diff --git a/examples/aloha_real/Dockerfile b/examples/aloha_real/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..c8d5ba775f5384f071534325b97aa2528c5c5e0a
--- /dev/null
+++ b/examples/aloha_real/Dockerfile
@@ -0,0 +1,70 @@
+# Dockerfile for the Aloha real environment.
+
+# Build the container:
+# docker build . -t aloha_real -f examples/aloha_real/Dockerfile
+
+# Run the container:
+# docker run --rm -it --network=host -v /dev:/dev -v .:/app --privileged aloha_real /bin/bash
+
+FROM ros:noetic-robot@sha256:0e12e4db836e78c74c4b04c6d16f185d9a18d2b13cf5580747efa075eb6dc6e0
+SHELL ["/bin/bash", "-c"]
+
+ENV DEBIAN_FRONTEND=noninteractive
+RUN apt-get update && \
+ apt-get install -y --no-install-recommends \
+ cmake \
+ curl \
+ libffi-dev \
+ python3-rosdep \
+ python3-rosinstall \
+ python3-rosinstall-generator \
+ whiptail \
+ git \
+ wget \
+ openssh-client \
+ ros-noetic-cv-bridge \
+ ros-noetic-usb-cam \
+ ros-noetic-realsense2-camera \
+ keyboard-configuration
+
+WORKDIR /root
+RUN curl 'https://raw.githubusercontent.com/Interbotix/interbotix_ros_manipulators/main/interbotix_ros_xsarms/install/amd64/xsarm_amd64_install.sh' > xsarm_amd64_install.sh
+RUN chmod +x xsarm_amd64_install.sh
+RUN export TZ='America/Los_Angeles' && ./xsarm_amd64_install.sh -d noetic -n
+
+COPY ./third_party/aloha /root/interbotix_ws/src/aloha
+RUN cd /root/interbotix_ws && source /opt/ros/noetic/setup.sh && source /root/interbotix_ws/devel/setup.sh && catkin_make
+
+# Install python 3.10 because this ROS image comes with 3.8
+RUN mkdir /python && \
+ cd /python && \
+ wget https://www.python.org/ftp/python/3.10.14/Python-3.10.14.tgz && \
+ tar -zxvf Python-3.10.14.tgz && \
+ cd Python-3.10.14 && \
+ ls -lhR && \
+ ./configure --enable-optimizations && \
+ make install && \
+ echo 'alias python3="/usr/local/bin/python3.10"' >> ~/.bashrc && \
+ echo 'alias python="/usr/local/bin/python3.10"' >> ~/.bashrc && \
+ cd ~ && rm -rf /python && \
+ rm -rf /var/lib/apt/lists/*
+
+COPY --from=ghcr.io/astral-sh/uv:0.5.6 /uv /bin/uv
+ENV UV_HTTP_TIMEOUT=120
+ENV UV_LINK_MODE=copy
+COPY ./examples/aloha_real/requirements.txt /tmp/requirements.txt
+COPY ./packages/openpi-client/pyproject.toml /tmp/openpi-client/pyproject.toml
+RUN uv pip sync --python 3.10 --system /tmp/requirements.txt /tmp/openpi-client/pyproject.toml
+
+ENV PYTHONPATH=/app:/app/src:/app/packages/openpi-client/src:/root/interbotix_ws/src/aloha/aloha_scripts:/root/interbotix_ws/src/aloha
+WORKDIR /app
+
+# Create an entrypoint script to run the setup commands, followed by the command passed in.
+RUN cat <<'EOF' > /usr/local/bin/entrypoint.sh
+#!/bin/bash
+source /opt/ros/noetic/setup.sh && source /root/interbotix_ws/devel/setup.sh && "$@"
+EOF
+RUN chmod +x /usr/local/bin/entrypoint.sh
+
+ENTRYPOINT ["/usr/local/bin/entrypoint.sh"]
+CMD ["python3", "/app/examples/aloha_real/main.py"]
diff --git a/examples/aloha_real/README.md b/examples/aloha_real/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..3addd4f580c2a665bd2a63ea7923825b89158f5c
--- /dev/null
+++ b/examples/aloha_real/README.md
@@ -0,0 +1,126 @@
+# Run Aloha (Real Robot)
+
+This example demonstrates how to run with a real robot using an [ALOHA setup](https://github.com/tonyzhaozh/aloha). See [here](../../docs/remote_inference.md) for instructions on how to load checkpoints and run inference. We list the relevant checkpoint paths for each provided fine-tuned model below.
+
+## Prerequisites
+
+This repo uses a fork of the ALOHA repo, with very minor modifications to use Realsense cameras.
+
+1. Follow the [hardware installation instructions](https://github.com/tonyzhaozh/aloha?tab=readme-ov-file#hardware-installation) in the ALOHA repo.
+1. Modify the `third_party/aloha/aloha_scripts/realsense_publisher.py` file to use serial numbers for your cameras.
+
+## With Docker
+
+```bash
+export SERVER_ARGS="--env ALOHA --default_prompt='take the toast out of the toaster'"
+docker compose -f examples/aloha_real/compose.yml up --build
+```
+
+## Without Docker
+
+Terminal window 1:
+
+```bash
+# Create virtual environment
+uv venv --python 3.10 examples/aloha_real/.venv
+source examples/aloha_real/.venv/bin/activate
+uv pip sync examples/aloha_real/requirements.txt
+uv pip install -e packages/openpi-client
+
+# Run the robot
+python examples/aloha_real/main.py
+```
+
+Terminal window 2:
+
+```bash
+roslaunch --wait aloha ros_nodes.launch
+```
+
+Terminal window 3:
+
+```bash
+uv run scripts/serve_policy.py --env ALOHA --default_prompt='take the toast out of the toaster'
+```
+
+## **ALOHA Checkpoint Guide**
+
+
+The `pi0_base` model can be used in zero shot for a simple task on the ALOHA platform, and we additionally provide two example fine-tuned checkpoints, “fold the towel” and “open the tupperware and put the food on the plate,” which can perform more advanced tasks on the ALOHA.
+
+While we’ve found the policies to work in unseen conditions across multiple ALOHA stations, we provide some pointers here on how best to set up scenes to maximize the chance of policy success. We cover the prompts to use for the policies, objects we’ve seen it work well on, and well-represented initial state distributions. Running these policies in zero shot is still a very experimental feature, and there is no guarantee that they will work on your robot. The recommended way to use `pi0_base` is by finetuning with data from the target robot.
+
+
+---
+
+### **Toast Task**
+
+This task involves the robot taking two pieces of toast out of a toaster and placing them on a plate.
+
+- **Checkpoint path**: `s3://openpi-assets/checkpoints/pi0_base`
+- **Prompt**: "take the toast out of the toaster"
+- **Objects needed**: Two pieces of toast, a plate, and a standard toaster.
+- **Object Distribution**:
+ - Works on both real toast and rubber fake toast
+ - Compatible with standard 2-slice toasters
+ - Works with plates of varying colors
+
+### **Scene Setup Guidelines**
+
+
+- The toaster should be positioned in the top-left quadrant of the workspace.
+- Both pieces of toast should start inside the toaster, with at least 1 cm of bread sticking out from the top.
+- The plate should be placed roughly in the lower-center of the workspace.
+- Works with both natural and synthetic lighting, but avoid making the scene too dark (e.g., don't place the setup inside an enclosed space or under a curtain).
+
+
+### **Towel Task**
+
+This task involves folding a small towel (e.g., roughly the size of a hand towel) into eighths.
+
+- **Checkpoint path**: `s3://openpi-assets/checkpoints/pi0_aloha_towel`
+- **Prompt**: "fold the towel"
+- **Object Distribution**:
+ - Works on towels of varying solid colors
+ - Performance is worse on heavily textured or striped towels
+
+### **Scene Setup Guidelines**
+
+
+- The towel should be flattened and roughly centered on the table.
+- Choose a towel that does not blend in with the table surface.
+
+
+### **Tupperware Task**
+
+This task involves opening a tupperware filled with food and pouring the contents onto a plate.
+
+- **Checkpoint path**: `s3://openpi-assets/checkpoints/pi0_aloha_tupperware`
+- **Prompt**: "open the tupperware and put the food on the plate"
+- **Objects needed**: Tupperware, food (or food-like items), and a plate.
+- **Object Distribution**:
+ - Works on various types of fake food (e.g., fake chicken nuggets, fries, and fried chicken).
+ - Compatible with tupperware of different lid colors and shapes, with best performance on square tupperware with a corner flap (see images below).
+ - The policy has seen plates of varying solid colors.
+
+### **Scene Setup Guidelines**
+
+
+- Best performance observed when both the tupperware and plate are roughly centered in the workspace.
+- Positioning:
+ - Tupperware should be on the left.
+ - Plate should be on the right or bottom.
+ - The tupperware flap should point toward the plate.
+
+## Training on your own Aloha dataset
+
+1. Convert the dataset to the LeRobot dataset v2.0 format.
+
+ We provide a script [convert_aloha_data_to_lerobot.py](./convert_aloha_data_to_lerobot.py) that converts the dataset to the LeRobot dataset v2.0 format. As an example we have converted the `aloha_pen_uncap_diverse_raw` dataset from the [BiPlay repo](https://huggingface.co/datasets/oier-mees/BiPlay/tree/main/aloha_pen_uncap_diverse_raw) and uploaded it to the HuggingFace Hub as [physical-intelligence/aloha_pen_uncap_diverse](https://huggingface.co/datasets/physical-intelligence/aloha_pen_uncap_diverse).
+
+
+2. Define a training config that uses the custom dataset.
+
+ We provide the [pi0_aloha_pen_uncap config](../../src/openpi/training/config.py) as an example. You should refer to the root [README](../../README.md) for how to run training with the new config.
+
+IMPORTANT: Our base checkpoint includes normalization stats from various common robot configurations. When fine-tuning a base checkpoint with a custom dataset from one of these configurations, we recommend using the corresponding normalization stats provided in the base checkpoint. In the example, this is done by specifying the trossen asset_id and a path to the pretrained checkpoint’s asset directory within the AssetsConfig.
\ No newline at end of file
diff --git a/packages/openpi-client/pyproject.toml b/packages/openpi-client/pyproject.toml
new file mode 100644
index 0000000000000000000000000000000000000000..553f7ef37aea9c55c6fd35043aa71cbe5da97d26
--- /dev/null
+++ b/packages/openpi-client/pyproject.toml
@@ -0,0 +1,25 @@
+[project]
+name = "openpi-client"
+version = "0.1.0"
+requires-python = ">=3.7"
+dependencies = [
+ "dm-tree>=0.1.8",
+ "msgpack>=1.0.5",
+ "numpy>=1.21.6",
+ "pillow>=9.0.0",
+ "tree>=0.2.4",
+ "websockets>=11.0",
+]
+
+[build-system]
+requires = ["hatchling"]
+build-backend = "hatchling.build"
+
+[tool.uv]
+dev-dependencies = [
+ "pytest>=8.3.4",
+]
+
+[tool.ruff]
+line-length = 120
+target-version = "py37"
\ No newline at end of file
diff --git a/packages/openpi-client/src/openpi_client/__init__.py b/packages/openpi-client/src/openpi_client/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..3dc1f76bc69e3f559bee6253b24fc93acee9e1f9
--- /dev/null
+++ b/packages/openpi-client/src/openpi_client/__init__.py
@@ -0,0 +1 @@
+__version__ = "0.1.0"
diff --git a/packages/openpi-client/src/openpi_client/action_chunk_broker.py b/packages/openpi-client/src/openpi_client/action_chunk_broker.py
new file mode 100644
index 0000000000000000000000000000000000000000..f95cdada02ec1061a52914777ad2b8ec4a4083d2
--- /dev/null
+++ b/packages/openpi-client/src/openpi_client/action_chunk_broker.py
@@ -0,0 +1,45 @@
+from typing import Dict
+
+import numpy as np
+import tree
+from typing_extensions import override
+
+from openpi_client import base_policy as _base_policy
+
+
+class ActionChunkBroker(_base_policy.BasePolicy):
+ """Wraps a policy to return action chunks one-at-a-time.
+
+ Assumes that the first dimension of all action fields is the chunk size.
+
+ A new inference call to the inner policy is only made when the current
+ list of chunks is exhausted.
+ """
+
+ def __init__(self, policy: _base_policy.BasePolicy, action_horizon: int):
+ self._policy = policy
+
+ self._action_horizon = action_horizon
+ self._cur_step: int = 0
+
+ self._last_results: Dict[str, np.ndarray] | None = None
+
+ @override
+ def infer(self, obs: Dict) -> Dict: # noqa: UP006
+ if self._last_results is None:
+ self._last_results = self._policy.infer(obs)
+ self._cur_step = 0
+
+ results = tree.map_structure(lambda x: x[self._cur_step, ...], self._last_results)
+ self._cur_step += 1
+
+ if self._cur_step >= self._action_horizon:
+ self._last_results = None
+
+ return results
+
+ @override
+ def reset(self) -> None:
+ self._policy.reset()
+ self._last_results = None
+ self._cur_step = 0
diff --git a/packages/openpi-client/src/openpi_client/base_policy.py b/packages/openpi-client/src/openpi_client/base_policy.py
new file mode 100644
index 0000000000000000000000000000000000000000..2f4290651b1b7bab3bd9549b47876838f5b51629
--- /dev/null
+++ b/packages/openpi-client/src/openpi_client/base_policy.py
@@ -0,0 +1,12 @@
+import abc
+from typing import Dict
+
+
+class BasePolicy(abc.ABC):
+ @abc.abstractmethod
+ def infer(self, obs: Dict) -> Dict:
+ """Infer actions from observations."""
+
+ def reset(self) -> None:
+ """Reset the policy to its initial state."""
+ pass
diff --git a/packages/openpi-client/src/openpi_client/image_tools.py b/packages/openpi-client/src/openpi_client/image_tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..7a971b9d5f6b1495fd6cdea202ffa607d8b34bf0
--- /dev/null
+++ b/packages/openpi-client/src/openpi_client/image_tools.py
@@ -0,0 +1,58 @@
+import numpy as np
+from PIL import Image
+
+
+def convert_to_uint8(img: np.ndarray) -> np.ndarray:
+ """Converts an image to uint8 if it is a float image.
+
+ This is important for reducing the size of the image when sending it over the network.
+ """
+ if np.issubdtype(img.dtype, np.floating):
+ img = (255 * img).astype(np.uint8)
+ return img
+
+
+def resize_with_pad(images: np.ndarray, height: int, width: int, method=Image.BILINEAR) -> np.ndarray:
+ """Replicates tf.image.resize_with_pad for multiple images using PIL. Resizes a batch of images to a target height.
+
+ Args:
+ images: A batch of images in [..., height, width, channel] format.
+ height: The target height of the image.
+ width: The target width of the image.
+ method: The interpolation method to use. Default is bilinear.
+
+ Returns:
+ The resized images in [..., height, width, channel].
+ """
+ # If the images are already the correct size, return them as is.
+ if images.shape[-3:-1] == (height, width):
+ return images
+
+ original_shape = images.shape
+
+ images = images.reshape(-1, *original_shape[-3:])
+ resized = np.stack([_resize_with_pad_pil(Image.fromarray(im), height, width, method=method) for im in images])
+ return resized.reshape(*original_shape[:-3], *resized.shape[-3:])
+
+
+def _resize_with_pad_pil(image: Image.Image, height: int, width: int, method: int) -> Image.Image:
+ """Replicates tf.image.resize_with_pad for one image using PIL. Resizes an image to a target height and
+ width without distortion by padding with zeros.
+
+ Unlike the jax version, note that PIL uses [width, height, channel] ordering instead of [batch, h, w, c].
+ """
+ cur_width, cur_height = image.size
+ if cur_width == width and cur_height == height:
+ return image # No need to resize if the image is already the correct size.
+
+ ratio = max(cur_width / width, cur_height / height)
+ resized_height = int(cur_height / ratio)
+ resized_width = int(cur_width / ratio)
+ resized_image = image.resize((resized_width, resized_height), resample=method)
+
+ zero_image = Image.new(resized_image.mode, (width, height), 0)
+ pad_height = max(0, int((height - resized_height) / 2))
+ pad_width = max(0, int((width - resized_width) / 2))
+ zero_image.paste(resized_image, (pad_width, pad_height))
+ assert zero_image.size == (width, height)
+ return zero_image
diff --git a/packages/openpi-client/src/openpi_client/image_tools_test.py b/packages/openpi-client/src/openpi_client/image_tools_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..8d4b4b92030ea869712b312581e26243035aafba
--- /dev/null
+++ b/packages/openpi-client/src/openpi_client/image_tools_test.py
@@ -0,0 +1,37 @@
+import numpy as np
+
+import openpi_client.image_tools as image_tools
+
+
+def test_resize_with_pad_shapes():
+ # Test case 1: Resize image with larger dimensions
+ images = np.zeros((2, 10, 10, 3), dtype=np.uint8) # Input images of shape (batch_size, height, width, channels)
+ height = 20
+ width = 20
+ resized_images = image_tools.resize_with_pad(images, height, width)
+ assert resized_images.shape == (2, height, width, 3)
+ assert np.all(resized_images == 0)
+
+ # Test case 2: Resize image with smaller dimensions
+ images = np.zeros((3, 30, 30, 3), dtype=np.uint8)
+ height = 15
+ width = 15
+ resized_images = image_tools.resize_with_pad(images, height, width)
+ assert resized_images.shape == (3, height, width, 3)
+ assert np.all(resized_images == 0)
+
+ # Test case 3: Resize image with the same dimensions
+ images = np.zeros((1, 50, 50, 3), dtype=np.uint8)
+ height = 50
+ width = 50
+ resized_images = image_tools.resize_with_pad(images, height, width)
+ assert resized_images.shape == (1, height, width, 3)
+ assert np.all(resized_images == 0)
+
+ # Test case 3: Resize image with odd-numbered padding
+ images = np.zeros((1, 256, 320, 3), dtype=np.uint8)
+ height = 60
+ width = 80
+ resized_images = image_tools.resize_with_pad(images, height, width)
+ assert resized_images.shape == (1, height, width, 3)
+ assert np.all(resized_images == 0)
diff --git a/packages/openpi-client/src/openpi_client/msgpack_numpy.py b/packages/openpi-client/src/openpi_client/msgpack_numpy.py
new file mode 100644
index 0000000000000000000000000000000000000000..007f755edf54565579376b077eec7f7f715e1b96
--- /dev/null
+++ b/packages/openpi-client/src/openpi_client/msgpack_numpy.py
@@ -0,0 +1,57 @@
+"""Adds NumPy array support to msgpack.
+
+msgpack is good for (de)serializing data over a network for multiple reasons:
+- msgpack is secure (as opposed to pickle/dill/etc which allow for arbitrary code execution)
+- msgpack is widely used and has good cross-language support
+- msgpack does not require a schema (as opposed to protobuf/flatbuffers/etc) which is convenient in dynamically typed
+ languages like Python and JavaScript
+- msgpack is fast and efficient (as opposed to readable formats like JSON/YAML/etc); I found that msgpack was ~4x faster
+ than pickle for serializing large arrays using the below strategy
+
+The code below is adapted from https://github.com/lebedov/msgpack-numpy. The reason not to use that library directly is
+that it falls back to pickle for object arrays.
+"""
+
+import functools
+
+import msgpack
+import numpy as np
+
+
+def pack_array(obj):
+ if (isinstance(obj, (np.ndarray, np.generic))) and obj.dtype.kind in ("V", "O", "c"):
+ raise ValueError(f"Unsupported dtype: {obj.dtype}")
+
+ if isinstance(obj, np.ndarray):
+ return {
+ b"__ndarray__": True,
+ b"data": obj.tobytes(),
+ b"dtype": obj.dtype.str,
+ b"shape": obj.shape,
+ }
+
+ if isinstance(obj, np.generic):
+ return {
+ b"__npgeneric__": True,
+ b"data": obj.item(),
+ b"dtype": obj.dtype.str,
+ }
+
+ return obj
+
+
+def unpack_array(obj):
+ if b"__ndarray__" in obj:
+ return np.ndarray(buffer=obj[b"data"], dtype=np.dtype(obj[b"dtype"]), shape=obj[b"shape"])
+
+ if b"__npgeneric__" in obj:
+ return np.dtype(obj[b"dtype"]).type(obj[b"data"])
+
+ return obj
+
+
+Packer = functools.partial(msgpack.Packer, default=pack_array)
+packb = functools.partial(msgpack.packb, default=pack_array)
+
+Unpacker = functools.partial(msgpack.Unpacker, object_hook=unpack_array)
+unpackb = functools.partial(msgpack.unpackb, object_hook=unpack_array)
diff --git a/packages/openpi-client/src/openpi_client/runtime/agent.py b/packages/openpi-client/src/openpi_client/runtime/agent.py
new file mode 100644
index 0000000000000000000000000000000000000000..a2c3ab66ef618ad9ecbff7b81ad9340a4604128c
--- /dev/null
+++ b/packages/openpi-client/src/openpi_client/runtime/agent.py
@@ -0,0 +1,17 @@
+import abc
+
+
+class Agent(abc.ABC):
+ """An Agent is the thing with agency, i.e. the entity that makes decisions.
+
+ Agents receive observations about the state of the world, and return actions
+ to take in response.
+ """
+
+ @abc.abstractmethod
+ def get_action(self, observation: dict) -> dict:
+ """Query the agent for the next action."""
+
+ @abc.abstractmethod
+ def reset(self) -> None:
+ """Reset the agent to its initial state."""
diff --git a/packages/openpi-client/src/openpi_client/runtime/environment.py b/packages/openpi-client/src/openpi_client/runtime/environment.py
new file mode 100644
index 0000000000000000000000000000000000000000..664ac4678aaaa3aecf52268a6a09d1d1fc974226
--- /dev/null
+++ b/packages/openpi-client/src/openpi_client/runtime/environment.py
@@ -0,0 +1,32 @@
+import abc
+
+
+class Environment(abc.ABC):
+ """An Environment represents the robot and the environment it inhabits.
+
+ The primary contract of environments is that they can be queried for observations
+ about their state, and have actions applied to them to change that state.
+ """
+
+ @abc.abstractmethod
+ def reset(self) -> None:
+ """Reset the environment to its initial state.
+
+ This will be called once before starting each episode.
+ """
+
+ @abc.abstractmethod
+ def is_episode_complete(self) -> bool:
+ """Allow the environment to signal that the episode is complete.
+
+ This will be called after each step. It should return `True` if the episode is
+ complete (either successfully or unsuccessfully), and `False` otherwise.
+ """
+
+ @abc.abstractmethod
+ def get_observation(self) -> dict:
+ """Query the environment for the current state."""
+
+ @abc.abstractmethod
+ def apply_action(self, action: dict) -> None:
+ """Take an action in the environment."""
diff --git a/packages/openpi-client/src/openpi_client/runtime/runtime.py b/packages/openpi-client/src/openpi_client/runtime/runtime.py
new file mode 100644
index 0000000000000000000000000000000000000000..9552be091a26e163d60cab8071df4716524bf2e8
--- /dev/null
+++ b/packages/openpi-client/src/openpi_client/runtime/runtime.py
@@ -0,0 +1,92 @@
+import logging
+import threading
+import time
+
+from openpi_client.runtime import agent as _agent
+from openpi_client.runtime import environment as _environment
+from openpi_client.runtime import subscriber as _subscriber
+
+
+class Runtime:
+ """The core module orchestrating interactions between key components of the system."""
+
+ def __init__(
+ self,
+ environment: _environment.Environment,
+ agent: _agent.Agent,
+ subscribers: list[_subscriber.Subscriber],
+ max_hz: float = 0,
+ num_episodes: int = 1,
+ max_episode_steps: int = 0,
+ ) -> None:
+ self._environment = environment
+ self._agent = agent
+ self._subscribers = subscribers
+ self._max_hz = max_hz
+ self._num_episodes = num_episodes
+ self._max_episode_steps = max_episode_steps
+
+ self._in_episode = False
+ self._episode_steps = 0
+
+ def run(self) -> None:
+ """Runs the runtime loop continuously until stop() is called or the environment is done."""
+ for _ in range(self._num_episodes):
+ self._run_episode()
+
+ # Final reset, this is important for real environments to move the robot to its home position.
+ self._environment.reset()
+
+ def run_in_new_thread(self) -> threading.Thread:
+ """Runs the runtime loop in a new thread."""
+ thread = threading.Thread(target=self.run)
+ thread.start()
+ return thread
+
+ def mark_episode_complete(self) -> None:
+ """Marks the end of an episode."""
+ self._in_episode = False
+
+ def _run_episode(self) -> None:
+ """Runs a single episode."""
+ logging.info("Starting episode...")
+ self._environment.reset()
+ self._agent.reset()
+ for subscriber in self._subscribers:
+ subscriber.on_episode_start()
+
+ self._in_episode = True
+ self._episode_steps = 0
+ step_time = 1 / self._max_hz if self._max_hz > 0 else 0
+ last_step_time = time.time()
+
+ while self._in_episode:
+ self._step()
+ self._episode_steps += 1
+
+ # Sleep to maintain the desired frame rate
+ now = time.time()
+ dt = now - last_step_time
+ if dt < step_time:
+ time.sleep(step_time - dt)
+ last_step_time = time.time()
+ else:
+ last_step_time = now
+
+ logging.info("Episode completed.")
+ for subscriber in self._subscribers:
+ subscriber.on_episode_end()
+
+ def _step(self) -> None:
+ """A single step of the runtime loop."""
+ observation = self._environment.get_observation()
+ action = self._agent.get_action(observation)
+ self._environment.apply_action(action)
+
+ for subscriber in self._subscribers:
+ subscriber.on_step(observation, action)
+
+ if self._environment.is_episode_complete() or (
+ self._max_episode_steps > 0 and self._episode_steps >= self._max_episode_steps
+ ):
+ self.mark_episode_complete()
diff --git a/packages/openpi-client/src/openpi_client/runtime/subscriber.py b/packages/openpi-client/src/openpi_client/runtime/subscriber.py
new file mode 100644
index 0000000000000000000000000000000000000000..7c69edaa8e814dfcfe56b78b774578fe37f79428
--- /dev/null
+++ b/packages/openpi-client/src/openpi_client/runtime/subscriber.py
@@ -0,0 +1,20 @@
+import abc
+
+
+class Subscriber(abc.ABC):
+ """Subscribes to events in the runtime.
+
+ Subscribers can be used to save data, visualize, etc.
+ """
+
+ @abc.abstractmethod
+ def on_episode_start(self) -> None:
+ """Called when an episode starts."""
+
+ @abc.abstractmethod
+ def on_step(self, observation: dict, action: dict) -> None:
+ """Append a step to the episode."""
+
+ @abc.abstractmethod
+ def on_episode_end(self) -> None:
+ """Called when an episode ends."""
diff --git a/packages/openpi-client/src/openpi_client/websocket_client_policy.py b/packages/openpi-client/src/openpi_client/websocket_client_policy.py
new file mode 100644
index 0000000000000000000000000000000000000000..e309fbb93ed2d66af3f2241c2f228335ac137293
--- /dev/null
+++ b/packages/openpi-client/src/openpi_client/websocket_client_policy.py
@@ -0,0 +1,49 @@
+import logging
+import time
+from typing import Dict, Tuple
+
+import websockets.sync.client
+from typing_extensions import override
+
+from openpi_client import base_policy as _base_policy
+from openpi_client import msgpack_numpy
+
+
+class WebsocketClientPolicy(_base_policy.BasePolicy):
+ """Implements the Policy interface by communicating with a server over websocket.
+
+ See WebsocketPolicyServer for a corresponding server implementation.
+ """
+
+ def __init__(self, host: str = "0.0.0.0", port: int = 8000) -> None:
+ self._uri = f"ws://{host}:{port}"
+ self._packer = msgpack_numpy.Packer()
+ self._ws, self._server_metadata = self._wait_for_server()
+
+ def get_server_metadata(self) -> Dict:
+ return self._server_metadata
+
+ def _wait_for_server(self) -> Tuple[websockets.sync.client.ClientConnection, Dict]:
+ logging.info(f"Waiting for server at {self._uri}...")
+ while True:
+ try:
+ conn = websockets.sync.client.connect(self._uri, compression=None, max_size=None)
+ metadata = msgpack_numpy.unpackb(conn.recv())
+ return conn, metadata
+ except ConnectionRefusedError:
+ logging.info("Still waiting for server...")
+ time.sleep(5)
+
+ @override
+ def infer(self, obs: Dict) -> Dict: # noqa: UP006
+ data = self._packer.pack(obs)
+ self._ws.send(data)
+ response = self._ws.recv()
+ if isinstance(response, str):
+ # we're expecting bytes; if the server sends a string, it's an error.
+ raise RuntimeError(f"Error in inference server:\n{response}")
+ return msgpack_numpy.unpackb(response)
+
+ @override
+ def reset(self) -> None:
+ pass
diff --git a/pi0_fast_base_params/d/900e9758c5a900a9363d9695ce317b05 b/pi0_fast_base_params/d/900e9758c5a900a9363d9695ce317b05
new file mode 100644
index 0000000000000000000000000000000000000000..bc6b2ea3a6318e8e06792ef93de61486b5b48a77
Binary files /dev/null and b/pi0_fast_base_params/d/900e9758c5a900a9363d9695ce317b05 differ
diff --git a/pi0_fast_base_params/ocdbt.process_0/d/188d9577d40fae6772f2a1e734d246a6 b/pi0_fast_base_params/ocdbt.process_0/d/188d9577d40fae6772f2a1e734d246a6
new file mode 100644
index 0000000000000000000000000000000000000000..a07c89df534eaa67db03bd79c5ea4f48aa26d7e7
Binary files /dev/null and b/pi0_fast_base_params/ocdbt.process_0/d/188d9577d40fae6772f2a1e734d246a6 differ
diff --git a/pi0_fast_base_params/ocdbt.process_0/d/82430346fa8fc21f6c02b925d2071bbe b/pi0_fast_base_params/ocdbt.process_0/d/82430346fa8fc21f6c02b925d2071bbe
new file mode 100644
index 0000000000000000000000000000000000000000..56ede5c93254a3e958435c5e15bcbe09934bc1eb
Binary files /dev/null and b/pi0_fast_base_params/ocdbt.process_0/d/82430346fa8fc21f6c02b925d2071bbe differ
diff --git a/pi0_fast_base_params/ocdbt.process_0/d/8c8f756cbcb196e0b8950de1511a1889 b/pi0_fast_base_params/ocdbt.process_0/d/8c8f756cbcb196e0b8950de1511a1889
new file mode 100644
index 0000000000000000000000000000000000000000..6d2efe66ab394e126cb43cdead050bc881d30b44
Binary files /dev/null and b/pi0_fast_base_params/ocdbt.process_0/d/8c8f756cbcb196e0b8950de1511a1889 differ
diff --git a/pi0_fast_base_params/ocdbt.process_0/d/c0125332e372918231585505ecbac6a0 b/pi0_fast_base_params/ocdbt.process_0/d/c0125332e372918231585505ecbac6a0
new file mode 100644
index 0000000000000000000000000000000000000000..58db609d8760080ff213d05591ed44ce787cc45e
Binary files /dev/null and b/pi0_fast_base_params/ocdbt.process_0/d/c0125332e372918231585505ecbac6a0 differ
diff --git a/pi0_fast_base_params/ocdbt.process_0/d/f55c8d160596d5002991decf8dc4edc6 b/pi0_fast_base_params/ocdbt.process_0/d/f55c8d160596d5002991decf8dc4edc6
new file mode 100644
index 0000000000000000000000000000000000000000..345184b669f6afa9e2ad6a94ac795fb83c2f9c70
Binary files /dev/null and b/pi0_fast_base_params/ocdbt.process_0/d/f55c8d160596d5002991decf8dc4edc6 differ
diff --git a/pi0_fast_base_params/ocdbt.process_0/d/fbf426851e50f4721bba2e803e51535c b/pi0_fast_base_params/ocdbt.process_0/d/fbf426851e50f4721bba2e803e51535c
new file mode 100644
index 0000000000000000000000000000000000000000..522c145a386363ccc2501069579aaa74ae9e1daf
Binary files /dev/null and b/pi0_fast_base_params/ocdbt.process_0/d/fbf426851e50f4721bba2e803e51535c differ
diff --git a/pi0_fast_base_params/ocdbt.process_0/manifest.ocdbt b/pi0_fast_base_params/ocdbt.process_0/manifest.ocdbt
new file mode 100644
index 0000000000000000000000000000000000000000..d94b5e29501e0dbd7c833cc858df215bbad52638
Binary files /dev/null and b/pi0_fast_base_params/ocdbt.process_0/manifest.ocdbt differ
diff --git a/rag/ricl_training_context_libero_10_test/metadata.json b/rag/ricl_training_context_libero_10_test/metadata.json
new file mode 100644
index 0000000000000000000000000000000000000000..a4b4477cabe21b38db765c45b3e043d4d815f916
--- /dev/null
+++ b/rag/ricl_training_context_libero_10_test/metadata.json
@@ -0,0 +1 @@
+[{"db_idx": 0, "episode_idx": 0, "frame_idx": 0, "global_frame_idx": 0, "task_index": 0}, {"db_idx": 1, "episode_idx": 0, "frame_idx": 1, "global_frame_idx": 1, "task_index": 0}, {"db_idx": 2, "episode_idx": 0, "frame_idx": 2, "global_frame_idx": 2, "task_index": 0}, {"db_idx": 3, "episode_idx": 0, "frame_idx": 3, "global_frame_idx": 3, "task_index": 0}, {"db_idx": 4, "episode_idx": 0, "frame_idx": 4, "global_frame_idx": 4, "task_index": 0}, {"db_idx": 5, "episode_idx": 0, "frame_idx": 5, "global_frame_idx": 5, "task_index": 0}, {"db_idx": 6, "episode_idx": 0, "frame_idx": 6, "global_frame_idx": 6, "task_index": 0}, {"db_idx": 7, "episode_idx": 0, "frame_idx": 7, "global_frame_idx": 7, "task_index": 0}, {"db_idx": 8, "episode_idx": 0, "frame_idx": 8, "global_frame_idx": 8, "task_index": 0}, {"db_idx": 9, "episode_idx": 0, "frame_idx": 9, "global_frame_idx": 9, "task_index": 0}, {"db_idx": 10, "episode_idx": 0, "frame_idx": 10, "global_frame_idx": 10, "task_index": 0}, {"db_idx": 11, "episode_idx": 0, "frame_idx": 11, "global_frame_idx": 11, "task_index": 0}, {"db_idx": 12, "episode_idx": 0, "frame_idx": 12, "global_frame_idx": 12, "task_index": 0}, {"db_idx": 13, "episode_idx": 0, "frame_idx": 13, "global_frame_idx": 13, "task_index": 0}, {"db_idx": 14, "episode_idx": 0, "frame_idx": 14, "global_frame_idx": 14, "task_index": 0}, {"db_idx": 15, "episode_idx": 0, "frame_idx": 15, "global_frame_idx": 15, "task_index": 0}, {"db_idx": 16, "episode_idx": 0, "frame_idx": 16, "global_frame_idx": 16, "task_index": 0}, {"db_idx": 17, "episode_idx": 0, "frame_idx": 17, "global_frame_idx": 17, "task_index": 0}, {"db_idx": 18, "episode_idx": 0, "frame_idx": 18, "global_frame_idx": 18, "task_index": 0}, {"db_idx": 19, "episode_idx": 0, "frame_idx": 19, "global_frame_idx": 19, "task_index": 0}, {"db_idx": 20, "episode_idx": 0, "frame_idx": 20, "global_frame_idx": 20, "task_index": 0}, {"db_idx": 21, "episode_idx": 0, "frame_idx": 21, "global_frame_idx": 21, "task_index": 0}, {"db_idx": 22, "episode_idx": 0, "frame_idx": 22, "global_frame_idx": 22, "task_index": 0}, {"db_idx": 23, "episode_idx": 0, "frame_idx": 23, "global_frame_idx": 23, "task_index": 0}, {"db_idx": 24, "episode_idx": 0, "frame_idx": 24, "global_frame_idx": 24, "task_index": 0}, {"db_idx": 25, "episode_idx": 0, "frame_idx": 25, "global_frame_idx": 25, "task_index": 0}, {"db_idx": 26, "episode_idx": 0, "frame_idx": 26, "global_frame_idx": 26, "task_index": 0}, {"db_idx": 27, "episode_idx": 0, "frame_idx": 27, "global_frame_idx": 27, "task_index": 0}, {"db_idx": 28, "episode_idx": 0, "frame_idx": 28, "global_frame_idx": 28, "task_index": 0}, {"db_idx": 29, "episode_idx": 0, "frame_idx": 29, "global_frame_idx": 29, "task_index": 0}, {"db_idx": 30, "episode_idx": 0, "frame_idx": 30, "global_frame_idx": 30, "task_index": 0}, {"db_idx": 31, "episode_idx": 0, "frame_idx": 31, "global_frame_idx": 31, "task_index": 0}, {"db_idx": 32, "episode_idx": 0, "frame_idx": 32, "global_frame_idx": 32, "task_index": 0}, {"db_idx": 33, "episode_idx": 0, "frame_idx": 33, "global_frame_idx": 33, "task_index": 0}, {"db_idx": 34, "episode_idx": 0, "frame_idx": 34, "global_frame_idx": 34, "task_index": 0}, {"db_idx": 35, "episode_idx": 0, "frame_idx": 35, "global_frame_idx": 35, "task_index": 0}, {"db_idx": 36, "episode_idx": 0, "frame_idx": 36, "global_frame_idx": 36, "task_index": 0}, {"db_idx": 37, "episode_idx": 0, "frame_idx": 37, "global_frame_idx": 37, "task_index": 0}, {"db_idx": 38, "episode_idx": 0, "frame_idx": 38, "global_frame_idx": 38, "task_index": 0}, {"db_idx": 39, "episode_idx": 0, "frame_idx": 39, "global_frame_idx": 39, "task_index": 0}, {"db_idx": 40, "episode_idx": 0, "frame_idx": 40, "global_frame_idx": 40, "task_index": 0}, {"db_idx": 41, "episode_idx": 0, "frame_idx": 41, "global_frame_idx": 41, "task_index": 0}, {"db_idx": 42, "episode_idx": 0, "frame_idx": 42, "global_frame_idx": 42, "task_index": 0}, {"db_idx": 43, "episode_idx": 0, "frame_idx": 43, "global_frame_idx": 43, "task_index": 0}, {"db_idx": 44, "episode_idx": 0, "frame_idx": 44, "global_frame_idx": 44, "task_index": 0}, {"db_idx": 45, "episode_idx": 0, "frame_idx": 45, "global_frame_idx": 45, "task_index": 0}, {"db_idx": 46, "episode_idx": 0, "frame_idx": 46, "global_frame_idx": 46, "task_index": 0}, {"db_idx": 47, "episode_idx": 0, "frame_idx": 47, "global_frame_idx": 47, "task_index": 0}, {"db_idx": 48, "episode_idx": 0, "frame_idx": 48, "global_frame_idx": 48, "task_index": 0}, {"db_idx": 49, "episode_idx": 0, "frame_idx": 49, "global_frame_idx": 49, "task_index": 0}, {"db_idx": 50, "episode_idx": 0, "frame_idx": 50, "global_frame_idx": 50, "task_index": 0}, {"db_idx": 51, "episode_idx": 0, "frame_idx": 51, "global_frame_idx": 51, "task_index": 0}, {"db_idx": 52, "episode_idx": 0, "frame_idx": 52, "global_frame_idx": 52, "task_index": 0}, {"db_idx": 53, "episode_idx": 0, "frame_idx": 53, "global_frame_idx": 53, "task_index": 0}, {"db_idx": 54, "episode_idx": 0, "frame_idx": 54, "global_frame_idx": 54, "task_index": 0}, {"db_idx": 55, "episode_idx": 0, "frame_idx": 55, "global_frame_idx": 55, "task_index": 0}, {"db_idx": 56, "episode_idx": 0, "frame_idx": 56, "global_frame_idx": 56, "task_index": 0}, {"db_idx": 57, "episode_idx": 0, "frame_idx": 57, "global_frame_idx": 57, "task_index": 0}, {"db_idx": 58, "episode_idx": 0, "frame_idx": 58, "global_frame_idx": 58, "task_index": 0}, {"db_idx": 59, "episode_idx": 0, "frame_idx": 59, "global_frame_idx": 59, "task_index": 0}, {"db_idx": 60, "episode_idx": 0, "frame_idx": 60, "global_frame_idx": 60, "task_index": 0}, {"db_idx": 61, "episode_idx": 0, "frame_idx": 61, "global_frame_idx": 61, "task_index": 0}, {"db_idx": 62, "episode_idx": 0, "frame_idx": 62, "global_frame_idx": 62, "task_index": 0}, {"db_idx": 63, "episode_idx": 0, "frame_idx": 63, "global_frame_idx": 63, "task_index": 0}, {"db_idx": 64, "episode_idx": 0, "frame_idx": 64, "global_frame_idx": 64, "task_index": 0}, {"db_idx": 65, "episode_idx": 0, "frame_idx": 65, "global_frame_idx": 65, "task_index": 0}, {"db_idx": 66, "episode_idx": 0, "frame_idx": 66, "global_frame_idx": 66, "task_index": 0}, {"db_idx": 67, "episode_idx": 0, "frame_idx": 67, "global_frame_idx": 67, "task_index": 0}, {"db_idx": 68, "episode_idx": 0, "frame_idx": 68, "global_frame_idx": 68, "task_index": 0}, {"db_idx": 69, "episode_idx": 0, "frame_idx": 69, "global_frame_idx": 69, "task_index": 0}, {"db_idx": 70, "episode_idx": 0, "frame_idx": 70, "global_frame_idx": 70, "task_index": 0}, {"db_idx": 71, "episode_idx": 0, "frame_idx": 71, "global_frame_idx": 71, "task_index": 0}, {"db_idx": 72, "episode_idx": 0, "frame_idx": 72, "global_frame_idx": 72, "task_index": 0}, {"db_idx": 73, "episode_idx": 0, "frame_idx": 73, "global_frame_idx": 73, "task_index": 0}, {"db_idx": 74, "episode_idx": 0, "frame_idx": 74, "global_frame_idx": 74, "task_index": 0}, {"db_idx": 75, "episode_idx": 0, "frame_idx": 75, "global_frame_idx": 75, "task_index": 0}, {"db_idx": 76, "episode_idx": 0, "frame_idx": 76, "global_frame_idx": 76, "task_index": 0}, {"db_idx": 77, "episode_idx": 0, "frame_idx": 77, "global_frame_idx": 77, "task_index": 0}, {"db_idx": 78, "episode_idx": 0, "frame_idx": 78, "global_frame_idx": 78, "task_index": 0}, {"db_idx": 79, "episode_idx": 0, "frame_idx": 79, "global_frame_idx": 79, "task_index": 0}, {"db_idx": 80, "episode_idx": 0, "frame_idx": 80, "global_frame_idx": 80, "task_index": 0}, {"db_idx": 81, "episode_idx": 0, "frame_idx": 81, "global_frame_idx": 81, "task_index": 0}, {"db_idx": 82, "episode_idx": 0, "frame_idx": 82, "global_frame_idx": 82, "task_index": 0}, {"db_idx": 83, "episode_idx": 0, "frame_idx": 83, "global_frame_idx": 83, "task_index": 0}, {"db_idx": 84, "episode_idx": 0, "frame_idx": 84, "global_frame_idx": 84, "task_index": 0}, {"db_idx": 85, "episode_idx": 0, "frame_idx": 85, "global_frame_idx": 85, "task_index": 0}, {"db_idx": 86, "episode_idx": 0, "frame_idx": 86, "global_frame_idx": 86, "task_index": 0}, {"db_idx": 87, "episode_idx": 0, "frame_idx": 87, "global_frame_idx": 87, "task_index": 0}, {"db_idx": 88, "episode_idx": 0, "frame_idx": 88, "global_frame_idx": 88, "task_index": 0}, {"db_idx": 89, "episode_idx": 0, "frame_idx": 89, "global_frame_idx": 89, "task_index": 0}, {"db_idx": 90, "episode_idx": 0, "frame_idx": 90, "global_frame_idx": 90, "task_index": 0}, {"db_idx": 91, "episode_idx": 0, "frame_idx": 91, "global_frame_idx": 91, "task_index": 0}, {"db_idx": 92, "episode_idx": 0, "frame_idx": 92, "global_frame_idx": 92, "task_index": 0}, {"db_idx": 93, "episode_idx": 0, "frame_idx": 93, "global_frame_idx": 93, "task_index": 0}, {"db_idx": 94, "episode_idx": 0, "frame_idx": 94, "global_frame_idx": 94, "task_index": 0}, {"db_idx": 95, "episode_idx": 0, "frame_idx": 95, "global_frame_idx": 95, "task_index": 0}, {"db_idx": 96, "episode_idx": 0, "frame_idx": 96, "global_frame_idx": 96, "task_index": 0}, {"db_idx": 97, "episode_idx": 0, "frame_idx": 97, "global_frame_idx": 97, "task_index": 0}, {"db_idx": 98, "episode_idx": 0, "frame_idx": 98, "global_frame_idx": 98, "task_index": 0}, {"db_idx": 99, "episode_idx": 0, "frame_idx": 99, "global_frame_idx": 99, "task_index": 0}, {"db_idx": 100, "episode_idx": 0, "frame_idx": 100, "global_frame_idx": 100, "task_index": 0}, {"db_idx": 101, "episode_idx": 0, "frame_idx": 101, "global_frame_idx": 101, "task_index": 0}, {"db_idx": 102, "episode_idx": 0, "frame_idx": 102, "global_frame_idx": 102, "task_index": 0}, {"db_idx": 103, "episode_idx": 0, "frame_idx": 103, "global_frame_idx": 103, "task_index": 0}, {"db_idx": 104, "episode_idx": 0, "frame_idx": 104, "global_frame_idx": 104, "task_index": 0}, {"db_idx": 105, "episode_idx": 0, "frame_idx": 105, "global_frame_idx": 105, "task_index": 0}, {"db_idx": 106, "episode_idx": 0, "frame_idx": 106, "global_frame_idx": 106, "task_index": 0}, {"db_idx": 107, "episode_idx": 0, "frame_idx": 107, "global_frame_idx": 107, "task_index": 0}, {"db_idx": 108, "episode_idx": 0, "frame_idx": 108, "global_frame_idx": 108, "task_index": 0}, {"db_idx": 109, "episode_idx": 0, "frame_idx": 109, "global_frame_idx": 109, "task_index": 0}, {"db_idx": 110, "episode_idx": 0, "frame_idx": 110, "global_frame_idx": 110, "task_index": 0}, {"db_idx": 111, "episode_idx": 0, "frame_idx": 111, "global_frame_idx": 111, "task_index": 0}, {"db_idx": 112, "episode_idx": 0, "frame_idx": 112, "global_frame_idx": 112, "task_index": 0}, {"db_idx": 113, "episode_idx": 0, "frame_idx": 113, "global_frame_idx": 113, "task_index": 0}, {"db_idx": 114, "episode_idx": 0, "frame_idx": 114, "global_frame_idx": 114, "task_index": 0}, {"db_idx": 115, "episode_idx": 0, "frame_idx": 115, "global_frame_idx": 115, "task_index": 0}, {"db_idx": 116, "episode_idx": 0, "frame_idx": 116, "global_frame_idx": 116, "task_index": 0}, {"db_idx": 117, "episode_idx": 0, "frame_idx": 117, "global_frame_idx": 117, "task_index": 0}, {"db_idx": 118, "episode_idx": 0, "frame_idx": 118, "global_frame_idx": 118, "task_index": 0}, {"db_idx": 119, "episode_idx": 0, "frame_idx": 119, "global_frame_idx": 119, "task_index": 0}, {"db_idx": 120, "episode_idx": 0, "frame_idx": 120, "global_frame_idx": 120, "task_index": 0}, {"db_idx": 121, "episode_idx": 0, "frame_idx": 121, "global_frame_idx": 121, "task_index": 0}, {"db_idx": 122, "episode_idx": 0, "frame_idx": 122, "global_frame_idx": 122, "task_index": 0}, {"db_idx": 123, "episode_idx": 0, "frame_idx": 123, "global_frame_idx": 123, "task_index": 0}, {"db_idx": 124, "episode_idx": 0, "frame_idx": 124, "global_frame_idx": 124, "task_index": 0}, {"db_idx": 125, "episode_idx": 0, "frame_idx": 125, "global_frame_idx": 125, "task_index": 0}, {"db_idx": 126, "episode_idx": 0, "frame_idx": 126, "global_frame_idx": 126, "task_index": 0}, {"db_idx": 127, "episode_idx": 0, "frame_idx": 127, "global_frame_idx": 127, "task_index": 0}, {"db_idx": 128, "episode_idx": 0, "frame_idx": 128, "global_frame_idx": 128, "task_index": 0}, {"db_idx": 129, "episode_idx": 0, "frame_idx": 129, "global_frame_idx": 129, "task_index": 0}, {"db_idx": 130, "episode_idx": 0, "frame_idx": 130, "global_frame_idx": 130, "task_index": 0}, {"db_idx": 131, "episode_idx": 0, "frame_idx": 131, "global_frame_idx": 131, "task_index": 0}, {"db_idx": 132, "episode_idx": 0, "frame_idx": 132, "global_frame_idx": 132, "task_index": 0}, {"db_idx": 133, "episode_idx": 0, "frame_idx": 133, "global_frame_idx": 133, "task_index": 0}, {"db_idx": 134, "episode_idx": 0, "frame_idx": 134, "global_frame_idx": 134, "task_index": 0}, {"db_idx": 135, "episode_idx": 0, "frame_idx": 135, "global_frame_idx": 135, "task_index": 0}, {"db_idx": 136, "episode_idx": 0, "frame_idx": 136, "global_frame_idx": 136, "task_index": 0}, {"db_idx": 137, "episode_idx": 0, "frame_idx": 137, "global_frame_idx": 137, "task_index": 0}, {"db_idx": 138, "episode_idx": 0, "frame_idx": 138, "global_frame_idx": 138, "task_index": 0}, {"db_idx": 139, "episode_idx": 0, "frame_idx": 139, "global_frame_idx": 139, "task_index": 0}, {"db_idx": 140, "episode_idx": 0, "frame_idx": 140, "global_frame_idx": 140, "task_index": 0}, {"db_idx": 141, "episode_idx": 0, "frame_idx": 141, "global_frame_idx": 141, "task_index": 0}, {"db_idx": 142, "episode_idx": 0, "frame_idx": 142, "global_frame_idx": 142, "task_index": 0}, {"db_idx": 143, "episode_idx": 0, "frame_idx": 143, "global_frame_idx": 143, "task_index": 0}, {"db_idx": 144, "episode_idx": 0, "frame_idx": 144, "global_frame_idx": 144, "task_index": 0}, {"db_idx": 145, "episode_idx": 0, "frame_idx": 145, "global_frame_idx": 145, "task_index": 0}, {"db_idx": 146, "episode_idx": 0, "frame_idx": 146, "global_frame_idx": 146, "task_index": 0}, {"db_idx": 147, "episode_idx": 0, "frame_idx": 147, "global_frame_idx": 147, "task_index": 0}, {"db_idx": 148, "episode_idx": 0, "frame_idx": 148, "global_frame_idx": 148, "task_index": 0}, {"db_idx": 149, "episode_idx": 0, "frame_idx": 149, "global_frame_idx": 149, "task_index": 0}, {"db_idx": 150, "episode_idx": 0, "frame_idx": 150, "global_frame_idx": 150, "task_index": 0}, {"db_idx": 151, "episode_idx": 0, "frame_idx": 151, "global_frame_idx": 151, "task_index": 0}, {"db_idx": 152, "episode_idx": 0, "frame_idx": 152, "global_frame_idx": 152, "task_index": 0}, {"db_idx": 153, "episode_idx": 0, "frame_idx": 153, "global_frame_idx": 153, "task_index": 0}, {"db_idx": 154, "episode_idx": 0, "frame_idx": 154, "global_frame_idx": 154, "task_index": 0}, {"db_idx": 155, "episode_idx": 0, "frame_idx": 155, "global_frame_idx": 155, "task_index": 0}, {"db_idx": 156, "episode_idx": 0, "frame_idx": 156, "global_frame_idx": 156, "task_index": 0}, {"db_idx": 157, "episode_idx": 0, "frame_idx": 157, "global_frame_idx": 157, "task_index": 0}, {"db_idx": 158, "episode_idx": 0, "frame_idx": 158, "global_frame_idx": 158, "task_index": 0}, {"db_idx": 159, "episode_idx": 0, "frame_idx": 159, "global_frame_idx": 159, "task_index": 0}, {"db_idx": 160, "episode_idx": 0, "frame_idx": 160, "global_frame_idx": 160, "task_index": 0}, {"db_idx": 161, "episode_idx": 0, "frame_idx": 161, "global_frame_idx": 161, "task_index": 0}, {"db_idx": 162, "episode_idx": 0, "frame_idx": 162, "global_frame_idx": 162, "task_index": 0}, {"db_idx": 163, "episode_idx": 0, "frame_idx": 163, "global_frame_idx": 163, "task_index": 0}, {"db_idx": 164, "episode_idx": 0, "frame_idx": 164, "global_frame_idx": 164, "task_index": 0}, {"db_idx": 165, "episode_idx": 0, "frame_idx": 165, "global_frame_idx": 165, "task_index": 0}, {"db_idx": 166, "episode_idx": 0, "frame_idx": 166, "global_frame_idx": 166, "task_index": 0}, {"db_idx": 167, "episode_idx": 0, "frame_idx": 167, "global_frame_idx": 167, "task_index": 0}, {"db_idx": 168, "episode_idx": 0, "frame_idx": 168, "global_frame_idx": 168, "task_index": 0}, {"db_idx": 169, "episode_idx": 0, "frame_idx": 169, "global_frame_idx": 169, "task_index": 0}, {"db_idx": 170, "episode_idx": 0, "frame_idx": 170, "global_frame_idx": 170, "task_index": 0}, {"db_idx": 171, "episode_idx": 0, "frame_idx": 171, "global_frame_idx": 171, "task_index": 0}, {"db_idx": 172, "episode_idx": 0, "frame_idx": 172, "global_frame_idx": 172, "task_index": 0}, {"db_idx": 173, "episode_idx": 0, "frame_idx": 173, "global_frame_idx": 173, "task_index": 0}, {"db_idx": 174, "episode_idx": 0, "frame_idx": 174, "global_frame_idx": 174, "task_index": 0}, {"db_idx": 175, "episode_idx": 0, "frame_idx": 175, "global_frame_idx": 175, "task_index": 0}, {"db_idx": 176, "episode_idx": 0, "frame_idx": 176, "global_frame_idx": 176, "task_index": 0}, {"db_idx": 177, "episode_idx": 0, "frame_idx": 177, "global_frame_idx": 177, "task_index": 0}, {"db_idx": 178, "episode_idx": 0, "frame_idx": 178, "global_frame_idx": 178, "task_index": 0}, {"db_idx": 179, "episode_idx": 0, "frame_idx": 179, "global_frame_idx": 179, "task_index": 0}, {"db_idx": 180, "episode_idx": 0, "frame_idx": 180, "global_frame_idx": 180, "task_index": 0}, {"db_idx": 181, "episode_idx": 0, "frame_idx": 181, "global_frame_idx": 181, "task_index": 0}, {"db_idx": 182, "episode_idx": 0, "frame_idx": 182, "global_frame_idx": 182, "task_index": 0}, {"db_idx": 183, "episode_idx": 0, "frame_idx": 183, "global_frame_idx": 183, "task_index": 0}, {"db_idx": 184, "episode_idx": 0, "frame_idx": 184, "global_frame_idx": 184, "task_index": 0}, {"db_idx": 185, "episode_idx": 0, "frame_idx": 185, "global_frame_idx": 185, "task_index": 0}, {"db_idx": 186, "episode_idx": 0, "frame_idx": 186, "global_frame_idx": 186, "task_index": 0}, {"db_idx": 187, "episode_idx": 0, "frame_idx": 187, "global_frame_idx": 187, "task_index": 0}, {"db_idx": 188, "episode_idx": 0, "frame_idx": 188, "global_frame_idx": 188, "task_index": 0}, {"db_idx": 189, "episode_idx": 0, "frame_idx": 189, "global_frame_idx": 189, "task_index": 0}, {"db_idx": 190, "episode_idx": 0, "frame_idx": 190, "global_frame_idx": 190, "task_index": 0}, {"db_idx": 191, "episode_idx": 0, "frame_idx": 191, "global_frame_idx": 191, "task_index": 0}, {"db_idx": 192, "episode_idx": 0, "frame_idx": 192, "global_frame_idx": 192, "task_index": 0}, {"db_idx": 193, "episode_idx": 0, "frame_idx": 193, "global_frame_idx": 193, "task_index": 0}, {"db_idx": 194, "episode_idx": 0, "frame_idx": 194, "global_frame_idx": 194, "task_index": 0}, {"db_idx": 195, "episode_idx": 0, "frame_idx": 195, "global_frame_idx": 195, "task_index": 0}, {"db_idx": 196, "episode_idx": 0, "frame_idx": 196, "global_frame_idx": 196, "task_index": 0}, {"db_idx": 197, "episode_idx": 0, "frame_idx": 197, "global_frame_idx": 197, "task_index": 0}, {"db_idx": 198, "episode_idx": 0, "frame_idx": 198, "global_frame_idx": 198, "task_index": 0}, {"db_idx": 199, "episode_idx": 0, "frame_idx": 199, "global_frame_idx": 199, "task_index": 0}, {"db_idx": 200, "episode_idx": 0, "frame_idx": 200, "global_frame_idx": 200, "task_index": 0}, {"db_idx": 201, "episode_idx": 0, "frame_idx": 201, "global_frame_idx": 201, "task_index": 0}, {"db_idx": 202, "episode_idx": 0, "frame_idx": 202, "global_frame_idx": 202, "task_index": 0}, {"db_idx": 203, "episode_idx": 0, "frame_idx": 203, "global_frame_idx": 203, "task_index": 0}, {"db_idx": 204, "episode_idx": 0, "frame_idx": 204, "global_frame_idx": 204, "task_index": 0}, {"db_idx": 205, "episode_idx": 0, "frame_idx": 205, "global_frame_idx": 205, "task_index": 0}, {"db_idx": 206, "episode_idx": 0, "frame_idx": 206, "global_frame_idx": 206, "task_index": 0}, {"db_idx": 207, "episode_idx": 0, "frame_idx": 207, "global_frame_idx": 207, "task_index": 0}, {"db_idx": 208, "episode_idx": 0, "frame_idx": 208, "global_frame_idx": 208, "task_index": 0}, {"db_idx": 209, "episode_idx": 0, "frame_idx": 209, "global_frame_idx": 209, "task_index": 0}, {"db_idx": 210, "episode_idx": 0, "frame_idx": 210, "global_frame_idx": 210, "task_index": 0}, {"db_idx": 211, "episode_idx": 0, "frame_idx": 211, "global_frame_idx": 211, "task_index": 0}, {"db_idx": 212, "episode_idx": 0, "frame_idx": 212, "global_frame_idx": 212, "task_index": 0}, {"db_idx": 213, "episode_idx": 0, "frame_idx": 213, "global_frame_idx": 213, "task_index": 0}, {"db_idx": 214, "episode_idx": 0, "frame_idx": 214, "global_frame_idx": 214, "task_index": 0}, {"db_idx": 215, "episode_idx": 0, "frame_idx": 215, "global_frame_idx": 215, "task_index": 0}, {"db_idx": 216, "episode_idx": 0, "frame_idx": 216, "global_frame_idx": 216, "task_index": 0}, {"db_idx": 217, "episode_idx": 0, "frame_idx": 217, "global_frame_idx": 217, "task_index": 0}, {"db_idx": 218, "episode_idx": 0, "frame_idx": 218, "global_frame_idx": 218, "task_index": 0}, {"db_idx": 219, "episode_idx": 0, "frame_idx": 219, "global_frame_idx": 219, "task_index": 0}, {"db_idx": 220, "episode_idx": 0, "frame_idx": 220, "global_frame_idx": 220, "task_index": 0}, {"db_idx": 221, "episode_idx": 0, "frame_idx": 221, "global_frame_idx": 221, "task_index": 0}, {"db_idx": 222, "episode_idx": 0, "frame_idx": 222, "global_frame_idx": 222, "task_index": 0}, {"db_idx": 223, "episode_idx": 0, "frame_idx": 223, "global_frame_idx": 223, "task_index": 0}, {"db_idx": 224, "episode_idx": 0, "frame_idx": 224, "global_frame_idx": 224, "task_index": 0}, {"db_idx": 225, "episode_idx": 0, "frame_idx": 225, "global_frame_idx": 225, "task_index": 0}, {"db_idx": 226, "episode_idx": 0, "frame_idx": 226, "global_frame_idx": 226, "task_index": 0}, {"db_idx": 227, "episode_idx": 0, "frame_idx": 227, "global_frame_idx": 227, "task_index": 0}, {"db_idx": 228, "episode_idx": 0, "frame_idx": 228, "global_frame_idx": 228, "task_index": 0}, {"db_idx": 229, "episode_idx": 0, "frame_idx": 229, "global_frame_idx": 229, "task_index": 0}, {"db_idx": 230, "episode_idx": 0, "frame_idx": 230, "global_frame_idx": 230, "task_index": 0}, {"db_idx": 231, "episode_idx": 0, "frame_idx": 231, "global_frame_idx": 231, "task_index": 0}, {"db_idx": 232, "episode_idx": 0, "frame_idx": 232, "global_frame_idx": 232, "task_index": 0}, {"db_idx": 233, "episode_idx": 0, "frame_idx": 233, "global_frame_idx": 233, "task_index": 0}, {"db_idx": 234, "episode_idx": 0, "frame_idx": 234, "global_frame_idx": 234, "task_index": 0}, {"db_idx": 235, "episode_idx": 0, "frame_idx": 235, "global_frame_idx": 235, "task_index": 0}, {"db_idx": 236, "episode_idx": 0, "frame_idx": 236, "global_frame_idx": 236, "task_index": 0}, {"db_idx": 237, "episode_idx": 0, "frame_idx": 237, "global_frame_idx": 237, "task_index": 0}, {"db_idx": 238, "episode_idx": 0, "frame_idx": 238, "global_frame_idx": 238, "task_index": 0}, {"db_idx": 239, "episode_idx": 0, "frame_idx": 239, "global_frame_idx": 239, "task_index": 0}, {"db_idx": 240, "episode_idx": 0, "frame_idx": 240, "global_frame_idx": 240, "task_index": 0}, {"db_idx": 241, "episode_idx": 0, "frame_idx": 241, "global_frame_idx": 241, "task_index": 0}, {"db_idx": 242, "episode_idx": 0, "frame_idx": 242, "global_frame_idx": 242, "task_index": 0}, {"db_idx": 243, "episode_idx": 0, "frame_idx": 243, "global_frame_idx": 243, "task_index": 0}, {"db_idx": 244, "episode_idx": 0, "frame_idx": 244, "global_frame_idx": 244, "task_index": 0}, {"db_idx": 245, "episode_idx": 0, "frame_idx": 245, "global_frame_idx": 245, "task_index": 0}, {"db_idx": 246, "episode_idx": 0, "frame_idx": 246, "global_frame_idx": 246, "task_index": 0}, {"db_idx": 247, "episode_idx": 0, "frame_idx": 247, "global_frame_idx": 247, "task_index": 0}, {"db_idx": 248, "episode_idx": 0, "frame_idx": 248, "global_frame_idx": 248, "task_index": 0}, {"db_idx": 249, "episode_idx": 0, "frame_idx": 249, "global_frame_idx": 249, "task_index": 0}, {"db_idx": 250, "episode_idx": 0, "frame_idx": 250, "global_frame_idx": 250, "task_index": 0}, {"db_idx": 251, "episode_idx": 0, "frame_idx": 251, "global_frame_idx": 251, "task_index": 0}, {"db_idx": 252, "episode_idx": 0, "frame_idx": 252, "global_frame_idx": 252, "task_index": 0}, {"db_idx": 253, "episode_idx": 0, "frame_idx": 253, "global_frame_idx": 253, "task_index": 0}, {"db_idx": 254, "episode_idx": 0, "frame_idx": 254, "global_frame_idx": 254, "task_index": 0}, {"db_idx": 255, "episode_idx": 0, "frame_idx": 255, "global_frame_idx": 255, "task_index": 0}, {"db_idx": 256, "episode_idx": 0, "frame_idx": 256, "global_frame_idx": 256, "task_index": 0}, {"db_idx": 257, "episode_idx": 0, "frame_idx": 257, "global_frame_idx": 257, "task_index": 0}, {"db_idx": 258, "episode_idx": 0, "frame_idx": 258, "global_frame_idx": 258, "task_index": 0}, {"db_idx": 259, "episode_idx": 0, "frame_idx": 259, "global_frame_idx": 259, "task_index": 0}, {"db_idx": 260, "episode_idx": 0, "frame_idx": 260, "global_frame_idx": 260, "task_index": 0}, {"db_idx": 261, "episode_idx": 1, "frame_idx": 0, "global_frame_idx": 261, "task_index": 0}, {"db_idx": 262, "episode_idx": 1, "frame_idx": 1, "global_frame_idx": 262, "task_index": 0}, {"db_idx": 263, "episode_idx": 1, "frame_idx": 2, "global_frame_idx": 263, "task_index": 0}, {"db_idx": 264, "episode_idx": 1, "frame_idx": 3, "global_frame_idx": 264, "task_index": 0}, {"db_idx": 265, "episode_idx": 1, "frame_idx": 4, "global_frame_idx": 265, "task_index": 0}, {"db_idx": 266, "episode_idx": 1, "frame_idx": 5, "global_frame_idx": 266, "task_index": 0}, {"db_idx": 267, "episode_idx": 1, "frame_idx": 6, "global_frame_idx": 267, "task_index": 0}, {"db_idx": 268, "episode_idx": 1, "frame_idx": 7, "global_frame_idx": 268, "task_index": 0}, {"db_idx": 269, "episode_idx": 1, "frame_idx": 8, "global_frame_idx": 269, "task_index": 0}, {"db_idx": 270, "episode_idx": 1, "frame_idx": 9, "global_frame_idx": 270, "task_index": 0}, {"db_idx": 271, "episode_idx": 1, "frame_idx": 10, "global_frame_idx": 271, "task_index": 0}, {"db_idx": 272, "episode_idx": 1, "frame_idx": 11, "global_frame_idx": 272, "task_index": 0}, {"db_idx": 273, "episode_idx": 1, "frame_idx": 12, "global_frame_idx": 273, "task_index": 0}, {"db_idx": 274, "episode_idx": 1, "frame_idx": 13, "global_frame_idx": 274, "task_index": 0}, {"db_idx": 275, "episode_idx": 1, "frame_idx": 14, "global_frame_idx": 275, "task_index": 0}, {"db_idx": 276, "episode_idx": 1, "frame_idx": 15, "global_frame_idx": 276, "task_index": 0}, {"db_idx": 277, "episode_idx": 1, "frame_idx": 16, "global_frame_idx": 277, "task_index": 0}, {"db_idx": 278, "episode_idx": 1, "frame_idx": 17, "global_frame_idx": 278, "task_index": 0}, {"db_idx": 279, "episode_idx": 1, "frame_idx": 18, "global_frame_idx": 279, "task_index": 0}, {"db_idx": 280, "episode_idx": 1, "frame_idx": 19, "global_frame_idx": 280, "task_index": 0}, {"db_idx": 281, "episode_idx": 1, "frame_idx": 20, "global_frame_idx": 281, "task_index": 0}, {"db_idx": 282, "episode_idx": 1, "frame_idx": 21, "global_frame_idx": 282, "task_index": 0}, {"db_idx": 283, "episode_idx": 1, "frame_idx": 22, "global_frame_idx": 283, "task_index": 0}, {"db_idx": 284, "episode_idx": 1, "frame_idx": 23, "global_frame_idx": 284, "task_index": 0}, {"db_idx": 285, "episode_idx": 1, "frame_idx": 24, "global_frame_idx": 285, "task_index": 0}, {"db_idx": 286, "episode_idx": 1, "frame_idx": 25, "global_frame_idx": 286, "task_index": 0}, {"db_idx": 287, "episode_idx": 1, "frame_idx": 26, "global_frame_idx": 287, "task_index": 0}, {"db_idx": 288, "episode_idx": 1, "frame_idx": 27, "global_frame_idx": 288, "task_index": 0}, {"db_idx": 289, "episode_idx": 1, "frame_idx": 28, "global_frame_idx": 289, "task_index": 0}, {"db_idx": 290, "episode_idx": 1, "frame_idx": 29, "global_frame_idx": 290, "task_index": 0}, {"db_idx": 291, "episode_idx": 1, "frame_idx": 30, "global_frame_idx": 291, "task_index": 0}, {"db_idx": 292, "episode_idx": 1, "frame_idx": 31, "global_frame_idx": 292, "task_index": 0}, {"db_idx": 293, "episode_idx": 1, "frame_idx": 32, "global_frame_idx": 293, "task_index": 0}, {"db_idx": 294, "episode_idx": 1, "frame_idx": 33, "global_frame_idx": 294, "task_index": 0}, {"db_idx": 295, "episode_idx": 1, "frame_idx": 34, "global_frame_idx": 295, "task_index": 0}, {"db_idx": 296, "episode_idx": 1, "frame_idx": 35, "global_frame_idx": 296, "task_index": 0}, {"db_idx": 297, "episode_idx": 1, "frame_idx": 36, "global_frame_idx": 297, "task_index": 0}, {"db_idx": 298, "episode_idx": 1, "frame_idx": 37, "global_frame_idx": 298, "task_index": 0}, {"db_idx": 299, "episode_idx": 1, "frame_idx": 38, "global_frame_idx": 299, "task_index": 0}, {"db_idx": 300, "episode_idx": 1, "frame_idx": 39, "global_frame_idx": 300, "task_index": 0}, {"db_idx": 301, "episode_idx": 1, "frame_idx": 40, "global_frame_idx": 301, "task_index": 0}, {"db_idx": 302, "episode_idx": 1, "frame_idx": 41, "global_frame_idx": 302, "task_index": 0}, {"db_idx": 303, "episode_idx": 1, "frame_idx": 42, "global_frame_idx": 303, "task_index": 0}, {"db_idx": 304, "episode_idx": 1, "frame_idx": 43, "global_frame_idx": 304, "task_index": 0}, {"db_idx": 305, "episode_idx": 1, "frame_idx": 44, "global_frame_idx": 305, "task_index": 0}, {"db_idx": 306, "episode_idx": 1, "frame_idx": 45, "global_frame_idx": 306, "task_index": 0}, {"db_idx": 307, "episode_idx": 1, "frame_idx": 46, "global_frame_idx": 307, "task_index": 0}, {"db_idx": 308, "episode_idx": 1, "frame_idx": 47, "global_frame_idx": 308, "task_index": 0}, {"db_idx": 309, "episode_idx": 1, "frame_idx": 48, "global_frame_idx": 309, "task_index": 0}, {"db_idx": 310, "episode_idx": 1, "frame_idx": 49, "global_frame_idx": 310, "task_index": 0}, {"db_idx": 311, "episode_idx": 1, "frame_idx": 50, "global_frame_idx": 311, "task_index": 0}, {"db_idx": 312, "episode_idx": 1, "frame_idx": 51, "global_frame_idx": 312, "task_index": 0}, {"db_idx": 313, "episode_idx": 1, "frame_idx": 52, "global_frame_idx": 313, "task_index": 0}, {"db_idx": 314, "episode_idx": 1, "frame_idx": 53, "global_frame_idx": 314, "task_index": 0}, {"db_idx": 315, "episode_idx": 1, "frame_idx": 54, "global_frame_idx": 315, "task_index": 0}, {"db_idx": 316, "episode_idx": 1, "frame_idx": 55, "global_frame_idx": 316, "task_index": 0}, {"db_idx": 317, "episode_idx": 1, "frame_idx": 56, "global_frame_idx": 317, "task_index": 0}, {"db_idx": 318, "episode_idx": 1, "frame_idx": 57, "global_frame_idx": 318, "task_index": 0}, {"db_idx": 319, "episode_idx": 1, "frame_idx": 58, "global_frame_idx": 319, "task_index": 0}, {"db_idx": 320, "episode_idx": 1, "frame_idx": 59, "global_frame_idx": 320, "task_index": 0}, {"db_idx": 321, "episode_idx": 1, "frame_idx": 60, "global_frame_idx": 321, "task_index": 0}, {"db_idx": 322, "episode_idx": 1, "frame_idx": 61, "global_frame_idx": 322, "task_index": 0}, {"db_idx": 323, "episode_idx": 1, "frame_idx": 62, "global_frame_idx": 323, "task_index": 0}, {"db_idx": 324, "episode_idx": 1, "frame_idx": 63, "global_frame_idx": 324, "task_index": 0}, {"db_idx": 325, "episode_idx": 1, "frame_idx": 64, "global_frame_idx": 325, "task_index": 0}, {"db_idx": 326, "episode_idx": 1, "frame_idx": 65, "global_frame_idx": 326, "task_index": 0}, {"db_idx": 327, "episode_idx": 1, "frame_idx": 66, "global_frame_idx": 327, "task_index": 0}, {"db_idx": 328, "episode_idx": 1, "frame_idx": 67, "global_frame_idx": 328, "task_index": 0}, {"db_idx": 329, "episode_idx": 1, "frame_idx": 68, "global_frame_idx": 329, "task_index": 0}, {"db_idx": 330, "episode_idx": 1, "frame_idx": 69, "global_frame_idx": 330, "task_index": 0}, {"db_idx": 331, "episode_idx": 1, "frame_idx": 70, "global_frame_idx": 331, "task_index": 0}, {"db_idx": 332, "episode_idx": 1, "frame_idx": 71, "global_frame_idx": 332, "task_index": 0}, {"db_idx": 333, "episode_idx": 1, "frame_idx": 72, "global_frame_idx": 333, "task_index": 0}, {"db_idx": 334, "episode_idx": 1, "frame_idx": 73, "global_frame_idx": 334, "task_index": 0}, {"db_idx": 335, "episode_idx": 1, "frame_idx": 74, "global_frame_idx": 335, "task_index": 0}, {"db_idx": 336, "episode_idx": 1, "frame_idx": 75, "global_frame_idx": 336, "task_index": 0}, {"db_idx": 337, "episode_idx": 1, "frame_idx": 76, "global_frame_idx": 337, "task_index": 0}, {"db_idx": 338, "episode_idx": 1, "frame_idx": 77, "global_frame_idx": 338, "task_index": 0}, {"db_idx": 339, "episode_idx": 1, "frame_idx": 78, "global_frame_idx": 339, "task_index": 0}, {"db_idx": 340, "episode_idx": 1, "frame_idx": 79, "global_frame_idx": 340, "task_index": 0}, {"db_idx": 341, "episode_idx": 1, "frame_idx": 80, "global_frame_idx": 341, "task_index": 0}, {"db_idx": 342, "episode_idx": 1, "frame_idx": 81, "global_frame_idx": 342, "task_index": 0}, {"db_idx": 343, "episode_idx": 1, "frame_idx": 82, "global_frame_idx": 343, "task_index": 0}, {"db_idx": 344, "episode_idx": 1, "frame_idx": 83, "global_frame_idx": 344, "task_index": 0}, {"db_idx": 345, "episode_idx": 1, "frame_idx": 84, "global_frame_idx": 345, "task_index": 0}, {"db_idx": 346, "episode_idx": 1, "frame_idx": 85, "global_frame_idx": 346, "task_index": 0}, {"db_idx": 347, "episode_idx": 1, "frame_idx": 86, "global_frame_idx": 347, "task_index": 0}, {"db_idx": 348, "episode_idx": 1, "frame_idx": 87, "global_frame_idx": 348, "task_index": 0}, {"db_idx": 349, "episode_idx": 1, "frame_idx": 88, "global_frame_idx": 349, "task_index": 0}, {"db_idx": 350, "episode_idx": 1, "frame_idx": 89, "global_frame_idx": 350, "task_index": 0}, {"db_idx": 351, "episode_idx": 1, "frame_idx": 90, "global_frame_idx": 351, "task_index": 0}, {"db_idx": 352, "episode_idx": 1, "frame_idx": 91, "global_frame_idx": 352, "task_index": 0}, {"db_idx": 353, "episode_idx": 1, "frame_idx": 92, "global_frame_idx": 353, "task_index": 0}, {"db_idx": 354, "episode_idx": 1, "frame_idx": 93, "global_frame_idx": 354, "task_index": 0}, {"db_idx": 355, "episode_idx": 1, "frame_idx": 94, "global_frame_idx": 355, "task_index": 0}, {"db_idx": 356, "episode_idx": 1, "frame_idx": 95, "global_frame_idx": 356, "task_index": 0}, {"db_idx": 357, "episode_idx": 1, "frame_idx": 96, "global_frame_idx": 357, "task_index": 0}, {"db_idx": 358, "episode_idx": 1, "frame_idx": 97, "global_frame_idx": 358, "task_index": 0}, {"db_idx": 359, "episode_idx": 1, "frame_idx": 98, "global_frame_idx": 359, "task_index": 0}, {"db_idx": 360, "episode_idx": 1, "frame_idx": 99, "global_frame_idx": 360, "task_index": 0}, {"db_idx": 361, "episode_idx": 1, "frame_idx": 100, "global_frame_idx": 361, "task_index": 0}, {"db_idx": 362, "episode_idx": 1, "frame_idx": 101, "global_frame_idx": 362, "task_index": 0}, {"db_idx": 363, "episode_idx": 1, "frame_idx": 102, "global_frame_idx": 363, "task_index": 0}, {"db_idx": 364, "episode_idx": 1, "frame_idx": 103, "global_frame_idx": 364, "task_index": 0}, {"db_idx": 365, "episode_idx": 1, "frame_idx": 104, "global_frame_idx": 365, "task_index": 0}, {"db_idx": 366, "episode_idx": 1, "frame_idx": 105, "global_frame_idx": 366, "task_index": 0}, {"db_idx": 367, "episode_idx": 1, "frame_idx": 106, "global_frame_idx": 367, "task_index": 0}, {"db_idx": 368, "episode_idx": 1, "frame_idx": 107, "global_frame_idx": 368, "task_index": 0}, {"db_idx": 369, "episode_idx": 1, "frame_idx": 108, "global_frame_idx": 369, "task_index": 0}, {"db_idx": 370, "episode_idx": 1, "frame_idx": 109, "global_frame_idx": 370, "task_index": 0}, {"db_idx": 371, "episode_idx": 1, "frame_idx": 110, "global_frame_idx": 371, "task_index": 0}, {"db_idx": 372, "episode_idx": 1, "frame_idx": 111, "global_frame_idx": 372, "task_index": 0}, {"db_idx": 373, "episode_idx": 1, "frame_idx": 112, "global_frame_idx": 373, "task_index": 0}, {"db_idx": 374, "episode_idx": 1, "frame_idx": 113, "global_frame_idx": 374, "task_index": 0}, {"db_idx": 375, "episode_idx": 1, "frame_idx": 114, "global_frame_idx": 375, "task_index": 0}, {"db_idx": 376, "episode_idx": 1, "frame_idx": 115, "global_frame_idx": 376, "task_index": 0}, {"db_idx": 377, "episode_idx": 1, "frame_idx": 116, "global_frame_idx": 377, "task_index": 0}, {"db_idx": 378, "episode_idx": 1, "frame_idx": 117, "global_frame_idx": 378, "task_index": 0}, {"db_idx": 379, "episode_idx": 1, "frame_idx": 118, "global_frame_idx": 379, "task_index": 0}, {"db_idx": 380, "episode_idx": 1, "frame_idx": 119, "global_frame_idx": 380, "task_index": 0}, {"db_idx": 381, "episode_idx": 1, "frame_idx": 120, "global_frame_idx": 381, "task_index": 0}, {"db_idx": 382, "episode_idx": 1, "frame_idx": 121, "global_frame_idx": 382, "task_index": 0}, {"db_idx": 383, "episode_idx": 1, "frame_idx": 122, "global_frame_idx": 383, "task_index": 0}, {"db_idx": 384, "episode_idx": 1, "frame_idx": 123, "global_frame_idx": 384, "task_index": 0}, {"db_idx": 385, "episode_idx": 1, "frame_idx": 124, "global_frame_idx": 385, "task_index": 0}, {"db_idx": 386, "episode_idx": 1, "frame_idx": 125, "global_frame_idx": 386, "task_index": 0}, {"db_idx": 387, "episode_idx": 1, "frame_idx": 126, "global_frame_idx": 387, "task_index": 0}, {"db_idx": 388, "episode_idx": 1, "frame_idx": 127, "global_frame_idx": 388, "task_index": 0}, {"db_idx": 389, "episode_idx": 1, "frame_idx": 128, "global_frame_idx": 389, "task_index": 0}, {"db_idx": 390, "episode_idx": 1, "frame_idx": 129, "global_frame_idx": 390, "task_index": 0}, {"db_idx": 391, "episode_idx": 1, "frame_idx": 130, "global_frame_idx": 391, "task_index": 0}, {"db_idx": 392, "episode_idx": 1, "frame_idx": 131, "global_frame_idx": 392, "task_index": 0}, {"db_idx": 393, "episode_idx": 1, "frame_idx": 132, "global_frame_idx": 393, "task_index": 0}, {"db_idx": 394, "episode_idx": 1, "frame_idx": 133, "global_frame_idx": 394, "task_index": 0}, {"db_idx": 395, "episode_idx": 1, "frame_idx": 134, "global_frame_idx": 395, "task_index": 0}, {"db_idx": 396, "episode_idx": 1, "frame_idx": 135, "global_frame_idx": 396, "task_index": 0}, {"db_idx": 397, "episode_idx": 1, "frame_idx": 136, "global_frame_idx": 397, "task_index": 0}, {"db_idx": 398, "episode_idx": 1, "frame_idx": 137, "global_frame_idx": 398, "task_index": 0}, {"db_idx": 399, "episode_idx": 1, "frame_idx": 138, "global_frame_idx": 399, "task_index": 0}, {"db_idx": 400, "episode_idx": 1, "frame_idx": 139, "global_frame_idx": 400, "task_index": 0}, {"db_idx": 401, "episode_idx": 1, "frame_idx": 140, "global_frame_idx": 401, "task_index": 0}, {"db_idx": 402, "episode_idx": 1, "frame_idx": 141, "global_frame_idx": 402, "task_index": 0}, {"db_idx": 403, "episode_idx": 1, "frame_idx": 142, "global_frame_idx": 403, "task_index": 0}, {"db_idx": 404, "episode_idx": 1, "frame_idx": 143, "global_frame_idx": 404, "task_index": 0}, {"db_idx": 405, "episode_idx": 1, "frame_idx": 144, "global_frame_idx": 405, "task_index": 0}, {"db_idx": 406, "episode_idx": 1, "frame_idx": 145, "global_frame_idx": 406, "task_index": 0}, {"db_idx": 407, "episode_idx": 1, "frame_idx": 146, "global_frame_idx": 407, "task_index": 0}, {"db_idx": 408, "episode_idx": 1, "frame_idx": 147, "global_frame_idx": 408, "task_index": 0}, {"db_idx": 409, "episode_idx": 1, "frame_idx": 148, "global_frame_idx": 409, "task_index": 0}, {"db_idx": 410, "episode_idx": 1, "frame_idx": 149, "global_frame_idx": 410, "task_index": 0}, {"db_idx": 411, "episode_idx": 1, "frame_idx": 150, "global_frame_idx": 411, "task_index": 0}, {"db_idx": 412, "episode_idx": 1, "frame_idx": 151, "global_frame_idx": 412, "task_index": 0}, {"db_idx": 413, "episode_idx": 1, "frame_idx": 152, "global_frame_idx": 413, "task_index": 0}, {"db_idx": 414, "episode_idx": 1, "frame_idx": 153, "global_frame_idx": 414, "task_index": 0}, {"db_idx": 415, "episode_idx": 1, "frame_idx": 154, "global_frame_idx": 415, "task_index": 0}, {"db_idx": 416, "episode_idx": 1, "frame_idx": 155, "global_frame_idx": 416, "task_index": 0}, {"db_idx": 417, "episode_idx": 1, "frame_idx": 156, "global_frame_idx": 417, "task_index": 0}, {"db_idx": 418, "episode_idx": 1, "frame_idx": 157, "global_frame_idx": 418, "task_index": 0}, {"db_idx": 419, "episode_idx": 1, "frame_idx": 158, "global_frame_idx": 419, "task_index": 0}, {"db_idx": 420, "episode_idx": 1, "frame_idx": 159, "global_frame_idx": 420, "task_index": 0}, {"db_idx": 421, "episode_idx": 1, "frame_idx": 160, "global_frame_idx": 421, "task_index": 0}, {"db_idx": 422, "episode_idx": 1, "frame_idx": 161, "global_frame_idx": 422, "task_index": 0}, {"db_idx": 423, "episode_idx": 1, "frame_idx": 162, "global_frame_idx": 423, "task_index": 0}, {"db_idx": 424, "episode_idx": 1, "frame_idx": 163, "global_frame_idx": 424, "task_index": 0}, {"db_idx": 425, "episode_idx": 1, "frame_idx": 164, "global_frame_idx": 425, "task_index": 0}, {"db_idx": 426, "episode_idx": 1, "frame_idx": 165, "global_frame_idx": 426, "task_index": 0}, {"db_idx": 427, "episode_idx": 1, "frame_idx": 166, "global_frame_idx": 427, "task_index": 0}, {"db_idx": 428, "episode_idx": 1, "frame_idx": 167, "global_frame_idx": 428, "task_index": 0}, {"db_idx": 429, "episode_idx": 1, "frame_idx": 168, "global_frame_idx": 429, "task_index": 0}, {"db_idx": 430, "episode_idx": 1, "frame_idx": 169, "global_frame_idx": 430, "task_index": 0}, {"db_idx": 431, "episode_idx": 1, "frame_idx": 170, "global_frame_idx": 431, "task_index": 0}, {"db_idx": 432, "episode_idx": 1, "frame_idx": 171, "global_frame_idx": 432, "task_index": 0}, {"db_idx": 433, "episode_idx": 1, "frame_idx": 172, "global_frame_idx": 433, "task_index": 0}, {"db_idx": 434, "episode_idx": 1, "frame_idx": 173, "global_frame_idx": 434, "task_index": 0}, {"db_idx": 435, "episode_idx": 1, "frame_idx": 174, "global_frame_idx": 435, "task_index": 0}, {"db_idx": 436, "episode_idx": 1, "frame_idx": 175, "global_frame_idx": 436, "task_index": 0}, {"db_idx": 437, "episode_idx": 1, "frame_idx": 176, "global_frame_idx": 437, "task_index": 0}, {"db_idx": 438, "episode_idx": 1, "frame_idx": 177, "global_frame_idx": 438, "task_index": 0}, {"db_idx": 439, "episode_idx": 1, "frame_idx": 178, "global_frame_idx": 439, "task_index": 0}, {"db_idx": 440, "episode_idx": 1, "frame_idx": 179, "global_frame_idx": 440, "task_index": 0}, {"db_idx": 441, "episode_idx": 1, "frame_idx": 180, "global_frame_idx": 441, "task_index": 0}, {"db_idx": 442, "episode_idx": 1, "frame_idx": 181, "global_frame_idx": 442, "task_index": 0}, {"db_idx": 443, "episode_idx": 1, "frame_idx": 182, "global_frame_idx": 443, "task_index": 0}, {"db_idx": 444, "episode_idx": 1, "frame_idx": 183, "global_frame_idx": 444, "task_index": 0}, {"db_idx": 445, "episode_idx": 1, "frame_idx": 184, "global_frame_idx": 445, "task_index": 0}, {"db_idx": 446, "episode_idx": 1, "frame_idx": 185, "global_frame_idx": 446, "task_index": 0}, {"db_idx": 447, "episode_idx": 1, "frame_idx": 186, "global_frame_idx": 447, "task_index": 0}, {"db_idx": 448, "episode_idx": 1, "frame_idx": 187, "global_frame_idx": 448, "task_index": 0}, {"db_idx": 449, "episode_idx": 1, "frame_idx": 188, "global_frame_idx": 449, "task_index": 0}, {"db_idx": 450, "episode_idx": 1, "frame_idx": 189, "global_frame_idx": 450, "task_index": 0}, {"db_idx": 451, "episode_idx": 1, "frame_idx": 190, "global_frame_idx": 451, "task_index": 0}, {"db_idx": 452, "episode_idx": 1, "frame_idx": 191, "global_frame_idx": 452, "task_index": 0}, {"db_idx": 453, "episode_idx": 1, "frame_idx": 192, "global_frame_idx": 453, "task_index": 0}, {"db_idx": 454, "episode_idx": 1, "frame_idx": 193, "global_frame_idx": 454, "task_index": 0}, {"db_idx": 455, "episode_idx": 1, "frame_idx": 194, "global_frame_idx": 455, "task_index": 0}, {"db_idx": 456, "episode_idx": 1, "frame_idx": 195, "global_frame_idx": 456, "task_index": 0}, {"db_idx": 457, "episode_idx": 1, "frame_idx": 196, "global_frame_idx": 457, "task_index": 0}, {"db_idx": 458, "episode_idx": 1, "frame_idx": 197, "global_frame_idx": 458, "task_index": 0}, {"db_idx": 459, "episode_idx": 1, "frame_idx": 198, "global_frame_idx": 459, "task_index": 0}, {"db_idx": 460, "episode_idx": 1, "frame_idx": 199, "global_frame_idx": 460, "task_index": 0}, {"db_idx": 461, "episode_idx": 1, "frame_idx": 200, "global_frame_idx": 461, "task_index": 0}, {"db_idx": 462, "episode_idx": 1, "frame_idx": 201, "global_frame_idx": 462, "task_index": 0}, {"db_idx": 463, "episode_idx": 1, "frame_idx": 202, "global_frame_idx": 463, "task_index": 0}, {"db_idx": 464, "episode_idx": 1, "frame_idx": 203, "global_frame_idx": 464, "task_index": 0}, {"db_idx": 465, "episode_idx": 1, "frame_idx": 204, "global_frame_idx": 465, "task_index": 0}, {"db_idx": 466, "episode_idx": 1, "frame_idx": 205, "global_frame_idx": 466, "task_index": 0}, {"db_idx": 467, "episode_idx": 1, "frame_idx": 206, "global_frame_idx": 467, "task_index": 0}, {"db_idx": 468, "episode_idx": 1, "frame_idx": 207, "global_frame_idx": 468, "task_index": 0}, {"db_idx": 469, "episode_idx": 1, "frame_idx": 208, "global_frame_idx": 469, "task_index": 0}, {"db_idx": 470, "episode_idx": 1, "frame_idx": 209, "global_frame_idx": 470, "task_index": 0}, {"db_idx": 471, "episode_idx": 1, "frame_idx": 210, "global_frame_idx": 471, "task_index": 0}, {"db_idx": 472, "episode_idx": 1, "frame_idx": 211, "global_frame_idx": 472, "task_index": 0}, {"db_idx": 473, "episode_idx": 1, "frame_idx": 212, "global_frame_idx": 473, "task_index": 0}, {"db_idx": 474, "episode_idx": 1, "frame_idx": 213, "global_frame_idx": 474, "task_index": 0}, {"db_idx": 475, "episode_idx": 1, "frame_idx": 214, "global_frame_idx": 475, "task_index": 0}, {"db_idx": 476, "episode_idx": 1, "frame_idx": 215, "global_frame_idx": 476, "task_index": 0}, {"db_idx": 477, "episode_idx": 1, "frame_idx": 216, "global_frame_idx": 477, "task_index": 0}, {"db_idx": 478, "episode_idx": 1, "frame_idx": 217, "global_frame_idx": 478, "task_index": 0}, {"db_idx": 479, "episode_idx": 1, "frame_idx": 218, "global_frame_idx": 479, "task_index": 0}, {"db_idx": 480, "episode_idx": 2, "frame_idx": 0, "global_frame_idx": 480, "task_index": 0}, {"db_idx": 481, "episode_idx": 2, "frame_idx": 1, "global_frame_idx": 481, "task_index": 0}, {"db_idx": 482, "episode_idx": 2, "frame_idx": 2, "global_frame_idx": 482, "task_index": 0}, {"db_idx": 483, "episode_idx": 2, "frame_idx": 3, "global_frame_idx": 483, "task_index": 0}, {"db_idx": 484, "episode_idx": 2, "frame_idx": 4, "global_frame_idx": 484, "task_index": 0}, {"db_idx": 485, "episode_idx": 2, "frame_idx": 5, "global_frame_idx": 485, "task_index": 0}, {"db_idx": 486, "episode_idx": 2, "frame_idx": 6, "global_frame_idx": 486, "task_index": 0}, {"db_idx": 487, "episode_idx": 2, "frame_idx": 7, "global_frame_idx": 487, "task_index": 0}, {"db_idx": 488, "episode_idx": 2, "frame_idx": 8, "global_frame_idx": 488, "task_index": 0}, {"db_idx": 489, "episode_idx": 2, "frame_idx": 9, "global_frame_idx": 489, "task_index": 0}, {"db_idx": 490, "episode_idx": 2, "frame_idx": 10, "global_frame_idx": 490, "task_index": 0}, {"db_idx": 491, "episode_idx": 2, "frame_idx": 11, "global_frame_idx": 491, "task_index": 0}, {"db_idx": 492, "episode_idx": 2, "frame_idx": 12, "global_frame_idx": 492, "task_index": 0}, {"db_idx": 493, "episode_idx": 2, "frame_idx": 13, "global_frame_idx": 493, "task_index": 0}, {"db_idx": 494, "episode_idx": 2, "frame_idx": 14, "global_frame_idx": 494, "task_index": 0}, {"db_idx": 495, "episode_idx": 2, "frame_idx": 15, "global_frame_idx": 495, "task_index": 0}, {"db_idx": 496, "episode_idx": 2, "frame_idx": 16, "global_frame_idx": 496, "task_index": 0}, {"db_idx": 497, "episode_idx": 2, "frame_idx": 17, "global_frame_idx": 497, "task_index": 0}, {"db_idx": 498, "episode_idx": 2, "frame_idx": 18, "global_frame_idx": 498, "task_index": 0}, {"db_idx": 499, "episode_idx": 2, "frame_idx": 19, "global_frame_idx": 499, "task_index": 0}, {"db_idx": 500, "episode_idx": 2, "frame_idx": 20, "global_frame_idx": 500, "task_index": 0}, {"db_idx": 501, "episode_idx": 2, "frame_idx": 21, "global_frame_idx": 501, "task_index": 0}, {"db_idx": 502, "episode_idx": 2, "frame_idx": 22, "global_frame_idx": 502, "task_index": 0}, {"db_idx": 503, "episode_idx": 2, "frame_idx": 23, "global_frame_idx": 503, "task_index": 0}, {"db_idx": 504, "episode_idx": 2, "frame_idx": 24, "global_frame_idx": 504, "task_index": 0}, {"db_idx": 505, "episode_idx": 2, "frame_idx": 25, "global_frame_idx": 505, "task_index": 0}, {"db_idx": 506, "episode_idx": 2, "frame_idx": 26, "global_frame_idx": 506, "task_index": 0}, {"db_idx": 507, "episode_idx": 2, "frame_idx": 27, "global_frame_idx": 507, "task_index": 0}, {"db_idx": 508, "episode_idx": 2, "frame_idx": 28, "global_frame_idx": 508, "task_index": 0}, {"db_idx": 509, "episode_idx": 2, "frame_idx": 29, "global_frame_idx": 509, "task_index": 0}, {"db_idx": 510, "episode_idx": 2, "frame_idx": 30, "global_frame_idx": 510, "task_index": 0}, {"db_idx": 511, "episode_idx": 2, "frame_idx": 31, "global_frame_idx": 511, "task_index": 0}, {"db_idx": 512, "episode_idx": 2, "frame_idx": 32, "global_frame_idx": 512, "task_index": 0}, {"db_idx": 513, "episode_idx": 2, "frame_idx": 33, "global_frame_idx": 513, "task_index": 0}, {"db_idx": 514, "episode_idx": 2, "frame_idx": 34, "global_frame_idx": 514, "task_index": 0}, {"db_idx": 515, "episode_idx": 2, "frame_idx": 35, "global_frame_idx": 515, "task_index": 0}, {"db_idx": 516, "episode_idx": 2, "frame_idx": 36, "global_frame_idx": 516, "task_index": 0}, {"db_idx": 517, "episode_idx": 2, "frame_idx": 37, "global_frame_idx": 517, "task_index": 0}, {"db_idx": 518, "episode_idx": 2, "frame_idx": 38, "global_frame_idx": 518, "task_index": 0}, {"db_idx": 519, "episode_idx": 2, "frame_idx": 39, "global_frame_idx": 519, "task_index": 0}, {"db_idx": 520, "episode_idx": 2, "frame_idx": 40, "global_frame_idx": 520, "task_index": 0}, {"db_idx": 521, "episode_idx": 2, "frame_idx": 41, "global_frame_idx": 521, "task_index": 0}, {"db_idx": 522, "episode_idx": 2, "frame_idx": 42, "global_frame_idx": 522, "task_index": 0}, {"db_idx": 523, "episode_idx": 2, "frame_idx": 43, "global_frame_idx": 523, "task_index": 0}, {"db_idx": 524, "episode_idx": 2, "frame_idx": 44, "global_frame_idx": 524, "task_index": 0}, {"db_idx": 525, "episode_idx": 2, "frame_idx": 45, "global_frame_idx": 525, "task_index": 0}, {"db_idx": 526, "episode_idx": 2, "frame_idx": 46, "global_frame_idx": 526, "task_index": 0}, {"db_idx": 527, "episode_idx": 2, "frame_idx": 47, "global_frame_idx": 527, "task_index": 0}, {"db_idx": 528, "episode_idx": 2, "frame_idx": 48, "global_frame_idx": 528, "task_index": 0}, {"db_idx": 529, "episode_idx": 2, "frame_idx": 49, "global_frame_idx": 529, "task_index": 0}, {"db_idx": 530, "episode_idx": 2, "frame_idx": 50, "global_frame_idx": 530, "task_index": 0}, {"db_idx": 531, "episode_idx": 2, "frame_idx": 51, "global_frame_idx": 531, "task_index": 0}, {"db_idx": 532, "episode_idx": 2, "frame_idx": 52, "global_frame_idx": 532, "task_index": 0}, {"db_idx": 533, "episode_idx": 2, "frame_idx": 53, "global_frame_idx": 533, "task_index": 0}, {"db_idx": 534, "episode_idx": 2, "frame_idx": 54, "global_frame_idx": 534, "task_index": 0}, {"db_idx": 535, "episode_idx": 2, "frame_idx": 55, "global_frame_idx": 535, "task_index": 0}, {"db_idx": 536, "episode_idx": 2, "frame_idx": 56, "global_frame_idx": 536, "task_index": 0}, {"db_idx": 537, "episode_idx": 2, "frame_idx": 57, "global_frame_idx": 537, "task_index": 0}, {"db_idx": 538, "episode_idx": 2, "frame_idx": 58, "global_frame_idx": 538, "task_index": 0}, {"db_idx": 539, "episode_idx": 2, "frame_idx": 59, "global_frame_idx": 539, "task_index": 0}, {"db_idx": 540, "episode_idx": 2, "frame_idx": 60, "global_frame_idx": 540, "task_index": 0}, {"db_idx": 541, "episode_idx": 2, "frame_idx": 61, "global_frame_idx": 541, "task_index": 0}, {"db_idx": 542, "episode_idx": 2, "frame_idx": 62, "global_frame_idx": 542, "task_index": 0}, {"db_idx": 543, "episode_idx": 2, "frame_idx": 63, "global_frame_idx": 543, "task_index": 0}, {"db_idx": 544, "episode_idx": 2, "frame_idx": 64, "global_frame_idx": 544, "task_index": 0}, {"db_idx": 545, "episode_idx": 2, "frame_idx": 65, "global_frame_idx": 545, "task_index": 0}, {"db_idx": 546, "episode_idx": 2, "frame_idx": 66, "global_frame_idx": 546, "task_index": 0}, {"db_idx": 547, "episode_idx": 2, "frame_idx": 67, "global_frame_idx": 547, "task_index": 0}, {"db_idx": 548, "episode_idx": 2, "frame_idx": 68, "global_frame_idx": 548, "task_index": 0}, {"db_idx": 549, "episode_idx": 2, "frame_idx": 69, "global_frame_idx": 549, "task_index": 0}, {"db_idx": 550, "episode_idx": 2, "frame_idx": 70, "global_frame_idx": 550, "task_index": 0}, {"db_idx": 551, "episode_idx": 2, "frame_idx": 71, "global_frame_idx": 551, "task_index": 0}, {"db_idx": 552, "episode_idx": 2, "frame_idx": 72, "global_frame_idx": 552, "task_index": 0}, {"db_idx": 553, "episode_idx": 2, "frame_idx": 73, "global_frame_idx": 553, "task_index": 0}, {"db_idx": 554, "episode_idx": 2, "frame_idx": 74, "global_frame_idx": 554, "task_index": 0}, {"db_idx": 555, "episode_idx": 2, "frame_idx": 75, "global_frame_idx": 555, "task_index": 0}, {"db_idx": 556, "episode_idx": 2, "frame_idx": 76, "global_frame_idx": 556, "task_index": 0}, {"db_idx": 557, "episode_idx": 2, "frame_idx": 77, "global_frame_idx": 557, "task_index": 0}, {"db_idx": 558, "episode_idx": 2, "frame_idx": 78, "global_frame_idx": 558, "task_index": 0}, {"db_idx": 559, "episode_idx": 2, "frame_idx": 79, "global_frame_idx": 559, "task_index": 0}, {"db_idx": 560, "episode_idx": 2, "frame_idx": 80, "global_frame_idx": 560, "task_index": 0}, {"db_idx": 561, "episode_idx": 2, "frame_idx": 81, "global_frame_idx": 561, "task_index": 0}, {"db_idx": 562, "episode_idx": 2, "frame_idx": 82, "global_frame_idx": 562, "task_index": 0}, {"db_idx": 563, "episode_idx": 2, "frame_idx": 83, "global_frame_idx": 563, "task_index": 0}, {"db_idx": 564, "episode_idx": 2, "frame_idx": 84, "global_frame_idx": 564, "task_index": 0}, {"db_idx": 565, "episode_idx": 2, "frame_idx": 85, "global_frame_idx": 565, "task_index": 0}, {"db_idx": 566, "episode_idx": 2, "frame_idx": 86, "global_frame_idx": 566, "task_index": 0}, {"db_idx": 567, "episode_idx": 2, "frame_idx": 87, "global_frame_idx": 567, "task_index": 0}, {"db_idx": 568, "episode_idx": 2, "frame_idx": 88, "global_frame_idx": 568, "task_index": 0}, {"db_idx": 569, "episode_idx": 2, "frame_idx": 89, "global_frame_idx": 569, "task_index": 0}, {"db_idx": 570, "episode_idx": 2, "frame_idx": 90, "global_frame_idx": 570, "task_index": 0}, {"db_idx": 571, "episode_idx": 2, "frame_idx": 91, "global_frame_idx": 571, "task_index": 0}, {"db_idx": 572, "episode_idx": 2, "frame_idx": 92, "global_frame_idx": 572, "task_index": 0}, {"db_idx": 573, "episode_idx": 2, "frame_idx": 93, "global_frame_idx": 573, "task_index": 0}, {"db_idx": 574, "episode_idx": 2, "frame_idx": 94, "global_frame_idx": 574, "task_index": 0}, {"db_idx": 575, "episode_idx": 2, "frame_idx": 95, "global_frame_idx": 575, "task_index": 0}, {"db_idx": 576, "episode_idx": 2, "frame_idx": 96, "global_frame_idx": 576, "task_index": 0}, {"db_idx": 577, "episode_idx": 2, "frame_idx": 97, "global_frame_idx": 577, "task_index": 0}, {"db_idx": 578, "episode_idx": 2, "frame_idx": 98, "global_frame_idx": 578, "task_index": 0}, {"db_idx": 579, "episode_idx": 2, "frame_idx": 99, "global_frame_idx": 579, "task_index": 0}, {"db_idx": 580, "episode_idx": 2, "frame_idx": 100, "global_frame_idx": 580, "task_index": 0}, {"db_idx": 581, "episode_idx": 2, "frame_idx": 101, "global_frame_idx": 581, "task_index": 0}, {"db_idx": 582, "episode_idx": 2, "frame_idx": 102, "global_frame_idx": 582, "task_index": 0}, {"db_idx": 583, "episode_idx": 2, "frame_idx": 103, "global_frame_idx": 583, "task_index": 0}, {"db_idx": 584, "episode_idx": 2, "frame_idx": 104, "global_frame_idx": 584, "task_index": 0}, {"db_idx": 585, "episode_idx": 2, "frame_idx": 105, "global_frame_idx": 585, "task_index": 0}, {"db_idx": 586, "episode_idx": 2, "frame_idx": 106, "global_frame_idx": 586, "task_index": 0}, {"db_idx": 587, "episode_idx": 2, "frame_idx": 107, "global_frame_idx": 587, "task_index": 0}, {"db_idx": 588, "episode_idx": 2, "frame_idx": 108, "global_frame_idx": 588, "task_index": 0}, {"db_idx": 589, "episode_idx": 2, "frame_idx": 109, "global_frame_idx": 589, "task_index": 0}, {"db_idx": 590, "episode_idx": 2, "frame_idx": 110, "global_frame_idx": 590, "task_index": 0}, {"db_idx": 591, "episode_idx": 2, "frame_idx": 111, "global_frame_idx": 591, "task_index": 0}, {"db_idx": 592, "episode_idx": 2, "frame_idx": 112, "global_frame_idx": 592, "task_index": 0}, {"db_idx": 593, "episode_idx": 2, "frame_idx": 113, "global_frame_idx": 593, "task_index": 0}, {"db_idx": 594, "episode_idx": 2, "frame_idx": 114, "global_frame_idx": 594, "task_index": 0}, {"db_idx": 595, "episode_idx": 2, "frame_idx": 115, "global_frame_idx": 595, "task_index": 0}, {"db_idx": 596, "episode_idx": 2, "frame_idx": 116, "global_frame_idx": 596, "task_index": 0}, {"db_idx": 597, "episode_idx": 2, "frame_idx": 117, "global_frame_idx": 597, "task_index": 0}, {"db_idx": 598, "episode_idx": 2, "frame_idx": 118, "global_frame_idx": 598, "task_index": 0}, {"db_idx": 599, "episode_idx": 2, "frame_idx": 119, "global_frame_idx": 599, "task_index": 0}, {"db_idx": 600, "episode_idx": 2, "frame_idx": 120, "global_frame_idx": 600, "task_index": 0}, {"db_idx": 601, "episode_idx": 2, "frame_idx": 121, "global_frame_idx": 601, "task_index": 0}, {"db_idx": 602, "episode_idx": 2, "frame_idx": 122, "global_frame_idx": 602, "task_index": 0}, {"db_idx": 603, "episode_idx": 2, "frame_idx": 123, "global_frame_idx": 603, "task_index": 0}, {"db_idx": 604, "episode_idx": 2, "frame_idx": 124, "global_frame_idx": 604, "task_index": 0}, {"db_idx": 605, "episode_idx": 2, "frame_idx": 125, "global_frame_idx": 605, "task_index": 0}, {"db_idx": 606, "episode_idx": 2, "frame_idx": 126, "global_frame_idx": 606, "task_index": 0}, {"db_idx": 607, "episode_idx": 2, "frame_idx": 127, "global_frame_idx": 607, "task_index": 0}, {"db_idx": 608, "episode_idx": 2, "frame_idx": 128, "global_frame_idx": 608, "task_index": 0}, {"db_idx": 609, "episode_idx": 2, "frame_idx": 129, "global_frame_idx": 609, "task_index": 0}, {"db_idx": 610, "episode_idx": 2, "frame_idx": 130, "global_frame_idx": 610, "task_index": 0}, {"db_idx": 611, "episode_idx": 2, "frame_idx": 131, "global_frame_idx": 611, "task_index": 0}, {"db_idx": 612, "episode_idx": 2, "frame_idx": 132, "global_frame_idx": 612, "task_index": 0}, {"db_idx": 613, "episode_idx": 2, "frame_idx": 133, "global_frame_idx": 613, "task_index": 0}, {"db_idx": 614, "episode_idx": 2, "frame_idx": 134, "global_frame_idx": 614, "task_index": 0}, {"db_idx": 615, "episode_idx": 2, "frame_idx": 135, "global_frame_idx": 615, "task_index": 0}, {"db_idx": 616, "episode_idx": 2, "frame_idx": 136, "global_frame_idx": 616, "task_index": 0}, {"db_idx": 617, "episode_idx": 2, "frame_idx": 137, "global_frame_idx": 617, "task_index": 0}, {"db_idx": 618, "episode_idx": 2, "frame_idx": 138, "global_frame_idx": 618, "task_index": 0}, {"db_idx": 619, "episode_idx": 2, "frame_idx": 139, "global_frame_idx": 619, "task_index": 0}, {"db_idx": 620, "episode_idx": 2, "frame_idx": 140, "global_frame_idx": 620, "task_index": 0}, {"db_idx": 621, "episode_idx": 2, "frame_idx": 141, "global_frame_idx": 621, "task_index": 0}, {"db_idx": 622, "episode_idx": 2, "frame_idx": 142, "global_frame_idx": 622, "task_index": 0}, {"db_idx": 623, "episode_idx": 2, "frame_idx": 143, "global_frame_idx": 623, "task_index": 0}, {"db_idx": 624, "episode_idx": 2, "frame_idx": 144, "global_frame_idx": 624, "task_index": 0}, {"db_idx": 625, "episode_idx": 2, "frame_idx": 145, "global_frame_idx": 625, "task_index": 0}, {"db_idx": 626, "episode_idx": 2, "frame_idx": 146, "global_frame_idx": 626, "task_index": 0}, {"db_idx": 627, "episode_idx": 2, "frame_idx": 147, "global_frame_idx": 627, "task_index": 0}, {"db_idx": 628, "episode_idx": 2, "frame_idx": 148, "global_frame_idx": 628, "task_index": 0}, {"db_idx": 629, "episode_idx": 2, "frame_idx": 149, "global_frame_idx": 629, "task_index": 0}, {"db_idx": 630, "episode_idx": 2, "frame_idx": 150, "global_frame_idx": 630, "task_index": 0}, {"db_idx": 631, "episode_idx": 2, "frame_idx": 151, "global_frame_idx": 631, "task_index": 0}, {"db_idx": 632, "episode_idx": 2, "frame_idx": 152, "global_frame_idx": 632, "task_index": 0}, {"db_idx": 633, "episode_idx": 2, "frame_idx": 153, "global_frame_idx": 633, "task_index": 0}, {"db_idx": 634, "episode_idx": 2, "frame_idx": 154, "global_frame_idx": 634, "task_index": 0}, {"db_idx": 635, "episode_idx": 2, "frame_idx": 155, "global_frame_idx": 635, "task_index": 0}, {"db_idx": 636, "episode_idx": 2, "frame_idx": 156, "global_frame_idx": 636, "task_index": 0}, {"db_idx": 637, "episode_idx": 2, "frame_idx": 157, "global_frame_idx": 637, "task_index": 0}, {"db_idx": 638, "episode_idx": 2, "frame_idx": 158, "global_frame_idx": 638, "task_index": 0}, {"db_idx": 639, "episode_idx": 2, "frame_idx": 159, "global_frame_idx": 639, "task_index": 0}, {"db_idx": 640, "episode_idx": 2, "frame_idx": 160, "global_frame_idx": 640, "task_index": 0}, {"db_idx": 641, "episode_idx": 2, "frame_idx": 161, "global_frame_idx": 641, "task_index": 0}, {"db_idx": 642, "episode_idx": 2, "frame_idx": 162, "global_frame_idx": 642, "task_index": 0}, {"db_idx": 643, "episode_idx": 2, "frame_idx": 163, "global_frame_idx": 643, "task_index": 0}, {"db_idx": 644, "episode_idx": 2, "frame_idx": 164, "global_frame_idx": 644, "task_index": 0}, {"db_idx": 645, "episode_idx": 2, "frame_idx": 165, "global_frame_idx": 645, "task_index": 0}, {"db_idx": 646, "episode_idx": 2, "frame_idx": 166, "global_frame_idx": 646, "task_index": 0}, {"db_idx": 647, "episode_idx": 2, "frame_idx": 167, "global_frame_idx": 647, "task_index": 0}, {"db_idx": 648, "episode_idx": 2, "frame_idx": 168, "global_frame_idx": 648, "task_index": 0}, {"db_idx": 649, "episode_idx": 2, "frame_idx": 169, "global_frame_idx": 649, "task_index": 0}, {"db_idx": 650, "episode_idx": 2, "frame_idx": 170, "global_frame_idx": 650, "task_index": 0}, {"db_idx": 651, "episode_idx": 2, "frame_idx": 171, "global_frame_idx": 651, "task_index": 0}, {"db_idx": 652, "episode_idx": 2, "frame_idx": 172, "global_frame_idx": 652, "task_index": 0}, {"db_idx": 653, "episode_idx": 2, "frame_idx": 173, "global_frame_idx": 653, "task_index": 0}, {"db_idx": 654, "episode_idx": 2, "frame_idx": 174, "global_frame_idx": 654, "task_index": 0}, {"db_idx": 655, "episode_idx": 2, "frame_idx": 175, "global_frame_idx": 655, "task_index": 0}, {"db_idx": 656, "episode_idx": 2, "frame_idx": 176, "global_frame_idx": 656, "task_index": 0}, {"db_idx": 657, "episode_idx": 2, "frame_idx": 177, "global_frame_idx": 657, "task_index": 0}, {"db_idx": 658, "episode_idx": 2, "frame_idx": 178, "global_frame_idx": 658, "task_index": 0}, {"db_idx": 659, "episode_idx": 2, "frame_idx": 179, "global_frame_idx": 659, "task_index": 0}, {"db_idx": 660, "episode_idx": 2, "frame_idx": 180, "global_frame_idx": 660, "task_index": 0}, {"db_idx": 661, "episode_idx": 2, "frame_idx": 181, "global_frame_idx": 661, "task_index": 0}, {"db_idx": 662, "episode_idx": 2, "frame_idx": 182, "global_frame_idx": 662, "task_index": 0}, {"db_idx": 663, "episode_idx": 2, "frame_idx": 183, "global_frame_idx": 663, "task_index": 0}, {"db_idx": 664, "episode_idx": 2, "frame_idx": 184, "global_frame_idx": 664, "task_index": 0}, {"db_idx": 665, "episode_idx": 2, "frame_idx": 185, "global_frame_idx": 665, "task_index": 0}, {"db_idx": 666, "episode_idx": 2, "frame_idx": 186, "global_frame_idx": 666, "task_index": 0}, {"db_idx": 667, "episode_idx": 2, "frame_idx": 187, "global_frame_idx": 667, "task_index": 0}, {"db_idx": 668, "episode_idx": 2, "frame_idx": 188, "global_frame_idx": 668, "task_index": 0}, {"db_idx": 669, "episode_idx": 2, "frame_idx": 189, "global_frame_idx": 669, "task_index": 0}, {"db_idx": 670, "episode_idx": 2, "frame_idx": 190, "global_frame_idx": 670, "task_index": 0}, {"db_idx": 671, "episode_idx": 2, "frame_idx": 191, "global_frame_idx": 671, "task_index": 0}, {"db_idx": 672, "episode_idx": 2, "frame_idx": 192, "global_frame_idx": 672, "task_index": 0}, {"db_idx": 673, "episode_idx": 2, "frame_idx": 193, "global_frame_idx": 673, "task_index": 0}, {"db_idx": 674, "episode_idx": 2, "frame_idx": 194, "global_frame_idx": 674, "task_index": 0}, {"db_idx": 675, "episode_idx": 2, "frame_idx": 195, "global_frame_idx": 675, "task_index": 0}, {"db_idx": 676, "episode_idx": 2, "frame_idx": 196, "global_frame_idx": 676, "task_index": 0}, {"db_idx": 677, "episode_idx": 2, "frame_idx": 197, "global_frame_idx": 677, "task_index": 0}, {"db_idx": 678, "episode_idx": 2, "frame_idx": 198, "global_frame_idx": 678, "task_index": 0}, {"db_idx": 679, "episode_idx": 2, "frame_idx": 199, "global_frame_idx": 679, "task_index": 0}, {"db_idx": 680, "episode_idx": 2, "frame_idx": 200, "global_frame_idx": 680, "task_index": 0}, {"db_idx": 681, "episode_idx": 2, "frame_idx": 201, "global_frame_idx": 681, "task_index": 0}, {"db_idx": 682, "episode_idx": 2, "frame_idx": 202, "global_frame_idx": 682, "task_index": 0}, {"db_idx": 683, "episode_idx": 2, "frame_idx": 203, "global_frame_idx": 683, "task_index": 0}, {"db_idx": 684, "episode_idx": 2, "frame_idx": 204, "global_frame_idx": 684, "task_index": 0}, {"db_idx": 685, "episode_idx": 2, "frame_idx": 205, "global_frame_idx": 685, "task_index": 0}, {"db_idx": 686, "episode_idx": 2, "frame_idx": 206, "global_frame_idx": 686, "task_index": 0}, {"db_idx": 687, "episode_idx": 2, "frame_idx": 207, "global_frame_idx": 687, "task_index": 0}, {"db_idx": 688, "episode_idx": 2, "frame_idx": 208, "global_frame_idx": 688, "task_index": 0}, {"db_idx": 689, "episode_idx": 2, "frame_idx": 209, "global_frame_idx": 689, "task_index": 0}, {"db_idx": 690, "episode_idx": 2, "frame_idx": 210, "global_frame_idx": 690, "task_index": 0}, {"db_idx": 691, "episode_idx": 2, "frame_idx": 211, "global_frame_idx": 691, "task_index": 0}, {"db_idx": 692, "episode_idx": 2, "frame_idx": 212, "global_frame_idx": 692, "task_index": 0}, {"db_idx": 693, "episode_idx": 2, "frame_idx": 213, "global_frame_idx": 693, "task_index": 0}, {"db_idx": 694, "episode_idx": 2, "frame_idx": 214, "global_frame_idx": 694, "task_index": 0}, {"db_idx": 695, "episode_idx": 2, "frame_idx": 215, "global_frame_idx": 695, "task_index": 0}, {"db_idx": 696, "episode_idx": 2, "frame_idx": 216, "global_frame_idx": 696, "task_index": 0}, {"db_idx": 697, "episode_idx": 2, "frame_idx": 217, "global_frame_idx": 697, "task_index": 0}, {"db_idx": 698, "episode_idx": 2, "frame_idx": 218, "global_frame_idx": 698, "task_index": 0}, {"db_idx": 699, "episode_idx": 2, "frame_idx": 219, "global_frame_idx": 699, "task_index": 0}, {"db_idx": 700, "episode_idx": 2, "frame_idx": 220, "global_frame_idx": 700, "task_index": 0}, {"db_idx": 701, "episode_idx": 2, "frame_idx": 221, "global_frame_idx": 701, "task_index": 0}, {"db_idx": 702, "episode_idx": 2, "frame_idx": 222, "global_frame_idx": 702, "task_index": 0}, {"db_idx": 703, "episode_idx": 2, "frame_idx": 223, "global_frame_idx": 703, "task_index": 0}, {"db_idx": 704, "episode_idx": 2, "frame_idx": 224, "global_frame_idx": 704, "task_index": 0}, {"db_idx": 705, "episode_idx": 2, "frame_idx": 225, "global_frame_idx": 705, "task_index": 0}, {"db_idx": 706, "episode_idx": 2, "frame_idx": 226, "global_frame_idx": 706, "task_index": 0}, {"db_idx": 707, "episode_idx": 2, "frame_idx": 227, "global_frame_idx": 707, "task_index": 0}, {"db_idx": 708, "episode_idx": 2, "frame_idx": 228, "global_frame_idx": 708, "task_index": 0}, {"db_idx": 709, "episode_idx": 2, "frame_idx": 229, "global_frame_idx": 709, "task_index": 0}, {"db_idx": 710, "episode_idx": 2, "frame_idx": 230, "global_frame_idx": 710, "task_index": 0}, {"db_idx": 711, "episode_idx": 2, "frame_idx": 231, "global_frame_idx": 711, "task_index": 0}, {"db_idx": 712, "episode_idx": 2, "frame_idx": 232, "global_frame_idx": 712, "task_index": 0}, {"db_idx": 713, "episode_idx": 2, "frame_idx": 233, "global_frame_idx": 713, "task_index": 0}, {"db_idx": 714, "episode_idx": 2, "frame_idx": 234, "global_frame_idx": 714, "task_index": 0}, {"db_idx": 715, "episode_idx": 2, "frame_idx": 235, "global_frame_idx": 715, "task_index": 0}, {"db_idx": 716, "episode_idx": 2, "frame_idx": 236, "global_frame_idx": 716, "task_index": 0}, {"db_idx": 717, "episode_idx": 2, "frame_idx": 237, "global_frame_idx": 717, "task_index": 0}, {"db_idx": 718, "episode_idx": 2, "frame_idx": 238, "global_frame_idx": 718, "task_index": 0}, {"db_idx": 719, "episode_idx": 2, "frame_idx": 239, "global_frame_idx": 719, "task_index": 0}, {"db_idx": 720, "episode_idx": 2, "frame_idx": 240, "global_frame_idx": 720, "task_index": 0}, {"db_idx": 721, "episode_idx": 2, "frame_idx": 241, "global_frame_idx": 721, "task_index": 0}, {"db_idx": 722, "episode_idx": 2, "frame_idx": 242, "global_frame_idx": 722, "task_index": 0}, {"db_idx": 723, "episode_idx": 2, "frame_idx": 243, "global_frame_idx": 723, "task_index": 0}, {"db_idx": 724, "episode_idx": 2, "frame_idx": 244, "global_frame_idx": 724, "task_index": 0}, {"db_idx": 725, "episode_idx": 2, "frame_idx": 245, "global_frame_idx": 725, "task_index": 0}, {"db_idx": 726, "episode_idx": 2, "frame_idx": 246, "global_frame_idx": 726, "task_index": 0}, {"db_idx": 727, "episode_idx": 2, "frame_idx": 247, "global_frame_idx": 727, "task_index": 0}, {"db_idx": 728, "episode_idx": 2, "frame_idx": 248, "global_frame_idx": 728, "task_index": 0}, {"db_idx": 729, "episode_idx": 2, "frame_idx": 249, "global_frame_idx": 729, "task_index": 0}, {"db_idx": 730, "episode_idx": 2, "frame_idx": 250, "global_frame_idx": 730, "task_index": 0}, {"db_idx": 731, "episode_idx": 2, "frame_idx": 251, "global_frame_idx": 731, "task_index": 0}, {"db_idx": 732, "episode_idx": 2, "frame_idx": 252, "global_frame_idx": 732, "task_index": 0}, {"db_idx": 733, "episode_idx": 2, "frame_idx": 253, "global_frame_idx": 733, "task_index": 0}, {"db_idx": 734, "episode_idx": 2, "frame_idx": 254, "global_frame_idx": 734, "task_index": 0}, {"db_idx": 735, "episode_idx": 2, "frame_idx": 255, "global_frame_idx": 735, "task_index": 0}, {"db_idx": 736, "episode_idx": 2, "frame_idx": 256, "global_frame_idx": 736, "task_index": 0}, {"db_idx": 737, "episode_idx": 2, "frame_idx": 257, "global_frame_idx": 737, "task_index": 0}, {"db_idx": 738, "episode_idx": 2, "frame_idx": 258, "global_frame_idx": 738, "task_index": 0}, {"db_idx": 739, "episode_idx": 2, "frame_idx": 259, "global_frame_idx": 739, "task_index": 0}, {"db_idx": 740, "episode_idx": 2, "frame_idx": 260, "global_frame_idx": 740, "task_index": 0}, {"db_idx": 741, "episode_idx": 2, "frame_idx": 261, "global_frame_idx": 741, "task_index": 0}, {"db_idx": 742, "episode_idx": 2, "frame_idx": 262, "global_frame_idx": 742, "task_index": 0}, {"db_idx": 743, "episode_idx": 2, "frame_idx": 263, "global_frame_idx": 743, "task_index": 0}, {"db_idx": 744, "episode_idx": 2, "frame_idx": 264, "global_frame_idx": 744, "task_index": 0}, {"db_idx": 745, "episode_idx": 2, "frame_idx": 265, "global_frame_idx": 745, "task_index": 0}, {"db_idx": 746, "episode_idx": 2, "frame_idx": 266, "global_frame_idx": 746, "task_index": 0}, {"db_idx": 747, "episode_idx": 2, "frame_idx": 267, "global_frame_idx": 747, "task_index": 0}, {"db_idx": 748, "episode_idx": 2, "frame_idx": 268, "global_frame_idx": 748, "task_index": 0}, {"db_idx": 749, "episode_idx": 2, "frame_idx": 269, "global_frame_idx": 749, "task_index": 0}, {"db_idx": 750, "episode_idx": 2, "frame_idx": 270, "global_frame_idx": 750, "task_index": 0}, {"db_idx": 751, "episode_idx": 2, "frame_idx": 271, "global_frame_idx": 751, "task_index": 0}, {"db_idx": 752, "episode_idx": 2, "frame_idx": 272, "global_frame_idx": 752, "task_index": 0}, {"db_idx": 753, "episode_idx": 2, "frame_idx": 273, "global_frame_idx": 753, "task_index": 0}, {"db_idx": 754, "episode_idx": 2, "frame_idx": 274, "global_frame_idx": 754, "task_index": 0}, {"db_idx": 755, "episode_idx": 2, "frame_idx": 275, "global_frame_idx": 755, "task_index": 0}, {"db_idx": 756, "episode_idx": 2, "frame_idx": 276, "global_frame_idx": 756, "task_index": 0}, {"db_idx": 757, "episode_idx": 2, "frame_idx": 277, "global_frame_idx": 757, "task_index": 0}, {"db_idx": 758, "episode_idx": 2, "frame_idx": 278, "global_frame_idx": 758, "task_index": 0}, {"db_idx": 759, "episode_idx": 2, "frame_idx": 279, "global_frame_idx": 759, "task_index": 0}, {"db_idx": 760, "episode_idx": 2, "frame_idx": 280, "global_frame_idx": 760, "task_index": 0}, {"db_idx": 761, "episode_idx": 2, "frame_idx": 281, "global_frame_idx": 761, "task_index": 0}, {"db_idx": 762, "episode_idx": 2, "frame_idx": 282, "global_frame_idx": 762, "task_index": 0}, {"db_idx": 763, "episode_idx": 2, "frame_idx": 283, "global_frame_idx": 763, "task_index": 0}, {"db_idx": 764, "episode_idx": 2, "frame_idx": 284, "global_frame_idx": 764, "task_index": 0}, {"db_idx": 765, "episode_idx": 2, "frame_idx": 285, "global_frame_idx": 765, "task_index": 0}, {"db_idx": 766, "episode_idx": 2, "frame_idx": 286, "global_frame_idx": 766, "task_index": 0}, {"db_idx": 767, "episode_idx": 2, "frame_idx": 287, "global_frame_idx": 767, "task_index": 0}, {"db_idx": 768, "episode_idx": 3, "frame_idx": 0, "global_frame_idx": 768, "task_index": 0}, {"db_idx": 769, "episode_idx": 3, "frame_idx": 1, "global_frame_idx": 769, "task_index": 0}, {"db_idx": 770, "episode_idx": 3, "frame_idx": 2, "global_frame_idx": 770, "task_index": 0}, {"db_idx": 771, "episode_idx": 3, "frame_idx": 3, "global_frame_idx": 771, "task_index": 0}, {"db_idx": 772, "episode_idx": 3, "frame_idx": 4, "global_frame_idx": 772, "task_index": 0}, {"db_idx": 773, "episode_idx": 3, "frame_idx": 5, "global_frame_idx": 773, "task_index": 0}, {"db_idx": 774, "episode_idx": 3, "frame_idx": 6, "global_frame_idx": 774, "task_index": 0}, {"db_idx": 775, "episode_idx": 3, "frame_idx": 7, "global_frame_idx": 775, "task_index": 0}, {"db_idx": 776, "episode_idx": 3, "frame_idx": 8, "global_frame_idx": 776, "task_index": 0}, {"db_idx": 777, "episode_idx": 3, "frame_idx": 9, "global_frame_idx": 777, "task_index": 0}, {"db_idx": 778, "episode_idx": 3, "frame_idx": 10, "global_frame_idx": 778, "task_index": 0}, {"db_idx": 779, "episode_idx": 3, "frame_idx": 11, "global_frame_idx": 779, "task_index": 0}, {"db_idx": 780, "episode_idx": 3, "frame_idx": 12, "global_frame_idx": 780, "task_index": 0}, {"db_idx": 781, "episode_idx": 3, "frame_idx": 13, "global_frame_idx": 781, "task_index": 0}, {"db_idx": 782, "episode_idx": 3, "frame_idx": 14, "global_frame_idx": 782, "task_index": 0}, {"db_idx": 783, "episode_idx": 3, "frame_idx": 15, "global_frame_idx": 783, "task_index": 0}, {"db_idx": 784, "episode_idx": 3, "frame_idx": 16, "global_frame_idx": 784, "task_index": 0}, {"db_idx": 785, "episode_idx": 3, "frame_idx": 17, "global_frame_idx": 785, "task_index": 0}, {"db_idx": 786, "episode_idx": 3, "frame_idx": 18, "global_frame_idx": 786, "task_index": 0}, {"db_idx": 787, "episode_idx": 3, "frame_idx": 19, "global_frame_idx": 787, "task_index": 0}, {"db_idx": 788, "episode_idx": 3, "frame_idx": 20, "global_frame_idx": 788, "task_index": 0}, {"db_idx": 789, "episode_idx": 3, "frame_idx": 21, "global_frame_idx": 789, "task_index": 0}, {"db_idx": 790, "episode_idx": 3, "frame_idx": 22, "global_frame_idx": 790, "task_index": 0}, {"db_idx": 791, "episode_idx": 3, "frame_idx": 23, "global_frame_idx": 791, "task_index": 0}, {"db_idx": 792, "episode_idx": 3, "frame_idx": 24, "global_frame_idx": 792, "task_index": 0}, {"db_idx": 793, "episode_idx": 3, "frame_idx": 25, "global_frame_idx": 793, "task_index": 0}, {"db_idx": 794, "episode_idx": 3, "frame_idx": 26, "global_frame_idx": 794, "task_index": 0}, {"db_idx": 795, "episode_idx": 3, "frame_idx": 27, "global_frame_idx": 795, "task_index": 0}, {"db_idx": 796, "episode_idx": 3, "frame_idx": 28, "global_frame_idx": 796, "task_index": 0}, {"db_idx": 797, "episode_idx": 3, "frame_idx": 29, "global_frame_idx": 797, "task_index": 0}, {"db_idx": 798, "episode_idx": 3, "frame_idx": 30, "global_frame_idx": 798, "task_index": 0}, {"db_idx": 799, "episode_idx": 3, "frame_idx": 31, "global_frame_idx": 799, "task_index": 0}, {"db_idx": 800, "episode_idx": 3, "frame_idx": 32, "global_frame_idx": 800, "task_index": 0}, {"db_idx": 801, "episode_idx": 3, "frame_idx": 33, "global_frame_idx": 801, "task_index": 0}, {"db_idx": 802, "episode_idx": 3, "frame_idx": 34, "global_frame_idx": 802, "task_index": 0}, {"db_idx": 803, "episode_idx": 3, "frame_idx": 35, "global_frame_idx": 803, "task_index": 0}, {"db_idx": 804, "episode_idx": 3, "frame_idx": 36, "global_frame_idx": 804, "task_index": 0}, {"db_idx": 805, "episode_idx": 3, "frame_idx": 37, "global_frame_idx": 805, "task_index": 0}, {"db_idx": 806, "episode_idx": 3, "frame_idx": 38, "global_frame_idx": 806, "task_index": 0}, {"db_idx": 807, "episode_idx": 3, "frame_idx": 39, "global_frame_idx": 807, "task_index": 0}, {"db_idx": 808, "episode_idx": 3, "frame_idx": 40, "global_frame_idx": 808, "task_index": 0}, {"db_idx": 809, "episode_idx": 3, "frame_idx": 41, "global_frame_idx": 809, "task_index": 0}, {"db_idx": 810, "episode_idx": 3, "frame_idx": 42, "global_frame_idx": 810, "task_index": 0}, {"db_idx": 811, "episode_idx": 3, "frame_idx": 43, "global_frame_idx": 811, "task_index": 0}, {"db_idx": 812, "episode_idx": 3, "frame_idx": 44, "global_frame_idx": 812, "task_index": 0}, {"db_idx": 813, "episode_idx": 3, "frame_idx": 45, "global_frame_idx": 813, "task_index": 0}, {"db_idx": 814, "episode_idx": 3, "frame_idx": 46, "global_frame_idx": 814, "task_index": 0}, {"db_idx": 815, "episode_idx": 3, "frame_idx": 47, "global_frame_idx": 815, "task_index": 0}, {"db_idx": 816, "episode_idx": 3, "frame_idx": 48, "global_frame_idx": 816, "task_index": 0}, {"db_idx": 817, "episode_idx": 3, "frame_idx": 49, "global_frame_idx": 817, "task_index": 0}, {"db_idx": 818, "episode_idx": 3, "frame_idx": 50, "global_frame_idx": 818, "task_index": 0}, {"db_idx": 819, "episode_idx": 3, "frame_idx": 51, "global_frame_idx": 819, "task_index": 0}, {"db_idx": 820, "episode_idx": 3, "frame_idx": 52, "global_frame_idx": 820, "task_index": 0}, {"db_idx": 821, "episode_idx": 3, "frame_idx": 53, "global_frame_idx": 821, "task_index": 0}, {"db_idx": 822, "episode_idx": 3, "frame_idx": 54, "global_frame_idx": 822, "task_index": 0}, {"db_idx": 823, "episode_idx": 3, "frame_idx": 55, "global_frame_idx": 823, "task_index": 0}, {"db_idx": 824, "episode_idx": 3, "frame_idx": 56, "global_frame_idx": 824, "task_index": 0}, {"db_idx": 825, "episode_idx": 3, "frame_idx": 57, "global_frame_idx": 825, "task_index": 0}, {"db_idx": 826, "episode_idx": 3, "frame_idx": 58, "global_frame_idx": 826, "task_index": 0}, {"db_idx": 827, "episode_idx": 3, "frame_idx": 59, "global_frame_idx": 827, "task_index": 0}, {"db_idx": 828, "episode_idx": 3, "frame_idx": 60, "global_frame_idx": 828, "task_index": 0}, {"db_idx": 829, "episode_idx": 3, "frame_idx": 61, "global_frame_idx": 829, "task_index": 0}, {"db_idx": 830, "episode_idx": 3, "frame_idx": 62, "global_frame_idx": 830, "task_index": 0}, {"db_idx": 831, "episode_idx": 3, "frame_idx": 63, "global_frame_idx": 831, "task_index": 0}, {"db_idx": 832, "episode_idx": 3, "frame_idx": 64, "global_frame_idx": 832, "task_index": 0}, {"db_idx": 833, "episode_idx": 3, "frame_idx": 65, "global_frame_idx": 833, "task_index": 0}, {"db_idx": 834, "episode_idx": 3, "frame_idx": 66, "global_frame_idx": 834, "task_index": 0}, {"db_idx": 835, "episode_idx": 3, "frame_idx": 67, "global_frame_idx": 835, "task_index": 0}, {"db_idx": 836, "episode_idx": 3, "frame_idx": 68, "global_frame_idx": 836, "task_index": 0}, {"db_idx": 837, "episode_idx": 3, "frame_idx": 69, "global_frame_idx": 837, "task_index": 0}, {"db_idx": 838, "episode_idx": 3, "frame_idx": 70, "global_frame_idx": 838, "task_index": 0}, {"db_idx": 839, "episode_idx": 3, "frame_idx": 71, "global_frame_idx": 839, "task_index": 0}, {"db_idx": 840, "episode_idx": 3, "frame_idx": 72, "global_frame_idx": 840, "task_index": 0}, {"db_idx": 841, "episode_idx": 3, "frame_idx": 73, "global_frame_idx": 841, "task_index": 0}, {"db_idx": 842, "episode_idx": 3, "frame_idx": 74, "global_frame_idx": 842, "task_index": 0}, {"db_idx": 843, "episode_idx": 3, "frame_idx": 75, "global_frame_idx": 843, "task_index": 0}, {"db_idx": 844, "episode_idx": 3, "frame_idx": 76, "global_frame_idx": 844, "task_index": 0}, {"db_idx": 845, "episode_idx": 3, "frame_idx": 77, "global_frame_idx": 845, "task_index": 0}, {"db_idx": 846, "episode_idx": 3, "frame_idx": 78, "global_frame_idx": 846, "task_index": 0}, {"db_idx": 847, "episode_idx": 3, "frame_idx": 79, "global_frame_idx": 847, "task_index": 0}, {"db_idx": 848, "episode_idx": 3, "frame_idx": 80, "global_frame_idx": 848, "task_index": 0}, {"db_idx": 849, "episode_idx": 3, "frame_idx": 81, "global_frame_idx": 849, "task_index": 0}, {"db_idx": 850, "episode_idx": 3, "frame_idx": 82, "global_frame_idx": 850, "task_index": 0}, {"db_idx": 851, "episode_idx": 3, "frame_idx": 83, "global_frame_idx": 851, "task_index": 0}, {"db_idx": 852, "episode_idx": 3, "frame_idx": 84, "global_frame_idx": 852, "task_index": 0}, {"db_idx": 853, "episode_idx": 3, "frame_idx": 85, "global_frame_idx": 853, "task_index": 0}, {"db_idx": 854, "episode_idx": 3, "frame_idx": 86, "global_frame_idx": 854, "task_index": 0}, {"db_idx": 855, "episode_idx": 3, "frame_idx": 87, "global_frame_idx": 855, "task_index": 0}, {"db_idx": 856, "episode_idx": 3, "frame_idx": 88, "global_frame_idx": 856, "task_index": 0}, {"db_idx": 857, "episode_idx": 3, "frame_idx": 89, "global_frame_idx": 857, "task_index": 0}, {"db_idx": 858, "episode_idx": 3, "frame_idx": 90, "global_frame_idx": 858, "task_index": 0}, {"db_idx": 859, "episode_idx": 3, "frame_idx": 91, "global_frame_idx": 859, "task_index": 0}, {"db_idx": 860, "episode_idx": 3, "frame_idx": 92, "global_frame_idx": 860, "task_index": 0}, {"db_idx": 861, "episode_idx": 3, "frame_idx": 93, "global_frame_idx": 861, "task_index": 0}, {"db_idx": 862, "episode_idx": 3, "frame_idx": 94, "global_frame_idx": 862, "task_index": 0}, {"db_idx": 863, "episode_idx": 3, "frame_idx": 95, "global_frame_idx": 863, "task_index": 0}, {"db_idx": 864, "episode_idx": 3, "frame_idx": 96, "global_frame_idx": 864, "task_index": 0}, {"db_idx": 865, "episode_idx": 3, "frame_idx": 97, "global_frame_idx": 865, "task_index": 0}, {"db_idx": 866, "episode_idx": 3, "frame_idx": 98, "global_frame_idx": 866, "task_index": 0}, {"db_idx": 867, "episode_idx": 3, "frame_idx": 99, "global_frame_idx": 867, "task_index": 0}, {"db_idx": 868, "episode_idx": 3, "frame_idx": 100, "global_frame_idx": 868, "task_index": 0}, {"db_idx": 869, "episode_idx": 3, "frame_idx": 101, "global_frame_idx": 869, "task_index": 0}, {"db_idx": 870, "episode_idx": 3, "frame_idx": 102, "global_frame_idx": 870, "task_index": 0}, {"db_idx": 871, "episode_idx": 3, "frame_idx": 103, "global_frame_idx": 871, "task_index": 0}, {"db_idx": 872, "episode_idx": 3, "frame_idx": 104, "global_frame_idx": 872, "task_index": 0}, {"db_idx": 873, "episode_idx": 3, "frame_idx": 105, "global_frame_idx": 873, "task_index": 0}, {"db_idx": 874, "episode_idx": 3, "frame_idx": 106, "global_frame_idx": 874, "task_index": 0}, {"db_idx": 875, "episode_idx": 3, "frame_idx": 107, "global_frame_idx": 875, "task_index": 0}, {"db_idx": 876, "episode_idx": 3, "frame_idx": 108, "global_frame_idx": 876, "task_index": 0}, {"db_idx": 877, "episode_idx": 3, "frame_idx": 109, "global_frame_idx": 877, "task_index": 0}, {"db_idx": 878, "episode_idx": 3, "frame_idx": 110, "global_frame_idx": 878, "task_index": 0}, {"db_idx": 879, "episode_idx": 3, "frame_idx": 111, "global_frame_idx": 879, "task_index": 0}, {"db_idx": 880, "episode_idx": 3, "frame_idx": 112, "global_frame_idx": 880, "task_index": 0}, {"db_idx": 881, "episode_idx": 3, "frame_idx": 113, "global_frame_idx": 881, "task_index": 0}, {"db_idx": 882, "episode_idx": 3, "frame_idx": 114, "global_frame_idx": 882, "task_index": 0}, {"db_idx": 883, "episode_idx": 3, "frame_idx": 115, "global_frame_idx": 883, "task_index": 0}, {"db_idx": 884, "episode_idx": 3, "frame_idx": 116, "global_frame_idx": 884, "task_index": 0}, {"db_idx": 885, "episode_idx": 3, "frame_idx": 117, "global_frame_idx": 885, "task_index": 0}, {"db_idx": 886, "episode_idx": 3, "frame_idx": 118, "global_frame_idx": 886, "task_index": 0}, {"db_idx": 887, "episode_idx": 3, "frame_idx": 119, "global_frame_idx": 887, "task_index": 0}, {"db_idx": 888, "episode_idx": 3, "frame_idx": 120, "global_frame_idx": 888, "task_index": 0}, {"db_idx": 889, "episode_idx": 3, "frame_idx": 121, "global_frame_idx": 889, "task_index": 0}, {"db_idx": 890, "episode_idx": 3, "frame_idx": 122, "global_frame_idx": 890, "task_index": 0}, {"db_idx": 891, "episode_idx": 3, "frame_idx": 123, "global_frame_idx": 891, "task_index": 0}, {"db_idx": 892, "episode_idx": 3, "frame_idx": 124, "global_frame_idx": 892, "task_index": 0}, {"db_idx": 893, "episode_idx": 3, "frame_idx": 125, "global_frame_idx": 893, "task_index": 0}, {"db_idx": 894, "episode_idx": 3, "frame_idx": 126, "global_frame_idx": 894, "task_index": 0}, {"db_idx": 895, "episode_idx": 3, "frame_idx": 127, "global_frame_idx": 895, "task_index": 0}, {"db_idx": 896, "episode_idx": 3, "frame_idx": 128, "global_frame_idx": 896, "task_index": 0}, {"db_idx": 897, "episode_idx": 3, "frame_idx": 129, "global_frame_idx": 897, "task_index": 0}, {"db_idx": 898, "episode_idx": 3, "frame_idx": 130, "global_frame_idx": 898, "task_index": 0}, {"db_idx": 899, "episode_idx": 3, "frame_idx": 131, "global_frame_idx": 899, "task_index": 0}, {"db_idx": 900, "episode_idx": 3, "frame_idx": 132, "global_frame_idx": 900, "task_index": 0}, {"db_idx": 901, "episode_idx": 3, "frame_idx": 133, "global_frame_idx": 901, "task_index": 0}, {"db_idx": 902, "episode_idx": 3, "frame_idx": 134, "global_frame_idx": 902, "task_index": 0}, {"db_idx": 903, "episode_idx": 3, "frame_idx": 135, "global_frame_idx": 903, "task_index": 0}, {"db_idx": 904, "episode_idx": 3, "frame_idx": 136, "global_frame_idx": 904, "task_index": 0}, {"db_idx": 905, "episode_idx": 3, "frame_idx": 137, "global_frame_idx": 905, "task_index": 0}, {"db_idx": 906, "episode_idx": 3, "frame_idx": 138, "global_frame_idx": 906, "task_index": 0}, {"db_idx": 907, "episode_idx": 3, "frame_idx": 139, "global_frame_idx": 907, "task_index": 0}, {"db_idx": 908, "episode_idx": 3, "frame_idx": 140, "global_frame_idx": 908, "task_index": 0}, {"db_idx": 909, "episode_idx": 3, "frame_idx": 141, "global_frame_idx": 909, "task_index": 0}, {"db_idx": 910, "episode_idx": 3, "frame_idx": 142, "global_frame_idx": 910, "task_index": 0}, {"db_idx": 911, "episode_idx": 3, "frame_idx": 143, "global_frame_idx": 911, "task_index": 0}, {"db_idx": 912, "episode_idx": 3, "frame_idx": 144, "global_frame_idx": 912, "task_index": 0}, {"db_idx": 913, "episode_idx": 3, "frame_idx": 145, "global_frame_idx": 913, "task_index": 0}, {"db_idx": 914, "episode_idx": 3, "frame_idx": 146, "global_frame_idx": 914, "task_index": 0}, {"db_idx": 915, "episode_idx": 3, "frame_idx": 147, "global_frame_idx": 915, "task_index": 0}, {"db_idx": 916, "episode_idx": 3, "frame_idx": 148, "global_frame_idx": 916, "task_index": 0}, {"db_idx": 917, "episode_idx": 3, "frame_idx": 149, "global_frame_idx": 917, "task_index": 0}, {"db_idx": 918, "episode_idx": 3, "frame_idx": 150, "global_frame_idx": 918, "task_index": 0}, {"db_idx": 919, "episode_idx": 3, "frame_idx": 151, "global_frame_idx": 919, "task_index": 0}, {"db_idx": 920, "episode_idx": 3, "frame_idx": 152, "global_frame_idx": 920, "task_index": 0}, {"db_idx": 921, "episode_idx": 3, "frame_idx": 153, "global_frame_idx": 921, "task_index": 0}, {"db_idx": 922, "episode_idx": 3, "frame_idx": 154, "global_frame_idx": 922, "task_index": 0}, {"db_idx": 923, "episode_idx": 3, "frame_idx": 155, "global_frame_idx": 923, "task_index": 0}, {"db_idx": 924, "episode_idx": 3, "frame_idx": 156, "global_frame_idx": 924, "task_index": 0}, {"db_idx": 925, "episode_idx": 3, "frame_idx": 157, "global_frame_idx": 925, "task_index": 0}, {"db_idx": 926, "episode_idx": 3, "frame_idx": 158, "global_frame_idx": 926, "task_index": 0}, {"db_idx": 927, "episode_idx": 3, "frame_idx": 159, "global_frame_idx": 927, "task_index": 0}, {"db_idx": 928, "episode_idx": 3, "frame_idx": 160, "global_frame_idx": 928, "task_index": 0}, {"db_idx": 929, "episode_idx": 3, "frame_idx": 161, "global_frame_idx": 929, "task_index": 0}, {"db_idx": 930, "episode_idx": 3, "frame_idx": 162, "global_frame_idx": 930, "task_index": 0}, {"db_idx": 931, "episode_idx": 3, "frame_idx": 163, "global_frame_idx": 931, "task_index": 0}, {"db_idx": 932, "episode_idx": 3, "frame_idx": 164, "global_frame_idx": 932, "task_index": 0}, {"db_idx": 933, "episode_idx": 3, "frame_idx": 165, "global_frame_idx": 933, "task_index": 0}, {"db_idx": 934, "episode_idx": 3, "frame_idx": 166, "global_frame_idx": 934, "task_index": 0}, {"db_idx": 935, "episode_idx": 3, "frame_idx": 167, "global_frame_idx": 935, "task_index": 0}, {"db_idx": 936, "episode_idx": 3, "frame_idx": 168, "global_frame_idx": 936, "task_index": 0}, {"db_idx": 937, "episode_idx": 3, "frame_idx": 169, "global_frame_idx": 937, "task_index": 0}, {"db_idx": 938, "episode_idx": 3, "frame_idx": 170, "global_frame_idx": 938, "task_index": 0}, {"db_idx": 939, "episode_idx": 3, "frame_idx": 171, "global_frame_idx": 939, "task_index": 0}, {"db_idx": 940, "episode_idx": 3, "frame_idx": 172, "global_frame_idx": 940, "task_index": 0}, {"db_idx": 941, "episode_idx": 3, "frame_idx": 173, "global_frame_idx": 941, "task_index": 0}, {"db_idx": 942, "episode_idx": 3, "frame_idx": 174, "global_frame_idx": 942, "task_index": 0}, {"db_idx": 943, "episode_idx": 3, "frame_idx": 175, "global_frame_idx": 943, "task_index": 0}, {"db_idx": 944, "episode_idx": 3, "frame_idx": 176, "global_frame_idx": 944, "task_index": 0}, {"db_idx": 945, "episode_idx": 3, "frame_idx": 177, "global_frame_idx": 945, "task_index": 0}, {"db_idx": 946, "episode_idx": 3, "frame_idx": 178, "global_frame_idx": 946, "task_index": 0}, {"db_idx": 947, "episode_idx": 3, "frame_idx": 179, "global_frame_idx": 947, "task_index": 0}, {"db_idx": 948, "episode_idx": 3, "frame_idx": 180, "global_frame_idx": 948, "task_index": 0}, {"db_idx": 949, "episode_idx": 3, "frame_idx": 181, "global_frame_idx": 949, "task_index": 0}, {"db_idx": 950, "episode_idx": 3, "frame_idx": 182, "global_frame_idx": 950, "task_index": 0}, {"db_idx": 951, "episode_idx": 3, "frame_idx": 183, "global_frame_idx": 951, "task_index": 0}, {"db_idx": 952, "episode_idx": 3, "frame_idx": 184, "global_frame_idx": 952, "task_index": 0}, {"db_idx": 953, "episode_idx": 3, "frame_idx": 185, "global_frame_idx": 953, "task_index": 0}, {"db_idx": 954, "episode_idx": 3, "frame_idx": 186, "global_frame_idx": 954, "task_index": 0}, {"db_idx": 955, "episode_idx": 3, "frame_idx": 187, "global_frame_idx": 955, "task_index": 0}, {"db_idx": 956, "episode_idx": 3, "frame_idx": 188, "global_frame_idx": 956, "task_index": 0}, {"db_idx": 957, "episode_idx": 3, "frame_idx": 189, "global_frame_idx": 957, "task_index": 0}, {"db_idx": 958, "episode_idx": 3, "frame_idx": 190, "global_frame_idx": 958, "task_index": 0}, {"db_idx": 959, "episode_idx": 3, "frame_idx": 191, "global_frame_idx": 959, "task_index": 0}, {"db_idx": 960, "episode_idx": 3, "frame_idx": 192, "global_frame_idx": 960, "task_index": 0}, {"db_idx": 961, "episode_idx": 3, "frame_idx": 193, "global_frame_idx": 961, "task_index": 0}, {"db_idx": 962, "episode_idx": 3, "frame_idx": 194, "global_frame_idx": 962, "task_index": 0}, {"db_idx": 963, "episode_idx": 3, "frame_idx": 195, "global_frame_idx": 963, "task_index": 0}, {"db_idx": 964, "episode_idx": 3, "frame_idx": 196, "global_frame_idx": 964, "task_index": 0}, {"db_idx": 965, "episode_idx": 3, "frame_idx": 197, "global_frame_idx": 965, "task_index": 0}, {"db_idx": 966, "episode_idx": 3, "frame_idx": 198, "global_frame_idx": 966, "task_index": 0}, {"db_idx": 967, "episode_idx": 3, "frame_idx": 199, "global_frame_idx": 967, "task_index": 0}, {"db_idx": 968, "episode_idx": 3, "frame_idx": 200, "global_frame_idx": 968, "task_index": 0}, {"db_idx": 969, "episode_idx": 3, "frame_idx": 201, "global_frame_idx": 969, "task_index": 0}, {"db_idx": 970, "episode_idx": 3, "frame_idx": 202, "global_frame_idx": 970, "task_index": 0}, {"db_idx": 971, "episode_idx": 3, "frame_idx": 203, "global_frame_idx": 971, "task_index": 0}, {"db_idx": 972, "episode_idx": 3, "frame_idx": 204, "global_frame_idx": 972, "task_index": 0}, {"db_idx": 973, "episode_idx": 3, "frame_idx": 205, "global_frame_idx": 973, "task_index": 0}, {"db_idx": 974, "episode_idx": 3, "frame_idx": 206, "global_frame_idx": 974, "task_index": 0}, {"db_idx": 975, "episode_idx": 3, "frame_idx": 207, "global_frame_idx": 975, "task_index": 0}, {"db_idx": 976, "episode_idx": 3, "frame_idx": 208, "global_frame_idx": 976, "task_index": 0}, {"db_idx": 977, "episode_idx": 3, "frame_idx": 209, "global_frame_idx": 977, "task_index": 0}, {"db_idx": 978, "episode_idx": 3, "frame_idx": 210, "global_frame_idx": 978, "task_index": 0}, {"db_idx": 979, "episode_idx": 3, "frame_idx": 211, "global_frame_idx": 979, "task_index": 0}, {"db_idx": 980, "episode_idx": 3, "frame_idx": 212, "global_frame_idx": 980, "task_index": 0}, {"db_idx": 981, "episode_idx": 3, "frame_idx": 213, "global_frame_idx": 981, "task_index": 0}, {"db_idx": 982, "episode_idx": 3, "frame_idx": 214, "global_frame_idx": 982, "task_index": 0}, {"db_idx": 983, "episode_idx": 3, "frame_idx": 215, "global_frame_idx": 983, "task_index": 0}, {"db_idx": 984, "episode_idx": 3, "frame_idx": 216, "global_frame_idx": 984, "task_index": 0}, {"db_idx": 985, "episode_idx": 3, "frame_idx": 217, "global_frame_idx": 985, "task_index": 0}, {"db_idx": 986, "episode_idx": 3, "frame_idx": 218, "global_frame_idx": 986, "task_index": 0}, {"db_idx": 987, "episode_idx": 3, "frame_idx": 219, "global_frame_idx": 987, "task_index": 0}, {"db_idx": 988, "episode_idx": 3, "frame_idx": 220, "global_frame_idx": 988, "task_index": 0}, {"db_idx": 989, "episode_idx": 3, "frame_idx": 221, "global_frame_idx": 989, "task_index": 0}, {"db_idx": 990, "episode_idx": 3, "frame_idx": 222, "global_frame_idx": 990, "task_index": 0}, {"db_idx": 991, "episode_idx": 3, "frame_idx": 223, "global_frame_idx": 991, "task_index": 0}, {"db_idx": 992, "episode_idx": 3, "frame_idx": 224, "global_frame_idx": 992, "task_index": 0}, {"db_idx": 993, "episode_idx": 3, "frame_idx": 225, "global_frame_idx": 993, "task_index": 0}, {"db_idx": 994, "episode_idx": 3, "frame_idx": 226, "global_frame_idx": 994, "task_index": 0}, {"db_idx": 995, "episode_idx": 3, "frame_idx": 227, "global_frame_idx": 995, "task_index": 0}, {"db_idx": 996, "episode_idx": 3, "frame_idx": 228, "global_frame_idx": 996, "task_index": 0}, {"db_idx": 997, "episode_idx": 3, "frame_idx": 229, "global_frame_idx": 997, "task_index": 0}, {"db_idx": 998, "episode_idx": 3, "frame_idx": 230, "global_frame_idx": 998, "task_index": 0}, {"db_idx": 999, "episode_idx": 3, "frame_idx": 231, "global_frame_idx": 999, "task_index": 0}, {"db_idx": 1000, "episode_idx": 3, "frame_idx": 232, "global_frame_idx": 1000, "task_index": 0}, {"db_idx": 1001, "episode_idx": 3, "frame_idx": 233, "global_frame_idx": 1001, "task_index": 0}, {"db_idx": 1002, "episode_idx": 3, "frame_idx": 234, "global_frame_idx": 1002, "task_index": 0}, {"db_idx": 1003, "episode_idx": 3, "frame_idx": 235, "global_frame_idx": 1003, "task_index": 0}, {"db_idx": 1004, "episode_idx": 3, "frame_idx": 236, "global_frame_idx": 1004, "task_index": 0}, {"db_idx": 1005, "episode_idx": 3, "frame_idx": 237, "global_frame_idx": 1005, "task_index": 0}, {"db_idx": 1006, "episode_idx": 3, "frame_idx": 238, "global_frame_idx": 1006, "task_index": 0}, {"db_idx": 1007, "episode_idx": 3, "frame_idx": 239, "global_frame_idx": 1007, "task_index": 0}, {"db_idx": 1008, "episode_idx": 3, "frame_idx": 240, "global_frame_idx": 1008, "task_index": 0}, {"db_idx": 1009, "episode_idx": 3, "frame_idx": 241, "global_frame_idx": 1009, "task_index": 0}, {"db_idx": 1010, "episode_idx": 3, "frame_idx": 242, "global_frame_idx": 1010, "task_index": 0}, {"db_idx": 1011, "episode_idx": 3, "frame_idx": 243, "global_frame_idx": 1011, "task_index": 0}, {"db_idx": 1012, "episode_idx": 3, "frame_idx": 244, "global_frame_idx": 1012, "task_index": 0}, {"db_idx": 1013, "episode_idx": 3, "frame_idx": 245, "global_frame_idx": 1013, "task_index": 0}, {"db_idx": 1014, "episode_idx": 3, "frame_idx": 246, "global_frame_idx": 1014, "task_index": 0}, {"db_idx": 1015, "episode_idx": 3, "frame_idx": 247, "global_frame_idx": 1015, "task_index": 0}, {"db_idx": 1016, "episode_idx": 3, "frame_idx": 248, "global_frame_idx": 1016, "task_index": 0}, {"db_idx": 1017, "episode_idx": 3, "frame_idx": 249, "global_frame_idx": 1017, "task_index": 0}, {"db_idx": 1018, "episode_idx": 3, "frame_idx": 250, "global_frame_idx": 1018, "task_index": 0}, {"db_idx": 1019, "episode_idx": 3, "frame_idx": 251, "global_frame_idx": 1019, "task_index": 0}, {"db_idx": 1020, "episode_idx": 3, "frame_idx": 252, "global_frame_idx": 1020, "task_index": 0}, {"db_idx": 1021, "episode_idx": 3, "frame_idx": 253, "global_frame_idx": 1021, "task_index": 0}, {"db_idx": 1022, "episode_idx": 3, "frame_idx": 254, "global_frame_idx": 1022, "task_index": 0}, {"db_idx": 1023, "episode_idx": 4, "frame_idx": 0, "global_frame_idx": 1023, "task_index": 0}, {"db_idx": 1024, "episode_idx": 4, "frame_idx": 1, "global_frame_idx": 1024, "task_index": 0}, {"db_idx": 1025, "episode_idx": 4, "frame_idx": 2, "global_frame_idx": 1025, "task_index": 0}, {"db_idx": 1026, "episode_idx": 4, "frame_idx": 3, "global_frame_idx": 1026, "task_index": 0}, {"db_idx": 1027, "episode_idx": 4, "frame_idx": 4, "global_frame_idx": 1027, "task_index": 0}, {"db_idx": 1028, "episode_idx": 4, "frame_idx": 5, "global_frame_idx": 1028, "task_index": 0}, {"db_idx": 1029, "episode_idx": 4, "frame_idx": 6, "global_frame_idx": 1029, "task_index": 0}, {"db_idx": 1030, "episode_idx": 4, "frame_idx": 7, "global_frame_idx": 1030, "task_index": 0}, {"db_idx": 1031, "episode_idx": 4, "frame_idx": 8, "global_frame_idx": 1031, "task_index": 0}, {"db_idx": 1032, "episode_idx": 4, "frame_idx": 9, "global_frame_idx": 1032, "task_index": 0}, {"db_idx": 1033, "episode_idx": 4, "frame_idx": 10, "global_frame_idx": 1033, "task_index": 0}, {"db_idx": 1034, "episode_idx": 4, "frame_idx": 11, "global_frame_idx": 1034, "task_index": 0}, {"db_idx": 1035, "episode_idx": 4, "frame_idx": 12, "global_frame_idx": 1035, "task_index": 0}, {"db_idx": 1036, "episode_idx": 4, "frame_idx": 13, "global_frame_idx": 1036, "task_index": 0}, {"db_idx": 1037, "episode_idx": 4, "frame_idx": 14, "global_frame_idx": 1037, "task_index": 0}, {"db_idx": 1038, "episode_idx": 4, "frame_idx": 15, "global_frame_idx": 1038, "task_index": 0}, {"db_idx": 1039, "episode_idx": 4, "frame_idx": 16, "global_frame_idx": 1039, "task_index": 0}, {"db_idx": 1040, "episode_idx": 4, "frame_idx": 17, "global_frame_idx": 1040, "task_index": 0}, {"db_idx": 1041, "episode_idx": 4, "frame_idx": 18, "global_frame_idx": 1041, "task_index": 0}, {"db_idx": 1042, "episode_idx": 4, "frame_idx": 19, "global_frame_idx": 1042, "task_index": 0}, {"db_idx": 1043, "episode_idx": 4, "frame_idx": 20, "global_frame_idx": 1043, "task_index": 0}, {"db_idx": 1044, "episode_idx": 4, "frame_idx": 21, "global_frame_idx": 1044, "task_index": 0}, {"db_idx": 1045, "episode_idx": 4, "frame_idx": 22, "global_frame_idx": 1045, "task_index": 0}, {"db_idx": 1046, "episode_idx": 4, "frame_idx": 23, "global_frame_idx": 1046, "task_index": 0}, {"db_idx": 1047, "episode_idx": 4, "frame_idx": 24, "global_frame_idx": 1047, "task_index": 0}, {"db_idx": 1048, "episode_idx": 4, "frame_idx": 25, "global_frame_idx": 1048, "task_index": 0}, {"db_idx": 1049, "episode_idx": 4, "frame_idx": 26, "global_frame_idx": 1049, "task_index": 0}, {"db_idx": 1050, "episode_idx": 4, "frame_idx": 27, "global_frame_idx": 1050, "task_index": 0}, {"db_idx": 1051, "episode_idx": 4, "frame_idx": 28, "global_frame_idx": 1051, "task_index": 0}, {"db_idx": 1052, "episode_idx": 4, "frame_idx": 29, "global_frame_idx": 1052, "task_index": 0}, {"db_idx": 1053, "episode_idx": 4, "frame_idx": 30, "global_frame_idx": 1053, "task_index": 0}, {"db_idx": 1054, "episode_idx": 4, "frame_idx": 31, "global_frame_idx": 1054, "task_index": 0}, {"db_idx": 1055, "episode_idx": 4, "frame_idx": 32, "global_frame_idx": 1055, "task_index": 0}, {"db_idx": 1056, "episode_idx": 4, "frame_idx": 33, "global_frame_idx": 1056, "task_index": 0}, {"db_idx": 1057, "episode_idx": 4, "frame_idx": 34, "global_frame_idx": 1057, "task_index": 0}, {"db_idx": 1058, "episode_idx": 4, "frame_idx": 35, "global_frame_idx": 1058, "task_index": 0}, {"db_idx": 1059, "episode_idx": 4, "frame_idx": 36, "global_frame_idx": 1059, "task_index": 0}, {"db_idx": 1060, "episode_idx": 4, "frame_idx": 37, "global_frame_idx": 1060, "task_index": 0}, {"db_idx": 1061, "episode_idx": 4, "frame_idx": 38, "global_frame_idx": 1061, "task_index": 0}, {"db_idx": 1062, "episode_idx": 4, "frame_idx": 39, "global_frame_idx": 1062, "task_index": 0}, {"db_idx": 1063, "episode_idx": 4, "frame_idx": 40, "global_frame_idx": 1063, "task_index": 0}, {"db_idx": 1064, "episode_idx": 4, "frame_idx": 41, "global_frame_idx": 1064, "task_index": 0}, {"db_idx": 1065, "episode_idx": 4, "frame_idx": 42, "global_frame_idx": 1065, "task_index": 0}, {"db_idx": 1066, "episode_idx": 4, "frame_idx": 43, "global_frame_idx": 1066, "task_index": 0}, {"db_idx": 1067, "episode_idx": 4, "frame_idx": 44, "global_frame_idx": 1067, "task_index": 0}, {"db_idx": 1068, "episode_idx": 4, "frame_idx": 45, "global_frame_idx": 1068, "task_index": 0}, {"db_idx": 1069, "episode_idx": 4, "frame_idx": 46, "global_frame_idx": 1069, "task_index": 0}, {"db_idx": 1070, "episode_idx": 4, "frame_idx": 47, "global_frame_idx": 1070, "task_index": 0}, {"db_idx": 1071, "episode_idx": 4, "frame_idx": 48, "global_frame_idx": 1071, "task_index": 0}, {"db_idx": 1072, "episode_idx": 4, "frame_idx": 49, "global_frame_idx": 1072, "task_index": 0}, {"db_idx": 1073, "episode_idx": 4, "frame_idx": 50, "global_frame_idx": 1073, "task_index": 0}, {"db_idx": 1074, "episode_idx": 4, "frame_idx": 51, "global_frame_idx": 1074, "task_index": 0}, {"db_idx": 1075, "episode_idx": 4, "frame_idx": 52, "global_frame_idx": 1075, "task_index": 0}, {"db_idx": 1076, "episode_idx": 4, "frame_idx": 53, "global_frame_idx": 1076, "task_index": 0}, {"db_idx": 1077, "episode_idx": 4, "frame_idx": 54, "global_frame_idx": 1077, "task_index": 0}, {"db_idx": 1078, "episode_idx": 4, "frame_idx": 55, "global_frame_idx": 1078, "task_index": 0}, {"db_idx": 1079, "episode_idx": 4, "frame_idx": 56, "global_frame_idx": 1079, "task_index": 0}, {"db_idx": 1080, "episode_idx": 4, "frame_idx": 57, "global_frame_idx": 1080, "task_index": 0}, {"db_idx": 1081, "episode_idx": 4, "frame_idx": 58, "global_frame_idx": 1081, "task_index": 0}, {"db_idx": 1082, "episode_idx": 4, "frame_idx": 59, "global_frame_idx": 1082, "task_index": 0}, {"db_idx": 1083, "episode_idx": 4, "frame_idx": 60, "global_frame_idx": 1083, "task_index": 0}, {"db_idx": 1084, "episode_idx": 4, "frame_idx": 61, "global_frame_idx": 1084, "task_index": 0}, {"db_idx": 1085, "episode_idx": 4, "frame_idx": 62, "global_frame_idx": 1085, "task_index": 0}, {"db_idx": 1086, "episode_idx": 4, "frame_idx": 63, "global_frame_idx": 1086, "task_index": 0}, {"db_idx": 1087, "episode_idx": 4, "frame_idx": 64, "global_frame_idx": 1087, "task_index": 0}, {"db_idx": 1088, "episode_idx": 4, "frame_idx": 65, "global_frame_idx": 1088, "task_index": 0}, {"db_idx": 1089, "episode_idx": 4, "frame_idx": 66, "global_frame_idx": 1089, "task_index": 0}, {"db_idx": 1090, "episode_idx": 4, "frame_idx": 67, "global_frame_idx": 1090, "task_index": 0}, {"db_idx": 1091, "episode_idx": 4, "frame_idx": 68, "global_frame_idx": 1091, "task_index": 0}, {"db_idx": 1092, "episode_idx": 4, "frame_idx": 69, "global_frame_idx": 1092, "task_index": 0}, {"db_idx": 1093, "episode_idx": 4, "frame_idx": 70, "global_frame_idx": 1093, "task_index": 0}, {"db_idx": 1094, "episode_idx": 4, "frame_idx": 71, "global_frame_idx": 1094, "task_index": 0}, {"db_idx": 1095, "episode_idx": 4, "frame_idx": 72, "global_frame_idx": 1095, "task_index": 0}, {"db_idx": 1096, "episode_idx": 4, "frame_idx": 73, "global_frame_idx": 1096, "task_index": 0}, {"db_idx": 1097, "episode_idx": 4, "frame_idx": 74, "global_frame_idx": 1097, "task_index": 0}, {"db_idx": 1098, "episode_idx": 4, "frame_idx": 75, "global_frame_idx": 1098, "task_index": 0}, {"db_idx": 1099, "episode_idx": 4, "frame_idx": 76, "global_frame_idx": 1099, "task_index": 0}, {"db_idx": 1100, "episode_idx": 4, "frame_idx": 77, "global_frame_idx": 1100, "task_index": 0}, {"db_idx": 1101, "episode_idx": 4, "frame_idx": 78, "global_frame_idx": 1101, "task_index": 0}, {"db_idx": 1102, "episode_idx": 4, "frame_idx": 79, "global_frame_idx": 1102, "task_index": 0}, {"db_idx": 1103, "episode_idx": 4, "frame_idx": 80, "global_frame_idx": 1103, "task_index": 0}, {"db_idx": 1104, "episode_idx": 4, "frame_idx": 81, "global_frame_idx": 1104, "task_index": 0}, {"db_idx": 1105, "episode_idx": 4, "frame_idx": 82, "global_frame_idx": 1105, "task_index": 0}, {"db_idx": 1106, "episode_idx": 4, "frame_idx": 83, "global_frame_idx": 1106, "task_index": 0}, {"db_idx": 1107, "episode_idx": 4, "frame_idx": 84, "global_frame_idx": 1107, "task_index": 0}, {"db_idx": 1108, "episode_idx": 4, "frame_idx": 85, "global_frame_idx": 1108, "task_index": 0}, {"db_idx": 1109, "episode_idx": 4, "frame_idx": 86, "global_frame_idx": 1109, "task_index": 0}, {"db_idx": 1110, "episode_idx": 4, "frame_idx": 87, "global_frame_idx": 1110, "task_index": 0}, {"db_idx": 1111, "episode_idx": 4, "frame_idx": 88, "global_frame_idx": 1111, "task_index": 0}, {"db_idx": 1112, "episode_idx": 4, "frame_idx": 89, "global_frame_idx": 1112, "task_index": 0}, {"db_idx": 1113, "episode_idx": 4, "frame_idx": 90, "global_frame_idx": 1113, "task_index": 0}, {"db_idx": 1114, "episode_idx": 4, "frame_idx": 91, "global_frame_idx": 1114, "task_index": 0}, {"db_idx": 1115, "episode_idx": 4, "frame_idx": 92, "global_frame_idx": 1115, "task_index": 0}, {"db_idx": 1116, "episode_idx": 4, "frame_idx": 93, "global_frame_idx": 1116, "task_index": 0}, {"db_idx": 1117, "episode_idx": 4, "frame_idx": 94, "global_frame_idx": 1117, "task_index": 0}, {"db_idx": 1118, "episode_idx": 4, "frame_idx": 95, "global_frame_idx": 1118, "task_index": 0}, {"db_idx": 1119, "episode_idx": 4, "frame_idx": 96, "global_frame_idx": 1119, "task_index": 0}, {"db_idx": 1120, "episode_idx": 4, "frame_idx": 97, "global_frame_idx": 1120, "task_index": 0}, {"db_idx": 1121, "episode_idx": 4, "frame_idx": 98, "global_frame_idx": 1121, "task_index": 0}, {"db_idx": 1122, "episode_idx": 4, "frame_idx": 99, "global_frame_idx": 1122, "task_index": 0}, {"db_idx": 1123, "episode_idx": 4, "frame_idx": 100, "global_frame_idx": 1123, "task_index": 0}, {"db_idx": 1124, "episode_idx": 4, "frame_idx": 101, "global_frame_idx": 1124, "task_index": 0}, {"db_idx": 1125, "episode_idx": 4, "frame_idx": 102, "global_frame_idx": 1125, "task_index": 0}, {"db_idx": 1126, "episode_idx": 4, "frame_idx": 103, "global_frame_idx": 1126, "task_index": 0}, {"db_idx": 1127, "episode_idx": 4, "frame_idx": 104, "global_frame_idx": 1127, "task_index": 0}, {"db_idx": 1128, "episode_idx": 4, "frame_idx": 105, "global_frame_idx": 1128, "task_index": 0}, {"db_idx": 1129, "episode_idx": 4, "frame_idx": 106, "global_frame_idx": 1129, "task_index": 0}, {"db_idx": 1130, "episode_idx": 4, "frame_idx": 107, "global_frame_idx": 1130, "task_index": 0}, {"db_idx": 1131, "episode_idx": 4, "frame_idx": 108, "global_frame_idx": 1131, "task_index": 0}, {"db_idx": 1132, "episode_idx": 4, "frame_idx": 109, "global_frame_idx": 1132, "task_index": 0}, {"db_idx": 1133, "episode_idx": 4, "frame_idx": 110, "global_frame_idx": 1133, "task_index": 0}, {"db_idx": 1134, "episode_idx": 4, "frame_idx": 111, "global_frame_idx": 1134, "task_index": 0}, {"db_idx": 1135, "episode_idx": 4, "frame_idx": 112, "global_frame_idx": 1135, "task_index": 0}, {"db_idx": 1136, "episode_idx": 4, "frame_idx": 113, "global_frame_idx": 1136, "task_index": 0}, {"db_idx": 1137, "episode_idx": 4, "frame_idx": 114, "global_frame_idx": 1137, "task_index": 0}, {"db_idx": 1138, "episode_idx": 4, "frame_idx": 115, "global_frame_idx": 1138, "task_index": 0}, {"db_idx": 1139, "episode_idx": 4, "frame_idx": 116, "global_frame_idx": 1139, "task_index": 0}, {"db_idx": 1140, "episode_idx": 4, "frame_idx": 117, "global_frame_idx": 1140, "task_index": 0}, {"db_idx": 1141, "episode_idx": 4, "frame_idx": 118, "global_frame_idx": 1141, "task_index": 0}, {"db_idx": 1142, "episode_idx": 4, "frame_idx": 119, "global_frame_idx": 1142, "task_index": 0}, {"db_idx": 1143, "episode_idx": 4, "frame_idx": 120, "global_frame_idx": 1143, "task_index": 0}, {"db_idx": 1144, "episode_idx": 4, "frame_idx": 121, "global_frame_idx": 1144, "task_index": 0}, {"db_idx": 1145, "episode_idx": 4, "frame_idx": 122, "global_frame_idx": 1145, "task_index": 0}, {"db_idx": 1146, "episode_idx": 4, "frame_idx": 123, "global_frame_idx": 1146, "task_index": 0}, {"db_idx": 1147, "episode_idx": 4, "frame_idx": 124, "global_frame_idx": 1147, "task_index": 0}, {"db_idx": 1148, "episode_idx": 4, "frame_idx": 125, "global_frame_idx": 1148, "task_index": 0}, {"db_idx": 1149, "episode_idx": 4, "frame_idx": 126, "global_frame_idx": 1149, "task_index": 0}, {"db_idx": 1150, "episode_idx": 4, "frame_idx": 127, "global_frame_idx": 1150, "task_index": 0}, {"db_idx": 1151, "episode_idx": 4, "frame_idx": 128, "global_frame_idx": 1151, "task_index": 0}, {"db_idx": 1152, "episode_idx": 4, "frame_idx": 129, "global_frame_idx": 1152, "task_index": 0}, {"db_idx": 1153, "episode_idx": 4, "frame_idx": 130, "global_frame_idx": 1153, "task_index": 0}, {"db_idx": 1154, "episode_idx": 4, "frame_idx": 131, "global_frame_idx": 1154, "task_index": 0}, {"db_idx": 1155, "episode_idx": 4, "frame_idx": 132, "global_frame_idx": 1155, "task_index": 0}, {"db_idx": 1156, "episode_idx": 4, "frame_idx": 133, "global_frame_idx": 1156, "task_index": 0}, {"db_idx": 1157, "episode_idx": 4, "frame_idx": 134, "global_frame_idx": 1157, "task_index": 0}, {"db_idx": 1158, "episode_idx": 4, "frame_idx": 135, "global_frame_idx": 1158, "task_index": 0}, {"db_idx": 1159, "episode_idx": 4, "frame_idx": 136, "global_frame_idx": 1159, "task_index": 0}, {"db_idx": 1160, "episode_idx": 4, "frame_idx": 137, "global_frame_idx": 1160, "task_index": 0}, {"db_idx": 1161, "episode_idx": 4, "frame_idx": 138, "global_frame_idx": 1161, "task_index": 0}, {"db_idx": 1162, "episode_idx": 4, "frame_idx": 139, "global_frame_idx": 1162, "task_index": 0}, {"db_idx": 1163, "episode_idx": 4, "frame_idx": 140, "global_frame_idx": 1163, "task_index": 0}, {"db_idx": 1164, "episode_idx": 4, "frame_idx": 141, "global_frame_idx": 1164, "task_index": 0}, {"db_idx": 1165, "episode_idx": 4, "frame_idx": 142, "global_frame_idx": 1165, "task_index": 0}, {"db_idx": 1166, "episode_idx": 4, "frame_idx": 143, "global_frame_idx": 1166, "task_index": 0}, {"db_idx": 1167, "episode_idx": 4, "frame_idx": 144, "global_frame_idx": 1167, "task_index": 0}, {"db_idx": 1168, "episode_idx": 4, "frame_idx": 145, "global_frame_idx": 1168, "task_index": 0}, {"db_idx": 1169, "episode_idx": 4, "frame_idx": 146, "global_frame_idx": 1169, "task_index": 0}, {"db_idx": 1170, "episode_idx": 4, "frame_idx": 147, "global_frame_idx": 1170, "task_index": 0}, {"db_idx": 1171, "episode_idx": 4, "frame_idx": 148, "global_frame_idx": 1171, "task_index": 0}, {"db_idx": 1172, "episode_idx": 4, "frame_idx": 149, "global_frame_idx": 1172, "task_index": 0}, {"db_idx": 1173, "episode_idx": 4, "frame_idx": 150, "global_frame_idx": 1173, "task_index": 0}, {"db_idx": 1174, "episode_idx": 4, "frame_idx": 151, "global_frame_idx": 1174, "task_index": 0}, {"db_idx": 1175, "episode_idx": 4, "frame_idx": 152, "global_frame_idx": 1175, "task_index": 0}, {"db_idx": 1176, "episode_idx": 4, "frame_idx": 153, "global_frame_idx": 1176, "task_index": 0}, {"db_idx": 1177, "episode_idx": 4, "frame_idx": 154, "global_frame_idx": 1177, "task_index": 0}, {"db_idx": 1178, "episode_idx": 4, "frame_idx": 155, "global_frame_idx": 1178, "task_index": 0}, {"db_idx": 1179, "episode_idx": 4, "frame_idx": 156, "global_frame_idx": 1179, "task_index": 0}, {"db_idx": 1180, "episode_idx": 4, "frame_idx": 157, "global_frame_idx": 1180, "task_index": 0}, {"db_idx": 1181, "episode_idx": 4, "frame_idx": 158, "global_frame_idx": 1181, "task_index": 0}, {"db_idx": 1182, "episode_idx": 4, "frame_idx": 159, "global_frame_idx": 1182, "task_index": 0}, {"db_idx": 1183, "episode_idx": 4, "frame_idx": 160, "global_frame_idx": 1183, "task_index": 0}, {"db_idx": 1184, "episode_idx": 4, "frame_idx": 161, "global_frame_idx": 1184, "task_index": 0}, {"db_idx": 1185, "episode_idx": 4, "frame_idx": 162, "global_frame_idx": 1185, "task_index": 0}, {"db_idx": 1186, "episode_idx": 4, "frame_idx": 163, "global_frame_idx": 1186, "task_index": 0}, {"db_idx": 1187, "episode_idx": 4, "frame_idx": 164, "global_frame_idx": 1187, "task_index": 0}, {"db_idx": 1188, "episode_idx": 4, "frame_idx": 165, "global_frame_idx": 1188, "task_index": 0}, {"db_idx": 1189, "episode_idx": 4, "frame_idx": 166, "global_frame_idx": 1189, "task_index": 0}, {"db_idx": 1190, "episode_idx": 4, "frame_idx": 167, "global_frame_idx": 1190, "task_index": 0}, {"db_idx": 1191, "episode_idx": 4, "frame_idx": 168, "global_frame_idx": 1191, "task_index": 0}, {"db_idx": 1192, "episode_idx": 4, "frame_idx": 169, "global_frame_idx": 1192, "task_index": 0}, {"db_idx": 1193, "episode_idx": 4, "frame_idx": 170, "global_frame_idx": 1193, "task_index": 0}, {"db_idx": 1194, "episode_idx": 4, "frame_idx": 171, "global_frame_idx": 1194, "task_index": 0}, {"db_idx": 1195, "episode_idx": 4, "frame_idx": 172, "global_frame_idx": 1195, "task_index": 0}, {"db_idx": 1196, "episode_idx": 4, "frame_idx": 173, "global_frame_idx": 1196, "task_index": 0}, {"db_idx": 1197, "episode_idx": 4, "frame_idx": 174, "global_frame_idx": 1197, "task_index": 0}, {"db_idx": 1198, "episode_idx": 4, "frame_idx": 175, "global_frame_idx": 1198, "task_index": 0}, {"db_idx": 1199, "episode_idx": 4, "frame_idx": 176, "global_frame_idx": 1199, "task_index": 0}, {"db_idx": 1200, "episode_idx": 4, "frame_idx": 177, "global_frame_idx": 1200, "task_index": 0}, {"db_idx": 1201, "episode_idx": 4, "frame_idx": 178, "global_frame_idx": 1201, "task_index": 0}, {"db_idx": 1202, "episode_idx": 4, "frame_idx": 179, "global_frame_idx": 1202, "task_index": 0}, {"db_idx": 1203, "episode_idx": 4, "frame_idx": 180, "global_frame_idx": 1203, "task_index": 0}, {"db_idx": 1204, "episode_idx": 4, "frame_idx": 181, "global_frame_idx": 1204, "task_index": 0}, {"db_idx": 1205, "episode_idx": 4, "frame_idx": 182, "global_frame_idx": 1205, "task_index": 0}, {"db_idx": 1206, "episode_idx": 4, "frame_idx": 183, "global_frame_idx": 1206, "task_index": 0}, {"db_idx": 1207, "episode_idx": 4, "frame_idx": 184, "global_frame_idx": 1207, "task_index": 0}, {"db_idx": 1208, "episode_idx": 4, "frame_idx": 185, "global_frame_idx": 1208, "task_index": 0}, {"db_idx": 1209, "episode_idx": 4, "frame_idx": 186, "global_frame_idx": 1209, "task_index": 0}, {"db_idx": 1210, "episode_idx": 4, "frame_idx": 187, "global_frame_idx": 1210, "task_index": 0}, {"db_idx": 1211, "episode_idx": 4, "frame_idx": 188, "global_frame_idx": 1211, "task_index": 0}, {"db_idx": 1212, "episode_idx": 4, "frame_idx": 189, "global_frame_idx": 1212, "task_index": 0}, {"db_idx": 1213, "episode_idx": 4, "frame_idx": 190, "global_frame_idx": 1213, "task_index": 0}, {"db_idx": 1214, "episode_idx": 4, "frame_idx": 191, "global_frame_idx": 1214, "task_index": 0}, {"db_idx": 1215, "episode_idx": 4, "frame_idx": 192, "global_frame_idx": 1215, "task_index": 0}, {"db_idx": 1216, "episode_idx": 4, "frame_idx": 193, "global_frame_idx": 1216, "task_index": 0}, {"db_idx": 1217, "episode_idx": 4, "frame_idx": 194, "global_frame_idx": 1217, "task_index": 0}, {"db_idx": 1218, "episode_idx": 4, "frame_idx": 195, "global_frame_idx": 1218, "task_index": 0}, {"db_idx": 1219, "episode_idx": 4, "frame_idx": 196, "global_frame_idx": 1219, "task_index": 0}, {"db_idx": 1220, "episode_idx": 4, "frame_idx": 197, "global_frame_idx": 1220, "task_index": 0}, {"db_idx": 1221, "episode_idx": 4, "frame_idx": 198, "global_frame_idx": 1221, "task_index": 0}, {"db_idx": 1222, "episode_idx": 4, "frame_idx": 199, "global_frame_idx": 1222, "task_index": 0}, {"db_idx": 1223, "episode_idx": 4, "frame_idx": 200, "global_frame_idx": 1223, "task_index": 0}, {"db_idx": 1224, "episode_idx": 4, "frame_idx": 201, "global_frame_idx": 1224, "task_index": 0}, {"db_idx": 1225, "episode_idx": 4, "frame_idx": 202, "global_frame_idx": 1225, "task_index": 0}, {"db_idx": 1226, "episode_idx": 4, "frame_idx": 203, "global_frame_idx": 1226, "task_index": 0}, {"db_idx": 1227, "episode_idx": 4, "frame_idx": 204, "global_frame_idx": 1227, "task_index": 0}, {"db_idx": 1228, "episode_idx": 4, "frame_idx": 205, "global_frame_idx": 1228, "task_index": 0}, {"db_idx": 1229, "episode_idx": 4, "frame_idx": 206, "global_frame_idx": 1229, "task_index": 0}, {"db_idx": 1230, "episode_idx": 4, "frame_idx": 207, "global_frame_idx": 1230, "task_index": 0}, {"db_idx": 1231, "episode_idx": 4, "frame_idx": 208, "global_frame_idx": 1231, "task_index": 0}, {"db_idx": 1232, "episode_idx": 4, "frame_idx": 209, "global_frame_idx": 1232, "task_index": 0}, {"db_idx": 1233, "episode_idx": 4, "frame_idx": 210, "global_frame_idx": 1233, "task_index": 0}, {"db_idx": 1234, "episode_idx": 4, "frame_idx": 211, "global_frame_idx": 1234, "task_index": 0}, {"db_idx": 1235, "episode_idx": 4, "frame_idx": 212, "global_frame_idx": 1235, "task_index": 0}, {"db_idx": 1236, "episode_idx": 4, "frame_idx": 213, "global_frame_idx": 1236, "task_index": 0}, {"db_idx": 1237, "episode_idx": 4, "frame_idx": 214, "global_frame_idx": 1237, "task_index": 0}, {"db_idx": 1238, "episode_idx": 4, "frame_idx": 215, "global_frame_idx": 1238, "task_index": 0}, {"db_idx": 1239, "episode_idx": 4, "frame_idx": 216, "global_frame_idx": 1239, "task_index": 0}, {"db_idx": 1240, "episode_idx": 4, "frame_idx": 217, "global_frame_idx": 1240, "task_index": 0}, {"db_idx": 1241, "episode_idx": 4, "frame_idx": 218, "global_frame_idx": 1241, "task_index": 0}, {"db_idx": 1242, "episode_idx": 4, "frame_idx": 219, "global_frame_idx": 1242, "task_index": 0}, {"db_idx": 1243, "episode_idx": 4, "frame_idx": 220, "global_frame_idx": 1243, "task_index": 0}, {"db_idx": 1244, "episode_idx": 4, "frame_idx": 221, "global_frame_idx": 1244, "task_index": 0}, {"db_idx": 1245, "episode_idx": 4, "frame_idx": 222, "global_frame_idx": 1245, "task_index": 0}, {"db_idx": 1246, "episode_idx": 4, "frame_idx": 223, "global_frame_idx": 1246, "task_index": 0}, {"db_idx": 1247, "episode_idx": 4, "frame_idx": 224, "global_frame_idx": 1247, "task_index": 0}, {"db_idx": 1248, "episode_idx": 4, "frame_idx": 225, "global_frame_idx": 1248, "task_index": 0}, {"db_idx": 1249, "episode_idx": 4, "frame_idx": 226, "global_frame_idx": 1249, "task_index": 0}, {"db_idx": 1250, "episode_idx": 4, "frame_idx": 227, "global_frame_idx": 1250, "task_index": 0}, {"db_idx": 1251, "episode_idx": 4, "frame_idx": 228, "global_frame_idx": 1251, "task_index": 0}, {"db_idx": 1252, "episode_idx": 4, "frame_idx": 229, "global_frame_idx": 1252, "task_index": 0}, {"db_idx": 1253, "episode_idx": 4, "frame_idx": 230, "global_frame_idx": 1253, "task_index": 0}, {"db_idx": 1254, "episode_idx": 4, "frame_idx": 231, "global_frame_idx": 1254, "task_index": 0}, {"db_idx": 1255, "episode_idx": 4, "frame_idx": 232, "global_frame_idx": 1255, "task_index": 0}, {"db_idx": 1256, "episode_idx": 4, "frame_idx": 233, "global_frame_idx": 1256, "task_index": 0}, {"db_idx": 1257, "episode_idx": 4, "frame_idx": 234, "global_frame_idx": 1257, "task_index": 0}, {"db_idx": 1258, "episode_idx": 4, "frame_idx": 235, "global_frame_idx": 1258, "task_index": 0}, {"db_idx": 1259, "episode_idx": 4, "frame_idx": 236, "global_frame_idx": 1259, "task_index": 0}, {"db_idx": 1260, "episode_idx": 4, "frame_idx": 237, "global_frame_idx": 1260, "task_index": 0}, {"db_idx": 1261, "episode_idx": 4, "frame_idx": 238, "global_frame_idx": 1261, "task_index": 0}, {"db_idx": 1262, "episode_idx": 4, "frame_idx": 239, "global_frame_idx": 1262, "task_index": 0}, {"db_idx": 1263, "episode_idx": 4, "frame_idx": 240, "global_frame_idx": 1263, "task_index": 0}, {"db_idx": 1264, "episode_idx": 4, "frame_idx": 241, "global_frame_idx": 1264, "task_index": 0}, {"db_idx": 1265, "episode_idx": 4, "frame_idx": 242, "global_frame_idx": 1265, "task_index": 0}, {"db_idx": 1266, "episode_idx": 4, "frame_idx": 243, "global_frame_idx": 1266, "task_index": 0}, {"db_idx": 1267, "episode_idx": 4, "frame_idx": 244, "global_frame_idx": 1267, "task_index": 0}, {"db_idx": 1268, "episode_idx": 4, "frame_idx": 245, "global_frame_idx": 1268, "task_index": 0}, {"db_idx": 1269, "episode_idx": 4, "frame_idx": 246, "global_frame_idx": 1269, "task_index": 0}, {"db_idx": 1270, "episode_idx": 4, "frame_idx": 247, "global_frame_idx": 1270, "task_index": 0}, {"db_idx": 1271, "episode_idx": 4, "frame_idx": 248, "global_frame_idx": 1271, "task_index": 0}, {"db_idx": 1272, "episode_idx": 4, "frame_idx": 249, "global_frame_idx": 1272, "task_index": 0}, {"db_idx": 1273, "episode_idx": 4, "frame_idx": 250, "global_frame_idx": 1273, "task_index": 0}, {"db_idx": 1274, "episode_idx": 4, "frame_idx": 251, "global_frame_idx": 1274, "task_index": 0}, {"db_idx": 1275, "episode_idx": 4, "frame_idx": 252, "global_frame_idx": 1275, "task_index": 0}, {"db_idx": 1276, "episode_idx": 4, "frame_idx": 253, "global_frame_idx": 1276, "task_index": 0}, {"db_idx": 1277, "episode_idx": 4, "frame_idx": 254, "global_frame_idx": 1277, "task_index": 0}, {"db_idx": 1278, "episode_idx": 4, "frame_idx": 255, "global_frame_idx": 1278, "task_index": 0}, {"db_idx": 1279, "episode_idx": 4, "frame_idx": 256, "global_frame_idx": 1279, "task_index": 0}, {"db_idx": 1280, "episode_idx": 4, "frame_idx": 257, "global_frame_idx": 1280, "task_index": 0}, {"db_idx": 1281, "episode_idx": 4, "frame_idx": 258, "global_frame_idx": 1281, "task_index": 0}, {"db_idx": 1282, "episode_idx": 4, "frame_idx": 259, "global_frame_idx": 1282, "task_index": 0}, {"db_idx": 1283, "episode_idx": 4, "frame_idx": 260, "global_frame_idx": 1283, "task_index": 0}, {"db_idx": 1284, "episode_idx": 4, "frame_idx": 261, "global_frame_idx": 1284, "task_index": 0}, {"db_idx": 1285, "episode_idx": 4, "frame_idx": 262, "global_frame_idx": 1285, "task_index": 0}, {"db_idx": 1286, "episode_idx": 4, "frame_idx": 263, "global_frame_idx": 1286, "task_index": 0}, {"db_idx": 1287, "episode_idx": 4, "frame_idx": 264, "global_frame_idx": 1287, "task_index": 0}, {"db_idx": 1288, "episode_idx": 4, "frame_idx": 265, "global_frame_idx": 1288, "task_index": 0}, {"db_idx": 1289, "episode_idx": 4, "frame_idx": 266, "global_frame_idx": 1289, "task_index": 0}, {"db_idx": 1290, "episode_idx": 4, "frame_idx": 267, "global_frame_idx": 1290, "task_index": 0}, {"db_idx": 1291, "episode_idx": 4, "frame_idx": 268, "global_frame_idx": 1291, "task_index": 0}, {"db_idx": 1292, "episode_idx": 4, "frame_idx": 269, "global_frame_idx": 1292, "task_index": 0}, {"db_idx": 1293, "episode_idx": 4, "frame_idx": 270, "global_frame_idx": 1293, "task_index": 0}, {"db_idx": 1294, "episode_idx": 4, "frame_idx": 271, "global_frame_idx": 1294, "task_index": 0}, {"db_idx": 1295, "episode_idx": 4, "frame_idx": 272, "global_frame_idx": 1295, "task_index": 0}, {"db_idx": 1296, "episode_idx": 4, "frame_idx": 273, "global_frame_idx": 1296, "task_index": 0}, {"db_idx": 1297, "episode_idx": 4, "frame_idx": 274, "global_frame_idx": 1297, "task_index": 0}, {"db_idx": 1298, "episode_idx": 4, "frame_idx": 275, "global_frame_idx": 1298, "task_index": 0}, {"db_idx": 1299, "episode_idx": 4, "frame_idx": 276, "global_frame_idx": 1299, "task_index": 0}, {"db_idx": 1300, "episode_idx": 4, "frame_idx": 277, "global_frame_idx": 1300, "task_index": 0}, {"db_idx": 1301, "episode_idx": 4, "frame_idx": 278, "global_frame_idx": 1301, "task_index": 0}, {"db_idx": 1302, "episode_idx": 4, "frame_idx": 279, "global_frame_idx": 1302, "task_index": 0}, {"db_idx": 1303, "episode_idx": 4, "frame_idx": 280, "global_frame_idx": 1303, "task_index": 0}, {"db_idx": 1304, "episode_idx": 4, "frame_idx": 281, "global_frame_idx": 1304, "task_index": 0}, {"db_idx": 1305, "episode_idx": 4, "frame_idx": 282, "global_frame_idx": 1305, "task_index": 0}, {"db_idx": 1306, "episode_idx": 4, "frame_idx": 283, "global_frame_idx": 1306, "task_index": 0}, {"db_idx": 1307, "episode_idx": 4, "frame_idx": 284, "global_frame_idx": 1307, "task_index": 0}, {"db_idx": 1308, "episode_idx": 4, "frame_idx": 285, "global_frame_idx": 1308, "task_index": 0}, {"db_idx": 1309, "episode_idx": 4, "frame_idx": 286, "global_frame_idx": 1309, "task_index": 0}, {"db_idx": 1310, "episode_idx": 4, "frame_idx": 287, "global_frame_idx": 1310, "task_index": 0}, {"db_idx": 1311, "episode_idx": 4, "frame_idx": 288, "global_frame_idx": 1311, "task_index": 0}, {"db_idx": 1312, "episode_idx": 4, "frame_idx": 289, "global_frame_idx": 1312, "task_index": 0}, {"db_idx": 1313, "episode_idx": 4, "frame_idx": 290, "global_frame_idx": 1313, "task_index": 0}, {"db_idx": 1314, "episode_idx": 4, "frame_idx": 291, "global_frame_idx": 1314, "task_index": 0}, {"db_idx": 1315, "episode_idx": 4, "frame_idx": 292, "global_frame_idx": 1315, "task_index": 0}, {"db_idx": 1316, "episode_idx": 4, "frame_idx": 293, "global_frame_idx": 1316, "task_index": 0}, {"db_idx": 1317, "episode_idx": 4, "frame_idx": 294, "global_frame_idx": 1317, "task_index": 0}, {"db_idx": 1318, "episode_idx": 4, "frame_idx": 295, "global_frame_idx": 1318, "task_index": 0}, {"db_idx": 1319, "episode_idx": 4, "frame_idx": 296, "global_frame_idx": 1319, "task_index": 0}, {"db_idx": 1320, "episode_idx": 4, "frame_idx": 297, "global_frame_idx": 1320, "task_index": 0}, {"db_idx": 1321, "episode_idx": 4, "frame_idx": 298, "global_frame_idx": 1321, "task_index": 0}, {"db_idx": 1322, "episode_idx": 4, "frame_idx": 299, "global_frame_idx": 1322, "task_index": 0}, {"db_idx": 1323, "episode_idx": 4, "frame_idx": 300, "global_frame_idx": 1323, "task_index": 0}, {"db_idx": 1324, "episode_idx": 4, "frame_idx": 301, "global_frame_idx": 1324, "task_index": 0}, {"db_idx": 1325, "episode_idx": 4, "frame_idx": 302, "global_frame_idx": 1325, "task_index": 0}, {"db_idx": 1326, "episode_idx": 4, "frame_idx": 303, "global_frame_idx": 1326, "task_index": 0}, {"db_idx": 1327, "episode_idx": 4, "frame_idx": 304, "global_frame_idx": 1327, "task_index": 0}, {"db_idx": 1328, "episode_idx": 4, "frame_idx": 305, "global_frame_idx": 1328, "task_index": 0}, {"db_idx": 1329, "episode_idx": 4, "frame_idx": 306, "global_frame_idx": 1329, "task_index": 0}, {"db_idx": 1330, "episode_idx": 4, "frame_idx": 307, "global_frame_idx": 1330, "task_index": 0}, {"db_idx": 1331, "episode_idx": 4, "frame_idx": 308, "global_frame_idx": 1331, "task_index": 0}, {"db_idx": 1332, "episode_idx": 4, "frame_idx": 309, "global_frame_idx": 1332, "task_index": 0}, {"db_idx": 1333, "episode_idx": 4, "frame_idx": 310, "global_frame_idx": 1333, "task_index": 0}, {"db_idx": 1334, "episode_idx": 4, "frame_idx": 311, "global_frame_idx": 1334, "task_index": 0}, {"db_idx": 1335, "episode_idx": 5, "frame_idx": 0, "global_frame_idx": 1335, "task_index": 1}, {"db_idx": 1336, "episode_idx": 5, "frame_idx": 1, "global_frame_idx": 1336, "task_index": 1}, {"db_idx": 1337, "episode_idx": 5, "frame_idx": 2, "global_frame_idx": 1337, "task_index": 1}, {"db_idx": 1338, "episode_idx": 5, "frame_idx": 3, "global_frame_idx": 1338, "task_index": 1}, {"db_idx": 1339, "episode_idx": 5, "frame_idx": 4, "global_frame_idx": 1339, "task_index": 1}, {"db_idx": 1340, "episode_idx": 5, "frame_idx": 5, "global_frame_idx": 1340, "task_index": 1}, {"db_idx": 1341, "episode_idx": 5, "frame_idx": 6, "global_frame_idx": 1341, "task_index": 1}, {"db_idx": 1342, "episode_idx": 5, "frame_idx": 7, "global_frame_idx": 1342, "task_index": 1}, {"db_idx": 1343, "episode_idx": 5, "frame_idx": 8, "global_frame_idx": 1343, "task_index": 1}, {"db_idx": 1344, "episode_idx": 5, "frame_idx": 9, "global_frame_idx": 1344, "task_index": 1}, {"db_idx": 1345, "episode_idx": 5, "frame_idx": 10, "global_frame_idx": 1345, "task_index": 1}, {"db_idx": 1346, "episode_idx": 5, "frame_idx": 11, "global_frame_idx": 1346, "task_index": 1}, {"db_idx": 1347, "episode_idx": 5, "frame_idx": 12, "global_frame_idx": 1347, "task_index": 1}, {"db_idx": 1348, "episode_idx": 5, "frame_idx": 13, "global_frame_idx": 1348, "task_index": 1}, {"db_idx": 1349, "episode_idx": 5, "frame_idx": 14, "global_frame_idx": 1349, "task_index": 1}, {"db_idx": 1350, "episode_idx": 5, "frame_idx": 15, "global_frame_idx": 1350, "task_index": 1}, {"db_idx": 1351, "episode_idx": 5, "frame_idx": 16, "global_frame_idx": 1351, "task_index": 1}, {"db_idx": 1352, "episode_idx": 5, "frame_idx": 17, "global_frame_idx": 1352, "task_index": 1}, {"db_idx": 1353, "episode_idx": 5, "frame_idx": 18, "global_frame_idx": 1353, "task_index": 1}, {"db_idx": 1354, "episode_idx": 5, "frame_idx": 19, "global_frame_idx": 1354, "task_index": 1}, {"db_idx": 1355, "episode_idx": 5, "frame_idx": 20, "global_frame_idx": 1355, "task_index": 1}, {"db_idx": 1356, "episode_idx": 5, "frame_idx": 21, "global_frame_idx": 1356, "task_index": 1}, {"db_idx": 1357, "episode_idx": 5, "frame_idx": 22, "global_frame_idx": 1357, "task_index": 1}, {"db_idx": 1358, "episode_idx": 5, "frame_idx": 23, "global_frame_idx": 1358, "task_index": 1}, {"db_idx": 1359, "episode_idx": 5, "frame_idx": 24, "global_frame_idx": 1359, "task_index": 1}, {"db_idx": 1360, "episode_idx": 5, "frame_idx": 25, "global_frame_idx": 1360, "task_index": 1}, {"db_idx": 1361, "episode_idx": 5, "frame_idx": 26, "global_frame_idx": 1361, "task_index": 1}, {"db_idx": 1362, "episode_idx": 5, "frame_idx": 27, "global_frame_idx": 1362, "task_index": 1}, {"db_idx": 1363, "episode_idx": 5, "frame_idx": 28, "global_frame_idx": 1363, "task_index": 1}, {"db_idx": 1364, "episode_idx": 5, "frame_idx": 29, "global_frame_idx": 1364, "task_index": 1}, {"db_idx": 1365, "episode_idx": 5, "frame_idx": 30, "global_frame_idx": 1365, "task_index": 1}, {"db_idx": 1366, "episode_idx": 5, "frame_idx": 31, "global_frame_idx": 1366, "task_index": 1}, {"db_idx": 1367, "episode_idx": 5, "frame_idx": 32, "global_frame_idx": 1367, "task_index": 1}, {"db_idx": 1368, "episode_idx": 5, "frame_idx": 33, "global_frame_idx": 1368, "task_index": 1}, {"db_idx": 1369, "episode_idx": 5, "frame_idx": 34, "global_frame_idx": 1369, "task_index": 1}, {"db_idx": 1370, "episode_idx": 5, "frame_idx": 35, "global_frame_idx": 1370, "task_index": 1}, {"db_idx": 1371, "episode_idx": 5, "frame_idx": 36, "global_frame_idx": 1371, "task_index": 1}, {"db_idx": 1372, "episode_idx": 5, "frame_idx": 37, "global_frame_idx": 1372, "task_index": 1}, {"db_idx": 1373, "episode_idx": 5, "frame_idx": 38, "global_frame_idx": 1373, "task_index": 1}, {"db_idx": 1374, "episode_idx": 5, "frame_idx": 39, "global_frame_idx": 1374, "task_index": 1}, {"db_idx": 1375, "episode_idx": 5, "frame_idx": 40, "global_frame_idx": 1375, "task_index": 1}, {"db_idx": 1376, "episode_idx": 5, "frame_idx": 41, "global_frame_idx": 1376, "task_index": 1}, {"db_idx": 1377, "episode_idx": 5, "frame_idx": 42, "global_frame_idx": 1377, "task_index": 1}, {"db_idx": 1378, "episode_idx": 5, "frame_idx": 43, "global_frame_idx": 1378, "task_index": 1}, {"db_idx": 1379, "episode_idx": 5, "frame_idx": 44, "global_frame_idx": 1379, "task_index": 1}, {"db_idx": 1380, "episode_idx": 5, "frame_idx": 45, "global_frame_idx": 1380, "task_index": 1}, {"db_idx": 1381, "episode_idx": 5, "frame_idx": 46, "global_frame_idx": 1381, "task_index": 1}, {"db_idx": 1382, "episode_idx": 5, "frame_idx": 47, "global_frame_idx": 1382, "task_index": 1}, {"db_idx": 1383, "episode_idx": 5, "frame_idx": 48, "global_frame_idx": 1383, "task_index": 1}, {"db_idx": 1384, "episode_idx": 5, "frame_idx": 49, "global_frame_idx": 1384, "task_index": 1}, {"db_idx": 1385, "episode_idx": 5, "frame_idx": 50, "global_frame_idx": 1385, "task_index": 1}, {"db_idx": 1386, "episode_idx": 5, "frame_idx": 51, "global_frame_idx": 1386, "task_index": 1}, {"db_idx": 1387, "episode_idx": 5, "frame_idx": 52, "global_frame_idx": 1387, "task_index": 1}, {"db_idx": 1388, "episode_idx": 5, "frame_idx": 53, "global_frame_idx": 1388, "task_index": 1}, {"db_idx": 1389, "episode_idx": 5, "frame_idx": 54, "global_frame_idx": 1389, "task_index": 1}, {"db_idx": 1390, "episode_idx": 5, "frame_idx": 55, "global_frame_idx": 1390, "task_index": 1}, {"db_idx": 1391, "episode_idx": 5, "frame_idx": 56, "global_frame_idx": 1391, "task_index": 1}, {"db_idx": 1392, "episode_idx": 5, "frame_idx": 57, "global_frame_idx": 1392, "task_index": 1}, {"db_idx": 1393, "episode_idx": 5, "frame_idx": 58, "global_frame_idx": 1393, "task_index": 1}, {"db_idx": 1394, "episode_idx": 5, "frame_idx": 59, "global_frame_idx": 1394, "task_index": 1}, {"db_idx": 1395, "episode_idx": 5, "frame_idx": 60, "global_frame_idx": 1395, "task_index": 1}, {"db_idx": 1396, "episode_idx": 5, "frame_idx": 61, "global_frame_idx": 1396, "task_index": 1}, {"db_idx": 1397, "episode_idx": 5, "frame_idx": 62, "global_frame_idx": 1397, "task_index": 1}, {"db_idx": 1398, "episode_idx": 5, "frame_idx": 63, "global_frame_idx": 1398, "task_index": 1}, {"db_idx": 1399, "episode_idx": 5, "frame_idx": 64, "global_frame_idx": 1399, "task_index": 1}, {"db_idx": 1400, "episode_idx": 5, "frame_idx": 65, "global_frame_idx": 1400, "task_index": 1}, {"db_idx": 1401, "episode_idx": 5, "frame_idx": 66, "global_frame_idx": 1401, "task_index": 1}, {"db_idx": 1402, "episode_idx": 5, "frame_idx": 67, "global_frame_idx": 1402, "task_index": 1}, {"db_idx": 1403, "episode_idx": 5, "frame_idx": 68, "global_frame_idx": 1403, "task_index": 1}, {"db_idx": 1404, "episode_idx": 5, "frame_idx": 69, "global_frame_idx": 1404, "task_index": 1}, {"db_idx": 1405, "episode_idx": 5, "frame_idx": 70, "global_frame_idx": 1405, "task_index": 1}, {"db_idx": 1406, "episode_idx": 5, "frame_idx": 71, "global_frame_idx": 1406, "task_index": 1}, {"db_idx": 1407, "episode_idx": 5, "frame_idx": 72, "global_frame_idx": 1407, "task_index": 1}, {"db_idx": 1408, "episode_idx": 5, "frame_idx": 73, "global_frame_idx": 1408, "task_index": 1}, {"db_idx": 1409, "episode_idx": 5, "frame_idx": 74, "global_frame_idx": 1409, "task_index": 1}, {"db_idx": 1410, "episode_idx": 5, "frame_idx": 75, "global_frame_idx": 1410, "task_index": 1}, {"db_idx": 1411, "episode_idx": 5, "frame_idx": 76, "global_frame_idx": 1411, "task_index": 1}, {"db_idx": 1412, "episode_idx": 5, "frame_idx": 77, "global_frame_idx": 1412, "task_index": 1}, {"db_idx": 1413, "episode_idx": 5, "frame_idx": 78, "global_frame_idx": 1413, "task_index": 1}, {"db_idx": 1414, "episode_idx": 5, "frame_idx": 79, "global_frame_idx": 1414, "task_index": 1}, {"db_idx": 1415, "episode_idx": 5, "frame_idx": 80, "global_frame_idx": 1415, "task_index": 1}, {"db_idx": 1416, "episode_idx": 5, "frame_idx": 81, "global_frame_idx": 1416, "task_index": 1}, {"db_idx": 1417, "episode_idx": 5, "frame_idx": 82, "global_frame_idx": 1417, "task_index": 1}, {"db_idx": 1418, "episode_idx": 5, "frame_idx": 83, "global_frame_idx": 1418, "task_index": 1}, {"db_idx": 1419, "episode_idx": 5, "frame_idx": 84, "global_frame_idx": 1419, "task_index": 1}, {"db_idx": 1420, "episode_idx": 5, "frame_idx": 85, "global_frame_idx": 1420, "task_index": 1}, {"db_idx": 1421, "episode_idx": 5, "frame_idx": 86, "global_frame_idx": 1421, "task_index": 1}, {"db_idx": 1422, "episode_idx": 5, "frame_idx": 87, "global_frame_idx": 1422, "task_index": 1}, {"db_idx": 1423, "episode_idx": 5, "frame_idx": 88, "global_frame_idx": 1423, "task_index": 1}, {"db_idx": 1424, "episode_idx": 5, "frame_idx": 89, "global_frame_idx": 1424, "task_index": 1}, {"db_idx": 1425, "episode_idx": 5, "frame_idx": 90, "global_frame_idx": 1425, "task_index": 1}, {"db_idx": 1426, "episode_idx": 5, "frame_idx": 91, "global_frame_idx": 1426, "task_index": 1}, {"db_idx": 1427, "episode_idx": 5, "frame_idx": 92, "global_frame_idx": 1427, "task_index": 1}, {"db_idx": 1428, "episode_idx": 5, "frame_idx": 93, "global_frame_idx": 1428, "task_index": 1}, {"db_idx": 1429, "episode_idx": 5, "frame_idx": 94, "global_frame_idx": 1429, "task_index": 1}, {"db_idx": 1430, "episode_idx": 5, "frame_idx": 95, "global_frame_idx": 1430, "task_index": 1}, {"db_idx": 1431, "episode_idx": 5, "frame_idx": 96, "global_frame_idx": 1431, "task_index": 1}, {"db_idx": 1432, "episode_idx": 5, "frame_idx": 97, "global_frame_idx": 1432, "task_index": 1}, {"db_idx": 1433, "episode_idx": 5, "frame_idx": 98, "global_frame_idx": 1433, "task_index": 1}, {"db_idx": 1434, "episode_idx": 5, "frame_idx": 99, "global_frame_idx": 1434, "task_index": 1}, {"db_idx": 1435, "episode_idx": 5, "frame_idx": 100, "global_frame_idx": 1435, "task_index": 1}, {"db_idx": 1436, "episode_idx": 5, "frame_idx": 101, "global_frame_idx": 1436, "task_index": 1}, {"db_idx": 1437, "episode_idx": 5, "frame_idx": 102, "global_frame_idx": 1437, "task_index": 1}, {"db_idx": 1438, "episode_idx": 5, "frame_idx": 103, "global_frame_idx": 1438, "task_index": 1}, {"db_idx": 1439, "episode_idx": 5, "frame_idx": 104, "global_frame_idx": 1439, "task_index": 1}, {"db_idx": 1440, "episode_idx": 5, "frame_idx": 105, "global_frame_idx": 1440, "task_index": 1}, {"db_idx": 1441, "episode_idx": 5, "frame_idx": 106, "global_frame_idx": 1441, "task_index": 1}, {"db_idx": 1442, "episode_idx": 5, "frame_idx": 107, "global_frame_idx": 1442, "task_index": 1}, {"db_idx": 1443, "episode_idx": 5, "frame_idx": 108, "global_frame_idx": 1443, "task_index": 1}, {"db_idx": 1444, "episode_idx": 5, "frame_idx": 109, "global_frame_idx": 1444, "task_index": 1}, {"db_idx": 1445, "episode_idx": 5, "frame_idx": 110, "global_frame_idx": 1445, "task_index": 1}, {"db_idx": 1446, "episode_idx": 5, "frame_idx": 111, "global_frame_idx": 1446, "task_index": 1}, {"db_idx": 1447, "episode_idx": 5, "frame_idx": 112, "global_frame_idx": 1447, "task_index": 1}, {"db_idx": 1448, "episode_idx": 5, "frame_idx": 113, "global_frame_idx": 1448, "task_index": 1}, {"db_idx": 1449, "episode_idx": 5, "frame_idx": 114, "global_frame_idx": 1449, "task_index": 1}, {"db_idx": 1450, "episode_idx": 5, "frame_idx": 115, "global_frame_idx": 1450, "task_index": 1}, {"db_idx": 1451, "episode_idx": 5, "frame_idx": 116, "global_frame_idx": 1451, "task_index": 1}, {"db_idx": 1452, "episode_idx": 5, "frame_idx": 117, "global_frame_idx": 1452, "task_index": 1}, {"db_idx": 1453, "episode_idx": 5, "frame_idx": 118, "global_frame_idx": 1453, "task_index": 1}, {"db_idx": 1454, "episode_idx": 5, "frame_idx": 119, "global_frame_idx": 1454, "task_index": 1}, {"db_idx": 1455, "episode_idx": 5, "frame_idx": 120, "global_frame_idx": 1455, "task_index": 1}, {"db_idx": 1456, "episode_idx": 5, "frame_idx": 121, "global_frame_idx": 1456, "task_index": 1}, {"db_idx": 1457, "episode_idx": 5, "frame_idx": 122, "global_frame_idx": 1457, "task_index": 1}, {"db_idx": 1458, "episode_idx": 5, "frame_idx": 123, "global_frame_idx": 1458, "task_index": 1}, {"db_idx": 1459, "episode_idx": 5, "frame_idx": 124, "global_frame_idx": 1459, "task_index": 1}, {"db_idx": 1460, "episode_idx": 5, "frame_idx": 125, "global_frame_idx": 1460, "task_index": 1}, {"db_idx": 1461, "episode_idx": 5, "frame_idx": 126, "global_frame_idx": 1461, "task_index": 1}, {"db_idx": 1462, "episode_idx": 5, "frame_idx": 127, "global_frame_idx": 1462, "task_index": 1}, {"db_idx": 1463, "episode_idx": 5, "frame_idx": 128, "global_frame_idx": 1463, "task_index": 1}, {"db_idx": 1464, "episode_idx": 5, "frame_idx": 129, "global_frame_idx": 1464, "task_index": 1}, {"db_idx": 1465, "episode_idx": 5, "frame_idx": 130, "global_frame_idx": 1465, "task_index": 1}, {"db_idx": 1466, "episode_idx": 5, "frame_idx": 131, "global_frame_idx": 1466, "task_index": 1}, {"db_idx": 1467, "episode_idx": 5, "frame_idx": 132, "global_frame_idx": 1467, "task_index": 1}, {"db_idx": 1468, "episode_idx": 5, "frame_idx": 133, "global_frame_idx": 1468, "task_index": 1}, {"db_idx": 1469, "episode_idx": 5, "frame_idx": 134, "global_frame_idx": 1469, "task_index": 1}, {"db_idx": 1470, "episode_idx": 5, "frame_idx": 135, "global_frame_idx": 1470, "task_index": 1}, {"db_idx": 1471, "episode_idx": 5, "frame_idx": 136, "global_frame_idx": 1471, "task_index": 1}, {"db_idx": 1472, "episode_idx": 5, "frame_idx": 137, "global_frame_idx": 1472, "task_index": 1}, {"db_idx": 1473, "episode_idx": 5, "frame_idx": 138, "global_frame_idx": 1473, "task_index": 1}, {"db_idx": 1474, "episode_idx": 5, "frame_idx": 139, "global_frame_idx": 1474, "task_index": 1}, {"db_idx": 1475, "episode_idx": 5, "frame_idx": 140, "global_frame_idx": 1475, "task_index": 1}, {"db_idx": 1476, "episode_idx": 5, "frame_idx": 141, "global_frame_idx": 1476, "task_index": 1}, {"db_idx": 1477, "episode_idx": 5, "frame_idx": 142, "global_frame_idx": 1477, "task_index": 1}, {"db_idx": 1478, "episode_idx": 5, "frame_idx": 143, "global_frame_idx": 1478, "task_index": 1}, {"db_idx": 1479, "episode_idx": 5, "frame_idx": 144, "global_frame_idx": 1479, "task_index": 1}, {"db_idx": 1480, "episode_idx": 5, "frame_idx": 145, "global_frame_idx": 1480, "task_index": 1}, {"db_idx": 1481, "episode_idx": 5, "frame_idx": 146, "global_frame_idx": 1481, "task_index": 1}, {"db_idx": 1482, "episode_idx": 5, "frame_idx": 147, "global_frame_idx": 1482, "task_index": 1}, {"db_idx": 1483, "episode_idx": 5, "frame_idx": 148, "global_frame_idx": 1483, "task_index": 1}, {"db_idx": 1484, "episode_idx": 5, "frame_idx": 149, "global_frame_idx": 1484, "task_index": 1}, {"db_idx": 1485, "episode_idx": 5, "frame_idx": 150, "global_frame_idx": 1485, "task_index": 1}, {"db_idx": 1486, "episode_idx": 5, "frame_idx": 151, "global_frame_idx": 1486, "task_index": 1}, {"db_idx": 1487, "episode_idx": 5, "frame_idx": 152, "global_frame_idx": 1487, "task_index": 1}, {"db_idx": 1488, "episode_idx": 5, "frame_idx": 153, "global_frame_idx": 1488, "task_index": 1}, {"db_idx": 1489, "episode_idx": 5, "frame_idx": 154, "global_frame_idx": 1489, "task_index": 1}, {"db_idx": 1490, "episode_idx": 5, "frame_idx": 155, "global_frame_idx": 1490, "task_index": 1}, {"db_idx": 1491, "episode_idx": 5, "frame_idx": 156, "global_frame_idx": 1491, "task_index": 1}, {"db_idx": 1492, "episode_idx": 5, "frame_idx": 157, "global_frame_idx": 1492, "task_index": 1}, {"db_idx": 1493, "episode_idx": 5, "frame_idx": 158, "global_frame_idx": 1493, "task_index": 1}, {"db_idx": 1494, "episode_idx": 5, "frame_idx": 159, "global_frame_idx": 1494, "task_index": 1}, {"db_idx": 1495, "episode_idx": 5, "frame_idx": 160, "global_frame_idx": 1495, "task_index": 1}, {"db_idx": 1496, "episode_idx": 5, "frame_idx": 161, "global_frame_idx": 1496, "task_index": 1}, {"db_idx": 1497, "episode_idx": 5, "frame_idx": 162, "global_frame_idx": 1497, "task_index": 1}, {"db_idx": 1498, "episode_idx": 5, "frame_idx": 163, "global_frame_idx": 1498, "task_index": 1}, {"db_idx": 1499, "episode_idx": 5, "frame_idx": 164, "global_frame_idx": 1499, "task_index": 1}, {"db_idx": 1500, "episode_idx": 5, "frame_idx": 165, "global_frame_idx": 1500, "task_index": 1}, {"db_idx": 1501, "episode_idx": 5, "frame_idx": 166, "global_frame_idx": 1501, "task_index": 1}, {"db_idx": 1502, "episode_idx": 5, "frame_idx": 167, "global_frame_idx": 1502, "task_index": 1}, {"db_idx": 1503, "episode_idx": 5, "frame_idx": 168, "global_frame_idx": 1503, "task_index": 1}, {"db_idx": 1504, "episode_idx": 5, "frame_idx": 169, "global_frame_idx": 1504, "task_index": 1}, {"db_idx": 1505, "episode_idx": 5, "frame_idx": 170, "global_frame_idx": 1505, "task_index": 1}, {"db_idx": 1506, "episode_idx": 5, "frame_idx": 171, "global_frame_idx": 1506, "task_index": 1}, {"db_idx": 1507, "episode_idx": 5, "frame_idx": 172, "global_frame_idx": 1507, "task_index": 1}, {"db_idx": 1508, "episode_idx": 5, "frame_idx": 173, "global_frame_idx": 1508, "task_index": 1}, {"db_idx": 1509, "episode_idx": 5, "frame_idx": 174, "global_frame_idx": 1509, "task_index": 1}, {"db_idx": 1510, "episode_idx": 5, "frame_idx": 175, "global_frame_idx": 1510, "task_index": 1}, {"db_idx": 1511, "episode_idx": 5, "frame_idx": 176, "global_frame_idx": 1511, "task_index": 1}, {"db_idx": 1512, "episode_idx": 5, "frame_idx": 177, "global_frame_idx": 1512, "task_index": 1}, {"db_idx": 1513, "episode_idx": 5, "frame_idx": 178, "global_frame_idx": 1513, "task_index": 1}, {"db_idx": 1514, "episode_idx": 5, "frame_idx": 179, "global_frame_idx": 1514, "task_index": 1}, {"db_idx": 1515, "episode_idx": 5, "frame_idx": 180, "global_frame_idx": 1515, "task_index": 1}, {"db_idx": 1516, "episode_idx": 5, "frame_idx": 181, "global_frame_idx": 1516, "task_index": 1}, {"db_idx": 1517, "episode_idx": 5, "frame_idx": 182, "global_frame_idx": 1517, "task_index": 1}, {"db_idx": 1518, "episode_idx": 5, "frame_idx": 183, "global_frame_idx": 1518, "task_index": 1}, {"db_idx": 1519, "episode_idx": 5, "frame_idx": 184, "global_frame_idx": 1519, "task_index": 1}, {"db_idx": 1520, "episode_idx": 5, "frame_idx": 185, "global_frame_idx": 1520, "task_index": 1}, {"db_idx": 1521, "episode_idx": 5, "frame_idx": 186, "global_frame_idx": 1521, "task_index": 1}, {"db_idx": 1522, "episode_idx": 5, "frame_idx": 187, "global_frame_idx": 1522, "task_index": 1}, {"db_idx": 1523, "episode_idx": 5, "frame_idx": 188, "global_frame_idx": 1523, "task_index": 1}, {"db_idx": 1524, "episode_idx": 5, "frame_idx": 189, "global_frame_idx": 1524, "task_index": 1}, {"db_idx": 1525, "episode_idx": 5, "frame_idx": 190, "global_frame_idx": 1525, "task_index": 1}, {"db_idx": 1526, "episode_idx": 5, "frame_idx": 191, "global_frame_idx": 1526, "task_index": 1}, {"db_idx": 1527, "episode_idx": 5, "frame_idx": 192, "global_frame_idx": 1527, "task_index": 1}, {"db_idx": 1528, "episode_idx": 5, "frame_idx": 193, "global_frame_idx": 1528, "task_index": 1}, {"db_idx": 1529, "episode_idx": 5, "frame_idx": 194, "global_frame_idx": 1529, "task_index": 1}, {"db_idx": 1530, "episode_idx": 5, "frame_idx": 195, "global_frame_idx": 1530, "task_index": 1}, {"db_idx": 1531, "episode_idx": 5, "frame_idx": 196, "global_frame_idx": 1531, "task_index": 1}, {"db_idx": 1532, "episode_idx": 5, "frame_idx": 197, "global_frame_idx": 1532, "task_index": 1}, {"db_idx": 1533, "episode_idx": 5, "frame_idx": 198, "global_frame_idx": 1533, "task_index": 1}, {"db_idx": 1534, "episode_idx": 5, "frame_idx": 199, "global_frame_idx": 1534, "task_index": 1}, {"db_idx": 1535, "episode_idx": 5, "frame_idx": 200, "global_frame_idx": 1535, "task_index": 1}, {"db_idx": 1536, "episode_idx": 5, "frame_idx": 201, "global_frame_idx": 1536, "task_index": 1}, {"db_idx": 1537, "episode_idx": 5, "frame_idx": 202, "global_frame_idx": 1537, "task_index": 1}, {"db_idx": 1538, "episode_idx": 5, "frame_idx": 203, "global_frame_idx": 1538, "task_index": 1}, {"db_idx": 1539, "episode_idx": 5, "frame_idx": 204, "global_frame_idx": 1539, "task_index": 1}, {"db_idx": 1540, "episode_idx": 5, "frame_idx": 205, "global_frame_idx": 1540, "task_index": 1}, {"db_idx": 1541, "episode_idx": 5, "frame_idx": 206, "global_frame_idx": 1541, "task_index": 1}, {"db_idx": 1542, "episode_idx": 5, "frame_idx": 207, "global_frame_idx": 1542, "task_index": 1}, {"db_idx": 1543, "episode_idx": 5, "frame_idx": 208, "global_frame_idx": 1543, "task_index": 1}, {"db_idx": 1544, "episode_idx": 5, "frame_idx": 209, "global_frame_idx": 1544, "task_index": 1}, {"db_idx": 1545, "episode_idx": 5, "frame_idx": 210, "global_frame_idx": 1545, "task_index": 1}, {"db_idx": 1546, "episode_idx": 5, "frame_idx": 211, "global_frame_idx": 1546, "task_index": 1}, {"db_idx": 1547, "episode_idx": 5, "frame_idx": 212, "global_frame_idx": 1547, "task_index": 1}, {"db_idx": 1548, "episode_idx": 5, "frame_idx": 213, "global_frame_idx": 1548, "task_index": 1}, {"db_idx": 1549, "episode_idx": 5, "frame_idx": 214, "global_frame_idx": 1549, "task_index": 1}, {"db_idx": 1550, "episode_idx": 5, "frame_idx": 215, "global_frame_idx": 1550, "task_index": 1}, {"db_idx": 1551, "episode_idx": 5, "frame_idx": 216, "global_frame_idx": 1551, "task_index": 1}, {"db_idx": 1552, "episode_idx": 5, "frame_idx": 217, "global_frame_idx": 1552, "task_index": 1}, {"db_idx": 1553, "episode_idx": 5, "frame_idx": 218, "global_frame_idx": 1553, "task_index": 1}, {"db_idx": 1554, "episode_idx": 5, "frame_idx": 219, "global_frame_idx": 1554, "task_index": 1}, {"db_idx": 1555, "episode_idx": 5, "frame_idx": 220, "global_frame_idx": 1555, "task_index": 1}, {"db_idx": 1556, "episode_idx": 5, "frame_idx": 221, "global_frame_idx": 1556, "task_index": 1}, {"db_idx": 1557, "episode_idx": 5, "frame_idx": 222, "global_frame_idx": 1557, "task_index": 1}, {"db_idx": 1558, "episode_idx": 5, "frame_idx": 223, "global_frame_idx": 1558, "task_index": 1}, {"db_idx": 1559, "episode_idx": 5, "frame_idx": 224, "global_frame_idx": 1559, "task_index": 1}, {"db_idx": 1560, "episode_idx": 5, "frame_idx": 225, "global_frame_idx": 1560, "task_index": 1}, {"db_idx": 1561, "episode_idx": 5, "frame_idx": 226, "global_frame_idx": 1561, "task_index": 1}, {"db_idx": 1562, "episode_idx": 5, "frame_idx": 227, "global_frame_idx": 1562, "task_index": 1}, {"db_idx": 1563, "episode_idx": 5, "frame_idx": 228, "global_frame_idx": 1563, "task_index": 1}, {"db_idx": 1564, "episode_idx": 5, "frame_idx": 229, "global_frame_idx": 1564, "task_index": 1}, {"db_idx": 1565, "episode_idx": 5, "frame_idx": 230, "global_frame_idx": 1565, "task_index": 1}, {"db_idx": 1566, "episode_idx": 5, "frame_idx": 231, "global_frame_idx": 1566, "task_index": 1}, {"db_idx": 1567, "episode_idx": 5, "frame_idx": 232, "global_frame_idx": 1567, "task_index": 1}, {"db_idx": 1568, "episode_idx": 5, "frame_idx": 233, "global_frame_idx": 1568, "task_index": 1}, {"db_idx": 1569, "episode_idx": 5, "frame_idx": 234, "global_frame_idx": 1569, "task_index": 1}, {"db_idx": 1570, "episode_idx": 5, "frame_idx": 235, "global_frame_idx": 1570, "task_index": 1}, {"db_idx": 1571, "episode_idx": 5, "frame_idx": 236, "global_frame_idx": 1571, "task_index": 1}, {"db_idx": 1572, "episode_idx": 5, "frame_idx": 237, "global_frame_idx": 1572, "task_index": 1}, {"db_idx": 1573, "episode_idx": 5, "frame_idx": 238, "global_frame_idx": 1573, "task_index": 1}, {"db_idx": 1574, "episode_idx": 5, "frame_idx": 239, "global_frame_idx": 1574, "task_index": 1}, {"db_idx": 1575, "episode_idx": 5, "frame_idx": 240, "global_frame_idx": 1575, "task_index": 1}, {"db_idx": 1576, "episode_idx": 5, "frame_idx": 241, "global_frame_idx": 1576, "task_index": 1}, {"db_idx": 1577, "episode_idx": 5, "frame_idx": 242, "global_frame_idx": 1577, "task_index": 1}, {"db_idx": 1578, "episode_idx": 5, "frame_idx": 243, "global_frame_idx": 1578, "task_index": 1}, {"db_idx": 1579, "episode_idx": 5, "frame_idx": 244, "global_frame_idx": 1579, "task_index": 1}, {"db_idx": 1580, "episode_idx": 5, "frame_idx": 245, "global_frame_idx": 1580, "task_index": 1}, {"db_idx": 1581, "episode_idx": 5, "frame_idx": 246, "global_frame_idx": 1581, "task_index": 1}, {"db_idx": 1582, "episode_idx": 5, "frame_idx": 247, "global_frame_idx": 1582, "task_index": 1}, {"db_idx": 1583, "episode_idx": 5, "frame_idx": 248, "global_frame_idx": 1583, "task_index": 1}, {"db_idx": 1584, "episode_idx": 5, "frame_idx": 249, "global_frame_idx": 1584, "task_index": 1}, {"db_idx": 1585, "episode_idx": 5, "frame_idx": 250, "global_frame_idx": 1585, "task_index": 1}, {"db_idx": 1586, "episode_idx": 5, "frame_idx": 251, "global_frame_idx": 1586, "task_index": 1}, {"db_idx": 1587, "episode_idx": 5, "frame_idx": 252, "global_frame_idx": 1587, "task_index": 1}, {"db_idx": 1588, "episode_idx": 5, "frame_idx": 253, "global_frame_idx": 1588, "task_index": 1}, {"db_idx": 1589, "episode_idx": 5, "frame_idx": 254, "global_frame_idx": 1589, "task_index": 1}, {"db_idx": 1590, "episode_idx": 5, "frame_idx": 255, "global_frame_idx": 1590, "task_index": 1}, {"db_idx": 1591, "episode_idx": 5, "frame_idx": 256, "global_frame_idx": 1591, "task_index": 1}, {"db_idx": 1592, "episode_idx": 5, "frame_idx": 257, "global_frame_idx": 1592, "task_index": 1}, {"db_idx": 1593, "episode_idx": 5, "frame_idx": 258, "global_frame_idx": 1593, "task_index": 1}, {"db_idx": 1594, "episode_idx": 5, "frame_idx": 259, "global_frame_idx": 1594, "task_index": 1}, {"db_idx": 1595, "episode_idx": 5, "frame_idx": 260, "global_frame_idx": 1595, "task_index": 1}, {"db_idx": 1596, "episode_idx": 5, "frame_idx": 261, "global_frame_idx": 1596, "task_index": 1}, {"db_idx": 1597, "episode_idx": 5, "frame_idx": 262, "global_frame_idx": 1597, "task_index": 1}, {"db_idx": 1598, "episode_idx": 5, "frame_idx": 263, "global_frame_idx": 1598, "task_index": 1}, {"db_idx": 1599, "episode_idx": 5, "frame_idx": 264, "global_frame_idx": 1599, "task_index": 1}, {"db_idx": 1600, "episode_idx": 5, "frame_idx": 265, "global_frame_idx": 1600, "task_index": 1}, {"db_idx": 1601, "episode_idx": 5, "frame_idx": 266, "global_frame_idx": 1601, "task_index": 1}, {"db_idx": 1602, "episode_idx": 5, "frame_idx": 267, "global_frame_idx": 1602, "task_index": 1}, {"db_idx": 1603, "episode_idx": 5, "frame_idx": 268, "global_frame_idx": 1603, "task_index": 1}, {"db_idx": 1604, "episode_idx": 5, "frame_idx": 269, "global_frame_idx": 1604, "task_index": 1}, {"db_idx": 1605, "episode_idx": 5, "frame_idx": 270, "global_frame_idx": 1605, "task_index": 1}, {"db_idx": 1606, "episode_idx": 5, "frame_idx": 271, "global_frame_idx": 1606, "task_index": 1}, {"db_idx": 1607, "episode_idx": 5, "frame_idx": 272, "global_frame_idx": 1607, "task_index": 1}, {"db_idx": 1608, "episode_idx": 5, "frame_idx": 273, "global_frame_idx": 1608, "task_index": 1}, {"db_idx": 1609, "episode_idx": 5, "frame_idx": 274, "global_frame_idx": 1609, "task_index": 1}, {"db_idx": 1610, "episode_idx": 5, "frame_idx": 275, "global_frame_idx": 1610, "task_index": 1}, {"db_idx": 1611, "episode_idx": 5, "frame_idx": 276, "global_frame_idx": 1611, "task_index": 1}, {"db_idx": 1612, "episode_idx": 5, "frame_idx": 277, "global_frame_idx": 1612, "task_index": 1}, {"db_idx": 1613, "episode_idx": 5, "frame_idx": 278, "global_frame_idx": 1613, "task_index": 1}, {"db_idx": 1614, "episode_idx": 5, "frame_idx": 279, "global_frame_idx": 1614, "task_index": 1}, {"db_idx": 1615, "episode_idx": 5, "frame_idx": 280, "global_frame_idx": 1615, "task_index": 1}, {"db_idx": 1616, "episode_idx": 5, "frame_idx": 281, "global_frame_idx": 1616, "task_index": 1}, {"db_idx": 1617, "episode_idx": 5, "frame_idx": 282, "global_frame_idx": 1617, "task_index": 1}, {"db_idx": 1618, "episode_idx": 5, "frame_idx": 283, "global_frame_idx": 1618, "task_index": 1}, {"db_idx": 1619, "episode_idx": 5, "frame_idx": 284, "global_frame_idx": 1619, "task_index": 1}, {"db_idx": 1620, "episode_idx": 5, "frame_idx": 285, "global_frame_idx": 1620, "task_index": 1}, {"db_idx": 1621, "episode_idx": 5, "frame_idx": 286, "global_frame_idx": 1621, "task_index": 1}, {"db_idx": 1622, "episode_idx": 5, "frame_idx": 287, "global_frame_idx": 1622, "task_index": 1}, {"db_idx": 1623, "episode_idx": 5, "frame_idx": 288, "global_frame_idx": 1623, "task_index": 1}, {"db_idx": 1624, "episode_idx": 5, "frame_idx": 289, "global_frame_idx": 1624, "task_index": 1}, {"db_idx": 1625, "episode_idx": 5, "frame_idx": 290, "global_frame_idx": 1625, "task_index": 1}, {"db_idx": 1626, "episode_idx": 5, "frame_idx": 291, "global_frame_idx": 1626, "task_index": 1}, {"db_idx": 1627, "episode_idx": 5, "frame_idx": 292, "global_frame_idx": 1627, "task_index": 1}, {"db_idx": 1628, "episode_idx": 5, "frame_idx": 293, "global_frame_idx": 1628, "task_index": 1}, {"db_idx": 1629, "episode_idx": 5, "frame_idx": 294, "global_frame_idx": 1629, "task_index": 1}, {"db_idx": 1630, "episode_idx": 5, "frame_idx": 295, "global_frame_idx": 1630, "task_index": 1}, {"db_idx": 1631, "episode_idx": 5, "frame_idx": 296, "global_frame_idx": 1631, "task_index": 1}, {"db_idx": 1632, "episode_idx": 5, "frame_idx": 297, "global_frame_idx": 1632, "task_index": 1}, {"db_idx": 1633, "episode_idx": 5, "frame_idx": 298, "global_frame_idx": 1633, "task_index": 1}, {"db_idx": 1634, "episode_idx": 5, "frame_idx": 299, "global_frame_idx": 1634, "task_index": 1}, {"db_idx": 1635, "episode_idx": 5, "frame_idx": 300, "global_frame_idx": 1635, "task_index": 1}, {"db_idx": 1636, "episode_idx": 5, "frame_idx": 301, "global_frame_idx": 1636, "task_index": 1}, {"db_idx": 1637, "episode_idx": 5, "frame_idx": 302, "global_frame_idx": 1637, "task_index": 1}, {"db_idx": 1638, "episode_idx": 5, "frame_idx": 303, "global_frame_idx": 1638, "task_index": 1}, {"db_idx": 1639, "episode_idx": 5, "frame_idx": 304, "global_frame_idx": 1639, "task_index": 1}, {"db_idx": 1640, "episode_idx": 5, "frame_idx": 305, "global_frame_idx": 1640, "task_index": 1}, {"db_idx": 1641, "episode_idx": 5, "frame_idx": 306, "global_frame_idx": 1641, "task_index": 1}, {"db_idx": 1642, "episode_idx": 5, "frame_idx": 307, "global_frame_idx": 1642, "task_index": 1}, {"db_idx": 1643, "episode_idx": 5, "frame_idx": 308, "global_frame_idx": 1643, "task_index": 1}, {"db_idx": 1644, "episode_idx": 5, "frame_idx": 309, "global_frame_idx": 1644, "task_index": 1}, {"db_idx": 1645, "episode_idx": 5, "frame_idx": 310, "global_frame_idx": 1645, "task_index": 1}, {"db_idx": 1646, "episode_idx": 5, "frame_idx": 311, "global_frame_idx": 1646, "task_index": 1}, {"db_idx": 1647, "episode_idx": 5, "frame_idx": 312, "global_frame_idx": 1647, "task_index": 1}, {"db_idx": 1648, "episode_idx": 5, "frame_idx": 313, "global_frame_idx": 1648, "task_index": 1}, {"db_idx": 1649, "episode_idx": 5, "frame_idx": 314, "global_frame_idx": 1649, "task_index": 1}, {"db_idx": 1650, "episode_idx": 5, "frame_idx": 315, "global_frame_idx": 1650, "task_index": 1}, {"db_idx": 1651, "episode_idx": 5, "frame_idx": 316, "global_frame_idx": 1651, "task_index": 1}, {"db_idx": 1652, "episode_idx": 5, "frame_idx": 317, "global_frame_idx": 1652, "task_index": 1}, {"db_idx": 1653, "episode_idx": 5, "frame_idx": 318, "global_frame_idx": 1653, "task_index": 1}, {"db_idx": 1654, "episode_idx": 5, "frame_idx": 319, "global_frame_idx": 1654, "task_index": 1}, {"db_idx": 1655, "episode_idx": 5, "frame_idx": 320, "global_frame_idx": 1655, "task_index": 1}, {"db_idx": 1656, "episode_idx": 5, "frame_idx": 321, "global_frame_idx": 1656, "task_index": 1}, {"db_idx": 1657, "episode_idx": 5, "frame_idx": 322, "global_frame_idx": 1657, "task_index": 1}, {"db_idx": 1658, "episode_idx": 5, "frame_idx": 323, "global_frame_idx": 1658, "task_index": 1}, {"db_idx": 1659, "episode_idx": 5, "frame_idx": 324, "global_frame_idx": 1659, "task_index": 1}, {"db_idx": 1660, "episode_idx": 5, "frame_idx": 325, "global_frame_idx": 1660, "task_index": 1}, {"db_idx": 1661, "episode_idx": 5, "frame_idx": 326, "global_frame_idx": 1661, "task_index": 1}, {"db_idx": 1662, "episode_idx": 5, "frame_idx": 327, "global_frame_idx": 1662, "task_index": 1}, {"db_idx": 1663, "episode_idx": 5, "frame_idx": 328, "global_frame_idx": 1663, "task_index": 1}, {"db_idx": 1664, "episode_idx": 5, "frame_idx": 329, "global_frame_idx": 1664, "task_index": 1}, {"db_idx": 1665, "episode_idx": 5, "frame_idx": 330, "global_frame_idx": 1665, "task_index": 1}, {"db_idx": 1666, "episode_idx": 5, "frame_idx": 331, "global_frame_idx": 1666, "task_index": 1}, {"db_idx": 1667, "episode_idx": 5, "frame_idx": 332, "global_frame_idx": 1667, "task_index": 1}, {"db_idx": 1668, "episode_idx": 5, "frame_idx": 333, "global_frame_idx": 1668, "task_index": 1}, {"db_idx": 1669, "episode_idx": 5, "frame_idx": 334, "global_frame_idx": 1669, "task_index": 1}, {"db_idx": 1670, "episode_idx": 5, "frame_idx": 335, "global_frame_idx": 1670, "task_index": 1}, {"db_idx": 1671, "episode_idx": 5, "frame_idx": 336, "global_frame_idx": 1671, "task_index": 1}, {"db_idx": 1672, "episode_idx": 5, "frame_idx": 337, "global_frame_idx": 1672, "task_index": 1}, {"db_idx": 1673, "episode_idx": 5, "frame_idx": 338, "global_frame_idx": 1673, "task_index": 1}, {"db_idx": 1674, "episode_idx": 5, "frame_idx": 339, "global_frame_idx": 1674, "task_index": 1}, {"db_idx": 1675, "episode_idx": 5, "frame_idx": 340, "global_frame_idx": 1675, "task_index": 1}, {"db_idx": 1676, "episode_idx": 5, "frame_idx": 341, "global_frame_idx": 1676, "task_index": 1}, {"db_idx": 1677, "episode_idx": 5, "frame_idx": 342, "global_frame_idx": 1677, "task_index": 1}, {"db_idx": 1678, "episode_idx": 5, "frame_idx": 343, "global_frame_idx": 1678, "task_index": 1}, {"db_idx": 1679, "episode_idx": 5, "frame_idx": 344, "global_frame_idx": 1679, "task_index": 1}, {"db_idx": 1680, "episode_idx": 5, "frame_idx": 345, "global_frame_idx": 1680, "task_index": 1}, {"db_idx": 1681, "episode_idx": 5, "frame_idx": 346, "global_frame_idx": 1681, "task_index": 1}, {"db_idx": 1682, "episode_idx": 5, "frame_idx": 347, "global_frame_idx": 1682, "task_index": 1}, {"db_idx": 1683, "episode_idx": 5, "frame_idx": 348, "global_frame_idx": 1683, "task_index": 1}, {"db_idx": 1684, "episode_idx": 5, "frame_idx": 349, "global_frame_idx": 1684, "task_index": 1}, {"db_idx": 1685, "episode_idx": 5, "frame_idx": 350, "global_frame_idx": 1685, "task_index": 1}, {"db_idx": 1686, "episode_idx": 5, "frame_idx": 351, "global_frame_idx": 1686, "task_index": 1}, {"db_idx": 1687, "episode_idx": 5, "frame_idx": 352, "global_frame_idx": 1687, "task_index": 1}, {"db_idx": 1688, "episode_idx": 5, "frame_idx": 353, "global_frame_idx": 1688, "task_index": 1}, {"db_idx": 1689, "episode_idx": 5, "frame_idx": 354, "global_frame_idx": 1689, "task_index": 1}, {"db_idx": 1690, "episode_idx": 5, "frame_idx": 355, "global_frame_idx": 1690, "task_index": 1}, {"db_idx": 1691, "episode_idx": 5, "frame_idx": 356, "global_frame_idx": 1691, "task_index": 1}, {"db_idx": 1692, "episode_idx": 5, "frame_idx": 357, "global_frame_idx": 1692, "task_index": 1}, {"db_idx": 1693, "episode_idx": 5, "frame_idx": 358, "global_frame_idx": 1693, "task_index": 1}, {"db_idx": 1694, "episode_idx": 5, "frame_idx": 359, "global_frame_idx": 1694, "task_index": 1}, {"db_idx": 1695, "episode_idx": 5, "frame_idx": 360, "global_frame_idx": 1695, "task_index": 1}, {"db_idx": 1696, "episode_idx": 5, "frame_idx": 361, "global_frame_idx": 1696, "task_index": 1}, {"db_idx": 1697, "episode_idx": 5, "frame_idx": 362, "global_frame_idx": 1697, "task_index": 1}, {"db_idx": 1698, "episode_idx": 5, "frame_idx": 363, "global_frame_idx": 1698, "task_index": 1}, {"db_idx": 1699, "episode_idx": 5, "frame_idx": 364, "global_frame_idx": 1699, "task_index": 1}, {"db_idx": 1700, "episode_idx": 5, "frame_idx": 365, "global_frame_idx": 1700, "task_index": 1}, {"db_idx": 1701, "episode_idx": 5, "frame_idx": 366, "global_frame_idx": 1701, "task_index": 1}, {"db_idx": 1702, "episode_idx": 5, "frame_idx": 367, "global_frame_idx": 1702, "task_index": 1}, {"db_idx": 1703, "episode_idx": 5, "frame_idx": 368, "global_frame_idx": 1703, "task_index": 1}, {"db_idx": 1704, "episode_idx": 5, "frame_idx": 369, "global_frame_idx": 1704, "task_index": 1}, {"db_idx": 1705, "episode_idx": 5, "frame_idx": 370, "global_frame_idx": 1705, "task_index": 1}, {"db_idx": 1706, "episode_idx": 5, "frame_idx": 371, "global_frame_idx": 1706, "task_index": 1}, {"db_idx": 1707, "episode_idx": 5, "frame_idx": 372, "global_frame_idx": 1707, "task_index": 1}, {"db_idx": 1708, "episode_idx": 5, "frame_idx": 373, "global_frame_idx": 1708, "task_index": 1}, {"db_idx": 1709, "episode_idx": 5, "frame_idx": 374, "global_frame_idx": 1709, "task_index": 1}, {"db_idx": 1710, "episode_idx": 5, "frame_idx": 375, "global_frame_idx": 1710, "task_index": 1}, {"db_idx": 1711, "episode_idx": 5, "frame_idx": 376, "global_frame_idx": 1711, "task_index": 1}, {"db_idx": 1712, "episode_idx": 5, "frame_idx": 377, "global_frame_idx": 1712, "task_index": 1}, {"db_idx": 1713, "episode_idx": 5, "frame_idx": 378, "global_frame_idx": 1713, "task_index": 1}, {"db_idx": 1714, "episode_idx": 5, "frame_idx": 379, "global_frame_idx": 1714, "task_index": 1}, {"db_idx": 1715, "episode_idx": 5, "frame_idx": 380, "global_frame_idx": 1715, "task_index": 1}, {"db_idx": 1716, "episode_idx": 5, "frame_idx": 381, "global_frame_idx": 1716, "task_index": 1}, {"db_idx": 1717, "episode_idx": 5, "frame_idx": 382, "global_frame_idx": 1717, "task_index": 1}, {"db_idx": 1718, "episode_idx": 5, "frame_idx": 383, "global_frame_idx": 1718, "task_index": 1}, {"db_idx": 1719, "episode_idx": 5, "frame_idx": 384, "global_frame_idx": 1719, "task_index": 1}, {"db_idx": 1720, "episode_idx": 5, "frame_idx": 385, "global_frame_idx": 1720, "task_index": 1}, {"db_idx": 1721, "episode_idx": 5, "frame_idx": 386, "global_frame_idx": 1721, "task_index": 1}, {"db_idx": 1722, "episode_idx": 5, "frame_idx": 387, "global_frame_idx": 1722, "task_index": 1}, {"db_idx": 1723, "episode_idx": 6, "frame_idx": 0, "global_frame_idx": 1723, "task_index": 1}, {"db_idx": 1724, "episode_idx": 6, "frame_idx": 1, "global_frame_idx": 1724, "task_index": 1}, {"db_idx": 1725, "episode_idx": 6, "frame_idx": 2, "global_frame_idx": 1725, "task_index": 1}, {"db_idx": 1726, "episode_idx": 6, "frame_idx": 3, "global_frame_idx": 1726, "task_index": 1}, {"db_idx": 1727, "episode_idx": 6, "frame_idx": 4, "global_frame_idx": 1727, "task_index": 1}, {"db_idx": 1728, "episode_idx": 6, "frame_idx": 5, "global_frame_idx": 1728, "task_index": 1}, {"db_idx": 1729, "episode_idx": 6, "frame_idx": 6, "global_frame_idx": 1729, "task_index": 1}, {"db_idx": 1730, "episode_idx": 6, "frame_idx": 7, "global_frame_idx": 1730, "task_index": 1}, {"db_idx": 1731, "episode_idx": 6, "frame_idx": 8, "global_frame_idx": 1731, "task_index": 1}, {"db_idx": 1732, "episode_idx": 6, "frame_idx": 9, "global_frame_idx": 1732, "task_index": 1}, {"db_idx": 1733, "episode_idx": 6, "frame_idx": 10, "global_frame_idx": 1733, "task_index": 1}, {"db_idx": 1734, "episode_idx": 6, "frame_idx": 11, "global_frame_idx": 1734, "task_index": 1}, {"db_idx": 1735, "episode_idx": 6, "frame_idx": 12, "global_frame_idx": 1735, "task_index": 1}, {"db_idx": 1736, "episode_idx": 6, "frame_idx": 13, "global_frame_idx": 1736, "task_index": 1}, {"db_idx": 1737, "episode_idx": 6, "frame_idx": 14, "global_frame_idx": 1737, "task_index": 1}, {"db_idx": 1738, "episode_idx": 6, "frame_idx": 15, "global_frame_idx": 1738, "task_index": 1}, {"db_idx": 1739, "episode_idx": 6, "frame_idx": 16, "global_frame_idx": 1739, "task_index": 1}, {"db_idx": 1740, "episode_idx": 6, "frame_idx": 17, "global_frame_idx": 1740, "task_index": 1}, {"db_idx": 1741, "episode_idx": 6, "frame_idx": 18, "global_frame_idx": 1741, "task_index": 1}, {"db_idx": 1742, "episode_idx": 6, "frame_idx": 19, "global_frame_idx": 1742, "task_index": 1}, {"db_idx": 1743, "episode_idx": 6, "frame_idx": 20, "global_frame_idx": 1743, "task_index": 1}, {"db_idx": 1744, "episode_idx": 6, "frame_idx": 21, "global_frame_idx": 1744, "task_index": 1}, {"db_idx": 1745, "episode_idx": 6, "frame_idx": 22, "global_frame_idx": 1745, "task_index": 1}, {"db_idx": 1746, "episode_idx": 6, "frame_idx": 23, "global_frame_idx": 1746, "task_index": 1}, {"db_idx": 1747, "episode_idx": 6, "frame_idx": 24, "global_frame_idx": 1747, "task_index": 1}, {"db_idx": 1748, "episode_idx": 6, "frame_idx": 25, "global_frame_idx": 1748, "task_index": 1}, {"db_idx": 1749, "episode_idx": 6, "frame_idx": 26, "global_frame_idx": 1749, "task_index": 1}, {"db_idx": 1750, "episode_idx": 6, "frame_idx": 27, "global_frame_idx": 1750, "task_index": 1}, {"db_idx": 1751, "episode_idx": 6, "frame_idx": 28, "global_frame_idx": 1751, "task_index": 1}, {"db_idx": 1752, "episode_idx": 6, "frame_idx": 29, "global_frame_idx": 1752, "task_index": 1}, {"db_idx": 1753, "episode_idx": 6, "frame_idx": 30, "global_frame_idx": 1753, "task_index": 1}, {"db_idx": 1754, "episode_idx": 6, "frame_idx": 31, "global_frame_idx": 1754, "task_index": 1}, {"db_idx": 1755, "episode_idx": 6, "frame_idx": 32, "global_frame_idx": 1755, "task_index": 1}, {"db_idx": 1756, "episode_idx": 6, "frame_idx": 33, "global_frame_idx": 1756, "task_index": 1}, {"db_idx": 1757, "episode_idx": 6, "frame_idx": 34, "global_frame_idx": 1757, "task_index": 1}, {"db_idx": 1758, "episode_idx": 6, "frame_idx": 35, "global_frame_idx": 1758, "task_index": 1}, {"db_idx": 1759, "episode_idx": 6, "frame_idx": 36, "global_frame_idx": 1759, "task_index": 1}, {"db_idx": 1760, "episode_idx": 6, "frame_idx": 37, "global_frame_idx": 1760, "task_index": 1}, {"db_idx": 1761, "episode_idx": 6, "frame_idx": 38, "global_frame_idx": 1761, "task_index": 1}, {"db_idx": 1762, "episode_idx": 6, "frame_idx": 39, "global_frame_idx": 1762, "task_index": 1}, {"db_idx": 1763, "episode_idx": 6, "frame_idx": 40, "global_frame_idx": 1763, "task_index": 1}, {"db_idx": 1764, "episode_idx": 6, "frame_idx": 41, "global_frame_idx": 1764, "task_index": 1}, {"db_idx": 1765, "episode_idx": 6, "frame_idx": 42, "global_frame_idx": 1765, "task_index": 1}, {"db_idx": 1766, "episode_idx": 6, "frame_idx": 43, "global_frame_idx": 1766, "task_index": 1}, {"db_idx": 1767, "episode_idx": 6, "frame_idx": 44, "global_frame_idx": 1767, "task_index": 1}, {"db_idx": 1768, "episode_idx": 6, "frame_idx": 45, "global_frame_idx": 1768, "task_index": 1}, {"db_idx": 1769, "episode_idx": 6, "frame_idx": 46, "global_frame_idx": 1769, "task_index": 1}, {"db_idx": 1770, "episode_idx": 6, "frame_idx": 47, "global_frame_idx": 1770, "task_index": 1}, {"db_idx": 1771, "episode_idx": 6, "frame_idx": 48, "global_frame_idx": 1771, "task_index": 1}, {"db_idx": 1772, "episode_idx": 6, "frame_idx": 49, "global_frame_idx": 1772, "task_index": 1}, {"db_idx": 1773, "episode_idx": 6, "frame_idx": 50, "global_frame_idx": 1773, "task_index": 1}, {"db_idx": 1774, "episode_idx": 6, "frame_idx": 51, "global_frame_idx": 1774, "task_index": 1}, {"db_idx": 1775, "episode_idx": 6, "frame_idx": 52, "global_frame_idx": 1775, "task_index": 1}, {"db_idx": 1776, "episode_idx": 6, "frame_idx": 53, "global_frame_idx": 1776, "task_index": 1}, {"db_idx": 1777, "episode_idx": 6, "frame_idx": 54, "global_frame_idx": 1777, "task_index": 1}, {"db_idx": 1778, "episode_idx": 6, "frame_idx": 55, "global_frame_idx": 1778, "task_index": 1}, {"db_idx": 1779, "episode_idx": 6, "frame_idx": 56, "global_frame_idx": 1779, "task_index": 1}, {"db_idx": 1780, "episode_idx": 6, "frame_idx": 57, "global_frame_idx": 1780, "task_index": 1}, {"db_idx": 1781, "episode_idx": 6, "frame_idx": 58, "global_frame_idx": 1781, "task_index": 1}, {"db_idx": 1782, "episode_idx": 6, "frame_idx": 59, "global_frame_idx": 1782, "task_index": 1}, {"db_idx": 1783, "episode_idx": 6, "frame_idx": 60, "global_frame_idx": 1783, "task_index": 1}, {"db_idx": 1784, "episode_idx": 6, "frame_idx": 61, "global_frame_idx": 1784, "task_index": 1}, {"db_idx": 1785, "episode_idx": 6, "frame_idx": 62, "global_frame_idx": 1785, "task_index": 1}, {"db_idx": 1786, "episode_idx": 6, "frame_idx": 63, "global_frame_idx": 1786, "task_index": 1}, {"db_idx": 1787, "episode_idx": 6, "frame_idx": 64, "global_frame_idx": 1787, "task_index": 1}, {"db_idx": 1788, "episode_idx": 6, "frame_idx": 65, "global_frame_idx": 1788, "task_index": 1}, {"db_idx": 1789, "episode_idx": 6, "frame_idx": 66, "global_frame_idx": 1789, "task_index": 1}, {"db_idx": 1790, "episode_idx": 6, "frame_idx": 67, "global_frame_idx": 1790, "task_index": 1}, {"db_idx": 1791, "episode_idx": 6, "frame_idx": 68, "global_frame_idx": 1791, "task_index": 1}, {"db_idx": 1792, "episode_idx": 6, "frame_idx": 69, "global_frame_idx": 1792, "task_index": 1}, {"db_idx": 1793, "episode_idx": 6, "frame_idx": 70, "global_frame_idx": 1793, "task_index": 1}, {"db_idx": 1794, "episode_idx": 6, "frame_idx": 71, "global_frame_idx": 1794, "task_index": 1}, {"db_idx": 1795, "episode_idx": 6, "frame_idx": 72, "global_frame_idx": 1795, "task_index": 1}, {"db_idx": 1796, "episode_idx": 6, "frame_idx": 73, "global_frame_idx": 1796, "task_index": 1}, {"db_idx": 1797, "episode_idx": 6, "frame_idx": 74, "global_frame_idx": 1797, "task_index": 1}, {"db_idx": 1798, "episode_idx": 6, "frame_idx": 75, "global_frame_idx": 1798, "task_index": 1}, {"db_idx": 1799, "episode_idx": 6, "frame_idx": 76, "global_frame_idx": 1799, "task_index": 1}, {"db_idx": 1800, "episode_idx": 6, "frame_idx": 77, "global_frame_idx": 1800, "task_index": 1}, {"db_idx": 1801, "episode_idx": 6, "frame_idx": 78, "global_frame_idx": 1801, "task_index": 1}, {"db_idx": 1802, "episode_idx": 6, "frame_idx": 79, "global_frame_idx": 1802, "task_index": 1}, {"db_idx": 1803, "episode_idx": 6, "frame_idx": 80, "global_frame_idx": 1803, "task_index": 1}, {"db_idx": 1804, "episode_idx": 6, "frame_idx": 81, "global_frame_idx": 1804, "task_index": 1}, {"db_idx": 1805, "episode_idx": 6, "frame_idx": 82, "global_frame_idx": 1805, "task_index": 1}, {"db_idx": 1806, "episode_idx": 6, "frame_idx": 83, "global_frame_idx": 1806, "task_index": 1}, {"db_idx": 1807, "episode_idx": 6, "frame_idx": 84, "global_frame_idx": 1807, "task_index": 1}, {"db_idx": 1808, "episode_idx": 6, "frame_idx": 85, "global_frame_idx": 1808, "task_index": 1}, {"db_idx": 1809, "episode_idx": 6, "frame_idx": 86, "global_frame_idx": 1809, "task_index": 1}, {"db_idx": 1810, "episode_idx": 6, "frame_idx": 87, "global_frame_idx": 1810, "task_index": 1}, {"db_idx": 1811, "episode_idx": 6, "frame_idx": 88, "global_frame_idx": 1811, "task_index": 1}, {"db_idx": 1812, "episode_idx": 6, "frame_idx": 89, "global_frame_idx": 1812, "task_index": 1}, {"db_idx": 1813, "episode_idx": 6, "frame_idx": 90, "global_frame_idx": 1813, "task_index": 1}, {"db_idx": 1814, "episode_idx": 6, "frame_idx": 91, "global_frame_idx": 1814, "task_index": 1}, {"db_idx": 1815, "episode_idx": 6, "frame_idx": 92, "global_frame_idx": 1815, "task_index": 1}, {"db_idx": 1816, "episode_idx": 6, "frame_idx": 93, "global_frame_idx": 1816, "task_index": 1}, {"db_idx": 1817, "episode_idx": 6, "frame_idx": 94, "global_frame_idx": 1817, "task_index": 1}, {"db_idx": 1818, "episode_idx": 6, "frame_idx": 95, "global_frame_idx": 1818, "task_index": 1}, {"db_idx": 1819, "episode_idx": 6, "frame_idx": 96, "global_frame_idx": 1819, "task_index": 1}, {"db_idx": 1820, "episode_idx": 6, "frame_idx": 97, "global_frame_idx": 1820, "task_index": 1}, {"db_idx": 1821, "episode_idx": 6, "frame_idx": 98, "global_frame_idx": 1821, "task_index": 1}, {"db_idx": 1822, "episode_idx": 6, "frame_idx": 99, "global_frame_idx": 1822, "task_index": 1}, {"db_idx": 1823, "episode_idx": 6, "frame_idx": 100, "global_frame_idx": 1823, "task_index": 1}, {"db_idx": 1824, "episode_idx": 6, "frame_idx": 101, "global_frame_idx": 1824, "task_index": 1}, {"db_idx": 1825, "episode_idx": 6, "frame_idx": 102, "global_frame_idx": 1825, "task_index": 1}, {"db_idx": 1826, "episode_idx": 6, "frame_idx": 103, "global_frame_idx": 1826, "task_index": 1}, {"db_idx": 1827, "episode_idx": 6, "frame_idx": 104, "global_frame_idx": 1827, "task_index": 1}, {"db_idx": 1828, "episode_idx": 6, "frame_idx": 105, "global_frame_idx": 1828, "task_index": 1}, {"db_idx": 1829, "episode_idx": 6, "frame_idx": 106, "global_frame_idx": 1829, "task_index": 1}, {"db_idx": 1830, "episode_idx": 6, "frame_idx": 107, "global_frame_idx": 1830, "task_index": 1}, {"db_idx": 1831, "episode_idx": 6, "frame_idx": 108, "global_frame_idx": 1831, "task_index": 1}, {"db_idx": 1832, "episode_idx": 6, "frame_idx": 109, "global_frame_idx": 1832, "task_index": 1}, {"db_idx": 1833, "episode_idx": 6, "frame_idx": 110, "global_frame_idx": 1833, "task_index": 1}, {"db_idx": 1834, "episode_idx": 6, "frame_idx": 111, "global_frame_idx": 1834, "task_index": 1}, {"db_idx": 1835, "episode_idx": 6, "frame_idx": 112, "global_frame_idx": 1835, "task_index": 1}, {"db_idx": 1836, "episode_idx": 6, "frame_idx": 113, "global_frame_idx": 1836, "task_index": 1}, {"db_idx": 1837, "episode_idx": 6, "frame_idx": 114, "global_frame_idx": 1837, "task_index": 1}, {"db_idx": 1838, "episode_idx": 6, "frame_idx": 115, "global_frame_idx": 1838, "task_index": 1}, {"db_idx": 1839, "episode_idx": 6, "frame_idx": 116, "global_frame_idx": 1839, "task_index": 1}, {"db_idx": 1840, "episode_idx": 6, "frame_idx": 117, "global_frame_idx": 1840, "task_index": 1}, {"db_idx": 1841, "episode_idx": 6, "frame_idx": 118, "global_frame_idx": 1841, "task_index": 1}, {"db_idx": 1842, "episode_idx": 6, "frame_idx": 119, "global_frame_idx": 1842, "task_index": 1}, {"db_idx": 1843, "episode_idx": 6, "frame_idx": 120, "global_frame_idx": 1843, "task_index": 1}, {"db_idx": 1844, "episode_idx": 6, "frame_idx": 121, "global_frame_idx": 1844, "task_index": 1}, {"db_idx": 1845, "episode_idx": 6, "frame_idx": 122, "global_frame_idx": 1845, "task_index": 1}, {"db_idx": 1846, "episode_idx": 6, "frame_idx": 123, "global_frame_idx": 1846, "task_index": 1}, {"db_idx": 1847, "episode_idx": 6, "frame_idx": 124, "global_frame_idx": 1847, "task_index": 1}, {"db_idx": 1848, "episode_idx": 6, "frame_idx": 125, "global_frame_idx": 1848, "task_index": 1}, {"db_idx": 1849, "episode_idx": 6, "frame_idx": 126, "global_frame_idx": 1849, "task_index": 1}, {"db_idx": 1850, "episode_idx": 6, "frame_idx": 127, "global_frame_idx": 1850, "task_index": 1}, {"db_idx": 1851, "episode_idx": 6, "frame_idx": 128, "global_frame_idx": 1851, "task_index": 1}, {"db_idx": 1852, "episode_idx": 6, "frame_idx": 129, "global_frame_idx": 1852, "task_index": 1}, {"db_idx": 1853, "episode_idx": 6, "frame_idx": 130, "global_frame_idx": 1853, "task_index": 1}, {"db_idx": 1854, "episode_idx": 6, "frame_idx": 131, "global_frame_idx": 1854, "task_index": 1}, {"db_idx": 1855, "episode_idx": 6, "frame_idx": 132, "global_frame_idx": 1855, "task_index": 1}, {"db_idx": 1856, "episode_idx": 6, "frame_idx": 133, "global_frame_idx": 1856, "task_index": 1}, {"db_idx": 1857, "episode_idx": 6, "frame_idx": 134, "global_frame_idx": 1857, "task_index": 1}, {"db_idx": 1858, "episode_idx": 6, "frame_idx": 135, "global_frame_idx": 1858, "task_index": 1}, {"db_idx": 1859, "episode_idx": 6, "frame_idx": 136, "global_frame_idx": 1859, "task_index": 1}, {"db_idx": 1860, "episode_idx": 6, "frame_idx": 137, "global_frame_idx": 1860, "task_index": 1}, {"db_idx": 1861, "episode_idx": 6, "frame_idx": 138, "global_frame_idx": 1861, "task_index": 1}, {"db_idx": 1862, "episode_idx": 6, "frame_idx": 139, "global_frame_idx": 1862, "task_index": 1}, {"db_idx": 1863, "episode_idx": 6, "frame_idx": 140, "global_frame_idx": 1863, "task_index": 1}, {"db_idx": 1864, "episode_idx": 6, "frame_idx": 141, "global_frame_idx": 1864, "task_index": 1}, {"db_idx": 1865, "episode_idx": 6, "frame_idx": 142, "global_frame_idx": 1865, "task_index": 1}, {"db_idx": 1866, "episode_idx": 6, "frame_idx": 143, "global_frame_idx": 1866, "task_index": 1}, {"db_idx": 1867, "episode_idx": 6, "frame_idx": 144, "global_frame_idx": 1867, "task_index": 1}, {"db_idx": 1868, "episode_idx": 6, "frame_idx": 145, "global_frame_idx": 1868, "task_index": 1}, {"db_idx": 1869, "episode_idx": 6, "frame_idx": 146, "global_frame_idx": 1869, "task_index": 1}, {"db_idx": 1870, "episode_idx": 6, "frame_idx": 147, "global_frame_idx": 1870, "task_index": 1}, {"db_idx": 1871, "episode_idx": 6, "frame_idx": 148, "global_frame_idx": 1871, "task_index": 1}, {"db_idx": 1872, "episode_idx": 6, "frame_idx": 149, "global_frame_idx": 1872, "task_index": 1}, {"db_idx": 1873, "episode_idx": 6, "frame_idx": 150, "global_frame_idx": 1873, "task_index": 1}, {"db_idx": 1874, "episode_idx": 6, "frame_idx": 151, "global_frame_idx": 1874, "task_index": 1}, {"db_idx": 1875, "episode_idx": 6, "frame_idx": 152, "global_frame_idx": 1875, "task_index": 1}, {"db_idx": 1876, "episode_idx": 6, "frame_idx": 153, "global_frame_idx": 1876, "task_index": 1}, {"db_idx": 1877, "episode_idx": 6, "frame_idx": 154, "global_frame_idx": 1877, "task_index": 1}, {"db_idx": 1878, "episode_idx": 6, "frame_idx": 155, "global_frame_idx": 1878, "task_index": 1}, {"db_idx": 1879, "episode_idx": 6, "frame_idx": 156, "global_frame_idx": 1879, "task_index": 1}, {"db_idx": 1880, "episode_idx": 6, "frame_idx": 157, "global_frame_idx": 1880, "task_index": 1}, {"db_idx": 1881, "episode_idx": 6, "frame_idx": 158, "global_frame_idx": 1881, "task_index": 1}, {"db_idx": 1882, "episode_idx": 6, "frame_idx": 159, "global_frame_idx": 1882, "task_index": 1}, {"db_idx": 1883, "episode_idx": 6, "frame_idx": 160, "global_frame_idx": 1883, "task_index": 1}, {"db_idx": 1884, "episode_idx": 6, "frame_idx": 161, "global_frame_idx": 1884, "task_index": 1}, {"db_idx": 1885, "episode_idx": 6, "frame_idx": 162, "global_frame_idx": 1885, "task_index": 1}, {"db_idx": 1886, "episode_idx": 6, "frame_idx": 163, "global_frame_idx": 1886, "task_index": 1}, {"db_idx": 1887, "episode_idx": 6, "frame_idx": 164, "global_frame_idx": 1887, "task_index": 1}, {"db_idx": 1888, "episode_idx": 6, "frame_idx": 165, "global_frame_idx": 1888, "task_index": 1}, {"db_idx": 1889, "episode_idx": 6, "frame_idx": 166, "global_frame_idx": 1889, "task_index": 1}, {"db_idx": 1890, "episode_idx": 6, "frame_idx": 167, "global_frame_idx": 1890, "task_index": 1}, {"db_idx": 1891, "episode_idx": 6, "frame_idx": 168, "global_frame_idx": 1891, "task_index": 1}, {"db_idx": 1892, "episode_idx": 6, "frame_idx": 169, "global_frame_idx": 1892, "task_index": 1}, {"db_idx": 1893, "episode_idx": 6, "frame_idx": 170, "global_frame_idx": 1893, "task_index": 1}, {"db_idx": 1894, "episode_idx": 6, "frame_idx": 171, "global_frame_idx": 1894, "task_index": 1}, {"db_idx": 1895, "episode_idx": 6, "frame_idx": 172, "global_frame_idx": 1895, "task_index": 1}, {"db_idx": 1896, "episode_idx": 6, "frame_idx": 173, "global_frame_idx": 1896, "task_index": 1}, {"db_idx": 1897, "episode_idx": 6, "frame_idx": 174, "global_frame_idx": 1897, "task_index": 1}, {"db_idx": 1898, "episode_idx": 6, "frame_idx": 175, "global_frame_idx": 1898, "task_index": 1}, {"db_idx": 1899, "episode_idx": 6, "frame_idx": 176, "global_frame_idx": 1899, "task_index": 1}, {"db_idx": 1900, "episode_idx": 6, "frame_idx": 177, "global_frame_idx": 1900, "task_index": 1}, {"db_idx": 1901, "episode_idx": 6, "frame_idx": 178, "global_frame_idx": 1901, "task_index": 1}, {"db_idx": 1902, "episode_idx": 6, "frame_idx": 179, "global_frame_idx": 1902, "task_index": 1}, {"db_idx": 1903, "episode_idx": 6, "frame_idx": 180, "global_frame_idx": 1903, "task_index": 1}, {"db_idx": 1904, "episode_idx": 6, "frame_idx": 181, "global_frame_idx": 1904, "task_index": 1}, {"db_idx": 1905, "episode_idx": 6, "frame_idx": 182, "global_frame_idx": 1905, "task_index": 1}, {"db_idx": 1906, "episode_idx": 6, "frame_idx": 183, "global_frame_idx": 1906, "task_index": 1}, {"db_idx": 1907, "episode_idx": 6, "frame_idx": 184, "global_frame_idx": 1907, "task_index": 1}, {"db_idx": 1908, "episode_idx": 6, "frame_idx": 185, "global_frame_idx": 1908, "task_index": 1}, {"db_idx": 1909, "episode_idx": 6, "frame_idx": 186, "global_frame_idx": 1909, "task_index": 1}, {"db_idx": 1910, "episode_idx": 6, "frame_idx": 187, "global_frame_idx": 1910, "task_index": 1}, {"db_idx": 1911, "episode_idx": 6, "frame_idx": 188, "global_frame_idx": 1911, "task_index": 1}, {"db_idx": 1912, "episode_idx": 6, "frame_idx": 189, "global_frame_idx": 1912, "task_index": 1}, {"db_idx": 1913, "episode_idx": 6, "frame_idx": 190, "global_frame_idx": 1913, "task_index": 1}, {"db_idx": 1914, "episode_idx": 6, "frame_idx": 191, "global_frame_idx": 1914, "task_index": 1}, {"db_idx": 1915, "episode_idx": 6, "frame_idx": 192, "global_frame_idx": 1915, "task_index": 1}, {"db_idx": 1916, "episode_idx": 6, "frame_idx": 193, "global_frame_idx": 1916, "task_index": 1}, {"db_idx": 1917, "episode_idx": 6, "frame_idx": 194, "global_frame_idx": 1917, "task_index": 1}, {"db_idx": 1918, "episode_idx": 6, "frame_idx": 195, "global_frame_idx": 1918, "task_index": 1}, {"db_idx": 1919, "episode_idx": 6, "frame_idx": 196, "global_frame_idx": 1919, "task_index": 1}, {"db_idx": 1920, "episode_idx": 6, "frame_idx": 197, "global_frame_idx": 1920, "task_index": 1}, {"db_idx": 1921, "episode_idx": 6, "frame_idx": 198, "global_frame_idx": 1921, "task_index": 1}, {"db_idx": 1922, "episode_idx": 6, "frame_idx": 199, "global_frame_idx": 1922, "task_index": 1}, {"db_idx": 1923, "episode_idx": 6, "frame_idx": 200, "global_frame_idx": 1923, "task_index": 1}, {"db_idx": 1924, "episode_idx": 6, "frame_idx": 201, "global_frame_idx": 1924, "task_index": 1}, {"db_idx": 1925, "episode_idx": 6, "frame_idx": 202, "global_frame_idx": 1925, "task_index": 1}, {"db_idx": 1926, "episode_idx": 6, "frame_idx": 203, "global_frame_idx": 1926, "task_index": 1}, {"db_idx": 1927, "episode_idx": 6, "frame_idx": 204, "global_frame_idx": 1927, "task_index": 1}, {"db_idx": 1928, "episode_idx": 6, "frame_idx": 205, "global_frame_idx": 1928, "task_index": 1}, {"db_idx": 1929, "episode_idx": 6, "frame_idx": 206, "global_frame_idx": 1929, "task_index": 1}, {"db_idx": 1930, "episode_idx": 6, "frame_idx": 207, "global_frame_idx": 1930, "task_index": 1}, {"db_idx": 1931, "episode_idx": 6, "frame_idx": 208, "global_frame_idx": 1931, "task_index": 1}, {"db_idx": 1932, "episode_idx": 6, "frame_idx": 209, "global_frame_idx": 1932, "task_index": 1}, {"db_idx": 1933, "episode_idx": 6, "frame_idx": 210, "global_frame_idx": 1933, "task_index": 1}, {"db_idx": 1934, "episode_idx": 6, "frame_idx": 211, "global_frame_idx": 1934, "task_index": 1}, {"db_idx": 1935, "episode_idx": 6, "frame_idx": 212, "global_frame_idx": 1935, "task_index": 1}, {"db_idx": 1936, "episode_idx": 6, "frame_idx": 213, "global_frame_idx": 1936, "task_index": 1}, {"db_idx": 1937, "episode_idx": 6, "frame_idx": 214, "global_frame_idx": 1937, "task_index": 1}, {"db_idx": 1938, "episode_idx": 6, "frame_idx": 215, "global_frame_idx": 1938, "task_index": 1}, {"db_idx": 1939, "episode_idx": 6, "frame_idx": 216, "global_frame_idx": 1939, "task_index": 1}, {"db_idx": 1940, "episode_idx": 6, "frame_idx": 217, "global_frame_idx": 1940, "task_index": 1}, {"db_idx": 1941, "episode_idx": 6, "frame_idx": 218, "global_frame_idx": 1941, "task_index": 1}, {"db_idx": 1942, "episode_idx": 6, "frame_idx": 219, "global_frame_idx": 1942, "task_index": 1}, {"db_idx": 1943, "episode_idx": 6, "frame_idx": 220, "global_frame_idx": 1943, "task_index": 1}, {"db_idx": 1944, "episode_idx": 6, "frame_idx": 221, "global_frame_idx": 1944, "task_index": 1}, {"db_idx": 1945, "episode_idx": 6, "frame_idx": 222, "global_frame_idx": 1945, "task_index": 1}, {"db_idx": 1946, "episode_idx": 6, "frame_idx": 223, "global_frame_idx": 1946, "task_index": 1}, {"db_idx": 1947, "episode_idx": 6, "frame_idx": 224, "global_frame_idx": 1947, "task_index": 1}, {"db_idx": 1948, "episode_idx": 6, "frame_idx": 225, "global_frame_idx": 1948, "task_index": 1}, {"db_idx": 1949, "episode_idx": 6, "frame_idx": 226, "global_frame_idx": 1949, "task_index": 1}, {"db_idx": 1950, "episode_idx": 6, "frame_idx": 227, "global_frame_idx": 1950, "task_index": 1}, {"db_idx": 1951, "episode_idx": 6, "frame_idx": 228, "global_frame_idx": 1951, "task_index": 1}, {"db_idx": 1952, "episode_idx": 6, "frame_idx": 229, "global_frame_idx": 1952, "task_index": 1}, {"db_idx": 1953, "episode_idx": 6, "frame_idx": 230, "global_frame_idx": 1953, "task_index": 1}, {"db_idx": 1954, "episode_idx": 6, "frame_idx": 231, "global_frame_idx": 1954, "task_index": 1}, {"db_idx": 1955, "episode_idx": 6, "frame_idx": 232, "global_frame_idx": 1955, "task_index": 1}, {"db_idx": 1956, "episode_idx": 6, "frame_idx": 233, "global_frame_idx": 1956, "task_index": 1}, {"db_idx": 1957, "episode_idx": 6, "frame_idx": 234, "global_frame_idx": 1957, "task_index": 1}, {"db_idx": 1958, "episode_idx": 6, "frame_idx": 235, "global_frame_idx": 1958, "task_index": 1}, {"db_idx": 1959, "episode_idx": 6, "frame_idx": 236, "global_frame_idx": 1959, "task_index": 1}, {"db_idx": 1960, "episode_idx": 6, "frame_idx": 237, "global_frame_idx": 1960, "task_index": 1}, {"db_idx": 1961, "episode_idx": 6, "frame_idx": 238, "global_frame_idx": 1961, "task_index": 1}, {"db_idx": 1962, "episode_idx": 6, "frame_idx": 239, "global_frame_idx": 1962, "task_index": 1}, {"db_idx": 1963, "episode_idx": 6, "frame_idx": 240, "global_frame_idx": 1963, "task_index": 1}, {"db_idx": 1964, "episode_idx": 6, "frame_idx": 241, "global_frame_idx": 1964, "task_index": 1}, {"db_idx": 1965, "episode_idx": 6, "frame_idx": 242, "global_frame_idx": 1965, "task_index": 1}, {"db_idx": 1966, "episode_idx": 6, "frame_idx": 243, "global_frame_idx": 1966, "task_index": 1}, {"db_idx": 1967, "episode_idx": 6, "frame_idx": 244, "global_frame_idx": 1967, "task_index": 1}, {"db_idx": 1968, "episode_idx": 6, "frame_idx": 245, "global_frame_idx": 1968, "task_index": 1}, {"db_idx": 1969, "episode_idx": 6, "frame_idx": 246, "global_frame_idx": 1969, "task_index": 1}, {"db_idx": 1970, "episode_idx": 6, "frame_idx": 247, "global_frame_idx": 1970, "task_index": 1}, {"db_idx": 1971, "episode_idx": 6, "frame_idx": 248, "global_frame_idx": 1971, "task_index": 1}, {"db_idx": 1972, "episode_idx": 6, "frame_idx": 249, "global_frame_idx": 1972, "task_index": 1}, {"db_idx": 1973, "episode_idx": 6, "frame_idx": 250, "global_frame_idx": 1973, "task_index": 1}, {"db_idx": 1974, "episode_idx": 6, "frame_idx": 251, "global_frame_idx": 1974, "task_index": 1}, {"db_idx": 1975, "episode_idx": 6, "frame_idx": 252, "global_frame_idx": 1975, "task_index": 1}, {"db_idx": 1976, "episode_idx": 6, "frame_idx": 253, "global_frame_idx": 1976, "task_index": 1}, {"db_idx": 1977, "episode_idx": 6, "frame_idx": 254, "global_frame_idx": 1977, "task_index": 1}, {"db_idx": 1978, "episode_idx": 6, "frame_idx": 255, "global_frame_idx": 1978, "task_index": 1}, {"db_idx": 1979, "episode_idx": 6, "frame_idx": 256, "global_frame_idx": 1979, "task_index": 1}, {"db_idx": 1980, "episode_idx": 6, "frame_idx": 257, "global_frame_idx": 1980, "task_index": 1}, {"db_idx": 1981, "episode_idx": 6, "frame_idx": 258, "global_frame_idx": 1981, "task_index": 1}, {"db_idx": 1982, "episode_idx": 6, "frame_idx": 259, "global_frame_idx": 1982, "task_index": 1}, {"db_idx": 1983, "episode_idx": 6, "frame_idx": 260, "global_frame_idx": 1983, "task_index": 1}, {"db_idx": 1984, "episode_idx": 6, "frame_idx": 261, "global_frame_idx": 1984, "task_index": 1}, {"db_idx": 1985, "episode_idx": 6, "frame_idx": 262, "global_frame_idx": 1985, "task_index": 1}, {"db_idx": 1986, "episode_idx": 6, "frame_idx": 263, "global_frame_idx": 1986, "task_index": 1}, {"db_idx": 1987, "episode_idx": 6, "frame_idx": 264, "global_frame_idx": 1987, "task_index": 1}, {"db_idx": 1988, "episode_idx": 6, "frame_idx": 265, "global_frame_idx": 1988, "task_index": 1}, {"db_idx": 1989, "episode_idx": 6, "frame_idx": 266, "global_frame_idx": 1989, "task_index": 1}, {"db_idx": 1990, "episode_idx": 6, "frame_idx": 267, "global_frame_idx": 1990, "task_index": 1}, {"db_idx": 1991, "episode_idx": 6, "frame_idx": 268, "global_frame_idx": 1991, "task_index": 1}, {"db_idx": 1992, "episode_idx": 6, "frame_idx": 269, "global_frame_idx": 1992, "task_index": 1}, {"db_idx": 1993, "episode_idx": 6, "frame_idx": 270, "global_frame_idx": 1993, "task_index": 1}, {"db_idx": 1994, "episode_idx": 6, "frame_idx": 271, "global_frame_idx": 1994, "task_index": 1}, {"db_idx": 1995, "episode_idx": 6, "frame_idx": 272, "global_frame_idx": 1995, "task_index": 1}, {"db_idx": 1996, "episode_idx": 6, "frame_idx": 273, "global_frame_idx": 1996, "task_index": 1}, {"db_idx": 1997, "episode_idx": 6, "frame_idx": 274, "global_frame_idx": 1997, "task_index": 1}, {"db_idx": 1998, "episode_idx": 6, "frame_idx": 275, "global_frame_idx": 1998, "task_index": 1}, {"db_idx": 1999, "episode_idx": 6, "frame_idx": 276, "global_frame_idx": 1999, "task_index": 1}, {"db_idx": 2000, "episode_idx": 6, "frame_idx": 277, "global_frame_idx": 2000, "task_index": 1}, {"db_idx": 2001, "episode_idx": 6, "frame_idx": 278, "global_frame_idx": 2001, "task_index": 1}, {"db_idx": 2002, "episode_idx": 6, "frame_idx": 279, "global_frame_idx": 2002, "task_index": 1}, {"db_idx": 2003, "episode_idx": 6, "frame_idx": 280, "global_frame_idx": 2003, "task_index": 1}, {"db_idx": 2004, "episode_idx": 6, "frame_idx": 281, "global_frame_idx": 2004, "task_index": 1}, {"db_idx": 2005, "episode_idx": 6, "frame_idx": 282, "global_frame_idx": 2005, "task_index": 1}, {"db_idx": 2006, "episode_idx": 6, "frame_idx": 283, "global_frame_idx": 2006, "task_index": 1}, {"db_idx": 2007, "episode_idx": 6, "frame_idx": 284, "global_frame_idx": 2007, "task_index": 1}, {"db_idx": 2008, "episode_idx": 6, "frame_idx": 285, "global_frame_idx": 2008, "task_index": 1}, {"db_idx": 2009, "episode_idx": 6, "frame_idx": 286, "global_frame_idx": 2009, "task_index": 1}, {"db_idx": 2010, "episode_idx": 6, "frame_idx": 287, "global_frame_idx": 2010, "task_index": 1}, {"db_idx": 2011, "episode_idx": 6, "frame_idx": 288, "global_frame_idx": 2011, "task_index": 1}, {"db_idx": 2012, "episode_idx": 6, "frame_idx": 289, "global_frame_idx": 2012, "task_index": 1}, {"db_idx": 2013, "episode_idx": 6, "frame_idx": 290, "global_frame_idx": 2013, "task_index": 1}, {"db_idx": 2014, "episode_idx": 6, "frame_idx": 291, "global_frame_idx": 2014, "task_index": 1}, {"db_idx": 2015, "episode_idx": 6, "frame_idx": 292, "global_frame_idx": 2015, "task_index": 1}, {"db_idx": 2016, "episode_idx": 6, "frame_idx": 293, "global_frame_idx": 2016, "task_index": 1}, {"db_idx": 2017, "episode_idx": 6, "frame_idx": 294, "global_frame_idx": 2017, "task_index": 1}, {"db_idx": 2018, "episode_idx": 6, "frame_idx": 295, "global_frame_idx": 2018, "task_index": 1}, {"db_idx": 2019, "episode_idx": 6, "frame_idx": 296, "global_frame_idx": 2019, "task_index": 1}, {"db_idx": 2020, "episode_idx": 6, "frame_idx": 297, "global_frame_idx": 2020, "task_index": 1}, {"db_idx": 2021, "episode_idx": 6, "frame_idx": 298, "global_frame_idx": 2021, "task_index": 1}, {"db_idx": 2022, "episode_idx": 6, "frame_idx": 299, "global_frame_idx": 2022, "task_index": 1}, {"db_idx": 2023, "episode_idx": 6, "frame_idx": 300, "global_frame_idx": 2023, "task_index": 1}, {"db_idx": 2024, "episode_idx": 6, "frame_idx": 301, "global_frame_idx": 2024, "task_index": 1}, {"db_idx": 2025, "episode_idx": 6, "frame_idx": 302, "global_frame_idx": 2025, "task_index": 1}, {"db_idx": 2026, "episode_idx": 6, "frame_idx": 303, "global_frame_idx": 2026, "task_index": 1}, {"db_idx": 2027, "episode_idx": 6, "frame_idx": 304, "global_frame_idx": 2027, "task_index": 1}, {"db_idx": 2028, "episode_idx": 6, "frame_idx": 305, "global_frame_idx": 2028, "task_index": 1}, {"db_idx": 2029, "episode_idx": 6, "frame_idx": 306, "global_frame_idx": 2029, "task_index": 1}, {"db_idx": 2030, "episode_idx": 6, "frame_idx": 307, "global_frame_idx": 2030, "task_index": 1}, {"db_idx": 2031, "episode_idx": 6, "frame_idx": 308, "global_frame_idx": 2031, "task_index": 1}, {"db_idx": 2032, "episode_idx": 6, "frame_idx": 309, "global_frame_idx": 2032, "task_index": 1}, {"db_idx": 2033, "episode_idx": 6, "frame_idx": 310, "global_frame_idx": 2033, "task_index": 1}, {"db_idx": 2034, "episode_idx": 6, "frame_idx": 311, "global_frame_idx": 2034, "task_index": 1}, {"db_idx": 2035, "episode_idx": 6, "frame_idx": 312, "global_frame_idx": 2035, "task_index": 1}, {"db_idx": 2036, "episode_idx": 6, "frame_idx": 313, "global_frame_idx": 2036, "task_index": 1}, {"db_idx": 2037, "episode_idx": 6, "frame_idx": 314, "global_frame_idx": 2037, "task_index": 1}, {"db_idx": 2038, "episode_idx": 6, "frame_idx": 315, "global_frame_idx": 2038, "task_index": 1}, {"db_idx": 2039, "episode_idx": 6, "frame_idx": 316, "global_frame_idx": 2039, "task_index": 1}, {"db_idx": 2040, "episode_idx": 6, "frame_idx": 317, "global_frame_idx": 2040, "task_index": 1}, {"db_idx": 2041, "episode_idx": 6, "frame_idx": 318, "global_frame_idx": 2041, "task_index": 1}, {"db_idx": 2042, "episode_idx": 6, "frame_idx": 319, "global_frame_idx": 2042, "task_index": 1}, {"db_idx": 2043, "episode_idx": 6, "frame_idx": 320, "global_frame_idx": 2043, "task_index": 1}, {"db_idx": 2044, "episode_idx": 6, "frame_idx": 321, "global_frame_idx": 2044, "task_index": 1}, {"db_idx": 2045, "episode_idx": 6, "frame_idx": 322, "global_frame_idx": 2045, "task_index": 1}, {"db_idx": 2046, "episode_idx": 6, "frame_idx": 323, "global_frame_idx": 2046, "task_index": 1}, {"db_idx": 2047, "episode_idx": 6, "frame_idx": 324, "global_frame_idx": 2047, "task_index": 1}, {"db_idx": 2048, "episode_idx": 6, "frame_idx": 325, "global_frame_idx": 2048, "task_index": 1}, {"db_idx": 2049, "episode_idx": 6, "frame_idx": 326, "global_frame_idx": 2049, "task_index": 1}, {"db_idx": 2050, "episode_idx": 6, "frame_idx": 327, "global_frame_idx": 2050, "task_index": 1}, {"db_idx": 2051, "episode_idx": 6, "frame_idx": 328, "global_frame_idx": 2051, "task_index": 1}, {"db_idx": 2052, "episode_idx": 6, "frame_idx": 329, "global_frame_idx": 2052, "task_index": 1}, {"db_idx": 2053, "episode_idx": 6, "frame_idx": 330, "global_frame_idx": 2053, "task_index": 1}, {"db_idx": 2054, "episode_idx": 6, "frame_idx": 331, "global_frame_idx": 2054, "task_index": 1}, {"db_idx": 2055, "episode_idx": 6, "frame_idx": 332, "global_frame_idx": 2055, "task_index": 1}, {"db_idx": 2056, "episode_idx": 6, "frame_idx": 333, "global_frame_idx": 2056, "task_index": 1}, {"db_idx": 2057, "episode_idx": 6, "frame_idx": 334, "global_frame_idx": 2057, "task_index": 1}, {"db_idx": 2058, "episode_idx": 6, "frame_idx": 335, "global_frame_idx": 2058, "task_index": 1}, {"db_idx": 2059, "episode_idx": 6, "frame_idx": 336, "global_frame_idx": 2059, "task_index": 1}, {"db_idx": 2060, "episode_idx": 6, "frame_idx": 337, "global_frame_idx": 2060, "task_index": 1}, {"db_idx": 2061, "episode_idx": 6, "frame_idx": 338, "global_frame_idx": 2061, "task_index": 1}, {"db_idx": 2062, "episode_idx": 6, "frame_idx": 339, "global_frame_idx": 2062, "task_index": 1}, {"db_idx": 2063, "episode_idx": 6, "frame_idx": 340, "global_frame_idx": 2063, "task_index": 1}, {"db_idx": 2064, "episode_idx": 6, "frame_idx": 341, "global_frame_idx": 2064, "task_index": 1}, {"db_idx": 2065, "episode_idx": 6, "frame_idx": 342, "global_frame_idx": 2065, "task_index": 1}, {"db_idx": 2066, "episode_idx": 6, "frame_idx": 343, "global_frame_idx": 2066, "task_index": 1}, {"db_idx": 2067, "episode_idx": 6, "frame_idx": 344, "global_frame_idx": 2067, "task_index": 1}, {"db_idx": 2068, "episode_idx": 6, "frame_idx": 345, "global_frame_idx": 2068, "task_index": 1}, {"db_idx": 2069, "episode_idx": 6, "frame_idx": 346, "global_frame_idx": 2069, "task_index": 1}, {"db_idx": 2070, "episode_idx": 6, "frame_idx": 347, "global_frame_idx": 2070, "task_index": 1}, {"db_idx": 2071, "episode_idx": 6, "frame_idx": 348, "global_frame_idx": 2071, "task_index": 1}, {"db_idx": 2072, "episode_idx": 6, "frame_idx": 349, "global_frame_idx": 2072, "task_index": 1}, {"db_idx": 2073, "episode_idx": 6, "frame_idx": 350, "global_frame_idx": 2073, "task_index": 1}, {"db_idx": 2074, "episode_idx": 6, "frame_idx": 351, "global_frame_idx": 2074, "task_index": 1}, {"db_idx": 2075, "episode_idx": 6, "frame_idx": 352, "global_frame_idx": 2075, "task_index": 1}, {"db_idx": 2076, "episode_idx": 6, "frame_idx": 353, "global_frame_idx": 2076, "task_index": 1}, {"db_idx": 2077, "episode_idx": 6, "frame_idx": 354, "global_frame_idx": 2077, "task_index": 1}, {"db_idx": 2078, "episode_idx": 6, "frame_idx": 355, "global_frame_idx": 2078, "task_index": 1}, {"db_idx": 2079, "episode_idx": 6, "frame_idx": 356, "global_frame_idx": 2079, "task_index": 1}, {"db_idx": 2080, "episode_idx": 6, "frame_idx": 357, "global_frame_idx": 2080, "task_index": 1}, {"db_idx": 2081, "episode_idx": 6, "frame_idx": 358, "global_frame_idx": 2081, "task_index": 1}, {"db_idx": 2082, "episode_idx": 6, "frame_idx": 359, "global_frame_idx": 2082, "task_index": 1}, {"db_idx": 2083, "episode_idx": 6, "frame_idx": 360, "global_frame_idx": 2083, "task_index": 1}, {"db_idx": 2084, "episode_idx": 6, "frame_idx": 361, "global_frame_idx": 2084, "task_index": 1}, {"db_idx": 2085, "episode_idx": 6, "frame_idx": 362, "global_frame_idx": 2085, "task_index": 1}, {"db_idx": 2086, "episode_idx": 6, "frame_idx": 363, "global_frame_idx": 2086, "task_index": 1}, {"db_idx": 2087, "episode_idx": 6, "frame_idx": 364, "global_frame_idx": 2087, "task_index": 1}, {"db_idx": 2088, "episode_idx": 6, "frame_idx": 365, "global_frame_idx": 2088, "task_index": 1}, {"db_idx": 2089, "episode_idx": 6, "frame_idx": 366, "global_frame_idx": 2089, "task_index": 1}, {"db_idx": 2090, "episode_idx": 6, "frame_idx": 367, "global_frame_idx": 2090, "task_index": 1}, {"db_idx": 2091, "episode_idx": 6, "frame_idx": 368, "global_frame_idx": 2091, "task_index": 1}, {"db_idx": 2092, "episode_idx": 6, "frame_idx": 369, "global_frame_idx": 2092, "task_index": 1}, {"db_idx": 2093, "episode_idx": 6, "frame_idx": 370, "global_frame_idx": 2093, "task_index": 1}, {"db_idx": 2094, "episode_idx": 6, "frame_idx": 371, "global_frame_idx": 2094, "task_index": 1}, {"db_idx": 2095, "episode_idx": 6, "frame_idx": 372, "global_frame_idx": 2095, "task_index": 1}, {"db_idx": 2096, "episode_idx": 6, "frame_idx": 373, "global_frame_idx": 2096, "task_index": 1}, {"db_idx": 2097, "episode_idx": 6, "frame_idx": 374, "global_frame_idx": 2097, "task_index": 1}, {"db_idx": 2098, "episode_idx": 6, "frame_idx": 375, "global_frame_idx": 2098, "task_index": 1}, {"db_idx": 2099, "episode_idx": 6, "frame_idx": 376, "global_frame_idx": 2099, "task_index": 1}, {"db_idx": 2100, "episode_idx": 6, "frame_idx": 377, "global_frame_idx": 2100, "task_index": 1}, {"db_idx": 2101, "episode_idx": 6, "frame_idx": 378, "global_frame_idx": 2101, "task_index": 1}, {"db_idx": 2102, "episode_idx": 6, "frame_idx": 379, "global_frame_idx": 2102, "task_index": 1}, {"db_idx": 2103, "episode_idx": 6, "frame_idx": 380, "global_frame_idx": 2103, "task_index": 1}, {"db_idx": 2104, "episode_idx": 6, "frame_idx": 381, "global_frame_idx": 2104, "task_index": 1}, {"db_idx": 2105, "episode_idx": 6, "frame_idx": 382, "global_frame_idx": 2105, "task_index": 1}, {"db_idx": 2106, "episode_idx": 6, "frame_idx": 383, "global_frame_idx": 2106, "task_index": 1}, {"db_idx": 2107, "episode_idx": 6, "frame_idx": 384, "global_frame_idx": 2107, "task_index": 1}, {"db_idx": 2108, "episode_idx": 6, "frame_idx": 385, "global_frame_idx": 2108, "task_index": 1}, {"db_idx": 2109, "episode_idx": 6, "frame_idx": 386, "global_frame_idx": 2109, "task_index": 1}, {"db_idx": 2110, "episode_idx": 6, "frame_idx": 387, "global_frame_idx": 2110, "task_index": 1}, {"db_idx": 2111, "episode_idx": 6, "frame_idx": 388, "global_frame_idx": 2111, "task_index": 1}, {"db_idx": 2112, "episode_idx": 6, "frame_idx": 389, "global_frame_idx": 2112, "task_index": 1}, {"db_idx": 2113, "episode_idx": 6, "frame_idx": 390, "global_frame_idx": 2113, "task_index": 1}, {"db_idx": 2114, "episode_idx": 6, "frame_idx": 391, "global_frame_idx": 2114, "task_index": 1}, {"db_idx": 2115, "episode_idx": 6, "frame_idx": 392, "global_frame_idx": 2115, "task_index": 1}, {"db_idx": 2116, "episode_idx": 6, "frame_idx": 393, "global_frame_idx": 2116, "task_index": 1}, {"db_idx": 2117, "episode_idx": 6, "frame_idx": 394, "global_frame_idx": 2117, "task_index": 1}, {"db_idx": 2118, "episode_idx": 6, "frame_idx": 395, "global_frame_idx": 2118, "task_index": 1}, {"db_idx": 2119, "episode_idx": 6, "frame_idx": 396, "global_frame_idx": 2119, "task_index": 1}, {"db_idx": 2120, "episode_idx": 6, "frame_idx": 397, "global_frame_idx": 2120, "task_index": 1}, {"db_idx": 2121, "episode_idx": 6, "frame_idx": 398, "global_frame_idx": 2121, "task_index": 1}, {"db_idx": 2122, "episode_idx": 6, "frame_idx": 399, "global_frame_idx": 2122, "task_index": 1}, {"db_idx": 2123, "episode_idx": 6, "frame_idx": 400, "global_frame_idx": 2123, "task_index": 1}, {"db_idx": 2124, "episode_idx": 6, "frame_idx": 401, "global_frame_idx": 2124, "task_index": 1}, {"db_idx": 2125, "episode_idx": 6, "frame_idx": 402, "global_frame_idx": 2125, "task_index": 1}, {"db_idx": 2126, "episode_idx": 6, "frame_idx": 403, "global_frame_idx": 2126, "task_index": 1}, {"db_idx": 2127, "episode_idx": 6, "frame_idx": 404, "global_frame_idx": 2127, "task_index": 1}, {"db_idx": 2128, "episode_idx": 6, "frame_idx": 405, "global_frame_idx": 2128, "task_index": 1}, {"db_idx": 2129, "episode_idx": 6, "frame_idx": 406, "global_frame_idx": 2129, "task_index": 1}, {"db_idx": 2130, "episode_idx": 6, "frame_idx": 407, "global_frame_idx": 2130, "task_index": 1}, {"db_idx": 2131, "episode_idx": 6, "frame_idx": 408, "global_frame_idx": 2131, "task_index": 1}, {"db_idx": 2132, "episode_idx": 6, "frame_idx": 409, "global_frame_idx": 2132, "task_index": 1}, {"db_idx": 2133, "episode_idx": 6, "frame_idx": 410, "global_frame_idx": 2133, "task_index": 1}, {"db_idx": 2134, "episode_idx": 6, "frame_idx": 411, "global_frame_idx": 2134, "task_index": 1}, {"db_idx": 2135, "episode_idx": 6, "frame_idx": 412, "global_frame_idx": 2135, "task_index": 1}, {"db_idx": 2136, "episode_idx": 6, "frame_idx": 413, "global_frame_idx": 2136, "task_index": 1}, {"db_idx": 2137, "episode_idx": 6, "frame_idx": 414, "global_frame_idx": 2137, "task_index": 1}, {"db_idx": 2138, "episode_idx": 6, "frame_idx": 415, "global_frame_idx": 2138, "task_index": 1}, {"db_idx": 2139, "episode_idx": 6, "frame_idx": 416, "global_frame_idx": 2139, "task_index": 1}, {"db_idx": 2140, "episode_idx": 6, "frame_idx": 417, "global_frame_idx": 2140, "task_index": 1}, {"db_idx": 2141, "episode_idx": 6, "frame_idx": 418, "global_frame_idx": 2141, "task_index": 1}, {"db_idx": 2142, "episode_idx": 6, "frame_idx": 419, "global_frame_idx": 2142, "task_index": 1}, {"db_idx": 2143, "episode_idx": 6, "frame_idx": 420, "global_frame_idx": 2143, "task_index": 1}, {"db_idx": 2144, "episode_idx": 6, "frame_idx": 421, "global_frame_idx": 2144, "task_index": 1}, {"db_idx": 2145, "episode_idx": 6, "frame_idx": 422, "global_frame_idx": 2145, "task_index": 1}, {"db_idx": 2146, "episode_idx": 6, "frame_idx": 423, "global_frame_idx": 2146, "task_index": 1}, {"db_idx": 2147, "episode_idx": 6, "frame_idx": 424, "global_frame_idx": 2147, "task_index": 1}, {"db_idx": 2148, "episode_idx": 6, "frame_idx": 425, "global_frame_idx": 2148, "task_index": 1}, {"db_idx": 2149, "episode_idx": 6, "frame_idx": 426, "global_frame_idx": 2149, "task_index": 1}, {"db_idx": 2150, "episode_idx": 6, "frame_idx": 427, "global_frame_idx": 2150, "task_index": 1}, {"db_idx": 2151, "episode_idx": 6, "frame_idx": 428, "global_frame_idx": 2151, "task_index": 1}, {"db_idx": 2152, "episode_idx": 6, "frame_idx": 429, "global_frame_idx": 2152, "task_index": 1}, {"db_idx": 2153, "episode_idx": 6, "frame_idx": 430, "global_frame_idx": 2153, "task_index": 1}, {"db_idx": 2154, "episode_idx": 6, "frame_idx": 431, "global_frame_idx": 2154, "task_index": 1}, {"db_idx": 2155, "episode_idx": 6, "frame_idx": 432, "global_frame_idx": 2155, "task_index": 1}, {"db_idx": 2156, "episode_idx": 6, "frame_idx": 433, "global_frame_idx": 2156, "task_index": 1}, {"db_idx": 2157, "episode_idx": 6, "frame_idx": 434, "global_frame_idx": 2157, "task_index": 1}, {"db_idx": 2158, "episode_idx": 6, "frame_idx": 435, "global_frame_idx": 2158, "task_index": 1}, {"db_idx": 2159, "episode_idx": 7, "frame_idx": 0, "global_frame_idx": 2159, "task_index": 1}, {"db_idx": 2160, "episode_idx": 7, "frame_idx": 1, "global_frame_idx": 2160, "task_index": 1}, {"db_idx": 2161, "episode_idx": 7, "frame_idx": 2, "global_frame_idx": 2161, "task_index": 1}, {"db_idx": 2162, "episode_idx": 7, "frame_idx": 3, "global_frame_idx": 2162, "task_index": 1}, {"db_idx": 2163, "episode_idx": 7, "frame_idx": 4, "global_frame_idx": 2163, "task_index": 1}, {"db_idx": 2164, "episode_idx": 7, "frame_idx": 5, "global_frame_idx": 2164, "task_index": 1}, {"db_idx": 2165, "episode_idx": 7, "frame_idx": 6, "global_frame_idx": 2165, "task_index": 1}, {"db_idx": 2166, "episode_idx": 7, "frame_idx": 7, "global_frame_idx": 2166, "task_index": 1}, {"db_idx": 2167, "episode_idx": 7, "frame_idx": 8, "global_frame_idx": 2167, "task_index": 1}, {"db_idx": 2168, "episode_idx": 7, "frame_idx": 9, "global_frame_idx": 2168, "task_index": 1}, {"db_idx": 2169, "episode_idx": 7, "frame_idx": 10, "global_frame_idx": 2169, "task_index": 1}, {"db_idx": 2170, "episode_idx": 7, "frame_idx": 11, "global_frame_idx": 2170, "task_index": 1}, {"db_idx": 2171, "episode_idx": 7, "frame_idx": 12, "global_frame_idx": 2171, "task_index": 1}, {"db_idx": 2172, "episode_idx": 7, "frame_idx": 13, "global_frame_idx": 2172, "task_index": 1}, {"db_idx": 2173, "episode_idx": 7, "frame_idx": 14, "global_frame_idx": 2173, "task_index": 1}, {"db_idx": 2174, "episode_idx": 7, "frame_idx": 15, "global_frame_idx": 2174, "task_index": 1}, {"db_idx": 2175, "episode_idx": 7, "frame_idx": 16, "global_frame_idx": 2175, "task_index": 1}, {"db_idx": 2176, "episode_idx": 7, "frame_idx": 17, "global_frame_idx": 2176, "task_index": 1}, {"db_idx": 2177, "episode_idx": 7, "frame_idx": 18, "global_frame_idx": 2177, "task_index": 1}, {"db_idx": 2178, "episode_idx": 7, "frame_idx": 19, "global_frame_idx": 2178, "task_index": 1}, {"db_idx": 2179, "episode_idx": 7, "frame_idx": 20, "global_frame_idx": 2179, "task_index": 1}, {"db_idx": 2180, "episode_idx": 7, "frame_idx": 21, "global_frame_idx": 2180, "task_index": 1}, {"db_idx": 2181, "episode_idx": 7, "frame_idx": 22, "global_frame_idx": 2181, "task_index": 1}, {"db_idx": 2182, "episode_idx": 7, "frame_idx": 23, "global_frame_idx": 2182, "task_index": 1}, {"db_idx": 2183, "episode_idx": 7, "frame_idx": 24, "global_frame_idx": 2183, "task_index": 1}, {"db_idx": 2184, "episode_idx": 7, "frame_idx": 25, "global_frame_idx": 2184, "task_index": 1}, {"db_idx": 2185, "episode_idx": 7, "frame_idx": 26, "global_frame_idx": 2185, "task_index": 1}, {"db_idx": 2186, "episode_idx": 7, "frame_idx": 27, "global_frame_idx": 2186, "task_index": 1}, {"db_idx": 2187, "episode_idx": 7, "frame_idx": 28, "global_frame_idx": 2187, "task_index": 1}, {"db_idx": 2188, "episode_idx": 7, "frame_idx": 29, "global_frame_idx": 2188, "task_index": 1}, {"db_idx": 2189, "episode_idx": 7, "frame_idx": 30, "global_frame_idx": 2189, "task_index": 1}, {"db_idx": 2190, "episode_idx": 7, "frame_idx": 31, "global_frame_idx": 2190, "task_index": 1}, {"db_idx": 2191, "episode_idx": 7, "frame_idx": 32, "global_frame_idx": 2191, "task_index": 1}, {"db_idx": 2192, "episode_idx": 7, "frame_idx": 33, "global_frame_idx": 2192, "task_index": 1}, {"db_idx": 2193, "episode_idx": 7, "frame_idx": 34, "global_frame_idx": 2193, "task_index": 1}, {"db_idx": 2194, "episode_idx": 7, "frame_idx": 35, "global_frame_idx": 2194, "task_index": 1}, {"db_idx": 2195, "episode_idx": 7, "frame_idx": 36, "global_frame_idx": 2195, "task_index": 1}, {"db_idx": 2196, "episode_idx": 7, "frame_idx": 37, "global_frame_idx": 2196, "task_index": 1}, {"db_idx": 2197, "episode_idx": 7, "frame_idx": 38, "global_frame_idx": 2197, "task_index": 1}, {"db_idx": 2198, "episode_idx": 7, "frame_idx": 39, "global_frame_idx": 2198, "task_index": 1}, {"db_idx": 2199, "episode_idx": 7, "frame_idx": 40, "global_frame_idx": 2199, "task_index": 1}, {"db_idx": 2200, "episode_idx": 7, "frame_idx": 41, "global_frame_idx": 2200, "task_index": 1}, {"db_idx": 2201, "episode_idx": 7, "frame_idx": 42, "global_frame_idx": 2201, "task_index": 1}, {"db_idx": 2202, "episode_idx": 7, "frame_idx": 43, "global_frame_idx": 2202, "task_index": 1}, {"db_idx": 2203, "episode_idx": 7, "frame_idx": 44, "global_frame_idx": 2203, "task_index": 1}, {"db_idx": 2204, "episode_idx": 7, "frame_idx": 45, "global_frame_idx": 2204, "task_index": 1}, {"db_idx": 2205, "episode_idx": 7, "frame_idx": 46, "global_frame_idx": 2205, "task_index": 1}, {"db_idx": 2206, "episode_idx": 7, "frame_idx": 47, "global_frame_idx": 2206, "task_index": 1}, {"db_idx": 2207, "episode_idx": 7, "frame_idx": 48, "global_frame_idx": 2207, "task_index": 1}, {"db_idx": 2208, "episode_idx": 7, "frame_idx": 49, "global_frame_idx": 2208, "task_index": 1}, {"db_idx": 2209, "episode_idx": 7, "frame_idx": 50, "global_frame_idx": 2209, "task_index": 1}, {"db_idx": 2210, "episode_idx": 7, "frame_idx": 51, "global_frame_idx": 2210, "task_index": 1}, {"db_idx": 2211, "episode_idx": 7, "frame_idx": 52, "global_frame_idx": 2211, "task_index": 1}, {"db_idx": 2212, "episode_idx": 7, "frame_idx": 53, "global_frame_idx": 2212, "task_index": 1}, {"db_idx": 2213, "episode_idx": 7, "frame_idx": 54, "global_frame_idx": 2213, "task_index": 1}, {"db_idx": 2214, "episode_idx": 7, "frame_idx": 55, "global_frame_idx": 2214, "task_index": 1}, {"db_idx": 2215, "episode_idx": 7, "frame_idx": 56, "global_frame_idx": 2215, "task_index": 1}, {"db_idx": 2216, "episode_idx": 7, "frame_idx": 57, "global_frame_idx": 2216, "task_index": 1}, {"db_idx": 2217, "episode_idx": 7, "frame_idx": 58, "global_frame_idx": 2217, "task_index": 1}, {"db_idx": 2218, "episode_idx": 7, "frame_idx": 59, "global_frame_idx": 2218, "task_index": 1}, {"db_idx": 2219, "episode_idx": 7, "frame_idx": 60, "global_frame_idx": 2219, "task_index": 1}, {"db_idx": 2220, "episode_idx": 7, "frame_idx": 61, "global_frame_idx": 2220, "task_index": 1}, {"db_idx": 2221, "episode_idx": 7, "frame_idx": 62, "global_frame_idx": 2221, "task_index": 1}, {"db_idx": 2222, "episode_idx": 7, "frame_idx": 63, "global_frame_idx": 2222, "task_index": 1}, {"db_idx": 2223, "episode_idx": 7, "frame_idx": 64, "global_frame_idx": 2223, "task_index": 1}, {"db_idx": 2224, "episode_idx": 7, "frame_idx": 65, "global_frame_idx": 2224, "task_index": 1}, {"db_idx": 2225, "episode_idx": 7, "frame_idx": 66, "global_frame_idx": 2225, "task_index": 1}, {"db_idx": 2226, "episode_idx": 7, "frame_idx": 67, "global_frame_idx": 2226, "task_index": 1}, {"db_idx": 2227, "episode_idx": 7, "frame_idx": 68, "global_frame_idx": 2227, "task_index": 1}, {"db_idx": 2228, "episode_idx": 7, "frame_idx": 69, "global_frame_idx": 2228, "task_index": 1}, {"db_idx": 2229, "episode_idx": 7, "frame_idx": 70, "global_frame_idx": 2229, "task_index": 1}, {"db_idx": 2230, "episode_idx": 7, "frame_idx": 71, "global_frame_idx": 2230, "task_index": 1}, {"db_idx": 2231, "episode_idx": 7, "frame_idx": 72, "global_frame_idx": 2231, "task_index": 1}, {"db_idx": 2232, "episode_idx": 7, "frame_idx": 73, "global_frame_idx": 2232, "task_index": 1}, {"db_idx": 2233, "episode_idx": 7, "frame_idx": 74, "global_frame_idx": 2233, "task_index": 1}, {"db_idx": 2234, "episode_idx": 7, "frame_idx": 75, "global_frame_idx": 2234, "task_index": 1}, {"db_idx": 2235, "episode_idx": 7, "frame_idx": 76, "global_frame_idx": 2235, "task_index": 1}, {"db_idx": 2236, "episode_idx": 7, "frame_idx": 77, "global_frame_idx": 2236, "task_index": 1}, {"db_idx": 2237, "episode_idx": 7, "frame_idx": 78, "global_frame_idx": 2237, "task_index": 1}, {"db_idx": 2238, "episode_idx": 7, "frame_idx": 79, "global_frame_idx": 2238, "task_index": 1}, {"db_idx": 2239, "episode_idx": 7, "frame_idx": 80, "global_frame_idx": 2239, "task_index": 1}, {"db_idx": 2240, "episode_idx": 7, "frame_idx": 81, "global_frame_idx": 2240, "task_index": 1}, {"db_idx": 2241, "episode_idx": 7, "frame_idx": 82, "global_frame_idx": 2241, "task_index": 1}, {"db_idx": 2242, "episode_idx": 7, "frame_idx": 83, "global_frame_idx": 2242, "task_index": 1}, {"db_idx": 2243, "episode_idx": 7, "frame_idx": 84, "global_frame_idx": 2243, "task_index": 1}, {"db_idx": 2244, "episode_idx": 7, "frame_idx": 85, "global_frame_idx": 2244, "task_index": 1}, {"db_idx": 2245, "episode_idx": 7, "frame_idx": 86, "global_frame_idx": 2245, "task_index": 1}, {"db_idx": 2246, "episode_idx": 7, "frame_idx": 87, "global_frame_idx": 2246, "task_index": 1}, {"db_idx": 2247, "episode_idx": 7, "frame_idx": 88, "global_frame_idx": 2247, "task_index": 1}, {"db_idx": 2248, "episode_idx": 7, "frame_idx": 89, "global_frame_idx": 2248, "task_index": 1}, {"db_idx": 2249, "episode_idx": 7, "frame_idx": 90, "global_frame_idx": 2249, "task_index": 1}, {"db_idx": 2250, "episode_idx": 7, "frame_idx": 91, "global_frame_idx": 2250, "task_index": 1}, {"db_idx": 2251, "episode_idx": 7, "frame_idx": 92, "global_frame_idx": 2251, "task_index": 1}, {"db_idx": 2252, "episode_idx": 7, "frame_idx": 93, "global_frame_idx": 2252, "task_index": 1}, {"db_idx": 2253, "episode_idx": 7, "frame_idx": 94, "global_frame_idx": 2253, "task_index": 1}, {"db_idx": 2254, "episode_idx": 7, "frame_idx": 95, "global_frame_idx": 2254, "task_index": 1}, {"db_idx": 2255, "episode_idx": 7, "frame_idx": 96, "global_frame_idx": 2255, "task_index": 1}, {"db_idx": 2256, "episode_idx": 7, "frame_idx": 97, "global_frame_idx": 2256, "task_index": 1}, {"db_idx": 2257, "episode_idx": 7, "frame_idx": 98, "global_frame_idx": 2257, "task_index": 1}, {"db_idx": 2258, "episode_idx": 7, "frame_idx": 99, "global_frame_idx": 2258, "task_index": 1}, {"db_idx": 2259, "episode_idx": 7, "frame_idx": 100, "global_frame_idx": 2259, "task_index": 1}, {"db_idx": 2260, "episode_idx": 7, "frame_idx": 101, "global_frame_idx": 2260, "task_index": 1}, {"db_idx": 2261, "episode_idx": 7, "frame_idx": 102, "global_frame_idx": 2261, "task_index": 1}, {"db_idx": 2262, "episode_idx": 7, "frame_idx": 103, "global_frame_idx": 2262, "task_index": 1}, {"db_idx": 2263, "episode_idx": 7, "frame_idx": 104, "global_frame_idx": 2263, "task_index": 1}, {"db_idx": 2264, "episode_idx": 7, "frame_idx": 105, "global_frame_idx": 2264, "task_index": 1}, {"db_idx": 2265, "episode_idx": 7, "frame_idx": 106, "global_frame_idx": 2265, "task_index": 1}, {"db_idx": 2266, "episode_idx": 7, "frame_idx": 107, "global_frame_idx": 2266, "task_index": 1}, {"db_idx": 2267, "episode_idx": 7, "frame_idx": 108, "global_frame_idx": 2267, "task_index": 1}, {"db_idx": 2268, "episode_idx": 7, "frame_idx": 109, "global_frame_idx": 2268, "task_index": 1}, {"db_idx": 2269, "episode_idx": 7, "frame_idx": 110, "global_frame_idx": 2269, "task_index": 1}, {"db_idx": 2270, "episode_idx": 7, "frame_idx": 111, "global_frame_idx": 2270, "task_index": 1}, {"db_idx": 2271, "episode_idx": 7, "frame_idx": 112, "global_frame_idx": 2271, "task_index": 1}, {"db_idx": 2272, "episode_idx": 7, "frame_idx": 113, "global_frame_idx": 2272, "task_index": 1}, {"db_idx": 2273, "episode_idx": 7, "frame_idx": 114, "global_frame_idx": 2273, "task_index": 1}, {"db_idx": 2274, "episode_idx": 7, "frame_idx": 115, "global_frame_idx": 2274, "task_index": 1}, {"db_idx": 2275, "episode_idx": 7, "frame_idx": 116, "global_frame_idx": 2275, "task_index": 1}, {"db_idx": 2276, "episode_idx": 7, "frame_idx": 117, "global_frame_idx": 2276, "task_index": 1}, {"db_idx": 2277, "episode_idx": 7, "frame_idx": 118, "global_frame_idx": 2277, "task_index": 1}, {"db_idx": 2278, "episode_idx": 7, "frame_idx": 119, "global_frame_idx": 2278, "task_index": 1}, {"db_idx": 2279, "episode_idx": 7, "frame_idx": 120, "global_frame_idx": 2279, "task_index": 1}, {"db_idx": 2280, "episode_idx": 7, "frame_idx": 121, "global_frame_idx": 2280, "task_index": 1}, {"db_idx": 2281, "episode_idx": 7, "frame_idx": 122, "global_frame_idx": 2281, "task_index": 1}, {"db_idx": 2282, "episode_idx": 7, "frame_idx": 123, "global_frame_idx": 2282, "task_index": 1}, {"db_idx": 2283, "episode_idx": 7, "frame_idx": 124, "global_frame_idx": 2283, "task_index": 1}, {"db_idx": 2284, "episode_idx": 7, "frame_idx": 125, "global_frame_idx": 2284, "task_index": 1}, {"db_idx": 2285, "episode_idx": 7, "frame_idx": 126, "global_frame_idx": 2285, "task_index": 1}, {"db_idx": 2286, "episode_idx": 7, "frame_idx": 127, "global_frame_idx": 2286, "task_index": 1}, {"db_idx": 2287, "episode_idx": 7, "frame_idx": 128, "global_frame_idx": 2287, "task_index": 1}, {"db_idx": 2288, "episode_idx": 7, "frame_idx": 129, "global_frame_idx": 2288, "task_index": 1}, {"db_idx": 2289, "episode_idx": 7, "frame_idx": 130, "global_frame_idx": 2289, "task_index": 1}, {"db_idx": 2290, "episode_idx": 7, "frame_idx": 131, "global_frame_idx": 2290, "task_index": 1}, {"db_idx": 2291, "episode_idx": 7, "frame_idx": 132, "global_frame_idx": 2291, "task_index": 1}, {"db_idx": 2292, "episode_idx": 7, "frame_idx": 133, "global_frame_idx": 2292, "task_index": 1}, {"db_idx": 2293, "episode_idx": 7, "frame_idx": 134, "global_frame_idx": 2293, "task_index": 1}, {"db_idx": 2294, "episode_idx": 7, "frame_idx": 135, "global_frame_idx": 2294, "task_index": 1}, {"db_idx": 2295, "episode_idx": 7, "frame_idx": 136, "global_frame_idx": 2295, "task_index": 1}, {"db_idx": 2296, "episode_idx": 7, "frame_idx": 137, "global_frame_idx": 2296, "task_index": 1}, {"db_idx": 2297, "episode_idx": 7, "frame_idx": 138, "global_frame_idx": 2297, "task_index": 1}, {"db_idx": 2298, "episode_idx": 7, "frame_idx": 139, "global_frame_idx": 2298, "task_index": 1}, {"db_idx": 2299, "episode_idx": 7, "frame_idx": 140, "global_frame_idx": 2299, "task_index": 1}, {"db_idx": 2300, "episode_idx": 7, "frame_idx": 141, "global_frame_idx": 2300, "task_index": 1}, {"db_idx": 2301, "episode_idx": 7, "frame_idx": 142, "global_frame_idx": 2301, "task_index": 1}, {"db_idx": 2302, "episode_idx": 7, "frame_idx": 143, "global_frame_idx": 2302, "task_index": 1}, {"db_idx": 2303, "episode_idx": 7, "frame_idx": 144, "global_frame_idx": 2303, "task_index": 1}, {"db_idx": 2304, "episode_idx": 7, "frame_idx": 145, "global_frame_idx": 2304, "task_index": 1}, {"db_idx": 2305, "episode_idx": 7, "frame_idx": 146, "global_frame_idx": 2305, "task_index": 1}, {"db_idx": 2306, "episode_idx": 7, "frame_idx": 147, "global_frame_idx": 2306, "task_index": 1}, {"db_idx": 2307, "episode_idx": 7, "frame_idx": 148, "global_frame_idx": 2307, "task_index": 1}, {"db_idx": 2308, "episode_idx": 7, "frame_idx": 149, "global_frame_idx": 2308, "task_index": 1}, {"db_idx": 2309, "episode_idx": 7, "frame_idx": 150, "global_frame_idx": 2309, "task_index": 1}, {"db_idx": 2310, "episode_idx": 7, "frame_idx": 151, "global_frame_idx": 2310, "task_index": 1}, {"db_idx": 2311, "episode_idx": 7, "frame_idx": 152, "global_frame_idx": 2311, "task_index": 1}, {"db_idx": 2312, "episode_idx": 7, "frame_idx": 153, "global_frame_idx": 2312, "task_index": 1}, {"db_idx": 2313, "episode_idx": 7, "frame_idx": 154, "global_frame_idx": 2313, "task_index": 1}, {"db_idx": 2314, "episode_idx": 7, "frame_idx": 155, "global_frame_idx": 2314, "task_index": 1}, {"db_idx": 2315, "episode_idx": 7, "frame_idx": 156, "global_frame_idx": 2315, "task_index": 1}, {"db_idx": 2316, "episode_idx": 7, "frame_idx": 157, "global_frame_idx": 2316, "task_index": 1}, {"db_idx": 2317, "episode_idx": 7, "frame_idx": 158, "global_frame_idx": 2317, "task_index": 1}, {"db_idx": 2318, "episode_idx": 7, "frame_idx": 159, "global_frame_idx": 2318, "task_index": 1}, {"db_idx": 2319, "episode_idx": 7, "frame_idx": 160, "global_frame_idx": 2319, "task_index": 1}, {"db_idx": 2320, "episode_idx": 7, "frame_idx": 161, "global_frame_idx": 2320, "task_index": 1}, {"db_idx": 2321, "episode_idx": 7, "frame_idx": 162, "global_frame_idx": 2321, "task_index": 1}, {"db_idx": 2322, "episode_idx": 7, "frame_idx": 163, "global_frame_idx": 2322, "task_index": 1}, {"db_idx": 2323, "episode_idx": 7, "frame_idx": 164, "global_frame_idx": 2323, "task_index": 1}, {"db_idx": 2324, "episode_idx": 7, "frame_idx": 165, "global_frame_idx": 2324, "task_index": 1}, {"db_idx": 2325, "episode_idx": 7, "frame_idx": 166, "global_frame_idx": 2325, "task_index": 1}, {"db_idx": 2326, "episode_idx": 7, "frame_idx": 167, "global_frame_idx": 2326, "task_index": 1}, {"db_idx": 2327, "episode_idx": 7, "frame_idx": 168, "global_frame_idx": 2327, "task_index": 1}, {"db_idx": 2328, "episode_idx": 7, "frame_idx": 169, "global_frame_idx": 2328, "task_index": 1}, {"db_idx": 2329, "episode_idx": 7, "frame_idx": 170, "global_frame_idx": 2329, "task_index": 1}, {"db_idx": 2330, "episode_idx": 7, "frame_idx": 171, "global_frame_idx": 2330, "task_index": 1}, {"db_idx": 2331, "episode_idx": 7, "frame_idx": 172, "global_frame_idx": 2331, "task_index": 1}, {"db_idx": 2332, "episode_idx": 7, "frame_idx": 173, "global_frame_idx": 2332, "task_index": 1}, {"db_idx": 2333, "episode_idx": 7, "frame_idx": 174, "global_frame_idx": 2333, "task_index": 1}, {"db_idx": 2334, "episode_idx": 7, "frame_idx": 175, "global_frame_idx": 2334, "task_index": 1}, {"db_idx": 2335, "episode_idx": 7, "frame_idx": 176, "global_frame_idx": 2335, "task_index": 1}, {"db_idx": 2336, "episode_idx": 7, "frame_idx": 177, "global_frame_idx": 2336, "task_index": 1}, {"db_idx": 2337, "episode_idx": 7, "frame_idx": 178, "global_frame_idx": 2337, "task_index": 1}, {"db_idx": 2338, "episode_idx": 7, "frame_idx": 179, "global_frame_idx": 2338, "task_index": 1}, {"db_idx": 2339, "episode_idx": 7, "frame_idx": 180, "global_frame_idx": 2339, "task_index": 1}, {"db_idx": 2340, "episode_idx": 7, "frame_idx": 181, "global_frame_idx": 2340, "task_index": 1}, {"db_idx": 2341, "episode_idx": 7, "frame_idx": 182, "global_frame_idx": 2341, "task_index": 1}, {"db_idx": 2342, "episode_idx": 7, "frame_idx": 183, "global_frame_idx": 2342, "task_index": 1}, {"db_idx": 2343, "episode_idx": 7, "frame_idx": 184, "global_frame_idx": 2343, "task_index": 1}, {"db_idx": 2344, "episode_idx": 7, "frame_idx": 185, "global_frame_idx": 2344, "task_index": 1}, {"db_idx": 2345, "episode_idx": 7, "frame_idx": 186, "global_frame_idx": 2345, "task_index": 1}, {"db_idx": 2346, "episode_idx": 7, "frame_idx": 187, "global_frame_idx": 2346, "task_index": 1}, {"db_idx": 2347, "episode_idx": 7, "frame_idx": 188, "global_frame_idx": 2347, "task_index": 1}, {"db_idx": 2348, "episode_idx": 7, "frame_idx": 189, "global_frame_idx": 2348, "task_index": 1}, {"db_idx": 2349, "episode_idx": 7, "frame_idx": 190, "global_frame_idx": 2349, "task_index": 1}, {"db_idx": 2350, "episode_idx": 7, "frame_idx": 191, "global_frame_idx": 2350, "task_index": 1}, {"db_idx": 2351, "episode_idx": 7, "frame_idx": 192, "global_frame_idx": 2351, "task_index": 1}, {"db_idx": 2352, "episode_idx": 7, "frame_idx": 193, "global_frame_idx": 2352, "task_index": 1}, {"db_idx": 2353, "episode_idx": 7, "frame_idx": 194, "global_frame_idx": 2353, "task_index": 1}, {"db_idx": 2354, "episode_idx": 7, "frame_idx": 195, "global_frame_idx": 2354, "task_index": 1}, {"db_idx": 2355, "episode_idx": 7, "frame_idx": 196, "global_frame_idx": 2355, "task_index": 1}, {"db_idx": 2356, "episode_idx": 7, "frame_idx": 197, "global_frame_idx": 2356, "task_index": 1}, {"db_idx": 2357, "episode_idx": 7, "frame_idx": 198, "global_frame_idx": 2357, "task_index": 1}, {"db_idx": 2358, "episode_idx": 7, "frame_idx": 199, "global_frame_idx": 2358, "task_index": 1}, {"db_idx": 2359, "episode_idx": 7, "frame_idx": 200, "global_frame_idx": 2359, "task_index": 1}, {"db_idx": 2360, "episode_idx": 7, "frame_idx": 201, "global_frame_idx": 2360, "task_index": 1}, {"db_idx": 2361, "episode_idx": 7, "frame_idx": 202, "global_frame_idx": 2361, "task_index": 1}, {"db_idx": 2362, "episode_idx": 7, "frame_idx": 203, "global_frame_idx": 2362, "task_index": 1}, {"db_idx": 2363, "episode_idx": 7, "frame_idx": 204, "global_frame_idx": 2363, "task_index": 1}, {"db_idx": 2364, "episode_idx": 7, "frame_idx": 205, "global_frame_idx": 2364, "task_index": 1}, {"db_idx": 2365, "episode_idx": 7, "frame_idx": 206, "global_frame_idx": 2365, "task_index": 1}, {"db_idx": 2366, "episode_idx": 7, "frame_idx": 207, "global_frame_idx": 2366, "task_index": 1}, {"db_idx": 2367, "episode_idx": 7, "frame_idx": 208, "global_frame_idx": 2367, "task_index": 1}, {"db_idx": 2368, "episode_idx": 7, "frame_idx": 209, "global_frame_idx": 2368, "task_index": 1}, {"db_idx": 2369, "episode_idx": 7, "frame_idx": 210, "global_frame_idx": 2369, "task_index": 1}, {"db_idx": 2370, "episode_idx": 7, "frame_idx": 211, "global_frame_idx": 2370, "task_index": 1}, {"db_idx": 2371, "episode_idx": 7, "frame_idx": 212, "global_frame_idx": 2371, "task_index": 1}, {"db_idx": 2372, "episode_idx": 7, "frame_idx": 213, "global_frame_idx": 2372, "task_index": 1}, {"db_idx": 2373, "episode_idx": 7, "frame_idx": 214, "global_frame_idx": 2373, "task_index": 1}, {"db_idx": 2374, "episode_idx": 7, "frame_idx": 215, "global_frame_idx": 2374, "task_index": 1}, {"db_idx": 2375, "episode_idx": 7, "frame_idx": 216, "global_frame_idx": 2375, "task_index": 1}, {"db_idx": 2376, "episode_idx": 7, "frame_idx": 217, "global_frame_idx": 2376, "task_index": 1}, {"db_idx": 2377, "episode_idx": 7, "frame_idx": 218, "global_frame_idx": 2377, "task_index": 1}, {"db_idx": 2378, "episode_idx": 7, "frame_idx": 219, "global_frame_idx": 2378, "task_index": 1}, {"db_idx": 2379, "episode_idx": 7, "frame_idx": 220, "global_frame_idx": 2379, "task_index": 1}, {"db_idx": 2380, "episode_idx": 7, "frame_idx": 221, "global_frame_idx": 2380, "task_index": 1}, {"db_idx": 2381, "episode_idx": 7, "frame_idx": 222, "global_frame_idx": 2381, "task_index": 1}, {"db_idx": 2382, "episode_idx": 7, "frame_idx": 223, "global_frame_idx": 2382, "task_index": 1}, {"db_idx": 2383, "episode_idx": 7, "frame_idx": 224, "global_frame_idx": 2383, "task_index": 1}, {"db_idx": 2384, "episode_idx": 7, "frame_idx": 225, "global_frame_idx": 2384, "task_index": 1}, {"db_idx": 2385, "episode_idx": 7, "frame_idx": 226, "global_frame_idx": 2385, "task_index": 1}, {"db_idx": 2386, "episode_idx": 7, "frame_idx": 227, "global_frame_idx": 2386, "task_index": 1}, {"db_idx": 2387, "episode_idx": 7, "frame_idx": 228, "global_frame_idx": 2387, "task_index": 1}, {"db_idx": 2388, "episode_idx": 7, "frame_idx": 229, "global_frame_idx": 2388, "task_index": 1}, {"db_idx": 2389, "episode_idx": 7, "frame_idx": 230, "global_frame_idx": 2389, "task_index": 1}, {"db_idx": 2390, "episode_idx": 7, "frame_idx": 231, "global_frame_idx": 2390, "task_index": 1}, {"db_idx": 2391, "episode_idx": 7, "frame_idx": 232, "global_frame_idx": 2391, "task_index": 1}, {"db_idx": 2392, "episode_idx": 7, "frame_idx": 233, "global_frame_idx": 2392, "task_index": 1}, {"db_idx": 2393, "episode_idx": 7, "frame_idx": 234, "global_frame_idx": 2393, "task_index": 1}, {"db_idx": 2394, "episode_idx": 7, "frame_idx": 235, "global_frame_idx": 2394, "task_index": 1}, {"db_idx": 2395, "episode_idx": 7, "frame_idx": 236, "global_frame_idx": 2395, "task_index": 1}, {"db_idx": 2396, "episode_idx": 7, "frame_idx": 237, "global_frame_idx": 2396, "task_index": 1}, {"db_idx": 2397, "episode_idx": 7, "frame_idx": 238, "global_frame_idx": 2397, "task_index": 1}, {"db_idx": 2398, "episode_idx": 7, "frame_idx": 239, "global_frame_idx": 2398, "task_index": 1}, {"db_idx": 2399, "episode_idx": 7, "frame_idx": 240, "global_frame_idx": 2399, "task_index": 1}, {"db_idx": 2400, "episode_idx": 7, "frame_idx": 241, "global_frame_idx": 2400, "task_index": 1}, {"db_idx": 2401, "episode_idx": 7, "frame_idx": 242, "global_frame_idx": 2401, "task_index": 1}, {"db_idx": 2402, "episode_idx": 7, "frame_idx": 243, "global_frame_idx": 2402, "task_index": 1}, {"db_idx": 2403, "episode_idx": 7, "frame_idx": 244, "global_frame_idx": 2403, "task_index": 1}, {"db_idx": 2404, "episode_idx": 7, "frame_idx": 245, "global_frame_idx": 2404, "task_index": 1}, {"db_idx": 2405, "episode_idx": 7, "frame_idx": 246, "global_frame_idx": 2405, "task_index": 1}, {"db_idx": 2406, "episode_idx": 7, "frame_idx": 247, "global_frame_idx": 2406, "task_index": 1}, {"db_idx": 2407, "episode_idx": 7, "frame_idx": 248, "global_frame_idx": 2407, "task_index": 1}, {"db_idx": 2408, "episode_idx": 7, "frame_idx": 249, "global_frame_idx": 2408, "task_index": 1}, {"db_idx": 2409, "episode_idx": 7, "frame_idx": 250, "global_frame_idx": 2409, "task_index": 1}, {"db_idx": 2410, "episode_idx": 7, "frame_idx": 251, "global_frame_idx": 2410, "task_index": 1}, {"db_idx": 2411, "episode_idx": 7, "frame_idx": 252, "global_frame_idx": 2411, "task_index": 1}, {"db_idx": 2412, "episode_idx": 7, "frame_idx": 253, "global_frame_idx": 2412, "task_index": 1}, {"db_idx": 2413, "episode_idx": 7, "frame_idx": 254, "global_frame_idx": 2413, "task_index": 1}, {"db_idx": 2414, "episode_idx": 7, "frame_idx": 255, "global_frame_idx": 2414, "task_index": 1}, {"db_idx": 2415, "episode_idx": 7, "frame_idx": 256, "global_frame_idx": 2415, "task_index": 1}, {"db_idx": 2416, "episode_idx": 7, "frame_idx": 257, "global_frame_idx": 2416, "task_index": 1}, {"db_idx": 2417, "episode_idx": 7, "frame_idx": 258, "global_frame_idx": 2417, "task_index": 1}, {"db_idx": 2418, "episode_idx": 7, "frame_idx": 259, "global_frame_idx": 2418, "task_index": 1}, {"db_idx": 2419, "episode_idx": 7, "frame_idx": 260, "global_frame_idx": 2419, "task_index": 1}, {"db_idx": 2420, "episode_idx": 7, "frame_idx": 261, "global_frame_idx": 2420, "task_index": 1}, {"db_idx": 2421, "episode_idx": 7, "frame_idx": 262, "global_frame_idx": 2421, "task_index": 1}, {"db_idx": 2422, "episode_idx": 7, "frame_idx": 263, "global_frame_idx": 2422, "task_index": 1}, {"db_idx": 2423, "episode_idx": 7, "frame_idx": 264, "global_frame_idx": 2423, "task_index": 1}, {"db_idx": 2424, "episode_idx": 7, "frame_idx": 265, "global_frame_idx": 2424, "task_index": 1}, {"db_idx": 2425, "episode_idx": 7, "frame_idx": 266, "global_frame_idx": 2425, "task_index": 1}, {"db_idx": 2426, "episode_idx": 7, "frame_idx": 267, "global_frame_idx": 2426, "task_index": 1}, {"db_idx": 2427, "episode_idx": 7, "frame_idx": 268, "global_frame_idx": 2427, "task_index": 1}, {"db_idx": 2428, "episode_idx": 7, "frame_idx": 269, "global_frame_idx": 2428, "task_index": 1}, {"db_idx": 2429, "episode_idx": 7, "frame_idx": 270, "global_frame_idx": 2429, "task_index": 1}, {"db_idx": 2430, "episode_idx": 7, "frame_idx": 271, "global_frame_idx": 2430, "task_index": 1}, {"db_idx": 2431, "episode_idx": 7, "frame_idx": 272, "global_frame_idx": 2431, "task_index": 1}, {"db_idx": 2432, "episode_idx": 7, "frame_idx": 273, "global_frame_idx": 2432, "task_index": 1}, {"db_idx": 2433, "episode_idx": 7, "frame_idx": 274, "global_frame_idx": 2433, "task_index": 1}, {"db_idx": 2434, "episode_idx": 7, "frame_idx": 275, "global_frame_idx": 2434, "task_index": 1}, {"db_idx": 2435, "episode_idx": 7, "frame_idx": 276, "global_frame_idx": 2435, "task_index": 1}, {"db_idx": 2436, "episode_idx": 7, "frame_idx": 277, "global_frame_idx": 2436, "task_index": 1}, {"db_idx": 2437, "episode_idx": 7, "frame_idx": 278, "global_frame_idx": 2437, "task_index": 1}, {"db_idx": 2438, "episode_idx": 7, "frame_idx": 279, "global_frame_idx": 2438, "task_index": 1}, {"db_idx": 2439, "episode_idx": 7, "frame_idx": 280, "global_frame_idx": 2439, "task_index": 1}, {"db_idx": 2440, "episode_idx": 7, "frame_idx": 281, "global_frame_idx": 2440, "task_index": 1}, {"db_idx": 2441, "episode_idx": 7, "frame_idx": 282, "global_frame_idx": 2441, "task_index": 1}, {"db_idx": 2442, "episode_idx": 7, "frame_idx": 283, "global_frame_idx": 2442, "task_index": 1}, {"db_idx": 2443, "episode_idx": 7, "frame_idx": 284, "global_frame_idx": 2443, "task_index": 1}, {"db_idx": 2444, "episode_idx": 7, "frame_idx": 285, "global_frame_idx": 2444, "task_index": 1}, {"db_idx": 2445, "episode_idx": 7, "frame_idx": 286, "global_frame_idx": 2445, "task_index": 1}, {"db_idx": 2446, "episode_idx": 7, "frame_idx": 287, "global_frame_idx": 2446, "task_index": 1}, {"db_idx": 2447, "episode_idx": 7, "frame_idx": 288, "global_frame_idx": 2447, "task_index": 1}, {"db_idx": 2448, "episode_idx": 7, "frame_idx": 289, "global_frame_idx": 2448, "task_index": 1}, {"db_idx": 2449, "episode_idx": 7, "frame_idx": 290, "global_frame_idx": 2449, "task_index": 1}, {"db_idx": 2450, "episode_idx": 7, "frame_idx": 291, "global_frame_idx": 2450, "task_index": 1}, {"db_idx": 2451, "episode_idx": 7, "frame_idx": 292, "global_frame_idx": 2451, "task_index": 1}, {"db_idx": 2452, "episode_idx": 7, "frame_idx": 293, "global_frame_idx": 2452, "task_index": 1}, {"db_idx": 2453, "episode_idx": 7, "frame_idx": 294, "global_frame_idx": 2453, "task_index": 1}, {"db_idx": 2454, "episode_idx": 7, "frame_idx": 295, "global_frame_idx": 2454, "task_index": 1}, {"db_idx": 2455, "episode_idx": 7, "frame_idx": 296, "global_frame_idx": 2455, "task_index": 1}, {"db_idx": 2456, "episode_idx": 7, "frame_idx": 297, "global_frame_idx": 2456, "task_index": 1}, {"db_idx": 2457, "episode_idx": 7, "frame_idx": 298, "global_frame_idx": 2457, "task_index": 1}, {"db_idx": 2458, "episode_idx": 7, "frame_idx": 299, "global_frame_idx": 2458, "task_index": 1}, {"db_idx": 2459, "episode_idx": 7, "frame_idx": 300, "global_frame_idx": 2459, "task_index": 1}, {"db_idx": 2460, "episode_idx": 7, "frame_idx": 301, "global_frame_idx": 2460, "task_index": 1}, {"db_idx": 2461, "episode_idx": 7, "frame_idx": 302, "global_frame_idx": 2461, "task_index": 1}, {"db_idx": 2462, "episode_idx": 7, "frame_idx": 303, "global_frame_idx": 2462, "task_index": 1}, {"db_idx": 2463, "episode_idx": 7, "frame_idx": 304, "global_frame_idx": 2463, "task_index": 1}, {"db_idx": 2464, "episode_idx": 7, "frame_idx": 305, "global_frame_idx": 2464, "task_index": 1}, {"db_idx": 2465, "episode_idx": 7, "frame_idx": 306, "global_frame_idx": 2465, "task_index": 1}, {"db_idx": 2466, "episode_idx": 7, "frame_idx": 307, "global_frame_idx": 2466, "task_index": 1}, {"db_idx": 2467, "episode_idx": 7, "frame_idx": 308, "global_frame_idx": 2467, "task_index": 1}, {"db_idx": 2468, "episode_idx": 7, "frame_idx": 309, "global_frame_idx": 2468, "task_index": 1}, {"db_idx": 2469, "episode_idx": 7, "frame_idx": 310, "global_frame_idx": 2469, "task_index": 1}, {"db_idx": 2470, "episode_idx": 7, "frame_idx": 311, "global_frame_idx": 2470, "task_index": 1}, {"db_idx": 2471, "episode_idx": 7, "frame_idx": 312, "global_frame_idx": 2471, "task_index": 1}, {"db_idx": 2472, "episode_idx": 7, "frame_idx": 313, "global_frame_idx": 2472, "task_index": 1}, {"db_idx": 2473, "episode_idx": 7, "frame_idx": 314, "global_frame_idx": 2473, "task_index": 1}, {"db_idx": 2474, "episode_idx": 7, "frame_idx": 315, "global_frame_idx": 2474, "task_index": 1}, {"db_idx": 2475, "episode_idx": 7, "frame_idx": 316, "global_frame_idx": 2475, "task_index": 1}, {"db_idx": 2476, "episode_idx": 7, "frame_idx": 317, "global_frame_idx": 2476, "task_index": 1}, {"db_idx": 2477, "episode_idx": 7, "frame_idx": 318, "global_frame_idx": 2477, "task_index": 1}, {"db_idx": 2478, "episode_idx": 7, "frame_idx": 319, "global_frame_idx": 2478, "task_index": 1}, {"db_idx": 2479, "episode_idx": 7, "frame_idx": 320, "global_frame_idx": 2479, "task_index": 1}, {"db_idx": 2480, "episode_idx": 7, "frame_idx": 321, "global_frame_idx": 2480, "task_index": 1}, {"db_idx": 2481, "episode_idx": 7, "frame_idx": 322, "global_frame_idx": 2481, "task_index": 1}, {"db_idx": 2482, "episode_idx": 7, "frame_idx": 323, "global_frame_idx": 2482, "task_index": 1}, {"db_idx": 2483, "episode_idx": 7, "frame_idx": 324, "global_frame_idx": 2483, "task_index": 1}, {"db_idx": 2484, "episode_idx": 7, "frame_idx": 325, "global_frame_idx": 2484, "task_index": 1}, {"db_idx": 2485, "episode_idx": 7, "frame_idx": 326, "global_frame_idx": 2485, "task_index": 1}, {"db_idx": 2486, "episode_idx": 7, "frame_idx": 327, "global_frame_idx": 2486, "task_index": 1}, {"db_idx": 2487, "episode_idx": 7, "frame_idx": 328, "global_frame_idx": 2487, "task_index": 1}, {"db_idx": 2488, "episode_idx": 7, "frame_idx": 329, "global_frame_idx": 2488, "task_index": 1}, {"db_idx": 2489, "episode_idx": 7, "frame_idx": 330, "global_frame_idx": 2489, "task_index": 1}, {"db_idx": 2490, "episode_idx": 7, "frame_idx": 331, "global_frame_idx": 2490, "task_index": 1}, {"db_idx": 2491, "episode_idx": 7, "frame_idx": 332, "global_frame_idx": 2491, "task_index": 1}, {"db_idx": 2492, "episode_idx": 7, "frame_idx": 333, "global_frame_idx": 2492, "task_index": 1}, {"db_idx": 2493, "episode_idx": 7, "frame_idx": 334, "global_frame_idx": 2493, "task_index": 1}, {"db_idx": 2494, "episode_idx": 7, "frame_idx": 335, "global_frame_idx": 2494, "task_index": 1}, {"db_idx": 2495, "episode_idx": 7, "frame_idx": 336, "global_frame_idx": 2495, "task_index": 1}, {"db_idx": 2496, "episode_idx": 7, "frame_idx": 337, "global_frame_idx": 2496, "task_index": 1}, {"db_idx": 2497, "episode_idx": 7, "frame_idx": 338, "global_frame_idx": 2497, "task_index": 1}, {"db_idx": 2498, "episode_idx": 7, "frame_idx": 339, "global_frame_idx": 2498, "task_index": 1}, {"db_idx": 2499, "episode_idx": 7, "frame_idx": 340, "global_frame_idx": 2499, "task_index": 1}, {"db_idx": 2500, "episode_idx": 7, "frame_idx": 341, "global_frame_idx": 2500, "task_index": 1}, {"db_idx": 2501, "episode_idx": 7, "frame_idx": 342, "global_frame_idx": 2501, "task_index": 1}, {"db_idx": 2502, "episode_idx": 7, "frame_idx": 343, "global_frame_idx": 2502, "task_index": 1}, {"db_idx": 2503, "episode_idx": 7, "frame_idx": 344, "global_frame_idx": 2503, "task_index": 1}, {"db_idx": 2504, "episode_idx": 7, "frame_idx": 345, "global_frame_idx": 2504, "task_index": 1}, {"db_idx": 2505, "episode_idx": 7, "frame_idx": 346, "global_frame_idx": 2505, "task_index": 1}, {"db_idx": 2506, "episode_idx": 7, "frame_idx": 347, "global_frame_idx": 2506, "task_index": 1}, {"db_idx": 2507, "episode_idx": 7, "frame_idx": 348, "global_frame_idx": 2507, "task_index": 1}, {"db_idx": 2508, "episode_idx": 7, "frame_idx": 349, "global_frame_idx": 2508, "task_index": 1}, {"db_idx": 2509, "episode_idx": 7, "frame_idx": 350, "global_frame_idx": 2509, "task_index": 1}, {"db_idx": 2510, "episode_idx": 7, "frame_idx": 351, "global_frame_idx": 2510, "task_index": 1}, {"db_idx": 2511, "episode_idx": 7, "frame_idx": 352, "global_frame_idx": 2511, "task_index": 1}, {"db_idx": 2512, "episode_idx": 7, "frame_idx": 353, "global_frame_idx": 2512, "task_index": 1}, {"db_idx": 2513, "episode_idx": 7, "frame_idx": 354, "global_frame_idx": 2513, "task_index": 1}, {"db_idx": 2514, "episode_idx": 7, "frame_idx": 355, "global_frame_idx": 2514, "task_index": 1}, {"db_idx": 2515, "episode_idx": 7, "frame_idx": 356, "global_frame_idx": 2515, "task_index": 1}, {"db_idx": 2516, "episode_idx": 7, "frame_idx": 357, "global_frame_idx": 2516, "task_index": 1}, {"db_idx": 2517, "episode_idx": 7, "frame_idx": 358, "global_frame_idx": 2517, "task_index": 1}, {"db_idx": 2518, "episode_idx": 7, "frame_idx": 359, "global_frame_idx": 2518, "task_index": 1}, {"db_idx": 2519, "episode_idx": 7, "frame_idx": 360, "global_frame_idx": 2519, "task_index": 1}, {"db_idx": 2520, "episode_idx": 7, "frame_idx": 361, "global_frame_idx": 2520, "task_index": 1}, {"db_idx": 2521, "episode_idx": 7, "frame_idx": 362, "global_frame_idx": 2521, "task_index": 1}, {"db_idx": 2522, "episode_idx": 7, "frame_idx": 363, "global_frame_idx": 2522, "task_index": 1}, {"db_idx": 2523, "episode_idx": 7, "frame_idx": 364, "global_frame_idx": 2523, "task_index": 1}, {"db_idx": 2524, "episode_idx": 7, "frame_idx": 365, "global_frame_idx": 2524, "task_index": 1}, {"db_idx": 2525, "episode_idx": 7, "frame_idx": 366, "global_frame_idx": 2525, "task_index": 1}, {"db_idx": 2526, "episode_idx": 7, "frame_idx": 367, "global_frame_idx": 2526, "task_index": 1}, {"db_idx": 2527, "episode_idx": 7, "frame_idx": 368, "global_frame_idx": 2527, "task_index": 1}, {"db_idx": 2528, "episode_idx": 7, "frame_idx": 369, "global_frame_idx": 2528, "task_index": 1}, {"db_idx": 2529, "episode_idx": 7, "frame_idx": 370, "global_frame_idx": 2529, "task_index": 1}, {"db_idx": 2530, "episode_idx": 7, "frame_idx": 371, "global_frame_idx": 2530, "task_index": 1}, {"db_idx": 2531, "episode_idx": 7, "frame_idx": 372, "global_frame_idx": 2531, "task_index": 1}, {"db_idx": 2532, "episode_idx": 7, "frame_idx": 373, "global_frame_idx": 2532, "task_index": 1}, {"db_idx": 2533, "episode_idx": 7, "frame_idx": 374, "global_frame_idx": 2533, "task_index": 1}, {"db_idx": 2534, "episode_idx": 7, "frame_idx": 375, "global_frame_idx": 2534, "task_index": 1}, {"db_idx": 2535, "episode_idx": 7, "frame_idx": 376, "global_frame_idx": 2535, "task_index": 1}, {"db_idx": 2536, "episode_idx": 7, "frame_idx": 377, "global_frame_idx": 2536, "task_index": 1}, {"db_idx": 2537, "episode_idx": 7, "frame_idx": 378, "global_frame_idx": 2537, "task_index": 1}, {"db_idx": 2538, "episode_idx": 7, "frame_idx": 379, "global_frame_idx": 2538, "task_index": 1}, {"db_idx": 2539, "episode_idx": 7, "frame_idx": 380, "global_frame_idx": 2539, "task_index": 1}, {"db_idx": 2540, "episode_idx": 7, "frame_idx": 381, "global_frame_idx": 2540, "task_index": 1}, {"db_idx": 2541, "episode_idx": 7, "frame_idx": 382, "global_frame_idx": 2541, "task_index": 1}, {"db_idx": 2542, "episode_idx": 7, "frame_idx": 383, "global_frame_idx": 2542, "task_index": 1}, {"db_idx": 2543, "episode_idx": 7, "frame_idx": 384, "global_frame_idx": 2543, "task_index": 1}, {"db_idx": 2544, "episode_idx": 7, "frame_idx": 385, "global_frame_idx": 2544, "task_index": 1}, {"db_idx": 2545, "episode_idx": 7, "frame_idx": 386, "global_frame_idx": 2545, "task_index": 1}, {"db_idx": 2546, "episode_idx": 7, "frame_idx": 387, "global_frame_idx": 2546, "task_index": 1}, {"db_idx": 2547, "episode_idx": 7, "frame_idx": 388, "global_frame_idx": 2547, "task_index": 1}, {"db_idx": 2548, "episode_idx": 7, "frame_idx": 389, "global_frame_idx": 2548, "task_index": 1}, {"db_idx": 2549, "episode_idx": 7, "frame_idx": 390, "global_frame_idx": 2549, "task_index": 1}, {"db_idx": 2550, "episode_idx": 7, "frame_idx": 391, "global_frame_idx": 2550, "task_index": 1}, {"db_idx": 2551, "episode_idx": 7, "frame_idx": 392, "global_frame_idx": 2551, "task_index": 1}, {"db_idx": 2552, "episode_idx": 7, "frame_idx": 393, "global_frame_idx": 2552, "task_index": 1}, {"db_idx": 2553, "episode_idx": 7, "frame_idx": 394, "global_frame_idx": 2553, "task_index": 1}, {"db_idx": 2554, "episode_idx": 7, "frame_idx": 395, "global_frame_idx": 2554, "task_index": 1}, {"db_idx": 2555, "episode_idx": 7, "frame_idx": 396, "global_frame_idx": 2555, "task_index": 1}, {"db_idx": 2556, "episode_idx": 7, "frame_idx": 397, "global_frame_idx": 2556, "task_index": 1}, {"db_idx": 2557, "episode_idx": 7, "frame_idx": 398, "global_frame_idx": 2557, "task_index": 1}, {"db_idx": 2558, "episode_idx": 7, "frame_idx": 399, "global_frame_idx": 2558, "task_index": 1}, {"db_idx": 2559, "episode_idx": 7, "frame_idx": 400, "global_frame_idx": 2559, "task_index": 1}, {"db_idx": 2560, "episode_idx": 7, "frame_idx": 401, "global_frame_idx": 2560, "task_index": 1}, {"db_idx": 2561, "episode_idx": 7, "frame_idx": 402, "global_frame_idx": 2561, "task_index": 1}, {"db_idx": 2562, "episode_idx": 7, "frame_idx": 403, "global_frame_idx": 2562, "task_index": 1}, {"db_idx": 2563, "episode_idx": 7, "frame_idx": 404, "global_frame_idx": 2563, "task_index": 1}, {"db_idx": 2564, "episode_idx": 7, "frame_idx": 405, "global_frame_idx": 2564, "task_index": 1}, {"db_idx": 2565, "episode_idx": 7, "frame_idx": 406, "global_frame_idx": 2565, "task_index": 1}, {"db_idx": 2566, "episode_idx": 7, "frame_idx": 407, "global_frame_idx": 2566, "task_index": 1}, {"db_idx": 2567, "episode_idx": 7, "frame_idx": 408, "global_frame_idx": 2567, "task_index": 1}, {"db_idx": 2568, "episode_idx": 7, "frame_idx": 409, "global_frame_idx": 2568, "task_index": 1}, {"db_idx": 2569, "episode_idx": 7, "frame_idx": 410, "global_frame_idx": 2569, "task_index": 1}, {"db_idx": 2570, "episode_idx": 7, "frame_idx": 411, "global_frame_idx": 2570, "task_index": 1}, {"db_idx": 2571, "episode_idx": 7, "frame_idx": 412, "global_frame_idx": 2571, "task_index": 1}, {"db_idx": 2572, "episode_idx": 7, "frame_idx": 413, "global_frame_idx": 2572, "task_index": 1}, {"db_idx": 2573, "episode_idx": 7, "frame_idx": 414, "global_frame_idx": 2573, "task_index": 1}, {"db_idx": 2574, "episode_idx": 7, "frame_idx": 415, "global_frame_idx": 2574, "task_index": 1}, {"db_idx": 2575, "episode_idx": 7, "frame_idx": 416, "global_frame_idx": 2575, "task_index": 1}, {"db_idx": 2576, "episode_idx": 7, "frame_idx": 417, "global_frame_idx": 2576, "task_index": 1}, {"db_idx": 2577, "episode_idx": 7, "frame_idx": 418, "global_frame_idx": 2577, "task_index": 1}, {"db_idx": 2578, "episode_idx": 7, "frame_idx": 419, "global_frame_idx": 2578, "task_index": 1}, {"db_idx": 2579, "episode_idx": 7, "frame_idx": 420, "global_frame_idx": 2579, "task_index": 1}, {"db_idx": 2580, "episode_idx": 7, "frame_idx": 421, "global_frame_idx": 2580, "task_index": 1}, {"db_idx": 2581, "episode_idx": 7, "frame_idx": 422, "global_frame_idx": 2581, "task_index": 1}, {"db_idx": 2582, "episode_idx": 7, "frame_idx": 423, "global_frame_idx": 2582, "task_index": 1}, {"db_idx": 2583, "episode_idx": 7, "frame_idx": 424, "global_frame_idx": 2583, "task_index": 1}, {"db_idx": 2584, "episode_idx": 7, "frame_idx": 425, "global_frame_idx": 2584, "task_index": 1}, {"db_idx": 2585, "episode_idx": 7, "frame_idx": 426, "global_frame_idx": 2585, "task_index": 1}, {"db_idx": 2586, "episode_idx": 7, "frame_idx": 427, "global_frame_idx": 2586, "task_index": 1}, {"db_idx": 2587, "episode_idx": 7, "frame_idx": 428, "global_frame_idx": 2587, "task_index": 1}, {"db_idx": 2588, "episode_idx": 7, "frame_idx": 429, "global_frame_idx": 2588, "task_index": 1}, {"db_idx": 2589, "episode_idx": 7, "frame_idx": 430, "global_frame_idx": 2589, "task_index": 1}, {"db_idx": 2590, "episode_idx": 7, "frame_idx": 431, "global_frame_idx": 2590, "task_index": 1}, {"db_idx": 2591, "episode_idx": 7, "frame_idx": 432, "global_frame_idx": 2591, "task_index": 1}, {"db_idx": 2592, "episode_idx": 7, "frame_idx": 433, "global_frame_idx": 2592, "task_index": 1}, {"db_idx": 2593, "episode_idx": 7, "frame_idx": 434, "global_frame_idx": 2593, "task_index": 1}, {"db_idx": 2594, "episode_idx": 7, "frame_idx": 435, "global_frame_idx": 2594, "task_index": 1}, {"db_idx": 2595, "episode_idx": 7, "frame_idx": 436, "global_frame_idx": 2595, "task_index": 1}, {"db_idx": 2596, "episode_idx": 7, "frame_idx": 437, "global_frame_idx": 2596, "task_index": 1}, {"db_idx": 2597, "episode_idx": 7, "frame_idx": 438, "global_frame_idx": 2597, "task_index": 1}, {"db_idx": 2598, "episode_idx": 7, "frame_idx": 439, "global_frame_idx": 2598, "task_index": 1}, {"db_idx": 2599, "episode_idx": 7, "frame_idx": 440, "global_frame_idx": 2599, "task_index": 1}, {"db_idx": 2600, "episode_idx": 7, "frame_idx": 441, "global_frame_idx": 2600, "task_index": 1}, {"db_idx": 2601, "episode_idx": 7, "frame_idx": 442, "global_frame_idx": 2601, "task_index": 1}, {"db_idx": 2602, "episode_idx": 7, "frame_idx": 443, "global_frame_idx": 2602, "task_index": 1}, {"db_idx": 2603, "episode_idx": 7, "frame_idx": 444, "global_frame_idx": 2603, "task_index": 1}, {"db_idx": 2604, "episode_idx": 7, "frame_idx": 445, "global_frame_idx": 2604, "task_index": 1}, {"db_idx": 2605, "episode_idx": 7, "frame_idx": 446, "global_frame_idx": 2605, "task_index": 1}, {"db_idx": 2606, "episode_idx": 7, "frame_idx": 447, "global_frame_idx": 2606, "task_index": 1}, {"db_idx": 2607, "episode_idx": 7, "frame_idx": 448, "global_frame_idx": 2607, "task_index": 1}, {"db_idx": 2608, "episode_idx": 7, "frame_idx": 449, "global_frame_idx": 2608, "task_index": 1}, {"db_idx": 2609, "episode_idx": 7, "frame_idx": 450, "global_frame_idx": 2609, "task_index": 1}, {"db_idx": 2610, "episode_idx": 7, "frame_idx": 451, "global_frame_idx": 2610, "task_index": 1}, {"db_idx": 2611, "episode_idx": 7, "frame_idx": 452, "global_frame_idx": 2611, "task_index": 1}, {"db_idx": 2612, "episode_idx": 7, "frame_idx": 453, "global_frame_idx": 2612, "task_index": 1}, {"db_idx": 2613, "episode_idx": 7, "frame_idx": 454, "global_frame_idx": 2613, "task_index": 1}, {"db_idx": 2614, "episode_idx": 7, "frame_idx": 455, "global_frame_idx": 2614, "task_index": 1}, {"db_idx": 2615, "episode_idx": 7, "frame_idx": 456, "global_frame_idx": 2615, "task_index": 1}, {"db_idx": 2616, "episode_idx": 7, "frame_idx": 457, "global_frame_idx": 2616, "task_index": 1}, {"db_idx": 2617, "episode_idx": 7, "frame_idx": 458, "global_frame_idx": 2617, "task_index": 1}, {"db_idx": 2618, "episode_idx": 7, "frame_idx": 459, "global_frame_idx": 2618, "task_index": 1}, {"db_idx": 2619, "episode_idx": 7, "frame_idx": 460, "global_frame_idx": 2619, "task_index": 1}, {"db_idx": 2620, "episode_idx": 7, "frame_idx": 461, "global_frame_idx": 2620, "task_index": 1}, {"db_idx": 2621, "episode_idx": 7, "frame_idx": 462, "global_frame_idx": 2621, "task_index": 1}, {"db_idx": 2622, "episode_idx": 7, "frame_idx": 463, "global_frame_idx": 2622, "task_index": 1}, {"db_idx": 2623, "episode_idx": 7, "frame_idx": 464, "global_frame_idx": 2623, "task_index": 1}, {"db_idx": 2624, "episode_idx": 7, "frame_idx": 465, "global_frame_idx": 2624, "task_index": 1}, {"db_idx": 2625, "episode_idx": 8, "frame_idx": 0, "global_frame_idx": 2625, "task_index": 1}, {"db_idx": 2626, "episode_idx": 8, "frame_idx": 1, "global_frame_idx": 2626, "task_index": 1}, {"db_idx": 2627, "episode_idx": 8, "frame_idx": 2, "global_frame_idx": 2627, "task_index": 1}, {"db_idx": 2628, "episode_idx": 8, "frame_idx": 3, "global_frame_idx": 2628, "task_index": 1}, {"db_idx": 2629, "episode_idx": 8, "frame_idx": 4, "global_frame_idx": 2629, "task_index": 1}, {"db_idx": 2630, "episode_idx": 8, "frame_idx": 5, "global_frame_idx": 2630, "task_index": 1}, {"db_idx": 2631, "episode_idx": 8, "frame_idx": 6, "global_frame_idx": 2631, "task_index": 1}, {"db_idx": 2632, "episode_idx": 8, "frame_idx": 7, "global_frame_idx": 2632, "task_index": 1}, {"db_idx": 2633, "episode_idx": 8, "frame_idx": 8, "global_frame_idx": 2633, "task_index": 1}, {"db_idx": 2634, "episode_idx": 8, "frame_idx": 9, "global_frame_idx": 2634, "task_index": 1}, {"db_idx": 2635, "episode_idx": 8, "frame_idx": 10, "global_frame_idx": 2635, "task_index": 1}, {"db_idx": 2636, "episode_idx": 8, "frame_idx": 11, "global_frame_idx": 2636, "task_index": 1}, {"db_idx": 2637, "episode_idx": 8, "frame_idx": 12, "global_frame_idx": 2637, "task_index": 1}, {"db_idx": 2638, "episode_idx": 8, "frame_idx": 13, "global_frame_idx": 2638, "task_index": 1}, {"db_idx": 2639, "episode_idx": 8, "frame_idx": 14, "global_frame_idx": 2639, "task_index": 1}, {"db_idx": 2640, "episode_idx": 8, "frame_idx": 15, "global_frame_idx": 2640, "task_index": 1}, {"db_idx": 2641, "episode_idx": 8, "frame_idx": 16, "global_frame_idx": 2641, "task_index": 1}, {"db_idx": 2642, "episode_idx": 8, "frame_idx": 17, "global_frame_idx": 2642, "task_index": 1}, {"db_idx": 2643, "episode_idx": 8, "frame_idx": 18, "global_frame_idx": 2643, "task_index": 1}, {"db_idx": 2644, "episode_idx": 8, "frame_idx": 19, "global_frame_idx": 2644, "task_index": 1}, {"db_idx": 2645, "episode_idx": 8, "frame_idx": 20, "global_frame_idx": 2645, "task_index": 1}, {"db_idx": 2646, "episode_idx": 8, "frame_idx": 21, "global_frame_idx": 2646, "task_index": 1}, {"db_idx": 2647, "episode_idx": 8, "frame_idx": 22, "global_frame_idx": 2647, "task_index": 1}, {"db_idx": 2648, "episode_idx": 8, "frame_idx": 23, "global_frame_idx": 2648, "task_index": 1}, {"db_idx": 2649, "episode_idx": 8, "frame_idx": 24, "global_frame_idx": 2649, "task_index": 1}, {"db_idx": 2650, "episode_idx": 8, "frame_idx": 25, "global_frame_idx": 2650, "task_index": 1}, {"db_idx": 2651, "episode_idx": 8, "frame_idx": 26, "global_frame_idx": 2651, "task_index": 1}, {"db_idx": 2652, "episode_idx": 8, "frame_idx": 27, "global_frame_idx": 2652, "task_index": 1}, {"db_idx": 2653, "episode_idx": 8, "frame_idx": 28, "global_frame_idx": 2653, "task_index": 1}, {"db_idx": 2654, "episode_idx": 8, "frame_idx": 29, "global_frame_idx": 2654, "task_index": 1}, {"db_idx": 2655, "episode_idx": 8, "frame_idx": 30, "global_frame_idx": 2655, "task_index": 1}, {"db_idx": 2656, "episode_idx": 8, "frame_idx": 31, "global_frame_idx": 2656, "task_index": 1}, {"db_idx": 2657, "episode_idx": 8, "frame_idx": 32, "global_frame_idx": 2657, "task_index": 1}, {"db_idx": 2658, "episode_idx": 8, "frame_idx": 33, "global_frame_idx": 2658, "task_index": 1}, {"db_idx": 2659, "episode_idx": 8, "frame_idx": 34, "global_frame_idx": 2659, "task_index": 1}, {"db_idx": 2660, "episode_idx": 8, "frame_idx": 35, "global_frame_idx": 2660, "task_index": 1}, {"db_idx": 2661, "episode_idx": 8, "frame_idx": 36, "global_frame_idx": 2661, "task_index": 1}, {"db_idx": 2662, "episode_idx": 8, "frame_idx": 37, "global_frame_idx": 2662, "task_index": 1}, {"db_idx": 2663, "episode_idx": 8, "frame_idx": 38, "global_frame_idx": 2663, "task_index": 1}, {"db_idx": 2664, "episode_idx": 8, "frame_idx": 39, "global_frame_idx": 2664, "task_index": 1}, {"db_idx": 2665, "episode_idx": 8, "frame_idx": 40, "global_frame_idx": 2665, "task_index": 1}, {"db_idx": 2666, "episode_idx": 8, "frame_idx": 41, "global_frame_idx": 2666, "task_index": 1}, {"db_idx": 2667, "episode_idx": 8, "frame_idx": 42, "global_frame_idx": 2667, "task_index": 1}, {"db_idx": 2668, "episode_idx": 8, "frame_idx": 43, "global_frame_idx": 2668, "task_index": 1}, {"db_idx": 2669, "episode_idx": 8, "frame_idx": 44, "global_frame_idx": 2669, "task_index": 1}, {"db_idx": 2670, "episode_idx": 8, "frame_idx": 45, "global_frame_idx": 2670, "task_index": 1}, {"db_idx": 2671, "episode_idx": 8, "frame_idx": 46, "global_frame_idx": 2671, "task_index": 1}, {"db_idx": 2672, "episode_idx": 8, "frame_idx": 47, "global_frame_idx": 2672, "task_index": 1}, {"db_idx": 2673, "episode_idx": 8, "frame_idx": 48, "global_frame_idx": 2673, "task_index": 1}, {"db_idx": 2674, "episode_idx": 8, "frame_idx": 49, "global_frame_idx": 2674, "task_index": 1}, {"db_idx": 2675, "episode_idx": 8, "frame_idx": 50, "global_frame_idx": 2675, "task_index": 1}, {"db_idx": 2676, "episode_idx": 8, "frame_idx": 51, "global_frame_idx": 2676, "task_index": 1}, {"db_idx": 2677, "episode_idx": 8, "frame_idx": 52, "global_frame_idx": 2677, "task_index": 1}, {"db_idx": 2678, "episode_idx": 8, "frame_idx": 53, "global_frame_idx": 2678, "task_index": 1}, {"db_idx": 2679, "episode_idx": 8, "frame_idx": 54, "global_frame_idx": 2679, "task_index": 1}, {"db_idx": 2680, "episode_idx": 8, "frame_idx": 55, "global_frame_idx": 2680, "task_index": 1}, {"db_idx": 2681, "episode_idx": 8, "frame_idx": 56, "global_frame_idx": 2681, "task_index": 1}, {"db_idx": 2682, "episode_idx": 8, "frame_idx": 57, "global_frame_idx": 2682, "task_index": 1}, {"db_idx": 2683, "episode_idx": 8, "frame_idx": 58, "global_frame_idx": 2683, "task_index": 1}, {"db_idx": 2684, "episode_idx": 8, "frame_idx": 59, "global_frame_idx": 2684, "task_index": 1}, {"db_idx": 2685, "episode_idx": 8, "frame_idx": 60, "global_frame_idx": 2685, "task_index": 1}, {"db_idx": 2686, "episode_idx": 8, "frame_idx": 61, "global_frame_idx": 2686, "task_index": 1}, {"db_idx": 2687, "episode_idx": 8, "frame_idx": 62, "global_frame_idx": 2687, "task_index": 1}, {"db_idx": 2688, "episode_idx": 8, "frame_idx": 63, "global_frame_idx": 2688, "task_index": 1}, {"db_idx": 2689, "episode_idx": 8, "frame_idx": 64, "global_frame_idx": 2689, "task_index": 1}, {"db_idx": 2690, "episode_idx": 8, "frame_idx": 65, "global_frame_idx": 2690, "task_index": 1}, {"db_idx": 2691, "episode_idx": 8, "frame_idx": 66, "global_frame_idx": 2691, "task_index": 1}, {"db_idx": 2692, "episode_idx": 8, "frame_idx": 67, "global_frame_idx": 2692, "task_index": 1}, {"db_idx": 2693, "episode_idx": 8, "frame_idx": 68, "global_frame_idx": 2693, "task_index": 1}, {"db_idx": 2694, "episode_idx": 8, "frame_idx": 69, "global_frame_idx": 2694, "task_index": 1}, {"db_idx": 2695, "episode_idx": 8, "frame_idx": 70, "global_frame_idx": 2695, "task_index": 1}, {"db_idx": 2696, "episode_idx": 8, "frame_idx": 71, "global_frame_idx": 2696, "task_index": 1}, {"db_idx": 2697, "episode_idx": 8, "frame_idx": 72, "global_frame_idx": 2697, "task_index": 1}, {"db_idx": 2698, "episode_idx": 8, "frame_idx": 73, "global_frame_idx": 2698, "task_index": 1}, {"db_idx": 2699, "episode_idx": 8, "frame_idx": 74, "global_frame_idx": 2699, "task_index": 1}, {"db_idx": 2700, "episode_idx": 8, "frame_idx": 75, "global_frame_idx": 2700, "task_index": 1}, {"db_idx": 2701, "episode_idx": 8, "frame_idx": 76, "global_frame_idx": 2701, "task_index": 1}, {"db_idx": 2702, "episode_idx": 8, "frame_idx": 77, "global_frame_idx": 2702, "task_index": 1}, {"db_idx": 2703, "episode_idx": 8, "frame_idx": 78, "global_frame_idx": 2703, "task_index": 1}, {"db_idx": 2704, "episode_idx": 8, "frame_idx": 79, "global_frame_idx": 2704, "task_index": 1}, {"db_idx": 2705, "episode_idx": 8, "frame_idx": 80, "global_frame_idx": 2705, "task_index": 1}, {"db_idx": 2706, "episode_idx": 8, "frame_idx": 81, "global_frame_idx": 2706, "task_index": 1}, {"db_idx": 2707, "episode_idx": 8, "frame_idx": 82, "global_frame_idx": 2707, "task_index": 1}, {"db_idx": 2708, "episode_idx": 8, "frame_idx": 83, "global_frame_idx": 2708, "task_index": 1}, {"db_idx": 2709, "episode_idx": 8, "frame_idx": 84, "global_frame_idx": 2709, "task_index": 1}, {"db_idx": 2710, "episode_idx": 8, "frame_idx": 85, "global_frame_idx": 2710, "task_index": 1}, {"db_idx": 2711, "episode_idx": 8, "frame_idx": 86, "global_frame_idx": 2711, "task_index": 1}, {"db_idx": 2712, "episode_idx": 8, "frame_idx": 87, "global_frame_idx": 2712, "task_index": 1}, {"db_idx": 2713, "episode_idx": 8, "frame_idx": 88, "global_frame_idx": 2713, "task_index": 1}, {"db_idx": 2714, "episode_idx": 8, "frame_idx": 89, "global_frame_idx": 2714, "task_index": 1}, {"db_idx": 2715, "episode_idx": 8, "frame_idx": 90, "global_frame_idx": 2715, "task_index": 1}, {"db_idx": 2716, "episode_idx": 8, "frame_idx": 91, "global_frame_idx": 2716, "task_index": 1}, {"db_idx": 2717, "episode_idx": 8, "frame_idx": 92, "global_frame_idx": 2717, "task_index": 1}, {"db_idx": 2718, "episode_idx": 8, "frame_idx": 93, "global_frame_idx": 2718, "task_index": 1}, {"db_idx": 2719, "episode_idx": 8, "frame_idx": 94, "global_frame_idx": 2719, "task_index": 1}, {"db_idx": 2720, "episode_idx": 8, "frame_idx": 95, "global_frame_idx": 2720, "task_index": 1}, {"db_idx": 2721, "episode_idx": 8, "frame_idx": 96, "global_frame_idx": 2721, "task_index": 1}, {"db_idx": 2722, "episode_idx": 8, "frame_idx": 97, "global_frame_idx": 2722, "task_index": 1}, {"db_idx": 2723, "episode_idx": 8, "frame_idx": 98, "global_frame_idx": 2723, "task_index": 1}, {"db_idx": 2724, "episode_idx": 8, "frame_idx": 99, "global_frame_idx": 2724, "task_index": 1}, {"db_idx": 2725, "episode_idx": 8, "frame_idx": 100, "global_frame_idx": 2725, "task_index": 1}, {"db_idx": 2726, "episode_idx": 8, "frame_idx": 101, "global_frame_idx": 2726, "task_index": 1}, {"db_idx": 2727, "episode_idx": 8, "frame_idx": 102, "global_frame_idx": 2727, "task_index": 1}, {"db_idx": 2728, "episode_idx": 8, "frame_idx": 103, "global_frame_idx": 2728, "task_index": 1}, {"db_idx": 2729, "episode_idx": 8, "frame_idx": 104, "global_frame_idx": 2729, "task_index": 1}, {"db_idx": 2730, "episode_idx": 8, "frame_idx": 105, "global_frame_idx": 2730, "task_index": 1}, {"db_idx": 2731, "episode_idx": 8, "frame_idx": 106, "global_frame_idx": 2731, "task_index": 1}, {"db_idx": 2732, "episode_idx": 8, "frame_idx": 107, "global_frame_idx": 2732, "task_index": 1}, {"db_idx": 2733, "episode_idx": 8, "frame_idx": 108, "global_frame_idx": 2733, "task_index": 1}, {"db_idx": 2734, "episode_idx": 8, "frame_idx": 109, "global_frame_idx": 2734, "task_index": 1}, {"db_idx": 2735, "episode_idx": 8, "frame_idx": 110, "global_frame_idx": 2735, "task_index": 1}, {"db_idx": 2736, "episode_idx": 8, "frame_idx": 111, "global_frame_idx": 2736, "task_index": 1}, {"db_idx": 2737, "episode_idx": 8, "frame_idx": 112, "global_frame_idx": 2737, "task_index": 1}, {"db_idx": 2738, "episode_idx": 8, "frame_idx": 113, "global_frame_idx": 2738, "task_index": 1}, {"db_idx": 2739, "episode_idx": 8, "frame_idx": 114, "global_frame_idx": 2739, "task_index": 1}, {"db_idx": 2740, "episode_idx": 8, "frame_idx": 115, "global_frame_idx": 2740, "task_index": 1}, {"db_idx": 2741, "episode_idx": 8, "frame_idx": 116, "global_frame_idx": 2741, "task_index": 1}, {"db_idx": 2742, "episode_idx": 8, "frame_idx": 117, "global_frame_idx": 2742, "task_index": 1}, {"db_idx": 2743, "episode_idx": 8, "frame_idx": 118, "global_frame_idx": 2743, "task_index": 1}, {"db_idx": 2744, "episode_idx": 8, "frame_idx": 119, "global_frame_idx": 2744, "task_index": 1}, {"db_idx": 2745, "episode_idx": 8, "frame_idx": 120, "global_frame_idx": 2745, "task_index": 1}, {"db_idx": 2746, "episode_idx": 8, "frame_idx": 121, "global_frame_idx": 2746, "task_index": 1}, {"db_idx": 2747, "episode_idx": 8, "frame_idx": 122, "global_frame_idx": 2747, "task_index": 1}, {"db_idx": 2748, "episode_idx": 8, "frame_idx": 123, "global_frame_idx": 2748, "task_index": 1}, {"db_idx": 2749, "episode_idx": 8, "frame_idx": 124, "global_frame_idx": 2749, "task_index": 1}, {"db_idx": 2750, "episode_idx": 8, "frame_idx": 125, "global_frame_idx": 2750, "task_index": 1}, {"db_idx": 2751, "episode_idx": 8, "frame_idx": 126, "global_frame_idx": 2751, "task_index": 1}, {"db_idx": 2752, "episode_idx": 8, "frame_idx": 127, "global_frame_idx": 2752, "task_index": 1}, {"db_idx": 2753, "episode_idx": 8, "frame_idx": 128, "global_frame_idx": 2753, "task_index": 1}, {"db_idx": 2754, "episode_idx": 8, "frame_idx": 129, "global_frame_idx": 2754, "task_index": 1}, {"db_idx": 2755, "episode_idx": 8, "frame_idx": 130, "global_frame_idx": 2755, "task_index": 1}, {"db_idx": 2756, "episode_idx": 8, "frame_idx": 131, "global_frame_idx": 2756, "task_index": 1}, {"db_idx": 2757, "episode_idx": 8, "frame_idx": 132, "global_frame_idx": 2757, "task_index": 1}, {"db_idx": 2758, "episode_idx": 8, "frame_idx": 133, "global_frame_idx": 2758, "task_index": 1}, {"db_idx": 2759, "episode_idx": 8, "frame_idx": 134, "global_frame_idx": 2759, "task_index": 1}, {"db_idx": 2760, "episode_idx": 8, "frame_idx": 135, "global_frame_idx": 2760, "task_index": 1}, {"db_idx": 2761, "episode_idx": 8, "frame_idx": 136, "global_frame_idx": 2761, "task_index": 1}, {"db_idx": 2762, "episode_idx": 8, "frame_idx": 137, "global_frame_idx": 2762, "task_index": 1}, {"db_idx": 2763, "episode_idx": 8, "frame_idx": 138, "global_frame_idx": 2763, "task_index": 1}, {"db_idx": 2764, "episode_idx": 8, "frame_idx": 139, "global_frame_idx": 2764, "task_index": 1}, {"db_idx": 2765, "episode_idx": 8, "frame_idx": 140, "global_frame_idx": 2765, "task_index": 1}, {"db_idx": 2766, "episode_idx": 8, "frame_idx": 141, "global_frame_idx": 2766, "task_index": 1}, {"db_idx": 2767, "episode_idx": 8, "frame_idx": 142, "global_frame_idx": 2767, "task_index": 1}, {"db_idx": 2768, "episode_idx": 8, "frame_idx": 143, "global_frame_idx": 2768, "task_index": 1}, {"db_idx": 2769, "episode_idx": 8, "frame_idx": 144, "global_frame_idx": 2769, "task_index": 1}, {"db_idx": 2770, "episode_idx": 8, "frame_idx": 145, "global_frame_idx": 2770, "task_index": 1}, {"db_idx": 2771, "episode_idx": 8, "frame_idx": 146, "global_frame_idx": 2771, "task_index": 1}, {"db_idx": 2772, "episode_idx": 8, "frame_idx": 147, "global_frame_idx": 2772, "task_index": 1}, {"db_idx": 2773, "episode_idx": 8, "frame_idx": 148, "global_frame_idx": 2773, "task_index": 1}, {"db_idx": 2774, "episode_idx": 8, "frame_idx": 149, "global_frame_idx": 2774, "task_index": 1}, {"db_idx": 2775, "episode_idx": 8, "frame_idx": 150, "global_frame_idx": 2775, "task_index": 1}, {"db_idx": 2776, "episode_idx": 8, "frame_idx": 151, "global_frame_idx": 2776, "task_index": 1}, {"db_idx": 2777, "episode_idx": 8, "frame_idx": 152, "global_frame_idx": 2777, "task_index": 1}, {"db_idx": 2778, "episode_idx": 8, "frame_idx": 153, "global_frame_idx": 2778, "task_index": 1}, {"db_idx": 2779, "episode_idx": 8, "frame_idx": 154, "global_frame_idx": 2779, "task_index": 1}, {"db_idx": 2780, "episode_idx": 8, "frame_idx": 155, "global_frame_idx": 2780, "task_index": 1}, {"db_idx": 2781, "episode_idx": 8, "frame_idx": 156, "global_frame_idx": 2781, "task_index": 1}, {"db_idx": 2782, "episode_idx": 8, "frame_idx": 157, "global_frame_idx": 2782, "task_index": 1}, {"db_idx": 2783, "episode_idx": 8, "frame_idx": 158, "global_frame_idx": 2783, "task_index": 1}, {"db_idx": 2784, "episode_idx": 8, "frame_idx": 159, "global_frame_idx": 2784, "task_index": 1}, {"db_idx": 2785, "episode_idx": 8, "frame_idx": 160, "global_frame_idx": 2785, "task_index": 1}, {"db_idx": 2786, "episode_idx": 8, "frame_idx": 161, "global_frame_idx": 2786, "task_index": 1}, {"db_idx": 2787, "episode_idx": 8, "frame_idx": 162, "global_frame_idx": 2787, "task_index": 1}, {"db_idx": 2788, "episode_idx": 8, "frame_idx": 163, "global_frame_idx": 2788, "task_index": 1}, {"db_idx": 2789, "episode_idx": 8, "frame_idx": 164, "global_frame_idx": 2789, "task_index": 1}, {"db_idx": 2790, "episode_idx": 8, "frame_idx": 165, "global_frame_idx": 2790, "task_index": 1}, {"db_idx": 2791, "episode_idx": 8, "frame_idx": 166, "global_frame_idx": 2791, "task_index": 1}, {"db_idx": 2792, "episode_idx": 8, "frame_idx": 167, "global_frame_idx": 2792, "task_index": 1}, {"db_idx": 2793, "episode_idx": 8, "frame_idx": 168, "global_frame_idx": 2793, "task_index": 1}, {"db_idx": 2794, "episode_idx": 8, "frame_idx": 169, "global_frame_idx": 2794, "task_index": 1}, {"db_idx": 2795, "episode_idx": 8, "frame_idx": 170, "global_frame_idx": 2795, "task_index": 1}, {"db_idx": 2796, "episode_idx": 8, "frame_idx": 171, "global_frame_idx": 2796, "task_index": 1}, {"db_idx": 2797, "episode_idx": 8, "frame_idx": 172, "global_frame_idx": 2797, "task_index": 1}, {"db_idx": 2798, "episode_idx": 8, "frame_idx": 173, "global_frame_idx": 2798, "task_index": 1}, {"db_idx": 2799, "episode_idx": 8, "frame_idx": 174, "global_frame_idx": 2799, "task_index": 1}, {"db_idx": 2800, "episode_idx": 8, "frame_idx": 175, "global_frame_idx": 2800, "task_index": 1}, {"db_idx": 2801, "episode_idx": 8, "frame_idx": 176, "global_frame_idx": 2801, "task_index": 1}, {"db_idx": 2802, "episode_idx": 8, "frame_idx": 177, "global_frame_idx": 2802, "task_index": 1}, {"db_idx": 2803, "episode_idx": 8, "frame_idx": 178, "global_frame_idx": 2803, "task_index": 1}, {"db_idx": 2804, "episode_idx": 8, "frame_idx": 179, "global_frame_idx": 2804, "task_index": 1}, {"db_idx": 2805, "episode_idx": 8, "frame_idx": 180, "global_frame_idx": 2805, "task_index": 1}, {"db_idx": 2806, "episode_idx": 8, "frame_idx": 181, "global_frame_idx": 2806, "task_index": 1}, {"db_idx": 2807, "episode_idx": 8, "frame_idx": 182, "global_frame_idx": 2807, "task_index": 1}, {"db_idx": 2808, "episode_idx": 8, "frame_idx": 183, "global_frame_idx": 2808, "task_index": 1}, {"db_idx": 2809, "episode_idx": 8, "frame_idx": 184, "global_frame_idx": 2809, "task_index": 1}, {"db_idx": 2810, "episode_idx": 8, "frame_idx": 185, "global_frame_idx": 2810, "task_index": 1}, {"db_idx": 2811, "episode_idx": 8, "frame_idx": 186, "global_frame_idx": 2811, "task_index": 1}, {"db_idx": 2812, "episode_idx": 8, "frame_idx": 187, "global_frame_idx": 2812, "task_index": 1}, {"db_idx": 2813, "episode_idx": 8, "frame_idx": 188, "global_frame_idx": 2813, "task_index": 1}, {"db_idx": 2814, "episode_idx": 8, "frame_idx": 189, "global_frame_idx": 2814, "task_index": 1}, {"db_idx": 2815, "episode_idx": 8, "frame_idx": 190, "global_frame_idx": 2815, "task_index": 1}, {"db_idx": 2816, "episode_idx": 8, "frame_idx": 191, "global_frame_idx": 2816, "task_index": 1}, {"db_idx": 2817, "episode_idx": 8, "frame_idx": 192, "global_frame_idx": 2817, "task_index": 1}, {"db_idx": 2818, "episode_idx": 8, "frame_idx": 193, "global_frame_idx": 2818, "task_index": 1}, {"db_idx": 2819, "episode_idx": 8, "frame_idx": 194, "global_frame_idx": 2819, "task_index": 1}, {"db_idx": 2820, "episode_idx": 8, "frame_idx": 195, "global_frame_idx": 2820, "task_index": 1}, {"db_idx": 2821, "episode_idx": 8, "frame_idx": 196, "global_frame_idx": 2821, "task_index": 1}, {"db_idx": 2822, "episode_idx": 8, "frame_idx": 197, "global_frame_idx": 2822, "task_index": 1}, {"db_idx": 2823, "episode_idx": 8, "frame_idx": 198, "global_frame_idx": 2823, "task_index": 1}, {"db_idx": 2824, "episode_idx": 8, "frame_idx": 199, "global_frame_idx": 2824, "task_index": 1}, {"db_idx": 2825, "episode_idx": 8, "frame_idx": 200, "global_frame_idx": 2825, "task_index": 1}, {"db_idx": 2826, "episode_idx": 8, "frame_idx": 201, "global_frame_idx": 2826, "task_index": 1}, {"db_idx": 2827, "episode_idx": 8, "frame_idx": 202, "global_frame_idx": 2827, "task_index": 1}, {"db_idx": 2828, "episode_idx": 8, "frame_idx": 203, "global_frame_idx": 2828, "task_index": 1}, {"db_idx": 2829, "episode_idx": 8, "frame_idx": 204, "global_frame_idx": 2829, "task_index": 1}, {"db_idx": 2830, "episode_idx": 8, "frame_idx": 205, "global_frame_idx": 2830, "task_index": 1}, {"db_idx": 2831, "episode_idx": 8, "frame_idx": 206, "global_frame_idx": 2831, "task_index": 1}, {"db_idx": 2832, "episode_idx": 8, "frame_idx": 207, "global_frame_idx": 2832, "task_index": 1}, {"db_idx": 2833, "episode_idx": 8, "frame_idx": 208, "global_frame_idx": 2833, "task_index": 1}, {"db_idx": 2834, "episode_idx": 8, "frame_idx": 209, "global_frame_idx": 2834, "task_index": 1}, {"db_idx": 2835, "episode_idx": 8, "frame_idx": 210, "global_frame_idx": 2835, "task_index": 1}, {"db_idx": 2836, "episode_idx": 8, "frame_idx": 211, "global_frame_idx": 2836, "task_index": 1}, {"db_idx": 2837, "episode_idx": 8, "frame_idx": 212, "global_frame_idx": 2837, "task_index": 1}, {"db_idx": 2838, "episode_idx": 8, "frame_idx": 213, "global_frame_idx": 2838, "task_index": 1}, {"db_idx": 2839, "episode_idx": 8, "frame_idx": 214, "global_frame_idx": 2839, "task_index": 1}, {"db_idx": 2840, "episode_idx": 8, "frame_idx": 215, "global_frame_idx": 2840, "task_index": 1}, {"db_idx": 2841, "episode_idx": 8, "frame_idx": 216, "global_frame_idx": 2841, "task_index": 1}, {"db_idx": 2842, "episode_idx": 8, "frame_idx": 217, "global_frame_idx": 2842, "task_index": 1}, {"db_idx": 2843, "episode_idx": 8, "frame_idx": 218, "global_frame_idx": 2843, "task_index": 1}, {"db_idx": 2844, "episode_idx": 8, "frame_idx": 219, "global_frame_idx": 2844, "task_index": 1}, {"db_idx": 2845, "episode_idx": 8, "frame_idx": 220, "global_frame_idx": 2845, "task_index": 1}, {"db_idx": 2846, "episode_idx": 8, "frame_idx": 221, "global_frame_idx": 2846, "task_index": 1}, {"db_idx": 2847, "episode_idx": 8, "frame_idx": 222, "global_frame_idx": 2847, "task_index": 1}, {"db_idx": 2848, "episode_idx": 8, "frame_idx": 223, "global_frame_idx": 2848, "task_index": 1}, {"db_idx": 2849, "episode_idx": 8, "frame_idx": 224, "global_frame_idx": 2849, "task_index": 1}, {"db_idx": 2850, "episode_idx": 8, "frame_idx": 225, "global_frame_idx": 2850, "task_index": 1}, {"db_idx": 2851, "episode_idx": 8, "frame_idx": 226, "global_frame_idx": 2851, "task_index": 1}, {"db_idx": 2852, "episode_idx": 8, "frame_idx": 227, "global_frame_idx": 2852, "task_index": 1}, {"db_idx": 2853, "episode_idx": 8, "frame_idx": 228, "global_frame_idx": 2853, "task_index": 1}, {"db_idx": 2854, "episode_idx": 8, "frame_idx": 229, "global_frame_idx": 2854, "task_index": 1}, {"db_idx": 2855, "episode_idx": 8, "frame_idx": 230, "global_frame_idx": 2855, "task_index": 1}, {"db_idx": 2856, "episode_idx": 8, "frame_idx": 231, "global_frame_idx": 2856, "task_index": 1}, {"db_idx": 2857, "episode_idx": 8, "frame_idx": 232, "global_frame_idx": 2857, "task_index": 1}, {"db_idx": 2858, "episode_idx": 8, "frame_idx": 233, "global_frame_idx": 2858, "task_index": 1}, {"db_idx": 2859, "episode_idx": 8, "frame_idx": 234, "global_frame_idx": 2859, "task_index": 1}, {"db_idx": 2860, "episode_idx": 8, "frame_idx": 235, "global_frame_idx": 2860, "task_index": 1}, {"db_idx": 2861, "episode_idx": 8, "frame_idx": 236, "global_frame_idx": 2861, "task_index": 1}, {"db_idx": 2862, "episode_idx": 8, "frame_idx": 237, "global_frame_idx": 2862, "task_index": 1}, {"db_idx": 2863, "episode_idx": 8, "frame_idx": 238, "global_frame_idx": 2863, "task_index": 1}, {"db_idx": 2864, "episode_idx": 8, "frame_idx": 239, "global_frame_idx": 2864, "task_index": 1}, {"db_idx": 2865, "episode_idx": 8, "frame_idx": 240, "global_frame_idx": 2865, "task_index": 1}, {"db_idx": 2866, "episode_idx": 8, "frame_idx": 241, "global_frame_idx": 2866, "task_index": 1}, {"db_idx": 2867, "episode_idx": 8, "frame_idx": 242, "global_frame_idx": 2867, "task_index": 1}, {"db_idx": 2868, "episode_idx": 8, "frame_idx": 243, "global_frame_idx": 2868, "task_index": 1}, {"db_idx": 2869, "episode_idx": 8, "frame_idx": 244, "global_frame_idx": 2869, "task_index": 1}, {"db_idx": 2870, "episode_idx": 8, "frame_idx": 245, "global_frame_idx": 2870, "task_index": 1}, {"db_idx": 2871, "episode_idx": 8, "frame_idx": 246, "global_frame_idx": 2871, "task_index": 1}, {"db_idx": 2872, "episode_idx": 8, "frame_idx": 247, "global_frame_idx": 2872, "task_index": 1}, {"db_idx": 2873, "episode_idx": 8, "frame_idx": 248, "global_frame_idx": 2873, "task_index": 1}, {"db_idx": 2874, "episode_idx": 8, "frame_idx": 249, "global_frame_idx": 2874, "task_index": 1}, {"db_idx": 2875, "episode_idx": 8, "frame_idx": 250, "global_frame_idx": 2875, "task_index": 1}, {"db_idx": 2876, "episode_idx": 8, "frame_idx": 251, "global_frame_idx": 2876, "task_index": 1}, {"db_idx": 2877, "episode_idx": 8, "frame_idx": 252, "global_frame_idx": 2877, "task_index": 1}, {"db_idx": 2878, "episode_idx": 8, "frame_idx": 253, "global_frame_idx": 2878, "task_index": 1}, {"db_idx": 2879, "episode_idx": 8, "frame_idx": 254, "global_frame_idx": 2879, "task_index": 1}, {"db_idx": 2880, "episode_idx": 8, "frame_idx": 255, "global_frame_idx": 2880, "task_index": 1}, {"db_idx": 2881, "episode_idx": 8, "frame_idx": 256, "global_frame_idx": 2881, "task_index": 1}, {"db_idx": 2882, "episode_idx": 8, "frame_idx": 257, "global_frame_idx": 2882, "task_index": 1}, {"db_idx": 2883, "episode_idx": 8, "frame_idx": 258, "global_frame_idx": 2883, "task_index": 1}, {"db_idx": 2884, "episode_idx": 8, "frame_idx": 259, "global_frame_idx": 2884, "task_index": 1}, {"db_idx": 2885, "episode_idx": 8, "frame_idx": 260, "global_frame_idx": 2885, "task_index": 1}, {"db_idx": 2886, "episode_idx": 8, "frame_idx": 261, "global_frame_idx": 2886, "task_index": 1}, {"db_idx": 2887, "episode_idx": 8, "frame_idx": 262, "global_frame_idx": 2887, "task_index": 1}, {"db_idx": 2888, "episode_idx": 8, "frame_idx": 263, "global_frame_idx": 2888, "task_index": 1}, {"db_idx": 2889, "episode_idx": 8, "frame_idx": 264, "global_frame_idx": 2889, "task_index": 1}, {"db_idx": 2890, "episode_idx": 8, "frame_idx": 265, "global_frame_idx": 2890, "task_index": 1}, {"db_idx": 2891, "episode_idx": 8, "frame_idx": 266, "global_frame_idx": 2891, "task_index": 1}, {"db_idx": 2892, "episode_idx": 8, "frame_idx": 267, "global_frame_idx": 2892, "task_index": 1}, {"db_idx": 2893, "episode_idx": 8, "frame_idx": 268, "global_frame_idx": 2893, "task_index": 1}, {"db_idx": 2894, "episode_idx": 8, "frame_idx": 269, "global_frame_idx": 2894, "task_index": 1}, {"db_idx": 2895, "episode_idx": 8, "frame_idx": 270, "global_frame_idx": 2895, "task_index": 1}, {"db_idx": 2896, "episode_idx": 8, "frame_idx": 271, "global_frame_idx": 2896, "task_index": 1}, {"db_idx": 2897, "episode_idx": 8, "frame_idx": 272, "global_frame_idx": 2897, "task_index": 1}, {"db_idx": 2898, "episode_idx": 8, "frame_idx": 273, "global_frame_idx": 2898, "task_index": 1}, {"db_idx": 2899, "episode_idx": 8, "frame_idx": 274, "global_frame_idx": 2899, "task_index": 1}, {"db_idx": 2900, "episode_idx": 8, "frame_idx": 275, "global_frame_idx": 2900, "task_index": 1}, {"db_idx": 2901, "episode_idx": 8, "frame_idx": 276, "global_frame_idx": 2901, "task_index": 1}, {"db_idx": 2902, "episode_idx": 8, "frame_idx": 277, "global_frame_idx": 2902, "task_index": 1}, {"db_idx": 2903, "episode_idx": 8, "frame_idx": 278, "global_frame_idx": 2903, "task_index": 1}, {"db_idx": 2904, "episode_idx": 8, "frame_idx": 279, "global_frame_idx": 2904, "task_index": 1}, {"db_idx": 2905, "episode_idx": 8, "frame_idx": 280, "global_frame_idx": 2905, "task_index": 1}, {"db_idx": 2906, "episode_idx": 8, "frame_idx": 281, "global_frame_idx": 2906, "task_index": 1}, {"db_idx": 2907, "episode_idx": 8, "frame_idx": 282, "global_frame_idx": 2907, "task_index": 1}, {"db_idx": 2908, "episode_idx": 8, "frame_idx": 283, "global_frame_idx": 2908, "task_index": 1}, {"db_idx": 2909, "episode_idx": 8, "frame_idx": 284, "global_frame_idx": 2909, "task_index": 1}, {"db_idx": 2910, "episode_idx": 8, "frame_idx": 285, "global_frame_idx": 2910, "task_index": 1}, {"db_idx": 2911, "episode_idx": 8, "frame_idx": 286, "global_frame_idx": 2911, "task_index": 1}, {"db_idx": 2912, "episode_idx": 8, "frame_idx": 287, "global_frame_idx": 2912, "task_index": 1}, {"db_idx": 2913, "episode_idx": 8, "frame_idx": 288, "global_frame_idx": 2913, "task_index": 1}, {"db_idx": 2914, "episode_idx": 8, "frame_idx": 289, "global_frame_idx": 2914, "task_index": 1}, {"db_idx": 2915, "episode_idx": 8, "frame_idx": 290, "global_frame_idx": 2915, "task_index": 1}, {"db_idx": 2916, "episode_idx": 8, "frame_idx": 291, "global_frame_idx": 2916, "task_index": 1}, {"db_idx": 2917, "episode_idx": 8, "frame_idx": 292, "global_frame_idx": 2917, "task_index": 1}, {"db_idx": 2918, "episode_idx": 8, "frame_idx": 293, "global_frame_idx": 2918, "task_index": 1}, {"db_idx": 2919, "episode_idx": 8, "frame_idx": 294, "global_frame_idx": 2919, "task_index": 1}, {"db_idx": 2920, "episode_idx": 8, "frame_idx": 295, "global_frame_idx": 2920, "task_index": 1}, {"db_idx": 2921, "episode_idx": 8, "frame_idx": 296, "global_frame_idx": 2921, "task_index": 1}, {"db_idx": 2922, "episode_idx": 8, "frame_idx": 297, "global_frame_idx": 2922, "task_index": 1}, {"db_idx": 2923, "episode_idx": 8, "frame_idx": 298, "global_frame_idx": 2923, "task_index": 1}, {"db_idx": 2924, "episode_idx": 8, "frame_idx": 299, "global_frame_idx": 2924, "task_index": 1}, {"db_idx": 2925, "episode_idx": 8, "frame_idx": 300, "global_frame_idx": 2925, "task_index": 1}, {"db_idx": 2926, "episode_idx": 8, "frame_idx": 301, "global_frame_idx": 2926, "task_index": 1}, {"db_idx": 2927, "episode_idx": 8, "frame_idx": 302, "global_frame_idx": 2927, "task_index": 1}, {"db_idx": 2928, "episode_idx": 8, "frame_idx": 303, "global_frame_idx": 2928, "task_index": 1}, {"db_idx": 2929, "episode_idx": 8, "frame_idx": 304, "global_frame_idx": 2929, "task_index": 1}, {"db_idx": 2930, "episode_idx": 8, "frame_idx": 305, "global_frame_idx": 2930, "task_index": 1}, {"db_idx": 2931, "episode_idx": 8, "frame_idx": 306, "global_frame_idx": 2931, "task_index": 1}, {"db_idx": 2932, "episode_idx": 8, "frame_idx": 307, "global_frame_idx": 2932, "task_index": 1}, {"db_idx": 2933, "episode_idx": 8, "frame_idx": 308, "global_frame_idx": 2933, "task_index": 1}, {"db_idx": 2934, "episode_idx": 8, "frame_idx": 309, "global_frame_idx": 2934, "task_index": 1}, {"db_idx": 2935, "episode_idx": 8, "frame_idx": 310, "global_frame_idx": 2935, "task_index": 1}, {"db_idx": 2936, "episode_idx": 8, "frame_idx": 311, "global_frame_idx": 2936, "task_index": 1}, {"db_idx": 2937, "episode_idx": 8, "frame_idx": 312, "global_frame_idx": 2937, "task_index": 1}, {"db_idx": 2938, "episode_idx": 8, "frame_idx": 313, "global_frame_idx": 2938, "task_index": 1}, {"db_idx": 2939, "episode_idx": 8, "frame_idx": 314, "global_frame_idx": 2939, "task_index": 1}, {"db_idx": 2940, "episode_idx": 8, "frame_idx": 315, "global_frame_idx": 2940, "task_index": 1}, {"db_idx": 2941, "episode_idx": 8, "frame_idx": 316, "global_frame_idx": 2941, "task_index": 1}, {"db_idx": 2942, "episode_idx": 8, "frame_idx": 317, "global_frame_idx": 2942, "task_index": 1}, {"db_idx": 2943, "episode_idx": 8, "frame_idx": 318, "global_frame_idx": 2943, "task_index": 1}, {"db_idx": 2944, "episode_idx": 8, "frame_idx": 319, "global_frame_idx": 2944, "task_index": 1}, {"db_idx": 2945, "episode_idx": 8, "frame_idx": 320, "global_frame_idx": 2945, "task_index": 1}, {"db_idx": 2946, "episode_idx": 8, "frame_idx": 321, "global_frame_idx": 2946, "task_index": 1}, {"db_idx": 2947, "episode_idx": 8, "frame_idx": 322, "global_frame_idx": 2947, "task_index": 1}, {"db_idx": 2948, "episode_idx": 8, "frame_idx": 323, "global_frame_idx": 2948, "task_index": 1}, {"db_idx": 2949, "episode_idx": 8, "frame_idx": 324, "global_frame_idx": 2949, "task_index": 1}, {"db_idx": 2950, "episode_idx": 8, "frame_idx": 325, "global_frame_idx": 2950, "task_index": 1}, {"db_idx": 2951, "episode_idx": 8, "frame_idx": 326, "global_frame_idx": 2951, "task_index": 1}, {"db_idx": 2952, "episode_idx": 8, "frame_idx": 327, "global_frame_idx": 2952, "task_index": 1}, {"db_idx": 2953, "episode_idx": 8, "frame_idx": 328, "global_frame_idx": 2953, "task_index": 1}, {"db_idx": 2954, "episode_idx": 8, "frame_idx": 329, "global_frame_idx": 2954, "task_index": 1}, {"db_idx": 2955, "episode_idx": 8, "frame_idx": 330, "global_frame_idx": 2955, "task_index": 1}, {"db_idx": 2956, "episode_idx": 8, "frame_idx": 331, "global_frame_idx": 2956, "task_index": 1}, {"db_idx": 2957, "episode_idx": 8, "frame_idx": 332, "global_frame_idx": 2957, "task_index": 1}, {"db_idx": 2958, "episode_idx": 8, "frame_idx": 333, "global_frame_idx": 2958, "task_index": 1}, {"db_idx": 2959, "episode_idx": 8, "frame_idx": 334, "global_frame_idx": 2959, "task_index": 1}, {"db_idx": 2960, "episode_idx": 8, "frame_idx": 335, "global_frame_idx": 2960, "task_index": 1}, {"db_idx": 2961, "episode_idx": 8, "frame_idx": 336, "global_frame_idx": 2961, "task_index": 1}, {"db_idx": 2962, "episode_idx": 8, "frame_idx": 337, "global_frame_idx": 2962, "task_index": 1}, {"db_idx": 2963, "episode_idx": 8, "frame_idx": 338, "global_frame_idx": 2963, "task_index": 1}, {"db_idx": 2964, "episode_idx": 8, "frame_idx": 339, "global_frame_idx": 2964, "task_index": 1}, {"db_idx": 2965, "episode_idx": 8, "frame_idx": 340, "global_frame_idx": 2965, "task_index": 1}, {"db_idx": 2966, "episode_idx": 8, "frame_idx": 341, "global_frame_idx": 2966, "task_index": 1}, {"db_idx": 2967, "episode_idx": 8, "frame_idx": 342, "global_frame_idx": 2967, "task_index": 1}, {"db_idx": 2968, "episode_idx": 8, "frame_idx": 343, "global_frame_idx": 2968, "task_index": 1}, {"db_idx": 2969, "episode_idx": 8, "frame_idx": 344, "global_frame_idx": 2969, "task_index": 1}, {"db_idx": 2970, "episode_idx": 8, "frame_idx": 345, "global_frame_idx": 2970, "task_index": 1}, {"db_idx": 2971, "episode_idx": 8, "frame_idx": 346, "global_frame_idx": 2971, "task_index": 1}, {"db_idx": 2972, "episode_idx": 8, "frame_idx": 347, "global_frame_idx": 2972, "task_index": 1}, {"db_idx": 2973, "episode_idx": 8, "frame_idx": 348, "global_frame_idx": 2973, "task_index": 1}, {"db_idx": 2974, "episode_idx": 8, "frame_idx": 349, "global_frame_idx": 2974, "task_index": 1}, {"db_idx": 2975, "episode_idx": 8, "frame_idx": 350, "global_frame_idx": 2975, "task_index": 1}, {"db_idx": 2976, "episode_idx": 8, "frame_idx": 351, "global_frame_idx": 2976, "task_index": 1}, {"db_idx": 2977, "episode_idx": 8, "frame_idx": 352, "global_frame_idx": 2977, "task_index": 1}, {"db_idx": 2978, "episode_idx": 8, "frame_idx": 353, "global_frame_idx": 2978, "task_index": 1}, {"db_idx": 2979, "episode_idx": 8, "frame_idx": 354, "global_frame_idx": 2979, "task_index": 1}, {"db_idx": 2980, "episode_idx": 8, "frame_idx": 355, "global_frame_idx": 2980, "task_index": 1}, {"db_idx": 2981, "episode_idx": 8, "frame_idx": 356, "global_frame_idx": 2981, "task_index": 1}, {"db_idx": 2982, "episode_idx": 8, "frame_idx": 357, "global_frame_idx": 2982, "task_index": 1}, {"db_idx": 2983, "episode_idx": 8, "frame_idx": 358, "global_frame_idx": 2983, "task_index": 1}, {"db_idx": 2984, "episode_idx": 8, "frame_idx": 359, "global_frame_idx": 2984, "task_index": 1}, {"db_idx": 2985, "episode_idx": 8, "frame_idx": 360, "global_frame_idx": 2985, "task_index": 1}, {"db_idx": 2986, "episode_idx": 8, "frame_idx": 361, "global_frame_idx": 2986, "task_index": 1}, {"db_idx": 2987, "episode_idx": 9, "frame_idx": 0, "global_frame_idx": 2987, "task_index": 1}, {"db_idx": 2988, "episode_idx": 9, "frame_idx": 1, "global_frame_idx": 2988, "task_index": 1}, {"db_idx": 2989, "episode_idx": 9, "frame_idx": 2, "global_frame_idx": 2989, "task_index": 1}, {"db_idx": 2990, "episode_idx": 9, "frame_idx": 3, "global_frame_idx": 2990, "task_index": 1}, {"db_idx": 2991, "episode_idx": 9, "frame_idx": 4, "global_frame_idx": 2991, "task_index": 1}, {"db_idx": 2992, "episode_idx": 9, "frame_idx": 5, "global_frame_idx": 2992, "task_index": 1}, {"db_idx": 2993, "episode_idx": 9, "frame_idx": 6, "global_frame_idx": 2993, "task_index": 1}, {"db_idx": 2994, "episode_idx": 9, "frame_idx": 7, "global_frame_idx": 2994, "task_index": 1}, {"db_idx": 2995, "episode_idx": 9, "frame_idx": 8, "global_frame_idx": 2995, "task_index": 1}, {"db_idx": 2996, "episode_idx": 9, "frame_idx": 9, "global_frame_idx": 2996, "task_index": 1}, {"db_idx": 2997, "episode_idx": 9, "frame_idx": 10, "global_frame_idx": 2997, "task_index": 1}, {"db_idx": 2998, "episode_idx": 9, "frame_idx": 11, "global_frame_idx": 2998, "task_index": 1}, {"db_idx": 2999, "episode_idx": 9, "frame_idx": 12, "global_frame_idx": 2999, "task_index": 1}, {"db_idx": 3000, "episode_idx": 9, "frame_idx": 13, "global_frame_idx": 3000, "task_index": 1}, {"db_idx": 3001, "episode_idx": 9, "frame_idx": 14, "global_frame_idx": 3001, "task_index": 1}, {"db_idx": 3002, "episode_idx": 9, "frame_idx": 15, "global_frame_idx": 3002, "task_index": 1}, {"db_idx": 3003, "episode_idx": 9, "frame_idx": 16, "global_frame_idx": 3003, "task_index": 1}, {"db_idx": 3004, "episode_idx": 9, "frame_idx": 17, "global_frame_idx": 3004, "task_index": 1}, {"db_idx": 3005, "episode_idx": 9, "frame_idx": 18, "global_frame_idx": 3005, "task_index": 1}, {"db_idx": 3006, "episode_idx": 9, "frame_idx": 19, "global_frame_idx": 3006, "task_index": 1}, {"db_idx": 3007, "episode_idx": 9, "frame_idx": 20, "global_frame_idx": 3007, "task_index": 1}, {"db_idx": 3008, "episode_idx": 9, "frame_idx": 21, "global_frame_idx": 3008, "task_index": 1}, {"db_idx": 3009, "episode_idx": 9, "frame_idx": 22, "global_frame_idx": 3009, "task_index": 1}, {"db_idx": 3010, "episode_idx": 9, "frame_idx": 23, "global_frame_idx": 3010, "task_index": 1}, {"db_idx": 3011, "episode_idx": 9, "frame_idx": 24, "global_frame_idx": 3011, "task_index": 1}, {"db_idx": 3012, "episode_idx": 9, "frame_idx": 25, "global_frame_idx": 3012, "task_index": 1}, {"db_idx": 3013, "episode_idx": 9, "frame_idx": 26, "global_frame_idx": 3013, "task_index": 1}, {"db_idx": 3014, "episode_idx": 9, "frame_idx": 27, "global_frame_idx": 3014, "task_index": 1}, {"db_idx": 3015, "episode_idx": 9, "frame_idx": 28, "global_frame_idx": 3015, "task_index": 1}, {"db_idx": 3016, "episode_idx": 9, "frame_idx": 29, "global_frame_idx": 3016, "task_index": 1}, {"db_idx": 3017, "episode_idx": 9, "frame_idx": 30, "global_frame_idx": 3017, "task_index": 1}, {"db_idx": 3018, "episode_idx": 9, "frame_idx": 31, "global_frame_idx": 3018, "task_index": 1}, {"db_idx": 3019, "episode_idx": 9, "frame_idx": 32, "global_frame_idx": 3019, "task_index": 1}, {"db_idx": 3020, "episode_idx": 9, "frame_idx": 33, "global_frame_idx": 3020, "task_index": 1}, {"db_idx": 3021, "episode_idx": 9, "frame_idx": 34, "global_frame_idx": 3021, "task_index": 1}, {"db_idx": 3022, "episode_idx": 9, "frame_idx": 35, "global_frame_idx": 3022, "task_index": 1}, {"db_idx": 3023, "episode_idx": 9, "frame_idx": 36, "global_frame_idx": 3023, "task_index": 1}, {"db_idx": 3024, "episode_idx": 9, "frame_idx": 37, "global_frame_idx": 3024, "task_index": 1}, {"db_idx": 3025, "episode_idx": 9, "frame_idx": 38, "global_frame_idx": 3025, "task_index": 1}, {"db_idx": 3026, "episode_idx": 9, "frame_idx": 39, "global_frame_idx": 3026, "task_index": 1}, {"db_idx": 3027, "episode_idx": 9, "frame_idx": 40, "global_frame_idx": 3027, "task_index": 1}, {"db_idx": 3028, "episode_idx": 9, "frame_idx": 41, "global_frame_idx": 3028, "task_index": 1}, {"db_idx": 3029, "episode_idx": 9, "frame_idx": 42, "global_frame_idx": 3029, "task_index": 1}, {"db_idx": 3030, "episode_idx": 9, "frame_idx": 43, "global_frame_idx": 3030, "task_index": 1}, {"db_idx": 3031, "episode_idx": 9, "frame_idx": 44, "global_frame_idx": 3031, "task_index": 1}, {"db_idx": 3032, "episode_idx": 9, "frame_idx": 45, "global_frame_idx": 3032, "task_index": 1}, {"db_idx": 3033, "episode_idx": 9, "frame_idx": 46, "global_frame_idx": 3033, "task_index": 1}, {"db_idx": 3034, "episode_idx": 9, "frame_idx": 47, "global_frame_idx": 3034, "task_index": 1}, {"db_idx": 3035, "episode_idx": 9, "frame_idx": 48, "global_frame_idx": 3035, "task_index": 1}, {"db_idx": 3036, "episode_idx": 9, "frame_idx": 49, "global_frame_idx": 3036, "task_index": 1}, {"db_idx": 3037, "episode_idx": 9, "frame_idx": 50, "global_frame_idx": 3037, "task_index": 1}, {"db_idx": 3038, "episode_idx": 9, "frame_idx": 51, "global_frame_idx": 3038, "task_index": 1}, {"db_idx": 3039, "episode_idx": 9, "frame_idx": 52, "global_frame_idx": 3039, "task_index": 1}, {"db_idx": 3040, "episode_idx": 9, "frame_idx": 53, "global_frame_idx": 3040, "task_index": 1}, {"db_idx": 3041, "episode_idx": 9, "frame_idx": 54, "global_frame_idx": 3041, "task_index": 1}, {"db_idx": 3042, "episode_idx": 9, "frame_idx": 55, "global_frame_idx": 3042, "task_index": 1}, {"db_idx": 3043, "episode_idx": 9, "frame_idx": 56, "global_frame_idx": 3043, "task_index": 1}, {"db_idx": 3044, "episode_idx": 9, "frame_idx": 57, "global_frame_idx": 3044, "task_index": 1}, {"db_idx": 3045, "episode_idx": 9, "frame_idx": 58, "global_frame_idx": 3045, "task_index": 1}, {"db_idx": 3046, "episode_idx": 9, "frame_idx": 59, "global_frame_idx": 3046, "task_index": 1}, {"db_idx": 3047, "episode_idx": 9, "frame_idx": 60, "global_frame_idx": 3047, "task_index": 1}, {"db_idx": 3048, "episode_idx": 9, "frame_idx": 61, "global_frame_idx": 3048, "task_index": 1}, {"db_idx": 3049, "episode_idx": 9, "frame_idx": 62, "global_frame_idx": 3049, "task_index": 1}, {"db_idx": 3050, "episode_idx": 9, "frame_idx": 63, "global_frame_idx": 3050, "task_index": 1}, {"db_idx": 3051, "episode_idx": 9, "frame_idx": 64, "global_frame_idx": 3051, "task_index": 1}, {"db_idx": 3052, "episode_idx": 9, "frame_idx": 65, "global_frame_idx": 3052, "task_index": 1}, {"db_idx": 3053, "episode_idx": 9, "frame_idx": 66, "global_frame_idx": 3053, "task_index": 1}, {"db_idx": 3054, "episode_idx": 9, "frame_idx": 67, "global_frame_idx": 3054, "task_index": 1}, {"db_idx": 3055, "episode_idx": 9, "frame_idx": 68, "global_frame_idx": 3055, "task_index": 1}, {"db_idx": 3056, "episode_idx": 9, "frame_idx": 69, "global_frame_idx": 3056, "task_index": 1}, {"db_idx": 3057, "episode_idx": 9, "frame_idx": 70, "global_frame_idx": 3057, "task_index": 1}, {"db_idx": 3058, "episode_idx": 9, "frame_idx": 71, "global_frame_idx": 3058, "task_index": 1}, {"db_idx": 3059, "episode_idx": 9, "frame_idx": 72, "global_frame_idx": 3059, "task_index": 1}, {"db_idx": 3060, "episode_idx": 9, "frame_idx": 73, "global_frame_idx": 3060, "task_index": 1}, {"db_idx": 3061, "episode_idx": 9, "frame_idx": 74, "global_frame_idx": 3061, "task_index": 1}, {"db_idx": 3062, "episode_idx": 9, "frame_idx": 75, "global_frame_idx": 3062, "task_index": 1}, {"db_idx": 3063, "episode_idx": 9, "frame_idx": 76, "global_frame_idx": 3063, "task_index": 1}, {"db_idx": 3064, "episode_idx": 9, "frame_idx": 77, "global_frame_idx": 3064, "task_index": 1}, {"db_idx": 3065, "episode_idx": 9, "frame_idx": 78, "global_frame_idx": 3065, "task_index": 1}, {"db_idx": 3066, "episode_idx": 9, "frame_idx": 79, "global_frame_idx": 3066, "task_index": 1}, {"db_idx": 3067, "episode_idx": 9, "frame_idx": 80, "global_frame_idx": 3067, "task_index": 1}, {"db_idx": 3068, "episode_idx": 9, "frame_idx": 81, "global_frame_idx": 3068, "task_index": 1}, {"db_idx": 3069, "episode_idx": 9, "frame_idx": 82, "global_frame_idx": 3069, "task_index": 1}, {"db_idx": 3070, "episode_idx": 9, "frame_idx": 83, "global_frame_idx": 3070, "task_index": 1}, {"db_idx": 3071, "episode_idx": 9, "frame_idx": 84, "global_frame_idx": 3071, "task_index": 1}, {"db_idx": 3072, "episode_idx": 9, "frame_idx": 85, "global_frame_idx": 3072, "task_index": 1}, {"db_idx": 3073, "episode_idx": 9, "frame_idx": 86, "global_frame_idx": 3073, "task_index": 1}, {"db_idx": 3074, "episode_idx": 9, "frame_idx": 87, "global_frame_idx": 3074, "task_index": 1}, {"db_idx": 3075, "episode_idx": 9, "frame_idx": 88, "global_frame_idx": 3075, "task_index": 1}, {"db_idx": 3076, "episode_idx": 9, "frame_idx": 89, "global_frame_idx": 3076, "task_index": 1}, {"db_idx": 3077, "episode_idx": 9, "frame_idx": 90, "global_frame_idx": 3077, "task_index": 1}, {"db_idx": 3078, "episode_idx": 9, "frame_idx": 91, "global_frame_idx": 3078, "task_index": 1}, {"db_idx": 3079, "episode_idx": 9, "frame_idx": 92, "global_frame_idx": 3079, "task_index": 1}, {"db_idx": 3080, "episode_idx": 9, "frame_idx": 93, "global_frame_idx": 3080, "task_index": 1}, {"db_idx": 3081, "episode_idx": 9, "frame_idx": 94, "global_frame_idx": 3081, "task_index": 1}, {"db_idx": 3082, "episode_idx": 9, "frame_idx": 95, "global_frame_idx": 3082, "task_index": 1}, {"db_idx": 3083, "episode_idx": 9, "frame_idx": 96, "global_frame_idx": 3083, "task_index": 1}, {"db_idx": 3084, "episode_idx": 9, "frame_idx": 97, "global_frame_idx": 3084, "task_index": 1}, {"db_idx": 3085, "episode_idx": 9, "frame_idx": 98, "global_frame_idx": 3085, "task_index": 1}, {"db_idx": 3086, "episode_idx": 9, "frame_idx": 99, "global_frame_idx": 3086, "task_index": 1}, {"db_idx": 3087, "episode_idx": 9, "frame_idx": 100, "global_frame_idx": 3087, "task_index": 1}, {"db_idx": 3088, "episode_idx": 9, "frame_idx": 101, "global_frame_idx": 3088, "task_index": 1}, {"db_idx": 3089, "episode_idx": 9, "frame_idx": 102, "global_frame_idx": 3089, "task_index": 1}, {"db_idx": 3090, "episode_idx": 9, "frame_idx": 103, "global_frame_idx": 3090, "task_index": 1}, {"db_idx": 3091, "episode_idx": 9, "frame_idx": 104, "global_frame_idx": 3091, "task_index": 1}, {"db_idx": 3092, "episode_idx": 9, "frame_idx": 105, "global_frame_idx": 3092, "task_index": 1}, {"db_idx": 3093, "episode_idx": 9, "frame_idx": 106, "global_frame_idx": 3093, "task_index": 1}, {"db_idx": 3094, "episode_idx": 9, "frame_idx": 107, "global_frame_idx": 3094, "task_index": 1}, {"db_idx": 3095, "episode_idx": 9, "frame_idx": 108, "global_frame_idx": 3095, "task_index": 1}, {"db_idx": 3096, "episode_idx": 9, "frame_idx": 109, "global_frame_idx": 3096, "task_index": 1}, {"db_idx": 3097, "episode_idx": 9, "frame_idx": 110, "global_frame_idx": 3097, "task_index": 1}, {"db_idx": 3098, "episode_idx": 9, "frame_idx": 111, "global_frame_idx": 3098, "task_index": 1}, {"db_idx": 3099, "episode_idx": 9, "frame_idx": 112, "global_frame_idx": 3099, "task_index": 1}, {"db_idx": 3100, "episode_idx": 9, "frame_idx": 113, "global_frame_idx": 3100, "task_index": 1}, {"db_idx": 3101, "episode_idx": 9, "frame_idx": 114, "global_frame_idx": 3101, "task_index": 1}, {"db_idx": 3102, "episode_idx": 9, "frame_idx": 115, "global_frame_idx": 3102, "task_index": 1}, {"db_idx": 3103, "episode_idx": 9, "frame_idx": 116, "global_frame_idx": 3103, "task_index": 1}, {"db_idx": 3104, "episode_idx": 9, "frame_idx": 117, "global_frame_idx": 3104, "task_index": 1}, {"db_idx": 3105, "episode_idx": 9, "frame_idx": 118, "global_frame_idx": 3105, "task_index": 1}, {"db_idx": 3106, "episode_idx": 9, "frame_idx": 119, "global_frame_idx": 3106, "task_index": 1}, {"db_idx": 3107, "episode_idx": 9, "frame_idx": 120, "global_frame_idx": 3107, "task_index": 1}, {"db_idx": 3108, "episode_idx": 9, "frame_idx": 121, "global_frame_idx": 3108, "task_index": 1}, {"db_idx": 3109, "episode_idx": 9, "frame_idx": 122, "global_frame_idx": 3109, "task_index": 1}, {"db_idx": 3110, "episode_idx": 9, "frame_idx": 123, "global_frame_idx": 3110, "task_index": 1}, {"db_idx": 3111, "episode_idx": 9, "frame_idx": 124, "global_frame_idx": 3111, "task_index": 1}, {"db_idx": 3112, "episode_idx": 9, "frame_idx": 125, "global_frame_idx": 3112, "task_index": 1}, {"db_idx": 3113, "episode_idx": 9, "frame_idx": 126, "global_frame_idx": 3113, "task_index": 1}, {"db_idx": 3114, "episode_idx": 9, "frame_idx": 127, "global_frame_idx": 3114, "task_index": 1}, {"db_idx": 3115, "episode_idx": 9, "frame_idx": 128, "global_frame_idx": 3115, "task_index": 1}, {"db_idx": 3116, "episode_idx": 9, "frame_idx": 129, "global_frame_idx": 3116, "task_index": 1}, {"db_idx": 3117, "episode_idx": 9, "frame_idx": 130, "global_frame_idx": 3117, "task_index": 1}, {"db_idx": 3118, "episode_idx": 9, "frame_idx": 131, "global_frame_idx": 3118, "task_index": 1}, {"db_idx": 3119, "episode_idx": 9, "frame_idx": 132, "global_frame_idx": 3119, "task_index": 1}, {"db_idx": 3120, "episode_idx": 9, "frame_idx": 133, "global_frame_idx": 3120, "task_index": 1}, {"db_idx": 3121, "episode_idx": 9, "frame_idx": 134, "global_frame_idx": 3121, "task_index": 1}, {"db_idx": 3122, "episode_idx": 9, "frame_idx": 135, "global_frame_idx": 3122, "task_index": 1}, {"db_idx": 3123, "episode_idx": 9, "frame_idx": 136, "global_frame_idx": 3123, "task_index": 1}, {"db_idx": 3124, "episode_idx": 9, "frame_idx": 137, "global_frame_idx": 3124, "task_index": 1}, {"db_idx": 3125, "episode_idx": 9, "frame_idx": 138, "global_frame_idx": 3125, "task_index": 1}, {"db_idx": 3126, "episode_idx": 9, "frame_idx": 139, "global_frame_idx": 3126, "task_index": 1}, {"db_idx": 3127, "episode_idx": 9, "frame_idx": 140, "global_frame_idx": 3127, "task_index": 1}, {"db_idx": 3128, "episode_idx": 9, "frame_idx": 141, "global_frame_idx": 3128, "task_index": 1}, {"db_idx": 3129, "episode_idx": 9, "frame_idx": 142, "global_frame_idx": 3129, "task_index": 1}, {"db_idx": 3130, "episode_idx": 9, "frame_idx": 143, "global_frame_idx": 3130, "task_index": 1}, {"db_idx": 3131, "episode_idx": 9, "frame_idx": 144, "global_frame_idx": 3131, "task_index": 1}, {"db_idx": 3132, "episode_idx": 9, "frame_idx": 145, "global_frame_idx": 3132, "task_index": 1}, {"db_idx": 3133, "episode_idx": 9, "frame_idx": 146, "global_frame_idx": 3133, "task_index": 1}, {"db_idx": 3134, "episode_idx": 9, "frame_idx": 147, "global_frame_idx": 3134, "task_index": 1}, {"db_idx": 3135, "episode_idx": 9, "frame_idx": 148, "global_frame_idx": 3135, "task_index": 1}, {"db_idx": 3136, "episode_idx": 9, "frame_idx": 149, "global_frame_idx": 3136, "task_index": 1}, {"db_idx": 3137, "episode_idx": 9, "frame_idx": 150, "global_frame_idx": 3137, "task_index": 1}, {"db_idx": 3138, "episode_idx": 9, "frame_idx": 151, "global_frame_idx": 3138, "task_index": 1}, {"db_idx": 3139, "episode_idx": 9, "frame_idx": 152, "global_frame_idx": 3139, "task_index": 1}, {"db_idx": 3140, "episode_idx": 9, "frame_idx": 153, "global_frame_idx": 3140, "task_index": 1}, {"db_idx": 3141, "episode_idx": 9, "frame_idx": 154, "global_frame_idx": 3141, "task_index": 1}, {"db_idx": 3142, "episode_idx": 9, "frame_idx": 155, "global_frame_idx": 3142, "task_index": 1}, {"db_idx": 3143, "episode_idx": 9, "frame_idx": 156, "global_frame_idx": 3143, "task_index": 1}, {"db_idx": 3144, "episode_idx": 9, "frame_idx": 157, "global_frame_idx": 3144, "task_index": 1}, {"db_idx": 3145, "episode_idx": 9, "frame_idx": 158, "global_frame_idx": 3145, "task_index": 1}, {"db_idx": 3146, "episode_idx": 9, "frame_idx": 159, "global_frame_idx": 3146, "task_index": 1}, {"db_idx": 3147, "episode_idx": 9, "frame_idx": 160, "global_frame_idx": 3147, "task_index": 1}, {"db_idx": 3148, "episode_idx": 9, "frame_idx": 161, "global_frame_idx": 3148, "task_index": 1}, {"db_idx": 3149, "episode_idx": 9, "frame_idx": 162, "global_frame_idx": 3149, "task_index": 1}, {"db_idx": 3150, "episode_idx": 9, "frame_idx": 163, "global_frame_idx": 3150, "task_index": 1}, {"db_idx": 3151, "episode_idx": 9, "frame_idx": 164, "global_frame_idx": 3151, "task_index": 1}, {"db_idx": 3152, "episode_idx": 9, "frame_idx": 165, "global_frame_idx": 3152, "task_index": 1}, {"db_idx": 3153, "episode_idx": 9, "frame_idx": 166, "global_frame_idx": 3153, "task_index": 1}, {"db_idx": 3154, "episode_idx": 9, "frame_idx": 167, "global_frame_idx": 3154, "task_index": 1}, {"db_idx": 3155, "episode_idx": 9, "frame_idx": 168, "global_frame_idx": 3155, "task_index": 1}, {"db_idx": 3156, "episode_idx": 9, "frame_idx": 169, "global_frame_idx": 3156, "task_index": 1}, {"db_idx": 3157, "episode_idx": 9, "frame_idx": 170, "global_frame_idx": 3157, "task_index": 1}, {"db_idx": 3158, "episode_idx": 9, "frame_idx": 171, "global_frame_idx": 3158, "task_index": 1}, {"db_idx": 3159, "episode_idx": 9, "frame_idx": 172, "global_frame_idx": 3159, "task_index": 1}, {"db_idx": 3160, "episode_idx": 9, "frame_idx": 173, "global_frame_idx": 3160, "task_index": 1}, {"db_idx": 3161, "episode_idx": 9, "frame_idx": 174, "global_frame_idx": 3161, "task_index": 1}, {"db_idx": 3162, "episode_idx": 9, "frame_idx": 175, "global_frame_idx": 3162, "task_index": 1}, {"db_idx": 3163, "episode_idx": 9, "frame_idx": 176, "global_frame_idx": 3163, "task_index": 1}, {"db_idx": 3164, "episode_idx": 9, "frame_idx": 177, "global_frame_idx": 3164, "task_index": 1}, {"db_idx": 3165, "episode_idx": 9, "frame_idx": 178, "global_frame_idx": 3165, "task_index": 1}, {"db_idx": 3166, "episode_idx": 9, "frame_idx": 179, "global_frame_idx": 3166, "task_index": 1}, {"db_idx": 3167, "episode_idx": 9, "frame_idx": 180, "global_frame_idx": 3167, "task_index": 1}, {"db_idx": 3168, "episode_idx": 9, "frame_idx": 181, "global_frame_idx": 3168, "task_index": 1}, {"db_idx": 3169, "episode_idx": 9, "frame_idx": 182, "global_frame_idx": 3169, "task_index": 1}, {"db_idx": 3170, "episode_idx": 9, "frame_idx": 183, "global_frame_idx": 3170, "task_index": 1}, {"db_idx": 3171, "episode_idx": 9, "frame_idx": 184, "global_frame_idx": 3171, "task_index": 1}, {"db_idx": 3172, "episode_idx": 9, "frame_idx": 185, "global_frame_idx": 3172, "task_index": 1}, {"db_idx": 3173, "episode_idx": 9, "frame_idx": 186, "global_frame_idx": 3173, "task_index": 1}, {"db_idx": 3174, "episode_idx": 9, "frame_idx": 187, "global_frame_idx": 3174, "task_index": 1}, {"db_idx": 3175, "episode_idx": 9, "frame_idx": 188, "global_frame_idx": 3175, "task_index": 1}, {"db_idx": 3176, "episode_idx": 9, "frame_idx": 189, "global_frame_idx": 3176, "task_index": 1}, {"db_idx": 3177, "episode_idx": 9, "frame_idx": 190, "global_frame_idx": 3177, "task_index": 1}, {"db_idx": 3178, "episode_idx": 9, "frame_idx": 191, "global_frame_idx": 3178, "task_index": 1}, {"db_idx": 3179, "episode_idx": 9, "frame_idx": 192, "global_frame_idx": 3179, "task_index": 1}, {"db_idx": 3180, "episode_idx": 9, "frame_idx": 193, "global_frame_idx": 3180, "task_index": 1}, {"db_idx": 3181, "episode_idx": 9, "frame_idx": 194, "global_frame_idx": 3181, "task_index": 1}, {"db_idx": 3182, "episode_idx": 9, "frame_idx": 195, "global_frame_idx": 3182, "task_index": 1}, {"db_idx": 3183, "episode_idx": 9, "frame_idx": 196, "global_frame_idx": 3183, "task_index": 1}, {"db_idx": 3184, "episode_idx": 9, "frame_idx": 197, "global_frame_idx": 3184, "task_index": 1}, {"db_idx": 3185, "episode_idx": 9, "frame_idx": 198, "global_frame_idx": 3185, "task_index": 1}, {"db_idx": 3186, "episode_idx": 9, "frame_idx": 199, "global_frame_idx": 3186, "task_index": 1}, {"db_idx": 3187, "episode_idx": 9, "frame_idx": 200, "global_frame_idx": 3187, "task_index": 1}, {"db_idx": 3188, "episode_idx": 9, "frame_idx": 201, "global_frame_idx": 3188, "task_index": 1}, {"db_idx": 3189, "episode_idx": 9, "frame_idx": 202, "global_frame_idx": 3189, "task_index": 1}, {"db_idx": 3190, "episode_idx": 9, "frame_idx": 203, "global_frame_idx": 3190, "task_index": 1}, {"db_idx": 3191, "episode_idx": 9, "frame_idx": 204, "global_frame_idx": 3191, "task_index": 1}, {"db_idx": 3192, "episode_idx": 9, "frame_idx": 205, "global_frame_idx": 3192, "task_index": 1}, {"db_idx": 3193, "episode_idx": 9, "frame_idx": 206, "global_frame_idx": 3193, "task_index": 1}, {"db_idx": 3194, "episode_idx": 9, "frame_idx": 207, "global_frame_idx": 3194, "task_index": 1}, {"db_idx": 3195, "episode_idx": 9, "frame_idx": 208, "global_frame_idx": 3195, "task_index": 1}, {"db_idx": 3196, "episode_idx": 9, "frame_idx": 209, "global_frame_idx": 3196, "task_index": 1}, {"db_idx": 3197, "episode_idx": 9, "frame_idx": 210, "global_frame_idx": 3197, "task_index": 1}, {"db_idx": 3198, "episode_idx": 9, "frame_idx": 211, "global_frame_idx": 3198, "task_index": 1}, {"db_idx": 3199, "episode_idx": 9, "frame_idx": 212, "global_frame_idx": 3199, "task_index": 1}, {"db_idx": 3200, "episode_idx": 9, "frame_idx": 213, "global_frame_idx": 3200, "task_index": 1}, {"db_idx": 3201, "episode_idx": 9, "frame_idx": 214, "global_frame_idx": 3201, "task_index": 1}, {"db_idx": 3202, "episode_idx": 9, "frame_idx": 215, "global_frame_idx": 3202, "task_index": 1}, {"db_idx": 3203, "episode_idx": 9, "frame_idx": 216, "global_frame_idx": 3203, "task_index": 1}, {"db_idx": 3204, "episode_idx": 9, "frame_idx": 217, "global_frame_idx": 3204, "task_index": 1}, {"db_idx": 3205, "episode_idx": 9, "frame_idx": 218, "global_frame_idx": 3205, "task_index": 1}, {"db_idx": 3206, "episode_idx": 9, "frame_idx": 219, "global_frame_idx": 3206, "task_index": 1}, {"db_idx": 3207, "episode_idx": 9, "frame_idx": 220, "global_frame_idx": 3207, "task_index": 1}, {"db_idx": 3208, "episode_idx": 9, "frame_idx": 221, "global_frame_idx": 3208, "task_index": 1}, {"db_idx": 3209, "episode_idx": 9, "frame_idx": 222, "global_frame_idx": 3209, "task_index": 1}, {"db_idx": 3210, "episode_idx": 9, "frame_idx": 223, "global_frame_idx": 3210, "task_index": 1}, {"db_idx": 3211, "episode_idx": 9, "frame_idx": 224, "global_frame_idx": 3211, "task_index": 1}, {"db_idx": 3212, "episode_idx": 9, "frame_idx": 225, "global_frame_idx": 3212, "task_index": 1}, {"db_idx": 3213, "episode_idx": 9, "frame_idx": 226, "global_frame_idx": 3213, "task_index": 1}, {"db_idx": 3214, "episode_idx": 9, "frame_idx": 227, "global_frame_idx": 3214, "task_index": 1}, {"db_idx": 3215, "episode_idx": 9, "frame_idx": 228, "global_frame_idx": 3215, "task_index": 1}, {"db_idx": 3216, "episode_idx": 9, "frame_idx": 229, "global_frame_idx": 3216, "task_index": 1}, {"db_idx": 3217, "episode_idx": 9, "frame_idx": 230, "global_frame_idx": 3217, "task_index": 1}, {"db_idx": 3218, "episode_idx": 9, "frame_idx": 231, "global_frame_idx": 3218, "task_index": 1}, {"db_idx": 3219, "episode_idx": 9, "frame_idx": 232, "global_frame_idx": 3219, "task_index": 1}, {"db_idx": 3220, "episode_idx": 9, "frame_idx": 233, "global_frame_idx": 3220, "task_index": 1}, {"db_idx": 3221, "episode_idx": 9, "frame_idx": 234, "global_frame_idx": 3221, "task_index": 1}, {"db_idx": 3222, "episode_idx": 9, "frame_idx": 235, "global_frame_idx": 3222, "task_index": 1}, {"db_idx": 3223, "episode_idx": 9, "frame_idx": 236, "global_frame_idx": 3223, "task_index": 1}, {"db_idx": 3224, "episode_idx": 9, "frame_idx": 237, "global_frame_idx": 3224, "task_index": 1}, {"db_idx": 3225, "episode_idx": 9, "frame_idx": 238, "global_frame_idx": 3225, "task_index": 1}, {"db_idx": 3226, "episode_idx": 9, "frame_idx": 239, "global_frame_idx": 3226, "task_index": 1}, {"db_idx": 3227, "episode_idx": 9, "frame_idx": 240, "global_frame_idx": 3227, "task_index": 1}, {"db_idx": 3228, "episode_idx": 9, "frame_idx": 241, "global_frame_idx": 3228, "task_index": 1}, {"db_idx": 3229, "episode_idx": 9, "frame_idx": 242, "global_frame_idx": 3229, "task_index": 1}, {"db_idx": 3230, "episode_idx": 9, "frame_idx": 243, "global_frame_idx": 3230, "task_index": 1}, {"db_idx": 3231, "episode_idx": 9, "frame_idx": 244, "global_frame_idx": 3231, "task_index": 1}, {"db_idx": 3232, "episode_idx": 9, "frame_idx": 245, "global_frame_idx": 3232, "task_index": 1}, {"db_idx": 3233, "episode_idx": 9, "frame_idx": 246, "global_frame_idx": 3233, "task_index": 1}, {"db_idx": 3234, "episode_idx": 9, "frame_idx": 247, "global_frame_idx": 3234, "task_index": 1}, {"db_idx": 3235, "episode_idx": 9, "frame_idx": 248, "global_frame_idx": 3235, "task_index": 1}, {"db_idx": 3236, "episode_idx": 9, "frame_idx": 249, "global_frame_idx": 3236, "task_index": 1}, {"db_idx": 3237, "episode_idx": 9, "frame_idx": 250, "global_frame_idx": 3237, "task_index": 1}, {"db_idx": 3238, "episode_idx": 9, "frame_idx": 251, "global_frame_idx": 3238, "task_index": 1}, {"db_idx": 3239, "episode_idx": 9, "frame_idx": 252, "global_frame_idx": 3239, "task_index": 1}, {"db_idx": 3240, "episode_idx": 9, "frame_idx": 253, "global_frame_idx": 3240, "task_index": 1}, {"db_idx": 3241, "episode_idx": 9, "frame_idx": 254, "global_frame_idx": 3241, "task_index": 1}, {"db_idx": 3242, "episode_idx": 9, "frame_idx": 255, "global_frame_idx": 3242, "task_index": 1}, {"db_idx": 3243, "episode_idx": 9, "frame_idx": 256, "global_frame_idx": 3243, "task_index": 1}, {"db_idx": 3244, "episode_idx": 9, "frame_idx": 257, "global_frame_idx": 3244, "task_index": 1}, {"db_idx": 3245, "episode_idx": 9, "frame_idx": 258, "global_frame_idx": 3245, "task_index": 1}, {"db_idx": 3246, "episode_idx": 9, "frame_idx": 259, "global_frame_idx": 3246, "task_index": 1}, {"db_idx": 3247, "episode_idx": 9, "frame_idx": 260, "global_frame_idx": 3247, "task_index": 1}, {"db_idx": 3248, "episode_idx": 9, "frame_idx": 261, "global_frame_idx": 3248, "task_index": 1}, {"db_idx": 3249, "episode_idx": 9, "frame_idx": 262, "global_frame_idx": 3249, "task_index": 1}, {"db_idx": 3250, "episode_idx": 9, "frame_idx": 263, "global_frame_idx": 3250, "task_index": 1}, {"db_idx": 3251, "episode_idx": 9, "frame_idx": 264, "global_frame_idx": 3251, "task_index": 1}, {"db_idx": 3252, "episode_idx": 9, "frame_idx": 265, "global_frame_idx": 3252, "task_index": 1}, {"db_idx": 3253, "episode_idx": 9, "frame_idx": 266, "global_frame_idx": 3253, "task_index": 1}, {"db_idx": 3254, "episode_idx": 9, "frame_idx": 267, "global_frame_idx": 3254, "task_index": 1}, {"db_idx": 3255, "episode_idx": 9, "frame_idx": 268, "global_frame_idx": 3255, "task_index": 1}, {"db_idx": 3256, "episode_idx": 9, "frame_idx": 269, "global_frame_idx": 3256, "task_index": 1}, {"db_idx": 3257, "episode_idx": 9, "frame_idx": 270, "global_frame_idx": 3257, "task_index": 1}, {"db_idx": 3258, "episode_idx": 9, "frame_idx": 271, "global_frame_idx": 3258, "task_index": 1}, {"db_idx": 3259, "episode_idx": 9, "frame_idx": 272, "global_frame_idx": 3259, "task_index": 1}, {"db_idx": 3260, "episode_idx": 9, "frame_idx": 273, "global_frame_idx": 3260, "task_index": 1}, {"db_idx": 3261, "episode_idx": 9, "frame_idx": 274, "global_frame_idx": 3261, "task_index": 1}, {"db_idx": 3262, "episode_idx": 9, "frame_idx": 275, "global_frame_idx": 3262, "task_index": 1}, {"db_idx": 3263, "episode_idx": 9, "frame_idx": 276, "global_frame_idx": 3263, "task_index": 1}, {"db_idx": 3264, "episode_idx": 9, "frame_idx": 277, "global_frame_idx": 3264, "task_index": 1}, {"db_idx": 3265, "episode_idx": 9, "frame_idx": 278, "global_frame_idx": 3265, "task_index": 1}, {"db_idx": 3266, "episode_idx": 9, "frame_idx": 279, "global_frame_idx": 3266, "task_index": 1}, {"db_idx": 3267, "episode_idx": 9, "frame_idx": 280, "global_frame_idx": 3267, "task_index": 1}, {"db_idx": 3268, "episode_idx": 9, "frame_idx": 281, "global_frame_idx": 3268, "task_index": 1}, {"db_idx": 3269, "episode_idx": 9, "frame_idx": 282, "global_frame_idx": 3269, "task_index": 1}, {"db_idx": 3270, "episode_idx": 9, "frame_idx": 283, "global_frame_idx": 3270, "task_index": 1}, {"db_idx": 3271, "episode_idx": 9, "frame_idx": 284, "global_frame_idx": 3271, "task_index": 1}, {"db_idx": 3272, "episode_idx": 9, "frame_idx": 285, "global_frame_idx": 3272, "task_index": 1}, {"db_idx": 3273, "episode_idx": 9, "frame_idx": 286, "global_frame_idx": 3273, "task_index": 1}, {"db_idx": 3274, "episode_idx": 9, "frame_idx": 287, "global_frame_idx": 3274, "task_index": 1}, {"db_idx": 3275, "episode_idx": 9, "frame_idx": 288, "global_frame_idx": 3275, "task_index": 1}, {"db_idx": 3276, "episode_idx": 9, "frame_idx": 289, "global_frame_idx": 3276, "task_index": 1}, {"db_idx": 3277, "episode_idx": 9, "frame_idx": 290, "global_frame_idx": 3277, "task_index": 1}, {"db_idx": 3278, "episode_idx": 9, "frame_idx": 291, "global_frame_idx": 3278, "task_index": 1}, {"db_idx": 3279, "episode_idx": 9, "frame_idx": 292, "global_frame_idx": 3279, "task_index": 1}, {"db_idx": 3280, "episode_idx": 9, "frame_idx": 293, "global_frame_idx": 3280, "task_index": 1}, {"db_idx": 3281, "episode_idx": 9, "frame_idx": 294, "global_frame_idx": 3281, "task_index": 1}, {"db_idx": 3282, "episode_idx": 9, "frame_idx": 295, "global_frame_idx": 3282, "task_index": 1}, {"db_idx": 3283, "episode_idx": 9, "frame_idx": 296, "global_frame_idx": 3283, "task_index": 1}, {"db_idx": 3284, "episode_idx": 9, "frame_idx": 297, "global_frame_idx": 3284, "task_index": 1}, {"db_idx": 3285, "episode_idx": 9, "frame_idx": 298, "global_frame_idx": 3285, "task_index": 1}, {"db_idx": 3286, "episode_idx": 9, "frame_idx": 299, "global_frame_idx": 3286, "task_index": 1}, {"db_idx": 3287, "episode_idx": 9, "frame_idx": 300, "global_frame_idx": 3287, "task_index": 1}, {"db_idx": 3288, "episode_idx": 9, "frame_idx": 301, "global_frame_idx": 3288, "task_index": 1}, {"db_idx": 3289, "episode_idx": 9, "frame_idx": 302, "global_frame_idx": 3289, "task_index": 1}, {"db_idx": 3290, "episode_idx": 9, "frame_idx": 303, "global_frame_idx": 3290, "task_index": 1}, {"db_idx": 3291, "episode_idx": 9, "frame_idx": 304, "global_frame_idx": 3291, "task_index": 1}, {"db_idx": 3292, "episode_idx": 9, "frame_idx": 305, "global_frame_idx": 3292, "task_index": 1}, {"db_idx": 3293, "episode_idx": 9, "frame_idx": 306, "global_frame_idx": 3293, "task_index": 1}, {"db_idx": 3294, "episode_idx": 9, "frame_idx": 307, "global_frame_idx": 3294, "task_index": 1}, {"db_idx": 3295, "episode_idx": 9, "frame_idx": 308, "global_frame_idx": 3295, "task_index": 1}, {"db_idx": 3296, "episode_idx": 9, "frame_idx": 309, "global_frame_idx": 3296, "task_index": 1}, {"db_idx": 3297, "episode_idx": 9, "frame_idx": 310, "global_frame_idx": 3297, "task_index": 1}, {"db_idx": 3298, "episode_idx": 9, "frame_idx": 311, "global_frame_idx": 3298, "task_index": 1}, {"db_idx": 3299, "episode_idx": 9, "frame_idx": 312, "global_frame_idx": 3299, "task_index": 1}, {"db_idx": 3300, "episode_idx": 9, "frame_idx": 313, "global_frame_idx": 3300, "task_index": 1}, {"db_idx": 3301, "episode_idx": 9, "frame_idx": 314, "global_frame_idx": 3301, "task_index": 1}, {"db_idx": 3302, "episode_idx": 9, "frame_idx": 315, "global_frame_idx": 3302, "task_index": 1}, {"db_idx": 3303, "episode_idx": 9, "frame_idx": 316, "global_frame_idx": 3303, "task_index": 1}, {"db_idx": 3304, "episode_idx": 9, "frame_idx": 317, "global_frame_idx": 3304, "task_index": 1}, {"db_idx": 3305, "episode_idx": 9, "frame_idx": 318, "global_frame_idx": 3305, "task_index": 1}, {"db_idx": 3306, "episode_idx": 9, "frame_idx": 319, "global_frame_idx": 3306, "task_index": 1}, {"db_idx": 3307, "episode_idx": 9, "frame_idx": 320, "global_frame_idx": 3307, "task_index": 1}, {"db_idx": 3308, "episode_idx": 9, "frame_idx": 321, "global_frame_idx": 3308, "task_index": 1}, {"db_idx": 3309, "episode_idx": 9, "frame_idx": 322, "global_frame_idx": 3309, "task_index": 1}, {"db_idx": 3310, "episode_idx": 9, "frame_idx": 323, "global_frame_idx": 3310, "task_index": 1}, {"db_idx": 3311, "episode_idx": 9, "frame_idx": 324, "global_frame_idx": 3311, "task_index": 1}, {"db_idx": 3312, "episode_idx": 9, "frame_idx": 325, "global_frame_idx": 3312, "task_index": 1}, {"db_idx": 3313, "episode_idx": 9, "frame_idx": 326, "global_frame_idx": 3313, "task_index": 1}, {"db_idx": 3314, "episode_idx": 9, "frame_idx": 327, "global_frame_idx": 3314, "task_index": 1}, {"db_idx": 3315, "episode_idx": 9, "frame_idx": 328, "global_frame_idx": 3315, "task_index": 1}, {"db_idx": 3316, "episode_idx": 9, "frame_idx": 329, "global_frame_idx": 3316, "task_index": 1}, {"db_idx": 3317, "episode_idx": 9, "frame_idx": 330, "global_frame_idx": 3317, "task_index": 1}, {"db_idx": 3318, "episode_idx": 9, "frame_idx": 331, "global_frame_idx": 3318, "task_index": 1}, {"db_idx": 3319, "episode_idx": 9, "frame_idx": 332, "global_frame_idx": 3319, "task_index": 1}, {"db_idx": 3320, "episode_idx": 9, "frame_idx": 333, "global_frame_idx": 3320, "task_index": 1}, {"db_idx": 3321, "episode_idx": 9, "frame_idx": 334, "global_frame_idx": 3321, "task_index": 1}, {"db_idx": 3322, "episode_idx": 9, "frame_idx": 335, "global_frame_idx": 3322, "task_index": 1}, {"db_idx": 3323, "episode_idx": 9, "frame_idx": 336, "global_frame_idx": 3323, "task_index": 1}, {"db_idx": 3324, "episode_idx": 9, "frame_idx": 337, "global_frame_idx": 3324, "task_index": 1}, {"db_idx": 3325, "episode_idx": 9, "frame_idx": 338, "global_frame_idx": 3325, "task_index": 1}, {"db_idx": 3326, "episode_idx": 9, "frame_idx": 339, "global_frame_idx": 3326, "task_index": 1}, {"db_idx": 3327, "episode_idx": 9, "frame_idx": 340, "global_frame_idx": 3327, "task_index": 1}, {"db_idx": 3328, "episode_idx": 9, "frame_idx": 341, "global_frame_idx": 3328, "task_index": 1}, {"db_idx": 3329, "episode_idx": 9, "frame_idx": 342, "global_frame_idx": 3329, "task_index": 1}, {"db_idx": 3330, "episode_idx": 9, "frame_idx": 343, "global_frame_idx": 3330, "task_index": 1}, {"db_idx": 3331, "episode_idx": 9, "frame_idx": 344, "global_frame_idx": 3331, "task_index": 1}, {"db_idx": 3332, "episode_idx": 9, "frame_idx": 345, "global_frame_idx": 3332, "task_index": 1}, {"db_idx": 3333, "episode_idx": 9, "frame_idx": 346, "global_frame_idx": 3333, "task_index": 1}, {"db_idx": 3334, "episode_idx": 9, "frame_idx": 347, "global_frame_idx": 3334, "task_index": 1}, {"db_idx": 3335, "episode_idx": 9, "frame_idx": 348, "global_frame_idx": 3335, "task_index": 1}, {"db_idx": 3336, "episode_idx": 9, "frame_idx": 349, "global_frame_idx": 3336, "task_index": 1}, {"db_idx": 3337, "episode_idx": 9, "frame_idx": 350, "global_frame_idx": 3337, "task_index": 1}, {"db_idx": 3338, "episode_idx": 9, "frame_idx": 351, "global_frame_idx": 3338, "task_index": 1}, {"db_idx": 3339, "episode_idx": 9, "frame_idx": 352, "global_frame_idx": 3339, "task_index": 1}, {"db_idx": 3340, "episode_idx": 9, "frame_idx": 353, "global_frame_idx": 3340, "task_index": 1}, {"db_idx": 3341, "episode_idx": 9, "frame_idx": 354, "global_frame_idx": 3341, "task_index": 1}, {"db_idx": 3342, "episode_idx": 9, "frame_idx": 355, "global_frame_idx": 3342, "task_index": 1}, {"db_idx": 3343, "episode_idx": 9, "frame_idx": 356, "global_frame_idx": 3343, "task_index": 1}, {"db_idx": 3344, "episode_idx": 9, "frame_idx": 357, "global_frame_idx": 3344, "task_index": 1}, {"db_idx": 3345, "episode_idx": 9, "frame_idx": 358, "global_frame_idx": 3345, "task_index": 1}, {"db_idx": 3346, "episode_idx": 9, "frame_idx": 359, "global_frame_idx": 3346, "task_index": 1}, {"db_idx": 3347, "episode_idx": 9, "frame_idx": 360, "global_frame_idx": 3347, "task_index": 1}, {"db_idx": 3348, "episode_idx": 9, "frame_idx": 361, "global_frame_idx": 3348, "task_index": 1}, {"db_idx": 3349, "episode_idx": 9, "frame_idx": 362, "global_frame_idx": 3349, "task_index": 1}, {"db_idx": 3350, "episode_idx": 9, "frame_idx": 363, "global_frame_idx": 3350, "task_index": 1}, {"db_idx": 3351, "episode_idx": 9, "frame_idx": 364, "global_frame_idx": 3351, "task_index": 1}, {"db_idx": 3352, "episode_idx": 9, "frame_idx": 365, "global_frame_idx": 3352, "task_index": 1}, {"db_idx": 3353, "episode_idx": 9, "frame_idx": 366, "global_frame_idx": 3353, "task_index": 1}, {"db_idx": 3354, "episode_idx": 9, "frame_idx": 367, "global_frame_idx": 3354, "task_index": 1}, {"db_idx": 3355, "episode_idx": 9, "frame_idx": 368, "global_frame_idx": 3355, "task_index": 1}, {"db_idx": 3356, "episode_idx": 9, "frame_idx": 369, "global_frame_idx": 3356, "task_index": 1}, {"db_idx": 3357, "episode_idx": 9, "frame_idx": 370, "global_frame_idx": 3357, "task_index": 1}, {"db_idx": 3358, "episode_idx": 9, "frame_idx": 371, "global_frame_idx": 3358, "task_index": 1}, {"db_idx": 3359, "episode_idx": 9, "frame_idx": 372, "global_frame_idx": 3359, "task_index": 1}, {"db_idx": 3360, "episode_idx": 9, "frame_idx": 373, "global_frame_idx": 3360, "task_index": 1}, {"db_idx": 3361, "episode_idx": 9, "frame_idx": 374, "global_frame_idx": 3361, "task_index": 1}, {"db_idx": 3362, "episode_idx": 9, "frame_idx": 375, "global_frame_idx": 3362, "task_index": 1}, {"db_idx": 3363, "episode_idx": 9, "frame_idx": 376, "global_frame_idx": 3363, "task_index": 1}, {"db_idx": 3364, "episode_idx": 9, "frame_idx": 377, "global_frame_idx": 3364, "task_index": 1}, {"db_idx": 3365, "episode_idx": 9, "frame_idx": 378, "global_frame_idx": 3365, "task_index": 1}, {"db_idx": 3366, "episode_idx": 9, "frame_idx": 379, "global_frame_idx": 3366, "task_index": 1}, {"db_idx": 3367, "episode_idx": 9, "frame_idx": 380, "global_frame_idx": 3367, "task_index": 1}, {"db_idx": 3368, "episode_idx": 9, "frame_idx": 381, "global_frame_idx": 3368, "task_index": 1}, {"db_idx": 3369, "episode_idx": 9, "frame_idx": 382, "global_frame_idx": 3369, "task_index": 1}, {"db_idx": 3370, "episode_idx": 9, "frame_idx": 383, "global_frame_idx": 3370, "task_index": 1}, {"db_idx": 3371, "episode_idx": 9, "frame_idx": 384, "global_frame_idx": 3371, "task_index": 1}, {"db_idx": 3372, "episode_idx": 9, "frame_idx": 385, "global_frame_idx": 3372, "task_index": 1}, {"db_idx": 3373, "episode_idx": 9, "frame_idx": 386, "global_frame_idx": 3373, "task_index": 1}, {"db_idx": 3374, "episode_idx": 9, "frame_idx": 387, "global_frame_idx": 3374, "task_index": 1}, {"db_idx": 3375, "episode_idx": 9, "frame_idx": 388, "global_frame_idx": 3375, "task_index": 1}, {"db_idx": 3376, "episode_idx": 9, "frame_idx": 389, "global_frame_idx": 3376, "task_index": 1}, {"db_idx": 3377, "episode_idx": 9, "frame_idx": 390, "global_frame_idx": 3377, "task_index": 1}, {"db_idx": 3378, "episode_idx": 9, "frame_idx": 391, "global_frame_idx": 3378, "task_index": 1}, {"db_idx": 3379, "episode_idx": 9, "frame_idx": 392, "global_frame_idx": 3379, "task_index": 1}, {"db_idx": 3380, "episode_idx": 9, "frame_idx": 393, "global_frame_idx": 3380, "task_index": 1}, {"db_idx": 3381, "episode_idx": 9, "frame_idx": 394, "global_frame_idx": 3381, "task_index": 1}, {"db_idx": 3382, "episode_idx": 9, "frame_idx": 395, "global_frame_idx": 3382, "task_index": 1}, {"db_idx": 3383, "episode_idx": 9, "frame_idx": 396, "global_frame_idx": 3383, "task_index": 1}, {"db_idx": 3384, "episode_idx": 9, "frame_idx": 397, "global_frame_idx": 3384, "task_index": 1}, {"db_idx": 3385, "episode_idx": 9, "frame_idx": 398, "global_frame_idx": 3385, "task_index": 1}, {"db_idx": 3386, "episode_idx": 9, "frame_idx": 399, "global_frame_idx": 3386, "task_index": 1}, {"db_idx": 3387, "episode_idx": 9, "frame_idx": 400, "global_frame_idx": 3387, "task_index": 1}, {"db_idx": 3388, "episode_idx": 9, "frame_idx": 401, "global_frame_idx": 3388, "task_index": 1}, {"db_idx": 3389, "episode_idx": 9, "frame_idx": 402, "global_frame_idx": 3389, "task_index": 1}, {"db_idx": 3390, "episode_idx": 9, "frame_idx": 403, "global_frame_idx": 3390, "task_index": 1}, {"db_idx": 3391, "episode_idx": 9, "frame_idx": 404, "global_frame_idx": 3391, "task_index": 1}, {"db_idx": 3392, "episode_idx": 9, "frame_idx": 405, "global_frame_idx": 3392, "task_index": 1}, {"db_idx": 3393, "episode_idx": 9, "frame_idx": 406, "global_frame_idx": 3393, "task_index": 1}, {"db_idx": 3394, "episode_idx": 9, "frame_idx": 407, "global_frame_idx": 3394, "task_index": 1}, {"db_idx": 3395, "episode_idx": 9, "frame_idx": 408, "global_frame_idx": 3395, "task_index": 1}, {"db_idx": 3396, "episode_idx": 9, "frame_idx": 409, "global_frame_idx": 3396, "task_index": 1}, {"db_idx": 3397, "episode_idx": 9, "frame_idx": 410, "global_frame_idx": 3397, "task_index": 1}, {"db_idx": 3398, "episode_idx": 9, "frame_idx": 411, "global_frame_idx": 3398, "task_index": 1}, {"db_idx": 3399, "episode_idx": 9, "frame_idx": 412, "global_frame_idx": 3399, "task_index": 1}, {"db_idx": 3400, "episode_idx": 9, "frame_idx": 413, "global_frame_idx": 3400, "task_index": 1}, {"db_idx": 3401, "episode_idx": 9, "frame_idx": 414, "global_frame_idx": 3401, "task_index": 1}, {"db_idx": 3402, "episode_idx": 9, "frame_idx": 415, "global_frame_idx": 3402, "task_index": 1}, {"db_idx": 3403, "episode_idx": 9, "frame_idx": 416, "global_frame_idx": 3403, "task_index": 1}, {"db_idx": 3404, "episode_idx": 9, "frame_idx": 417, "global_frame_idx": 3404, "task_index": 1}, {"db_idx": 3405, "episode_idx": 9, "frame_idx": 418, "global_frame_idx": 3405, "task_index": 1}, {"db_idx": 3406, "episode_idx": 9, "frame_idx": 419, "global_frame_idx": 3406, "task_index": 1}, {"db_idx": 3407, "episode_idx": 9, "frame_idx": 420, "global_frame_idx": 3407, "task_index": 1}, {"db_idx": 3408, "episode_idx": 9, "frame_idx": 421, "global_frame_idx": 3408, "task_index": 1}, {"db_idx": 3409, "episode_idx": 9, "frame_idx": 422, "global_frame_idx": 3409, "task_index": 1}, {"db_idx": 3410, "episode_idx": 9, "frame_idx": 423, "global_frame_idx": 3410, "task_index": 1}, {"db_idx": 3411, "episode_idx": 9, "frame_idx": 424, "global_frame_idx": 3411, "task_index": 1}, {"db_idx": 3412, "episode_idx": 9, "frame_idx": 425, "global_frame_idx": 3412, "task_index": 1}, {"db_idx": 3413, "episode_idx": 9, "frame_idx": 426, "global_frame_idx": 3413, "task_index": 1}, {"db_idx": 3414, "episode_idx": 9, "frame_idx": 427, "global_frame_idx": 3414, "task_index": 1}, {"db_idx": 3415, "episode_idx": 9, "frame_idx": 428, "global_frame_idx": 3415, "task_index": 1}, {"db_idx": 3416, "episode_idx": 9, "frame_idx": 429, "global_frame_idx": 3416, "task_index": 1}, {"db_idx": 3417, "episode_idx": 9, "frame_idx": 430, "global_frame_idx": 3417, "task_index": 1}, {"db_idx": 3418, "episode_idx": 9, "frame_idx": 431, "global_frame_idx": 3418, "task_index": 1}, {"db_idx": 3419, "episode_idx": 9, "frame_idx": 432, "global_frame_idx": 3419, "task_index": 1}, {"db_idx": 3420, "episode_idx": 9, "frame_idx": 433, "global_frame_idx": 3420, "task_index": 1}, {"db_idx": 3421, "episode_idx": 9, "frame_idx": 434, "global_frame_idx": 3421, "task_index": 1}, {"db_idx": 3422, "episode_idx": 9, "frame_idx": 435, "global_frame_idx": 3422, "task_index": 1}, {"db_idx": 3423, "episode_idx": 9, "frame_idx": 436, "global_frame_idx": 3423, "task_index": 1}, {"db_idx": 3424, "episode_idx": 9, "frame_idx": 437, "global_frame_idx": 3424, "task_index": 1}, {"db_idx": 3425, "episode_idx": 9, "frame_idx": 438, "global_frame_idx": 3425, "task_index": 1}, {"db_idx": 3426, "episode_idx": 9, "frame_idx": 439, "global_frame_idx": 3426, "task_index": 1}, {"db_idx": 3427, "episode_idx": 9, "frame_idx": 440, "global_frame_idx": 3427, "task_index": 1}, {"db_idx": 3428, "episode_idx": 9, "frame_idx": 441, "global_frame_idx": 3428, "task_index": 1}, {"db_idx": 3429, "episode_idx": 9, "frame_idx": 442, "global_frame_idx": 3429, "task_index": 1}, {"db_idx": 3430, "episode_idx": 9, "frame_idx": 443, "global_frame_idx": 3430, "task_index": 1}, {"db_idx": 3431, "episode_idx": 9, "frame_idx": 444, "global_frame_idx": 3431, "task_index": 1}, {"db_idx": 3432, "episode_idx": 9, "frame_idx": 445, "global_frame_idx": 3432, "task_index": 1}, {"db_idx": 3433, "episode_idx": 9, "frame_idx": 446, "global_frame_idx": 3433, "task_index": 1}, {"db_idx": 3434, "episode_idx": 9, "frame_idx": 447, "global_frame_idx": 3434, "task_index": 1}, {"db_idx": 3435, "episode_idx": 9, "frame_idx": 448, "global_frame_idx": 3435, "task_index": 1}, {"db_idx": 3436, "episode_idx": 9, "frame_idx": 449, "global_frame_idx": 3436, "task_index": 1}, {"db_idx": 3437, "episode_idx": 10, "frame_idx": 0, "global_frame_idx": 3437, "task_index": 2}, {"db_idx": 3438, "episode_idx": 10, "frame_idx": 1, "global_frame_idx": 3438, "task_index": 2}, {"db_idx": 3439, "episode_idx": 10, "frame_idx": 2, "global_frame_idx": 3439, "task_index": 2}, {"db_idx": 3440, "episode_idx": 10, "frame_idx": 3, "global_frame_idx": 3440, "task_index": 2}, {"db_idx": 3441, "episode_idx": 10, "frame_idx": 4, "global_frame_idx": 3441, "task_index": 2}, {"db_idx": 3442, "episode_idx": 10, "frame_idx": 5, "global_frame_idx": 3442, "task_index": 2}, {"db_idx": 3443, "episode_idx": 10, "frame_idx": 6, "global_frame_idx": 3443, "task_index": 2}, {"db_idx": 3444, "episode_idx": 10, "frame_idx": 7, "global_frame_idx": 3444, "task_index": 2}, {"db_idx": 3445, "episode_idx": 10, "frame_idx": 8, "global_frame_idx": 3445, "task_index": 2}, {"db_idx": 3446, "episode_idx": 10, "frame_idx": 9, "global_frame_idx": 3446, "task_index": 2}, {"db_idx": 3447, "episode_idx": 10, "frame_idx": 10, "global_frame_idx": 3447, "task_index": 2}, {"db_idx": 3448, "episode_idx": 10, "frame_idx": 11, "global_frame_idx": 3448, "task_index": 2}, {"db_idx": 3449, "episode_idx": 10, "frame_idx": 12, "global_frame_idx": 3449, "task_index": 2}, {"db_idx": 3450, "episode_idx": 10, "frame_idx": 13, "global_frame_idx": 3450, "task_index": 2}, {"db_idx": 3451, "episode_idx": 10, "frame_idx": 14, "global_frame_idx": 3451, "task_index": 2}, {"db_idx": 3452, "episode_idx": 10, "frame_idx": 15, "global_frame_idx": 3452, "task_index": 2}, {"db_idx": 3453, "episode_idx": 10, "frame_idx": 16, "global_frame_idx": 3453, "task_index": 2}, {"db_idx": 3454, "episode_idx": 10, "frame_idx": 17, "global_frame_idx": 3454, "task_index": 2}, {"db_idx": 3455, "episode_idx": 10, "frame_idx": 18, "global_frame_idx": 3455, "task_index": 2}, {"db_idx": 3456, "episode_idx": 10, "frame_idx": 19, "global_frame_idx": 3456, "task_index": 2}, {"db_idx": 3457, "episode_idx": 10, "frame_idx": 20, "global_frame_idx": 3457, "task_index": 2}, {"db_idx": 3458, "episode_idx": 10, "frame_idx": 21, "global_frame_idx": 3458, "task_index": 2}, {"db_idx": 3459, "episode_idx": 10, "frame_idx": 22, "global_frame_idx": 3459, "task_index": 2}, {"db_idx": 3460, "episode_idx": 10, "frame_idx": 23, "global_frame_idx": 3460, "task_index": 2}, {"db_idx": 3461, "episode_idx": 10, "frame_idx": 24, "global_frame_idx": 3461, "task_index": 2}, {"db_idx": 3462, "episode_idx": 10, "frame_idx": 25, "global_frame_idx": 3462, "task_index": 2}, {"db_idx": 3463, "episode_idx": 10, "frame_idx": 26, "global_frame_idx": 3463, "task_index": 2}, {"db_idx": 3464, "episode_idx": 10, "frame_idx": 27, "global_frame_idx": 3464, "task_index": 2}, {"db_idx": 3465, "episode_idx": 10, "frame_idx": 28, "global_frame_idx": 3465, "task_index": 2}, {"db_idx": 3466, "episode_idx": 10, "frame_idx": 29, "global_frame_idx": 3466, "task_index": 2}, {"db_idx": 3467, "episode_idx": 10, "frame_idx": 30, "global_frame_idx": 3467, "task_index": 2}, {"db_idx": 3468, "episode_idx": 10, "frame_idx": 31, "global_frame_idx": 3468, "task_index": 2}, {"db_idx": 3469, "episode_idx": 10, "frame_idx": 32, "global_frame_idx": 3469, "task_index": 2}, {"db_idx": 3470, "episode_idx": 10, "frame_idx": 33, "global_frame_idx": 3470, "task_index": 2}, {"db_idx": 3471, "episode_idx": 10, "frame_idx": 34, "global_frame_idx": 3471, "task_index": 2}, {"db_idx": 3472, "episode_idx": 10, "frame_idx": 35, "global_frame_idx": 3472, "task_index": 2}, {"db_idx": 3473, "episode_idx": 10, "frame_idx": 36, "global_frame_idx": 3473, "task_index": 2}, {"db_idx": 3474, "episode_idx": 10, "frame_idx": 37, "global_frame_idx": 3474, "task_index": 2}, {"db_idx": 3475, "episode_idx": 10, "frame_idx": 38, "global_frame_idx": 3475, "task_index": 2}, {"db_idx": 3476, "episode_idx": 10, "frame_idx": 39, "global_frame_idx": 3476, "task_index": 2}, {"db_idx": 3477, "episode_idx": 10, "frame_idx": 40, "global_frame_idx": 3477, "task_index": 2}, {"db_idx": 3478, "episode_idx": 10, "frame_idx": 41, "global_frame_idx": 3478, "task_index": 2}, {"db_idx": 3479, "episode_idx": 10, "frame_idx": 42, "global_frame_idx": 3479, "task_index": 2}, {"db_idx": 3480, "episode_idx": 10, "frame_idx": 43, "global_frame_idx": 3480, "task_index": 2}, {"db_idx": 3481, "episode_idx": 10, "frame_idx": 44, "global_frame_idx": 3481, "task_index": 2}, {"db_idx": 3482, "episode_idx": 10, "frame_idx": 45, "global_frame_idx": 3482, "task_index": 2}, {"db_idx": 3483, "episode_idx": 10, "frame_idx": 46, "global_frame_idx": 3483, "task_index": 2}, {"db_idx": 3484, "episode_idx": 10, "frame_idx": 47, "global_frame_idx": 3484, "task_index": 2}, {"db_idx": 3485, "episode_idx": 10, "frame_idx": 48, "global_frame_idx": 3485, "task_index": 2}, {"db_idx": 3486, "episode_idx": 10, "frame_idx": 49, "global_frame_idx": 3486, "task_index": 2}, {"db_idx": 3487, "episode_idx": 10, "frame_idx": 50, "global_frame_idx": 3487, "task_index": 2}, {"db_idx": 3488, "episode_idx": 10, "frame_idx": 51, "global_frame_idx": 3488, "task_index": 2}, {"db_idx": 3489, "episode_idx": 10, "frame_idx": 52, "global_frame_idx": 3489, "task_index": 2}, {"db_idx": 3490, "episode_idx": 10, "frame_idx": 53, "global_frame_idx": 3490, "task_index": 2}, {"db_idx": 3491, "episode_idx": 10, "frame_idx": 54, "global_frame_idx": 3491, "task_index": 2}, {"db_idx": 3492, "episode_idx": 10, "frame_idx": 55, "global_frame_idx": 3492, "task_index": 2}, {"db_idx": 3493, "episode_idx": 10, "frame_idx": 56, "global_frame_idx": 3493, "task_index": 2}, {"db_idx": 3494, "episode_idx": 10, "frame_idx": 57, "global_frame_idx": 3494, "task_index": 2}, {"db_idx": 3495, "episode_idx": 10, "frame_idx": 58, "global_frame_idx": 3495, "task_index": 2}, {"db_idx": 3496, "episode_idx": 10, "frame_idx": 59, "global_frame_idx": 3496, "task_index": 2}, {"db_idx": 3497, "episode_idx": 10, "frame_idx": 60, "global_frame_idx": 3497, "task_index": 2}, {"db_idx": 3498, "episode_idx": 10, "frame_idx": 61, "global_frame_idx": 3498, "task_index": 2}, {"db_idx": 3499, "episode_idx": 10, "frame_idx": 62, "global_frame_idx": 3499, "task_index": 2}, {"db_idx": 3500, "episode_idx": 10, "frame_idx": 63, "global_frame_idx": 3500, "task_index": 2}, {"db_idx": 3501, "episode_idx": 10, "frame_idx": 64, "global_frame_idx": 3501, "task_index": 2}, {"db_idx": 3502, "episode_idx": 10, "frame_idx": 65, "global_frame_idx": 3502, "task_index": 2}, {"db_idx": 3503, "episode_idx": 10, "frame_idx": 66, "global_frame_idx": 3503, "task_index": 2}, {"db_idx": 3504, "episode_idx": 10, "frame_idx": 67, "global_frame_idx": 3504, "task_index": 2}, {"db_idx": 3505, "episode_idx": 10, "frame_idx": 68, "global_frame_idx": 3505, "task_index": 2}, {"db_idx": 3506, "episode_idx": 10, "frame_idx": 69, "global_frame_idx": 3506, "task_index": 2}, {"db_idx": 3507, "episode_idx": 10, "frame_idx": 70, "global_frame_idx": 3507, "task_index": 2}, {"db_idx": 3508, "episode_idx": 10, "frame_idx": 71, "global_frame_idx": 3508, "task_index": 2}, {"db_idx": 3509, "episode_idx": 10, "frame_idx": 72, "global_frame_idx": 3509, "task_index": 2}, {"db_idx": 3510, "episode_idx": 10, "frame_idx": 73, "global_frame_idx": 3510, "task_index": 2}, {"db_idx": 3511, "episode_idx": 10, "frame_idx": 74, "global_frame_idx": 3511, "task_index": 2}, {"db_idx": 3512, "episode_idx": 10, "frame_idx": 75, "global_frame_idx": 3512, "task_index": 2}, {"db_idx": 3513, "episode_idx": 10, "frame_idx": 76, "global_frame_idx": 3513, "task_index": 2}, {"db_idx": 3514, "episode_idx": 10, "frame_idx": 77, "global_frame_idx": 3514, "task_index": 2}, {"db_idx": 3515, "episode_idx": 10, "frame_idx": 78, "global_frame_idx": 3515, "task_index": 2}, {"db_idx": 3516, "episode_idx": 10, "frame_idx": 79, "global_frame_idx": 3516, "task_index": 2}, {"db_idx": 3517, "episode_idx": 10, "frame_idx": 80, "global_frame_idx": 3517, "task_index": 2}, {"db_idx": 3518, "episode_idx": 10, "frame_idx": 81, "global_frame_idx": 3518, "task_index": 2}, {"db_idx": 3519, "episode_idx": 10, "frame_idx": 82, "global_frame_idx": 3519, "task_index": 2}, {"db_idx": 3520, "episode_idx": 10, "frame_idx": 83, "global_frame_idx": 3520, "task_index": 2}, {"db_idx": 3521, "episode_idx": 10, "frame_idx": 84, "global_frame_idx": 3521, "task_index": 2}, {"db_idx": 3522, "episode_idx": 10, "frame_idx": 85, "global_frame_idx": 3522, "task_index": 2}, {"db_idx": 3523, "episode_idx": 10, "frame_idx": 86, "global_frame_idx": 3523, "task_index": 2}, {"db_idx": 3524, "episode_idx": 10, "frame_idx": 87, "global_frame_idx": 3524, "task_index": 2}, {"db_idx": 3525, "episode_idx": 10, "frame_idx": 88, "global_frame_idx": 3525, "task_index": 2}, {"db_idx": 3526, "episode_idx": 10, "frame_idx": 89, "global_frame_idx": 3526, "task_index": 2}, {"db_idx": 3527, "episode_idx": 10, "frame_idx": 90, "global_frame_idx": 3527, "task_index": 2}, {"db_idx": 3528, "episode_idx": 10, "frame_idx": 91, "global_frame_idx": 3528, "task_index": 2}, {"db_idx": 3529, "episode_idx": 10, "frame_idx": 92, "global_frame_idx": 3529, "task_index": 2}, {"db_idx": 3530, "episode_idx": 10, "frame_idx": 93, "global_frame_idx": 3530, "task_index": 2}, {"db_idx": 3531, "episode_idx": 10, "frame_idx": 94, "global_frame_idx": 3531, "task_index": 2}, {"db_idx": 3532, "episode_idx": 10, "frame_idx": 95, "global_frame_idx": 3532, "task_index": 2}, {"db_idx": 3533, "episode_idx": 10, "frame_idx": 96, "global_frame_idx": 3533, "task_index": 2}, {"db_idx": 3534, "episode_idx": 10, "frame_idx": 97, "global_frame_idx": 3534, "task_index": 2}, {"db_idx": 3535, "episode_idx": 10, "frame_idx": 98, "global_frame_idx": 3535, "task_index": 2}, {"db_idx": 3536, "episode_idx": 10, "frame_idx": 99, "global_frame_idx": 3536, "task_index": 2}, {"db_idx": 3537, "episode_idx": 10, "frame_idx": 100, "global_frame_idx": 3537, "task_index": 2}, {"db_idx": 3538, "episode_idx": 10, "frame_idx": 101, "global_frame_idx": 3538, "task_index": 2}, {"db_idx": 3539, "episode_idx": 10, "frame_idx": 102, "global_frame_idx": 3539, "task_index": 2}, {"db_idx": 3540, "episode_idx": 10, "frame_idx": 103, "global_frame_idx": 3540, "task_index": 2}, {"db_idx": 3541, "episode_idx": 10, "frame_idx": 104, "global_frame_idx": 3541, "task_index": 2}, {"db_idx": 3542, "episode_idx": 10, "frame_idx": 105, "global_frame_idx": 3542, "task_index": 2}, {"db_idx": 3543, "episode_idx": 10, "frame_idx": 106, "global_frame_idx": 3543, "task_index": 2}, {"db_idx": 3544, "episode_idx": 10, "frame_idx": 107, "global_frame_idx": 3544, "task_index": 2}, {"db_idx": 3545, "episode_idx": 10, "frame_idx": 108, "global_frame_idx": 3545, "task_index": 2}, {"db_idx": 3546, "episode_idx": 10, "frame_idx": 109, "global_frame_idx": 3546, "task_index": 2}, {"db_idx": 3547, "episode_idx": 10, "frame_idx": 110, "global_frame_idx": 3547, "task_index": 2}, {"db_idx": 3548, "episode_idx": 10, "frame_idx": 111, "global_frame_idx": 3548, "task_index": 2}, {"db_idx": 3549, "episode_idx": 10, "frame_idx": 112, "global_frame_idx": 3549, "task_index": 2}, {"db_idx": 3550, "episode_idx": 10, "frame_idx": 113, "global_frame_idx": 3550, "task_index": 2}, {"db_idx": 3551, "episode_idx": 10, "frame_idx": 114, "global_frame_idx": 3551, "task_index": 2}, {"db_idx": 3552, "episode_idx": 10, "frame_idx": 115, "global_frame_idx": 3552, "task_index": 2}, {"db_idx": 3553, "episode_idx": 10, "frame_idx": 116, "global_frame_idx": 3553, "task_index": 2}, {"db_idx": 3554, "episode_idx": 10, "frame_idx": 117, "global_frame_idx": 3554, "task_index": 2}, {"db_idx": 3555, "episode_idx": 10, "frame_idx": 118, "global_frame_idx": 3555, "task_index": 2}, {"db_idx": 3556, "episode_idx": 10, "frame_idx": 119, "global_frame_idx": 3556, "task_index": 2}, {"db_idx": 3557, "episode_idx": 10, "frame_idx": 120, "global_frame_idx": 3557, "task_index": 2}, {"db_idx": 3558, "episode_idx": 10, "frame_idx": 121, "global_frame_idx": 3558, "task_index": 2}, {"db_idx": 3559, "episode_idx": 10, "frame_idx": 122, "global_frame_idx": 3559, "task_index": 2}, {"db_idx": 3560, "episode_idx": 10, "frame_idx": 123, "global_frame_idx": 3560, "task_index": 2}, {"db_idx": 3561, "episode_idx": 10, "frame_idx": 124, "global_frame_idx": 3561, "task_index": 2}, {"db_idx": 3562, "episode_idx": 10, "frame_idx": 125, "global_frame_idx": 3562, "task_index": 2}, {"db_idx": 3563, "episode_idx": 10, "frame_idx": 126, "global_frame_idx": 3563, "task_index": 2}, {"db_idx": 3564, "episode_idx": 10, "frame_idx": 127, "global_frame_idx": 3564, "task_index": 2}, {"db_idx": 3565, "episode_idx": 10, "frame_idx": 128, "global_frame_idx": 3565, "task_index": 2}, {"db_idx": 3566, "episode_idx": 10, "frame_idx": 129, "global_frame_idx": 3566, "task_index": 2}, {"db_idx": 3567, "episode_idx": 10, "frame_idx": 130, "global_frame_idx": 3567, "task_index": 2}, {"db_idx": 3568, "episode_idx": 10, "frame_idx": 131, "global_frame_idx": 3568, "task_index": 2}, {"db_idx": 3569, "episode_idx": 10, "frame_idx": 132, "global_frame_idx": 3569, "task_index": 2}, {"db_idx": 3570, "episode_idx": 10, "frame_idx": 133, "global_frame_idx": 3570, "task_index": 2}, {"db_idx": 3571, "episode_idx": 10, "frame_idx": 134, "global_frame_idx": 3571, "task_index": 2}, {"db_idx": 3572, "episode_idx": 10, "frame_idx": 135, "global_frame_idx": 3572, "task_index": 2}, {"db_idx": 3573, "episode_idx": 10, "frame_idx": 136, "global_frame_idx": 3573, "task_index": 2}, {"db_idx": 3574, "episode_idx": 10, "frame_idx": 137, "global_frame_idx": 3574, "task_index": 2}, {"db_idx": 3575, "episode_idx": 10, "frame_idx": 138, "global_frame_idx": 3575, "task_index": 2}, {"db_idx": 3576, "episode_idx": 10, "frame_idx": 139, "global_frame_idx": 3576, "task_index": 2}, {"db_idx": 3577, "episode_idx": 10, "frame_idx": 140, "global_frame_idx": 3577, "task_index": 2}, {"db_idx": 3578, "episode_idx": 10, "frame_idx": 141, "global_frame_idx": 3578, "task_index": 2}, {"db_idx": 3579, "episode_idx": 10, "frame_idx": 142, "global_frame_idx": 3579, "task_index": 2}, {"db_idx": 3580, "episode_idx": 10, "frame_idx": 143, "global_frame_idx": 3580, "task_index": 2}, {"db_idx": 3581, "episode_idx": 10, "frame_idx": 144, "global_frame_idx": 3581, "task_index": 2}, {"db_idx": 3582, "episode_idx": 10, "frame_idx": 145, "global_frame_idx": 3582, "task_index": 2}, {"db_idx": 3583, "episode_idx": 10, "frame_idx": 146, "global_frame_idx": 3583, "task_index": 2}, {"db_idx": 3584, "episode_idx": 10, "frame_idx": 147, "global_frame_idx": 3584, "task_index": 2}, {"db_idx": 3585, "episode_idx": 10, "frame_idx": 148, "global_frame_idx": 3585, "task_index": 2}, {"db_idx": 3586, "episode_idx": 10, "frame_idx": 149, "global_frame_idx": 3586, "task_index": 2}, {"db_idx": 3587, "episode_idx": 10, "frame_idx": 150, "global_frame_idx": 3587, "task_index": 2}, {"db_idx": 3588, "episode_idx": 10, "frame_idx": 151, "global_frame_idx": 3588, "task_index": 2}, {"db_idx": 3589, "episode_idx": 10, "frame_idx": 152, "global_frame_idx": 3589, "task_index": 2}, {"db_idx": 3590, "episode_idx": 10, "frame_idx": 153, "global_frame_idx": 3590, "task_index": 2}, {"db_idx": 3591, "episode_idx": 10, "frame_idx": 154, "global_frame_idx": 3591, "task_index": 2}, {"db_idx": 3592, "episode_idx": 10, "frame_idx": 155, "global_frame_idx": 3592, "task_index": 2}, {"db_idx": 3593, "episode_idx": 10, "frame_idx": 156, "global_frame_idx": 3593, "task_index": 2}, {"db_idx": 3594, "episode_idx": 10, "frame_idx": 157, "global_frame_idx": 3594, "task_index": 2}, {"db_idx": 3595, "episode_idx": 10, "frame_idx": 158, "global_frame_idx": 3595, "task_index": 2}, {"db_idx": 3596, "episode_idx": 10, "frame_idx": 159, "global_frame_idx": 3596, "task_index": 2}, {"db_idx": 3597, "episode_idx": 10, "frame_idx": 160, "global_frame_idx": 3597, "task_index": 2}, {"db_idx": 3598, "episode_idx": 10, "frame_idx": 161, "global_frame_idx": 3598, "task_index": 2}, {"db_idx": 3599, "episode_idx": 10, "frame_idx": 162, "global_frame_idx": 3599, "task_index": 2}, {"db_idx": 3600, "episode_idx": 10, "frame_idx": 163, "global_frame_idx": 3600, "task_index": 2}, {"db_idx": 3601, "episode_idx": 10, "frame_idx": 164, "global_frame_idx": 3601, "task_index": 2}, {"db_idx": 3602, "episode_idx": 10, "frame_idx": 165, "global_frame_idx": 3602, "task_index": 2}, {"db_idx": 3603, "episode_idx": 10, "frame_idx": 166, "global_frame_idx": 3603, "task_index": 2}, {"db_idx": 3604, "episode_idx": 10, "frame_idx": 167, "global_frame_idx": 3604, "task_index": 2}, {"db_idx": 3605, "episode_idx": 10, "frame_idx": 168, "global_frame_idx": 3605, "task_index": 2}, {"db_idx": 3606, "episode_idx": 10, "frame_idx": 169, "global_frame_idx": 3606, "task_index": 2}, {"db_idx": 3607, "episode_idx": 11, "frame_idx": 0, "global_frame_idx": 3607, "task_index": 2}, {"db_idx": 3608, "episode_idx": 11, "frame_idx": 1, "global_frame_idx": 3608, "task_index": 2}, {"db_idx": 3609, "episode_idx": 11, "frame_idx": 2, "global_frame_idx": 3609, "task_index": 2}, {"db_idx": 3610, "episode_idx": 11, "frame_idx": 3, "global_frame_idx": 3610, "task_index": 2}, {"db_idx": 3611, "episode_idx": 11, "frame_idx": 4, "global_frame_idx": 3611, "task_index": 2}, {"db_idx": 3612, "episode_idx": 11, "frame_idx": 5, "global_frame_idx": 3612, "task_index": 2}, {"db_idx": 3613, "episode_idx": 11, "frame_idx": 6, "global_frame_idx": 3613, "task_index": 2}, {"db_idx": 3614, "episode_idx": 11, "frame_idx": 7, "global_frame_idx": 3614, "task_index": 2}, {"db_idx": 3615, "episode_idx": 11, "frame_idx": 8, "global_frame_idx": 3615, "task_index": 2}, {"db_idx": 3616, "episode_idx": 11, "frame_idx": 9, "global_frame_idx": 3616, "task_index": 2}, {"db_idx": 3617, "episode_idx": 11, "frame_idx": 10, "global_frame_idx": 3617, "task_index": 2}, {"db_idx": 3618, "episode_idx": 11, "frame_idx": 11, "global_frame_idx": 3618, "task_index": 2}, {"db_idx": 3619, "episode_idx": 11, "frame_idx": 12, "global_frame_idx": 3619, "task_index": 2}, {"db_idx": 3620, "episode_idx": 11, "frame_idx": 13, "global_frame_idx": 3620, "task_index": 2}, {"db_idx": 3621, "episode_idx": 11, "frame_idx": 14, "global_frame_idx": 3621, "task_index": 2}, {"db_idx": 3622, "episode_idx": 11, "frame_idx": 15, "global_frame_idx": 3622, "task_index": 2}, {"db_idx": 3623, "episode_idx": 11, "frame_idx": 16, "global_frame_idx": 3623, "task_index": 2}, {"db_idx": 3624, "episode_idx": 11, "frame_idx": 17, "global_frame_idx": 3624, "task_index": 2}, {"db_idx": 3625, "episode_idx": 11, "frame_idx": 18, "global_frame_idx": 3625, "task_index": 2}, {"db_idx": 3626, "episode_idx": 11, "frame_idx": 19, "global_frame_idx": 3626, "task_index": 2}, {"db_idx": 3627, "episode_idx": 11, "frame_idx": 20, "global_frame_idx": 3627, "task_index": 2}, {"db_idx": 3628, "episode_idx": 11, "frame_idx": 21, "global_frame_idx": 3628, "task_index": 2}, {"db_idx": 3629, "episode_idx": 11, "frame_idx": 22, "global_frame_idx": 3629, "task_index": 2}, {"db_idx": 3630, "episode_idx": 11, "frame_idx": 23, "global_frame_idx": 3630, "task_index": 2}, {"db_idx": 3631, "episode_idx": 11, "frame_idx": 24, "global_frame_idx": 3631, "task_index": 2}, {"db_idx": 3632, "episode_idx": 11, "frame_idx": 25, "global_frame_idx": 3632, "task_index": 2}, {"db_idx": 3633, "episode_idx": 11, "frame_idx": 26, "global_frame_idx": 3633, "task_index": 2}, {"db_idx": 3634, "episode_idx": 11, "frame_idx": 27, "global_frame_idx": 3634, "task_index": 2}, {"db_idx": 3635, "episode_idx": 11, "frame_idx": 28, "global_frame_idx": 3635, "task_index": 2}, {"db_idx": 3636, "episode_idx": 11, "frame_idx": 29, "global_frame_idx": 3636, "task_index": 2}, {"db_idx": 3637, "episode_idx": 11, "frame_idx": 30, "global_frame_idx": 3637, "task_index": 2}, {"db_idx": 3638, "episode_idx": 11, "frame_idx": 31, "global_frame_idx": 3638, "task_index": 2}, {"db_idx": 3639, "episode_idx": 11, "frame_idx": 32, "global_frame_idx": 3639, "task_index": 2}, {"db_idx": 3640, "episode_idx": 11, "frame_idx": 33, "global_frame_idx": 3640, "task_index": 2}, {"db_idx": 3641, "episode_idx": 11, "frame_idx": 34, "global_frame_idx": 3641, "task_index": 2}, {"db_idx": 3642, "episode_idx": 11, "frame_idx": 35, "global_frame_idx": 3642, "task_index": 2}, {"db_idx": 3643, "episode_idx": 11, "frame_idx": 36, "global_frame_idx": 3643, "task_index": 2}, {"db_idx": 3644, "episode_idx": 11, "frame_idx": 37, "global_frame_idx": 3644, "task_index": 2}, {"db_idx": 3645, "episode_idx": 11, "frame_idx": 38, "global_frame_idx": 3645, "task_index": 2}, {"db_idx": 3646, "episode_idx": 11, "frame_idx": 39, "global_frame_idx": 3646, "task_index": 2}, {"db_idx": 3647, "episode_idx": 11, "frame_idx": 40, "global_frame_idx": 3647, "task_index": 2}, {"db_idx": 3648, "episode_idx": 11, "frame_idx": 41, "global_frame_idx": 3648, "task_index": 2}, {"db_idx": 3649, "episode_idx": 11, "frame_idx": 42, "global_frame_idx": 3649, "task_index": 2}, {"db_idx": 3650, "episode_idx": 11, "frame_idx": 43, "global_frame_idx": 3650, "task_index": 2}, {"db_idx": 3651, "episode_idx": 11, "frame_idx": 44, "global_frame_idx": 3651, "task_index": 2}, {"db_idx": 3652, "episode_idx": 11, "frame_idx": 45, "global_frame_idx": 3652, "task_index": 2}, {"db_idx": 3653, "episode_idx": 11, "frame_idx": 46, "global_frame_idx": 3653, "task_index": 2}, {"db_idx": 3654, "episode_idx": 11, "frame_idx": 47, "global_frame_idx": 3654, "task_index": 2}, {"db_idx": 3655, "episode_idx": 11, "frame_idx": 48, "global_frame_idx": 3655, "task_index": 2}, {"db_idx": 3656, "episode_idx": 11, "frame_idx": 49, "global_frame_idx": 3656, "task_index": 2}, {"db_idx": 3657, "episode_idx": 11, "frame_idx": 50, "global_frame_idx": 3657, "task_index": 2}, {"db_idx": 3658, "episode_idx": 11, "frame_idx": 51, "global_frame_idx": 3658, "task_index": 2}, {"db_idx": 3659, "episode_idx": 11, "frame_idx": 52, "global_frame_idx": 3659, "task_index": 2}, {"db_idx": 3660, "episode_idx": 11, "frame_idx": 53, "global_frame_idx": 3660, "task_index": 2}, {"db_idx": 3661, "episode_idx": 11, "frame_idx": 54, "global_frame_idx": 3661, "task_index": 2}, {"db_idx": 3662, "episode_idx": 11, "frame_idx": 55, "global_frame_idx": 3662, "task_index": 2}, {"db_idx": 3663, "episode_idx": 11, "frame_idx": 56, "global_frame_idx": 3663, "task_index": 2}, {"db_idx": 3664, "episode_idx": 11, "frame_idx": 57, "global_frame_idx": 3664, "task_index": 2}, {"db_idx": 3665, "episode_idx": 11, "frame_idx": 58, "global_frame_idx": 3665, "task_index": 2}, {"db_idx": 3666, "episode_idx": 11, "frame_idx": 59, "global_frame_idx": 3666, "task_index": 2}, {"db_idx": 3667, "episode_idx": 11, "frame_idx": 60, "global_frame_idx": 3667, "task_index": 2}, {"db_idx": 3668, "episode_idx": 11, "frame_idx": 61, "global_frame_idx": 3668, "task_index": 2}, {"db_idx": 3669, "episode_idx": 11, "frame_idx": 62, "global_frame_idx": 3669, "task_index": 2}, {"db_idx": 3670, "episode_idx": 11, "frame_idx": 63, "global_frame_idx": 3670, "task_index": 2}, {"db_idx": 3671, "episode_idx": 11, "frame_idx": 64, "global_frame_idx": 3671, "task_index": 2}, {"db_idx": 3672, "episode_idx": 11, "frame_idx": 65, "global_frame_idx": 3672, "task_index": 2}, {"db_idx": 3673, "episode_idx": 11, "frame_idx": 66, "global_frame_idx": 3673, "task_index": 2}, {"db_idx": 3674, "episode_idx": 11, "frame_idx": 67, "global_frame_idx": 3674, "task_index": 2}, {"db_idx": 3675, "episode_idx": 11, "frame_idx": 68, "global_frame_idx": 3675, "task_index": 2}, {"db_idx": 3676, "episode_idx": 11, "frame_idx": 69, "global_frame_idx": 3676, "task_index": 2}, {"db_idx": 3677, "episode_idx": 11, "frame_idx": 70, "global_frame_idx": 3677, "task_index": 2}, {"db_idx": 3678, "episode_idx": 11, "frame_idx": 71, "global_frame_idx": 3678, "task_index": 2}, {"db_idx": 3679, "episode_idx": 11, "frame_idx": 72, "global_frame_idx": 3679, "task_index": 2}, {"db_idx": 3680, "episode_idx": 11, "frame_idx": 73, "global_frame_idx": 3680, "task_index": 2}, {"db_idx": 3681, "episode_idx": 11, "frame_idx": 74, "global_frame_idx": 3681, "task_index": 2}, {"db_idx": 3682, "episode_idx": 11, "frame_idx": 75, "global_frame_idx": 3682, "task_index": 2}, {"db_idx": 3683, "episode_idx": 11, "frame_idx": 76, "global_frame_idx": 3683, "task_index": 2}, {"db_idx": 3684, "episode_idx": 11, "frame_idx": 77, "global_frame_idx": 3684, "task_index": 2}, {"db_idx": 3685, "episode_idx": 11, "frame_idx": 78, "global_frame_idx": 3685, "task_index": 2}, {"db_idx": 3686, "episode_idx": 11, "frame_idx": 79, "global_frame_idx": 3686, "task_index": 2}, {"db_idx": 3687, "episode_idx": 11, "frame_idx": 80, "global_frame_idx": 3687, "task_index": 2}, {"db_idx": 3688, "episode_idx": 11, "frame_idx": 81, "global_frame_idx": 3688, "task_index": 2}, {"db_idx": 3689, "episode_idx": 11, "frame_idx": 82, "global_frame_idx": 3689, "task_index": 2}, {"db_idx": 3690, "episode_idx": 11, "frame_idx": 83, "global_frame_idx": 3690, "task_index": 2}, {"db_idx": 3691, "episode_idx": 11, "frame_idx": 84, "global_frame_idx": 3691, "task_index": 2}, {"db_idx": 3692, "episode_idx": 11, "frame_idx": 85, "global_frame_idx": 3692, "task_index": 2}, {"db_idx": 3693, "episode_idx": 11, "frame_idx": 86, "global_frame_idx": 3693, "task_index": 2}, {"db_idx": 3694, "episode_idx": 11, "frame_idx": 87, "global_frame_idx": 3694, "task_index": 2}, {"db_idx": 3695, "episode_idx": 11, "frame_idx": 88, "global_frame_idx": 3695, "task_index": 2}, {"db_idx": 3696, "episode_idx": 11, "frame_idx": 89, "global_frame_idx": 3696, "task_index": 2}, {"db_idx": 3697, "episode_idx": 11, "frame_idx": 90, "global_frame_idx": 3697, "task_index": 2}, {"db_idx": 3698, "episode_idx": 11, "frame_idx": 91, "global_frame_idx": 3698, "task_index": 2}, {"db_idx": 3699, "episode_idx": 11, "frame_idx": 92, "global_frame_idx": 3699, "task_index": 2}, {"db_idx": 3700, "episode_idx": 11, "frame_idx": 93, "global_frame_idx": 3700, "task_index": 2}, {"db_idx": 3701, "episode_idx": 11, "frame_idx": 94, "global_frame_idx": 3701, "task_index": 2}, {"db_idx": 3702, "episode_idx": 11, "frame_idx": 95, "global_frame_idx": 3702, "task_index": 2}, {"db_idx": 3703, "episode_idx": 11, "frame_idx": 96, "global_frame_idx": 3703, "task_index": 2}, {"db_idx": 3704, "episode_idx": 11, "frame_idx": 97, "global_frame_idx": 3704, "task_index": 2}, {"db_idx": 3705, "episode_idx": 11, "frame_idx": 98, "global_frame_idx": 3705, "task_index": 2}, {"db_idx": 3706, "episode_idx": 11, "frame_idx": 99, "global_frame_idx": 3706, "task_index": 2}, {"db_idx": 3707, "episode_idx": 11, "frame_idx": 100, "global_frame_idx": 3707, "task_index": 2}, {"db_idx": 3708, "episode_idx": 11, "frame_idx": 101, "global_frame_idx": 3708, "task_index": 2}, {"db_idx": 3709, "episode_idx": 11, "frame_idx": 102, "global_frame_idx": 3709, "task_index": 2}, {"db_idx": 3710, "episode_idx": 11, "frame_idx": 103, "global_frame_idx": 3710, "task_index": 2}, {"db_idx": 3711, "episode_idx": 11, "frame_idx": 104, "global_frame_idx": 3711, "task_index": 2}, {"db_idx": 3712, "episode_idx": 11, "frame_idx": 105, "global_frame_idx": 3712, "task_index": 2}, {"db_idx": 3713, "episode_idx": 11, "frame_idx": 106, "global_frame_idx": 3713, "task_index": 2}, {"db_idx": 3714, "episode_idx": 11, "frame_idx": 107, "global_frame_idx": 3714, "task_index": 2}, {"db_idx": 3715, "episode_idx": 11, "frame_idx": 108, "global_frame_idx": 3715, "task_index": 2}, {"db_idx": 3716, "episode_idx": 11, "frame_idx": 109, "global_frame_idx": 3716, "task_index": 2}, {"db_idx": 3717, "episode_idx": 11, "frame_idx": 110, "global_frame_idx": 3717, "task_index": 2}, {"db_idx": 3718, "episode_idx": 11, "frame_idx": 111, "global_frame_idx": 3718, "task_index": 2}, {"db_idx": 3719, "episode_idx": 11, "frame_idx": 112, "global_frame_idx": 3719, "task_index": 2}, {"db_idx": 3720, "episode_idx": 11, "frame_idx": 113, "global_frame_idx": 3720, "task_index": 2}, {"db_idx": 3721, "episode_idx": 11, "frame_idx": 114, "global_frame_idx": 3721, "task_index": 2}, {"db_idx": 3722, "episode_idx": 11, "frame_idx": 115, "global_frame_idx": 3722, "task_index": 2}, {"db_idx": 3723, "episode_idx": 11, "frame_idx": 116, "global_frame_idx": 3723, "task_index": 2}, {"db_idx": 3724, "episode_idx": 11, "frame_idx": 117, "global_frame_idx": 3724, "task_index": 2}, {"db_idx": 3725, "episode_idx": 11, "frame_idx": 118, "global_frame_idx": 3725, "task_index": 2}, {"db_idx": 3726, "episode_idx": 11, "frame_idx": 119, "global_frame_idx": 3726, "task_index": 2}, {"db_idx": 3727, "episode_idx": 11, "frame_idx": 120, "global_frame_idx": 3727, "task_index": 2}, {"db_idx": 3728, "episode_idx": 11, "frame_idx": 121, "global_frame_idx": 3728, "task_index": 2}, {"db_idx": 3729, "episode_idx": 11, "frame_idx": 122, "global_frame_idx": 3729, "task_index": 2}, {"db_idx": 3730, "episode_idx": 11, "frame_idx": 123, "global_frame_idx": 3730, "task_index": 2}, {"db_idx": 3731, "episode_idx": 11, "frame_idx": 124, "global_frame_idx": 3731, "task_index": 2}, {"db_idx": 3732, "episode_idx": 11, "frame_idx": 125, "global_frame_idx": 3732, "task_index": 2}, {"db_idx": 3733, "episode_idx": 11, "frame_idx": 126, "global_frame_idx": 3733, "task_index": 2}, {"db_idx": 3734, "episode_idx": 11, "frame_idx": 127, "global_frame_idx": 3734, "task_index": 2}, {"db_idx": 3735, "episode_idx": 11, "frame_idx": 128, "global_frame_idx": 3735, "task_index": 2}, {"db_idx": 3736, "episode_idx": 11, "frame_idx": 129, "global_frame_idx": 3736, "task_index": 2}, {"db_idx": 3737, "episode_idx": 11, "frame_idx": 130, "global_frame_idx": 3737, "task_index": 2}, {"db_idx": 3738, "episode_idx": 11, "frame_idx": 131, "global_frame_idx": 3738, "task_index": 2}, {"db_idx": 3739, "episode_idx": 11, "frame_idx": 132, "global_frame_idx": 3739, "task_index": 2}, {"db_idx": 3740, "episode_idx": 11, "frame_idx": 133, "global_frame_idx": 3740, "task_index": 2}, {"db_idx": 3741, "episode_idx": 11, "frame_idx": 134, "global_frame_idx": 3741, "task_index": 2}, {"db_idx": 3742, "episode_idx": 11, "frame_idx": 135, "global_frame_idx": 3742, "task_index": 2}, {"db_idx": 3743, "episode_idx": 11, "frame_idx": 136, "global_frame_idx": 3743, "task_index": 2}, {"db_idx": 3744, "episode_idx": 11, "frame_idx": 137, "global_frame_idx": 3744, "task_index": 2}, {"db_idx": 3745, "episode_idx": 11, "frame_idx": 138, "global_frame_idx": 3745, "task_index": 2}, {"db_idx": 3746, "episode_idx": 11, "frame_idx": 139, "global_frame_idx": 3746, "task_index": 2}, {"db_idx": 3747, "episode_idx": 11, "frame_idx": 140, "global_frame_idx": 3747, "task_index": 2}, {"db_idx": 3748, "episode_idx": 11, "frame_idx": 141, "global_frame_idx": 3748, "task_index": 2}, {"db_idx": 3749, "episode_idx": 11, "frame_idx": 142, "global_frame_idx": 3749, "task_index": 2}, {"db_idx": 3750, "episode_idx": 11, "frame_idx": 143, "global_frame_idx": 3750, "task_index": 2}, {"db_idx": 3751, "episode_idx": 11, "frame_idx": 144, "global_frame_idx": 3751, "task_index": 2}, {"db_idx": 3752, "episode_idx": 11, "frame_idx": 145, "global_frame_idx": 3752, "task_index": 2}, {"db_idx": 3753, "episode_idx": 11, "frame_idx": 146, "global_frame_idx": 3753, "task_index": 2}, {"db_idx": 3754, "episode_idx": 11, "frame_idx": 147, "global_frame_idx": 3754, "task_index": 2}, {"db_idx": 3755, "episode_idx": 11, "frame_idx": 148, "global_frame_idx": 3755, "task_index": 2}, {"db_idx": 3756, "episode_idx": 11, "frame_idx": 149, "global_frame_idx": 3756, "task_index": 2}, {"db_idx": 3757, "episode_idx": 11, "frame_idx": 150, "global_frame_idx": 3757, "task_index": 2}, {"db_idx": 3758, "episode_idx": 11, "frame_idx": 151, "global_frame_idx": 3758, "task_index": 2}, {"db_idx": 3759, "episode_idx": 11, "frame_idx": 152, "global_frame_idx": 3759, "task_index": 2}, {"db_idx": 3760, "episode_idx": 11, "frame_idx": 153, "global_frame_idx": 3760, "task_index": 2}, {"db_idx": 3761, "episode_idx": 11, "frame_idx": 154, "global_frame_idx": 3761, "task_index": 2}, {"db_idx": 3762, "episode_idx": 11, "frame_idx": 155, "global_frame_idx": 3762, "task_index": 2}, {"db_idx": 3763, "episode_idx": 11, "frame_idx": 156, "global_frame_idx": 3763, "task_index": 2}, {"db_idx": 3764, "episode_idx": 11, "frame_idx": 157, "global_frame_idx": 3764, "task_index": 2}, {"db_idx": 3765, "episode_idx": 11, "frame_idx": 158, "global_frame_idx": 3765, "task_index": 2}, {"db_idx": 3766, "episode_idx": 11, "frame_idx": 159, "global_frame_idx": 3766, "task_index": 2}, {"db_idx": 3767, "episode_idx": 11, "frame_idx": 160, "global_frame_idx": 3767, "task_index": 2}, {"db_idx": 3768, "episode_idx": 11, "frame_idx": 161, "global_frame_idx": 3768, "task_index": 2}, {"db_idx": 3769, "episode_idx": 12, "frame_idx": 0, "global_frame_idx": 3769, "task_index": 2}, {"db_idx": 3770, "episode_idx": 12, "frame_idx": 1, "global_frame_idx": 3770, "task_index": 2}, {"db_idx": 3771, "episode_idx": 12, "frame_idx": 2, "global_frame_idx": 3771, "task_index": 2}, {"db_idx": 3772, "episode_idx": 12, "frame_idx": 3, "global_frame_idx": 3772, "task_index": 2}, {"db_idx": 3773, "episode_idx": 12, "frame_idx": 4, "global_frame_idx": 3773, "task_index": 2}, {"db_idx": 3774, "episode_idx": 12, "frame_idx": 5, "global_frame_idx": 3774, "task_index": 2}, {"db_idx": 3775, "episode_idx": 12, "frame_idx": 6, "global_frame_idx": 3775, "task_index": 2}, {"db_idx": 3776, "episode_idx": 12, "frame_idx": 7, "global_frame_idx": 3776, "task_index": 2}, {"db_idx": 3777, "episode_idx": 12, "frame_idx": 8, "global_frame_idx": 3777, "task_index": 2}, {"db_idx": 3778, "episode_idx": 12, "frame_idx": 9, "global_frame_idx": 3778, "task_index": 2}, {"db_idx": 3779, "episode_idx": 12, "frame_idx": 10, "global_frame_idx": 3779, "task_index": 2}, {"db_idx": 3780, "episode_idx": 12, "frame_idx": 11, "global_frame_idx": 3780, "task_index": 2}, {"db_idx": 3781, "episode_idx": 12, "frame_idx": 12, "global_frame_idx": 3781, "task_index": 2}, {"db_idx": 3782, "episode_idx": 12, "frame_idx": 13, "global_frame_idx": 3782, "task_index": 2}, {"db_idx": 3783, "episode_idx": 12, "frame_idx": 14, "global_frame_idx": 3783, "task_index": 2}, {"db_idx": 3784, "episode_idx": 12, "frame_idx": 15, "global_frame_idx": 3784, "task_index": 2}, {"db_idx": 3785, "episode_idx": 12, "frame_idx": 16, "global_frame_idx": 3785, "task_index": 2}, {"db_idx": 3786, "episode_idx": 12, "frame_idx": 17, "global_frame_idx": 3786, "task_index": 2}, {"db_idx": 3787, "episode_idx": 12, "frame_idx": 18, "global_frame_idx": 3787, "task_index": 2}, {"db_idx": 3788, "episode_idx": 12, "frame_idx": 19, "global_frame_idx": 3788, "task_index": 2}, {"db_idx": 3789, "episode_idx": 12, "frame_idx": 20, "global_frame_idx": 3789, "task_index": 2}, {"db_idx": 3790, "episode_idx": 12, "frame_idx": 21, "global_frame_idx": 3790, "task_index": 2}, {"db_idx": 3791, "episode_idx": 12, "frame_idx": 22, "global_frame_idx": 3791, "task_index": 2}, {"db_idx": 3792, "episode_idx": 12, "frame_idx": 23, "global_frame_idx": 3792, "task_index": 2}, {"db_idx": 3793, "episode_idx": 12, "frame_idx": 24, "global_frame_idx": 3793, "task_index": 2}, {"db_idx": 3794, "episode_idx": 12, "frame_idx": 25, "global_frame_idx": 3794, "task_index": 2}, {"db_idx": 3795, "episode_idx": 12, "frame_idx": 26, "global_frame_idx": 3795, "task_index": 2}, {"db_idx": 3796, "episode_idx": 12, "frame_idx": 27, "global_frame_idx": 3796, "task_index": 2}, {"db_idx": 3797, "episode_idx": 12, "frame_idx": 28, "global_frame_idx": 3797, "task_index": 2}, {"db_idx": 3798, "episode_idx": 12, "frame_idx": 29, "global_frame_idx": 3798, "task_index": 2}, {"db_idx": 3799, "episode_idx": 12, "frame_idx": 30, "global_frame_idx": 3799, "task_index": 2}, {"db_idx": 3800, "episode_idx": 12, "frame_idx": 31, "global_frame_idx": 3800, "task_index": 2}, {"db_idx": 3801, "episode_idx": 12, "frame_idx": 32, "global_frame_idx": 3801, "task_index": 2}, {"db_idx": 3802, "episode_idx": 12, "frame_idx": 33, "global_frame_idx": 3802, "task_index": 2}, {"db_idx": 3803, "episode_idx": 12, "frame_idx": 34, "global_frame_idx": 3803, "task_index": 2}, {"db_idx": 3804, "episode_idx": 12, "frame_idx": 35, "global_frame_idx": 3804, "task_index": 2}, {"db_idx": 3805, "episode_idx": 12, "frame_idx": 36, "global_frame_idx": 3805, "task_index": 2}, {"db_idx": 3806, "episode_idx": 12, "frame_idx": 37, "global_frame_idx": 3806, "task_index": 2}, {"db_idx": 3807, "episode_idx": 12, "frame_idx": 38, "global_frame_idx": 3807, "task_index": 2}, {"db_idx": 3808, "episode_idx": 12, "frame_idx": 39, "global_frame_idx": 3808, "task_index": 2}, {"db_idx": 3809, "episode_idx": 12, "frame_idx": 40, "global_frame_idx": 3809, "task_index": 2}, {"db_idx": 3810, "episode_idx": 12, "frame_idx": 41, "global_frame_idx": 3810, "task_index": 2}, {"db_idx": 3811, "episode_idx": 12, "frame_idx": 42, "global_frame_idx": 3811, "task_index": 2}, {"db_idx": 3812, "episode_idx": 12, "frame_idx": 43, "global_frame_idx": 3812, "task_index": 2}, {"db_idx": 3813, "episode_idx": 12, "frame_idx": 44, "global_frame_idx": 3813, "task_index": 2}, {"db_idx": 3814, "episode_idx": 12, "frame_idx": 45, "global_frame_idx": 3814, "task_index": 2}, {"db_idx": 3815, "episode_idx": 12, "frame_idx": 46, "global_frame_idx": 3815, "task_index": 2}, {"db_idx": 3816, "episode_idx": 12, "frame_idx": 47, "global_frame_idx": 3816, "task_index": 2}, {"db_idx": 3817, "episode_idx": 12, "frame_idx": 48, "global_frame_idx": 3817, "task_index": 2}, {"db_idx": 3818, "episode_idx": 12, "frame_idx": 49, "global_frame_idx": 3818, "task_index": 2}, {"db_idx": 3819, "episode_idx": 12, "frame_idx": 50, "global_frame_idx": 3819, "task_index": 2}, {"db_idx": 3820, "episode_idx": 12, "frame_idx": 51, "global_frame_idx": 3820, "task_index": 2}, {"db_idx": 3821, "episode_idx": 12, "frame_idx": 52, "global_frame_idx": 3821, "task_index": 2}, {"db_idx": 3822, "episode_idx": 12, "frame_idx": 53, "global_frame_idx": 3822, "task_index": 2}, {"db_idx": 3823, "episode_idx": 12, "frame_idx": 54, "global_frame_idx": 3823, "task_index": 2}, {"db_idx": 3824, "episode_idx": 12, "frame_idx": 55, "global_frame_idx": 3824, "task_index": 2}, {"db_idx": 3825, "episode_idx": 12, "frame_idx": 56, "global_frame_idx": 3825, "task_index": 2}, {"db_idx": 3826, "episode_idx": 12, "frame_idx": 57, "global_frame_idx": 3826, "task_index": 2}, {"db_idx": 3827, "episode_idx": 12, "frame_idx": 58, "global_frame_idx": 3827, "task_index": 2}, {"db_idx": 3828, "episode_idx": 12, "frame_idx": 59, "global_frame_idx": 3828, "task_index": 2}, {"db_idx": 3829, "episode_idx": 12, "frame_idx": 60, "global_frame_idx": 3829, "task_index": 2}, {"db_idx": 3830, "episode_idx": 12, "frame_idx": 61, "global_frame_idx": 3830, "task_index": 2}, {"db_idx": 3831, "episode_idx": 12, "frame_idx": 62, "global_frame_idx": 3831, "task_index": 2}, {"db_idx": 3832, "episode_idx": 12, "frame_idx": 63, "global_frame_idx": 3832, "task_index": 2}, {"db_idx": 3833, "episode_idx": 12, "frame_idx": 64, "global_frame_idx": 3833, "task_index": 2}, {"db_idx": 3834, "episode_idx": 12, "frame_idx": 65, "global_frame_idx": 3834, "task_index": 2}, {"db_idx": 3835, "episode_idx": 12, "frame_idx": 66, "global_frame_idx": 3835, "task_index": 2}, {"db_idx": 3836, "episode_idx": 12, "frame_idx": 67, "global_frame_idx": 3836, "task_index": 2}, {"db_idx": 3837, "episode_idx": 12, "frame_idx": 68, "global_frame_idx": 3837, "task_index": 2}, {"db_idx": 3838, "episode_idx": 12, "frame_idx": 69, "global_frame_idx": 3838, "task_index": 2}, {"db_idx": 3839, "episode_idx": 12, "frame_idx": 70, "global_frame_idx": 3839, "task_index": 2}, {"db_idx": 3840, "episode_idx": 12, "frame_idx": 71, "global_frame_idx": 3840, "task_index": 2}, {"db_idx": 3841, "episode_idx": 12, "frame_idx": 72, "global_frame_idx": 3841, "task_index": 2}, {"db_idx": 3842, "episode_idx": 12, "frame_idx": 73, "global_frame_idx": 3842, "task_index": 2}, {"db_idx": 3843, "episode_idx": 12, "frame_idx": 74, "global_frame_idx": 3843, "task_index": 2}, {"db_idx": 3844, "episode_idx": 12, "frame_idx": 75, "global_frame_idx": 3844, "task_index": 2}, {"db_idx": 3845, "episode_idx": 12, "frame_idx": 76, "global_frame_idx": 3845, "task_index": 2}, {"db_idx": 3846, "episode_idx": 12, "frame_idx": 77, "global_frame_idx": 3846, "task_index": 2}, {"db_idx": 3847, "episode_idx": 12, "frame_idx": 78, "global_frame_idx": 3847, "task_index": 2}, {"db_idx": 3848, "episode_idx": 12, "frame_idx": 79, "global_frame_idx": 3848, "task_index": 2}, {"db_idx": 3849, "episode_idx": 12, "frame_idx": 80, "global_frame_idx": 3849, "task_index": 2}, {"db_idx": 3850, "episode_idx": 12, "frame_idx": 81, "global_frame_idx": 3850, "task_index": 2}, {"db_idx": 3851, "episode_idx": 12, "frame_idx": 82, "global_frame_idx": 3851, "task_index": 2}, {"db_idx": 3852, "episode_idx": 12, "frame_idx": 83, "global_frame_idx": 3852, "task_index": 2}, {"db_idx": 3853, "episode_idx": 12, "frame_idx": 84, "global_frame_idx": 3853, "task_index": 2}, {"db_idx": 3854, "episode_idx": 12, "frame_idx": 85, "global_frame_idx": 3854, "task_index": 2}, {"db_idx": 3855, "episode_idx": 12, "frame_idx": 86, "global_frame_idx": 3855, "task_index": 2}, {"db_idx": 3856, "episode_idx": 12, "frame_idx": 87, "global_frame_idx": 3856, "task_index": 2}, {"db_idx": 3857, "episode_idx": 12, "frame_idx": 88, "global_frame_idx": 3857, "task_index": 2}, {"db_idx": 3858, "episode_idx": 12, "frame_idx": 89, "global_frame_idx": 3858, "task_index": 2}, {"db_idx": 3859, "episode_idx": 12, "frame_idx": 90, "global_frame_idx": 3859, "task_index": 2}, {"db_idx": 3860, "episode_idx": 12, "frame_idx": 91, "global_frame_idx": 3860, "task_index": 2}, {"db_idx": 3861, "episode_idx": 12, "frame_idx": 92, "global_frame_idx": 3861, "task_index": 2}, {"db_idx": 3862, "episode_idx": 12, "frame_idx": 93, "global_frame_idx": 3862, "task_index": 2}, {"db_idx": 3863, "episode_idx": 12, "frame_idx": 94, "global_frame_idx": 3863, "task_index": 2}, {"db_idx": 3864, "episode_idx": 12, "frame_idx": 95, "global_frame_idx": 3864, "task_index": 2}, {"db_idx": 3865, "episode_idx": 12, "frame_idx": 96, "global_frame_idx": 3865, "task_index": 2}, {"db_idx": 3866, "episode_idx": 12, "frame_idx": 97, "global_frame_idx": 3866, "task_index": 2}, {"db_idx": 3867, "episode_idx": 12, "frame_idx": 98, "global_frame_idx": 3867, "task_index": 2}, {"db_idx": 3868, "episode_idx": 12, "frame_idx": 99, "global_frame_idx": 3868, "task_index": 2}, {"db_idx": 3869, "episode_idx": 12, "frame_idx": 100, "global_frame_idx": 3869, "task_index": 2}, {"db_idx": 3870, "episode_idx": 12, "frame_idx": 101, "global_frame_idx": 3870, "task_index": 2}, {"db_idx": 3871, "episode_idx": 12, "frame_idx": 102, "global_frame_idx": 3871, "task_index": 2}, {"db_idx": 3872, "episode_idx": 12, "frame_idx": 103, "global_frame_idx": 3872, "task_index": 2}, {"db_idx": 3873, "episode_idx": 12, "frame_idx": 104, "global_frame_idx": 3873, "task_index": 2}, {"db_idx": 3874, "episode_idx": 12, "frame_idx": 105, "global_frame_idx": 3874, "task_index": 2}, {"db_idx": 3875, "episode_idx": 12, "frame_idx": 106, "global_frame_idx": 3875, "task_index": 2}, {"db_idx": 3876, "episode_idx": 12, "frame_idx": 107, "global_frame_idx": 3876, "task_index": 2}, {"db_idx": 3877, "episode_idx": 12, "frame_idx": 108, "global_frame_idx": 3877, "task_index": 2}, {"db_idx": 3878, "episode_idx": 12, "frame_idx": 109, "global_frame_idx": 3878, "task_index": 2}, {"db_idx": 3879, "episode_idx": 12, "frame_idx": 110, "global_frame_idx": 3879, "task_index": 2}, {"db_idx": 3880, "episode_idx": 12, "frame_idx": 111, "global_frame_idx": 3880, "task_index": 2}, {"db_idx": 3881, "episode_idx": 12, "frame_idx": 112, "global_frame_idx": 3881, "task_index": 2}, {"db_idx": 3882, "episode_idx": 12, "frame_idx": 113, "global_frame_idx": 3882, "task_index": 2}, {"db_idx": 3883, "episode_idx": 12, "frame_idx": 114, "global_frame_idx": 3883, "task_index": 2}, {"db_idx": 3884, "episode_idx": 12, "frame_idx": 115, "global_frame_idx": 3884, "task_index": 2}, {"db_idx": 3885, "episode_idx": 12, "frame_idx": 116, "global_frame_idx": 3885, "task_index": 2}, {"db_idx": 3886, "episode_idx": 12, "frame_idx": 117, "global_frame_idx": 3886, "task_index": 2}, {"db_idx": 3887, "episode_idx": 12, "frame_idx": 118, "global_frame_idx": 3887, "task_index": 2}, {"db_idx": 3888, "episode_idx": 12, "frame_idx": 119, "global_frame_idx": 3888, "task_index": 2}, {"db_idx": 3889, "episode_idx": 12, "frame_idx": 120, "global_frame_idx": 3889, "task_index": 2}, {"db_idx": 3890, "episode_idx": 12, "frame_idx": 121, "global_frame_idx": 3890, "task_index": 2}, {"db_idx": 3891, "episode_idx": 12, "frame_idx": 122, "global_frame_idx": 3891, "task_index": 2}, {"db_idx": 3892, "episode_idx": 12, "frame_idx": 123, "global_frame_idx": 3892, "task_index": 2}, {"db_idx": 3893, "episode_idx": 12, "frame_idx": 124, "global_frame_idx": 3893, "task_index": 2}, {"db_idx": 3894, "episode_idx": 12, "frame_idx": 125, "global_frame_idx": 3894, "task_index": 2}, {"db_idx": 3895, "episode_idx": 12, "frame_idx": 126, "global_frame_idx": 3895, "task_index": 2}, {"db_idx": 3896, "episode_idx": 12, "frame_idx": 127, "global_frame_idx": 3896, "task_index": 2}, {"db_idx": 3897, "episode_idx": 12, "frame_idx": 128, "global_frame_idx": 3897, "task_index": 2}, {"db_idx": 3898, "episode_idx": 12, "frame_idx": 129, "global_frame_idx": 3898, "task_index": 2}, {"db_idx": 3899, "episode_idx": 12, "frame_idx": 130, "global_frame_idx": 3899, "task_index": 2}, {"db_idx": 3900, "episode_idx": 12, "frame_idx": 131, "global_frame_idx": 3900, "task_index": 2}, {"db_idx": 3901, "episode_idx": 12, "frame_idx": 132, "global_frame_idx": 3901, "task_index": 2}, {"db_idx": 3902, "episode_idx": 12, "frame_idx": 133, "global_frame_idx": 3902, "task_index": 2}, {"db_idx": 3903, "episode_idx": 12, "frame_idx": 134, "global_frame_idx": 3903, "task_index": 2}, {"db_idx": 3904, "episode_idx": 12, "frame_idx": 135, "global_frame_idx": 3904, "task_index": 2}, {"db_idx": 3905, "episode_idx": 12, "frame_idx": 136, "global_frame_idx": 3905, "task_index": 2}, {"db_idx": 3906, "episode_idx": 12, "frame_idx": 137, "global_frame_idx": 3906, "task_index": 2}, {"db_idx": 3907, "episode_idx": 12, "frame_idx": 138, "global_frame_idx": 3907, "task_index": 2}, {"db_idx": 3908, "episode_idx": 12, "frame_idx": 139, "global_frame_idx": 3908, "task_index": 2}, {"db_idx": 3909, "episode_idx": 12, "frame_idx": 140, "global_frame_idx": 3909, "task_index": 2}, {"db_idx": 3910, "episode_idx": 12, "frame_idx": 141, "global_frame_idx": 3910, "task_index": 2}, {"db_idx": 3911, "episode_idx": 12, "frame_idx": 142, "global_frame_idx": 3911, "task_index": 2}, {"db_idx": 3912, "episode_idx": 12, "frame_idx": 143, "global_frame_idx": 3912, "task_index": 2}, {"db_idx": 3913, "episode_idx": 12, "frame_idx": 144, "global_frame_idx": 3913, "task_index": 2}, {"db_idx": 3914, "episode_idx": 12, "frame_idx": 145, "global_frame_idx": 3914, "task_index": 2}, {"db_idx": 3915, "episode_idx": 12, "frame_idx": 146, "global_frame_idx": 3915, "task_index": 2}, {"db_idx": 3916, "episode_idx": 12, "frame_idx": 147, "global_frame_idx": 3916, "task_index": 2}, {"db_idx": 3917, "episode_idx": 12, "frame_idx": 148, "global_frame_idx": 3917, "task_index": 2}, {"db_idx": 3918, "episode_idx": 12, "frame_idx": 149, "global_frame_idx": 3918, "task_index": 2}, {"db_idx": 3919, "episode_idx": 12, "frame_idx": 150, "global_frame_idx": 3919, "task_index": 2}, {"db_idx": 3920, "episode_idx": 12, "frame_idx": 151, "global_frame_idx": 3920, "task_index": 2}, {"db_idx": 3921, "episode_idx": 12, "frame_idx": 152, "global_frame_idx": 3921, "task_index": 2}, {"db_idx": 3922, "episode_idx": 12, "frame_idx": 153, "global_frame_idx": 3922, "task_index": 2}, {"db_idx": 3923, "episode_idx": 12, "frame_idx": 154, "global_frame_idx": 3923, "task_index": 2}, {"db_idx": 3924, "episode_idx": 12, "frame_idx": 155, "global_frame_idx": 3924, "task_index": 2}, {"db_idx": 3925, "episode_idx": 12, "frame_idx": 156, "global_frame_idx": 3925, "task_index": 2}, {"db_idx": 3926, "episode_idx": 12, "frame_idx": 157, "global_frame_idx": 3926, "task_index": 2}, {"db_idx": 3927, "episode_idx": 12, "frame_idx": 158, "global_frame_idx": 3927, "task_index": 2}, {"db_idx": 3928, "episode_idx": 12, "frame_idx": 159, "global_frame_idx": 3928, "task_index": 2}, {"db_idx": 3929, "episode_idx": 12, "frame_idx": 160, "global_frame_idx": 3929, "task_index": 2}, {"db_idx": 3930, "episode_idx": 12, "frame_idx": 161, "global_frame_idx": 3930, "task_index": 2}, {"db_idx": 3931, "episode_idx": 12, "frame_idx": 162, "global_frame_idx": 3931, "task_index": 2}, {"db_idx": 3932, "episode_idx": 12, "frame_idx": 163, "global_frame_idx": 3932, "task_index": 2}, {"db_idx": 3933, "episode_idx": 12, "frame_idx": 164, "global_frame_idx": 3933, "task_index": 2}, {"db_idx": 3934, "episode_idx": 12, "frame_idx": 165, "global_frame_idx": 3934, "task_index": 2}, {"db_idx": 3935, "episode_idx": 12, "frame_idx": 166, "global_frame_idx": 3935, "task_index": 2}, {"db_idx": 3936, "episode_idx": 12, "frame_idx": 167, "global_frame_idx": 3936, "task_index": 2}, {"db_idx": 3937, "episode_idx": 12, "frame_idx": 168, "global_frame_idx": 3937, "task_index": 2}, {"db_idx": 3938, "episode_idx": 12, "frame_idx": 169, "global_frame_idx": 3938, "task_index": 2}, {"db_idx": 3939, "episode_idx": 12, "frame_idx": 170, "global_frame_idx": 3939, "task_index": 2}, {"db_idx": 3940, "episode_idx": 12, "frame_idx": 171, "global_frame_idx": 3940, "task_index": 2}, {"db_idx": 3941, "episode_idx": 12, "frame_idx": 172, "global_frame_idx": 3941, "task_index": 2}, {"db_idx": 3942, "episode_idx": 12, "frame_idx": 173, "global_frame_idx": 3942, "task_index": 2}, {"db_idx": 3943, "episode_idx": 12, "frame_idx": 174, "global_frame_idx": 3943, "task_index": 2}, {"db_idx": 3944, "episode_idx": 12, "frame_idx": 175, "global_frame_idx": 3944, "task_index": 2}, {"db_idx": 3945, "episode_idx": 12, "frame_idx": 176, "global_frame_idx": 3945, "task_index": 2}, {"db_idx": 3946, "episode_idx": 12, "frame_idx": 177, "global_frame_idx": 3946, "task_index": 2}, {"db_idx": 3947, "episode_idx": 12, "frame_idx": 178, "global_frame_idx": 3947, "task_index": 2}, {"db_idx": 3948, "episode_idx": 12, "frame_idx": 179, "global_frame_idx": 3948, "task_index": 2}, {"db_idx": 3949, "episode_idx": 12, "frame_idx": 180, "global_frame_idx": 3949, "task_index": 2}, {"db_idx": 3950, "episode_idx": 12, "frame_idx": 181, "global_frame_idx": 3950, "task_index": 2}, {"db_idx": 3951, "episode_idx": 12, "frame_idx": 182, "global_frame_idx": 3951, "task_index": 2}, {"db_idx": 3952, "episode_idx": 12, "frame_idx": 183, "global_frame_idx": 3952, "task_index": 2}, {"db_idx": 3953, "episode_idx": 12, "frame_idx": 184, "global_frame_idx": 3953, "task_index": 2}, {"db_idx": 3954, "episode_idx": 12, "frame_idx": 185, "global_frame_idx": 3954, "task_index": 2}, {"db_idx": 3955, "episode_idx": 12, "frame_idx": 186, "global_frame_idx": 3955, "task_index": 2}, {"db_idx": 3956, "episode_idx": 12, "frame_idx": 187, "global_frame_idx": 3956, "task_index": 2}, {"db_idx": 3957, "episode_idx": 12, "frame_idx": 188, "global_frame_idx": 3957, "task_index": 2}, {"db_idx": 3958, "episode_idx": 13, "frame_idx": 0, "global_frame_idx": 3958, "task_index": 2}, {"db_idx": 3959, "episode_idx": 13, "frame_idx": 1, "global_frame_idx": 3959, "task_index": 2}, {"db_idx": 3960, "episode_idx": 13, "frame_idx": 2, "global_frame_idx": 3960, "task_index": 2}, {"db_idx": 3961, "episode_idx": 13, "frame_idx": 3, "global_frame_idx": 3961, "task_index": 2}, {"db_idx": 3962, "episode_idx": 13, "frame_idx": 4, "global_frame_idx": 3962, "task_index": 2}, {"db_idx": 3963, "episode_idx": 13, "frame_idx": 5, "global_frame_idx": 3963, "task_index": 2}, {"db_idx": 3964, "episode_idx": 13, "frame_idx": 6, "global_frame_idx": 3964, "task_index": 2}, {"db_idx": 3965, "episode_idx": 13, "frame_idx": 7, "global_frame_idx": 3965, "task_index": 2}, {"db_idx": 3966, "episode_idx": 13, "frame_idx": 8, "global_frame_idx": 3966, "task_index": 2}, {"db_idx": 3967, "episode_idx": 13, "frame_idx": 9, "global_frame_idx": 3967, "task_index": 2}, {"db_idx": 3968, "episode_idx": 13, "frame_idx": 10, "global_frame_idx": 3968, "task_index": 2}, {"db_idx": 3969, "episode_idx": 13, "frame_idx": 11, "global_frame_idx": 3969, "task_index": 2}, {"db_idx": 3970, "episode_idx": 13, "frame_idx": 12, "global_frame_idx": 3970, "task_index": 2}, {"db_idx": 3971, "episode_idx": 13, "frame_idx": 13, "global_frame_idx": 3971, "task_index": 2}, {"db_idx": 3972, "episode_idx": 13, "frame_idx": 14, "global_frame_idx": 3972, "task_index": 2}, {"db_idx": 3973, "episode_idx": 13, "frame_idx": 15, "global_frame_idx": 3973, "task_index": 2}, {"db_idx": 3974, "episode_idx": 13, "frame_idx": 16, "global_frame_idx": 3974, "task_index": 2}, {"db_idx": 3975, "episode_idx": 13, "frame_idx": 17, "global_frame_idx": 3975, "task_index": 2}, {"db_idx": 3976, "episode_idx": 13, "frame_idx": 18, "global_frame_idx": 3976, "task_index": 2}, {"db_idx": 3977, "episode_idx": 13, "frame_idx": 19, "global_frame_idx": 3977, "task_index": 2}, {"db_idx": 3978, "episode_idx": 13, "frame_idx": 20, "global_frame_idx": 3978, "task_index": 2}, {"db_idx": 3979, "episode_idx": 13, "frame_idx": 21, "global_frame_idx": 3979, "task_index": 2}, {"db_idx": 3980, "episode_idx": 13, "frame_idx": 22, "global_frame_idx": 3980, "task_index": 2}, {"db_idx": 3981, "episode_idx": 13, "frame_idx": 23, "global_frame_idx": 3981, "task_index": 2}, {"db_idx": 3982, "episode_idx": 13, "frame_idx": 24, "global_frame_idx": 3982, "task_index": 2}, {"db_idx": 3983, "episode_idx": 13, "frame_idx": 25, "global_frame_idx": 3983, "task_index": 2}, {"db_idx": 3984, "episode_idx": 13, "frame_idx": 26, "global_frame_idx": 3984, "task_index": 2}, {"db_idx": 3985, "episode_idx": 13, "frame_idx": 27, "global_frame_idx": 3985, "task_index": 2}, {"db_idx": 3986, "episode_idx": 13, "frame_idx": 28, "global_frame_idx": 3986, "task_index": 2}, {"db_idx": 3987, "episode_idx": 13, "frame_idx": 29, "global_frame_idx": 3987, "task_index": 2}, {"db_idx": 3988, "episode_idx": 13, "frame_idx": 30, "global_frame_idx": 3988, "task_index": 2}, {"db_idx": 3989, "episode_idx": 13, "frame_idx": 31, "global_frame_idx": 3989, "task_index": 2}, {"db_idx": 3990, "episode_idx": 13, "frame_idx": 32, "global_frame_idx": 3990, "task_index": 2}, {"db_idx": 3991, "episode_idx": 13, "frame_idx": 33, "global_frame_idx": 3991, "task_index": 2}, {"db_idx": 3992, "episode_idx": 13, "frame_idx": 34, "global_frame_idx": 3992, "task_index": 2}, {"db_idx": 3993, "episode_idx": 13, "frame_idx": 35, "global_frame_idx": 3993, "task_index": 2}, {"db_idx": 3994, "episode_idx": 13, "frame_idx": 36, "global_frame_idx": 3994, "task_index": 2}, {"db_idx": 3995, "episode_idx": 13, "frame_idx": 37, "global_frame_idx": 3995, "task_index": 2}, {"db_idx": 3996, "episode_idx": 13, "frame_idx": 38, "global_frame_idx": 3996, "task_index": 2}, {"db_idx": 3997, "episode_idx": 13, "frame_idx": 39, "global_frame_idx": 3997, "task_index": 2}, {"db_idx": 3998, "episode_idx": 13, "frame_idx": 40, "global_frame_idx": 3998, "task_index": 2}, {"db_idx": 3999, "episode_idx": 13, "frame_idx": 41, "global_frame_idx": 3999, "task_index": 2}, {"db_idx": 4000, "episode_idx": 13, "frame_idx": 42, "global_frame_idx": 4000, "task_index": 2}, {"db_idx": 4001, "episode_idx": 13, "frame_idx": 43, "global_frame_idx": 4001, "task_index": 2}, {"db_idx": 4002, "episode_idx": 13, "frame_idx": 44, "global_frame_idx": 4002, "task_index": 2}, {"db_idx": 4003, "episode_idx": 13, "frame_idx": 45, "global_frame_idx": 4003, "task_index": 2}, {"db_idx": 4004, "episode_idx": 13, "frame_idx": 46, "global_frame_idx": 4004, "task_index": 2}, {"db_idx": 4005, "episode_idx": 13, "frame_idx": 47, "global_frame_idx": 4005, "task_index": 2}, {"db_idx": 4006, "episode_idx": 13, "frame_idx": 48, "global_frame_idx": 4006, "task_index": 2}, {"db_idx": 4007, "episode_idx": 13, "frame_idx": 49, "global_frame_idx": 4007, "task_index": 2}, {"db_idx": 4008, "episode_idx": 13, "frame_idx": 50, "global_frame_idx": 4008, "task_index": 2}, {"db_idx": 4009, "episode_idx": 13, "frame_idx": 51, "global_frame_idx": 4009, "task_index": 2}, {"db_idx": 4010, "episode_idx": 13, "frame_idx": 52, "global_frame_idx": 4010, "task_index": 2}, {"db_idx": 4011, "episode_idx": 13, "frame_idx": 53, "global_frame_idx": 4011, "task_index": 2}, {"db_idx": 4012, "episode_idx": 13, "frame_idx": 54, "global_frame_idx": 4012, "task_index": 2}, {"db_idx": 4013, "episode_idx": 13, "frame_idx": 55, "global_frame_idx": 4013, "task_index": 2}, {"db_idx": 4014, "episode_idx": 13, "frame_idx": 56, "global_frame_idx": 4014, "task_index": 2}, {"db_idx": 4015, "episode_idx": 13, "frame_idx": 57, "global_frame_idx": 4015, "task_index": 2}, {"db_idx": 4016, "episode_idx": 13, "frame_idx": 58, "global_frame_idx": 4016, "task_index": 2}, {"db_idx": 4017, "episode_idx": 13, "frame_idx": 59, "global_frame_idx": 4017, "task_index": 2}, {"db_idx": 4018, "episode_idx": 13, "frame_idx": 60, "global_frame_idx": 4018, "task_index": 2}, {"db_idx": 4019, "episode_idx": 13, "frame_idx": 61, "global_frame_idx": 4019, "task_index": 2}, {"db_idx": 4020, "episode_idx": 13, "frame_idx": 62, "global_frame_idx": 4020, "task_index": 2}, {"db_idx": 4021, "episode_idx": 13, "frame_idx": 63, "global_frame_idx": 4021, "task_index": 2}, {"db_idx": 4022, "episode_idx": 13, "frame_idx": 64, "global_frame_idx": 4022, "task_index": 2}, {"db_idx": 4023, "episode_idx": 13, "frame_idx": 65, "global_frame_idx": 4023, "task_index": 2}, {"db_idx": 4024, "episode_idx": 13, "frame_idx": 66, "global_frame_idx": 4024, "task_index": 2}, {"db_idx": 4025, "episode_idx": 13, "frame_idx": 67, "global_frame_idx": 4025, "task_index": 2}, {"db_idx": 4026, "episode_idx": 13, "frame_idx": 68, "global_frame_idx": 4026, "task_index": 2}, {"db_idx": 4027, "episode_idx": 13, "frame_idx": 69, "global_frame_idx": 4027, "task_index": 2}, {"db_idx": 4028, "episode_idx": 13, "frame_idx": 70, "global_frame_idx": 4028, "task_index": 2}, {"db_idx": 4029, "episode_idx": 13, "frame_idx": 71, "global_frame_idx": 4029, "task_index": 2}, {"db_idx": 4030, "episode_idx": 13, "frame_idx": 72, "global_frame_idx": 4030, "task_index": 2}, {"db_idx": 4031, "episode_idx": 13, "frame_idx": 73, "global_frame_idx": 4031, "task_index": 2}, {"db_idx": 4032, "episode_idx": 13, "frame_idx": 74, "global_frame_idx": 4032, "task_index": 2}, {"db_idx": 4033, "episode_idx": 13, "frame_idx": 75, "global_frame_idx": 4033, "task_index": 2}, {"db_idx": 4034, "episode_idx": 13, "frame_idx": 76, "global_frame_idx": 4034, "task_index": 2}, {"db_idx": 4035, "episode_idx": 13, "frame_idx": 77, "global_frame_idx": 4035, "task_index": 2}, {"db_idx": 4036, "episode_idx": 13, "frame_idx": 78, "global_frame_idx": 4036, "task_index": 2}, {"db_idx": 4037, "episode_idx": 13, "frame_idx": 79, "global_frame_idx": 4037, "task_index": 2}, {"db_idx": 4038, "episode_idx": 13, "frame_idx": 80, "global_frame_idx": 4038, "task_index": 2}, {"db_idx": 4039, "episode_idx": 13, "frame_idx": 81, "global_frame_idx": 4039, "task_index": 2}, {"db_idx": 4040, "episode_idx": 13, "frame_idx": 82, "global_frame_idx": 4040, "task_index": 2}, {"db_idx": 4041, "episode_idx": 13, "frame_idx": 83, "global_frame_idx": 4041, "task_index": 2}, {"db_idx": 4042, "episode_idx": 13, "frame_idx": 84, "global_frame_idx": 4042, "task_index": 2}, {"db_idx": 4043, "episode_idx": 13, "frame_idx": 85, "global_frame_idx": 4043, "task_index": 2}, {"db_idx": 4044, "episode_idx": 13, "frame_idx": 86, "global_frame_idx": 4044, "task_index": 2}, {"db_idx": 4045, "episode_idx": 13, "frame_idx": 87, "global_frame_idx": 4045, "task_index": 2}, {"db_idx": 4046, "episode_idx": 13, "frame_idx": 88, "global_frame_idx": 4046, "task_index": 2}, {"db_idx": 4047, "episode_idx": 13, "frame_idx": 89, "global_frame_idx": 4047, "task_index": 2}, {"db_idx": 4048, "episode_idx": 13, "frame_idx": 90, "global_frame_idx": 4048, "task_index": 2}, {"db_idx": 4049, "episode_idx": 13, "frame_idx": 91, "global_frame_idx": 4049, "task_index": 2}, {"db_idx": 4050, "episode_idx": 13, "frame_idx": 92, "global_frame_idx": 4050, "task_index": 2}, {"db_idx": 4051, "episode_idx": 13, "frame_idx": 93, "global_frame_idx": 4051, "task_index": 2}, {"db_idx": 4052, "episode_idx": 13, "frame_idx": 94, "global_frame_idx": 4052, "task_index": 2}, {"db_idx": 4053, "episode_idx": 13, "frame_idx": 95, "global_frame_idx": 4053, "task_index": 2}, {"db_idx": 4054, "episode_idx": 13, "frame_idx": 96, "global_frame_idx": 4054, "task_index": 2}, {"db_idx": 4055, "episode_idx": 13, "frame_idx": 97, "global_frame_idx": 4055, "task_index": 2}, {"db_idx": 4056, "episode_idx": 13, "frame_idx": 98, "global_frame_idx": 4056, "task_index": 2}, {"db_idx": 4057, "episode_idx": 13, "frame_idx": 99, "global_frame_idx": 4057, "task_index": 2}, {"db_idx": 4058, "episode_idx": 13, "frame_idx": 100, "global_frame_idx": 4058, "task_index": 2}, {"db_idx": 4059, "episode_idx": 13, "frame_idx": 101, "global_frame_idx": 4059, "task_index": 2}, {"db_idx": 4060, "episode_idx": 13, "frame_idx": 102, "global_frame_idx": 4060, "task_index": 2}, {"db_idx": 4061, "episode_idx": 13, "frame_idx": 103, "global_frame_idx": 4061, "task_index": 2}, {"db_idx": 4062, "episode_idx": 13, "frame_idx": 104, "global_frame_idx": 4062, "task_index": 2}, {"db_idx": 4063, "episode_idx": 13, "frame_idx": 105, "global_frame_idx": 4063, "task_index": 2}, {"db_idx": 4064, "episode_idx": 13, "frame_idx": 106, "global_frame_idx": 4064, "task_index": 2}, {"db_idx": 4065, "episode_idx": 13, "frame_idx": 107, "global_frame_idx": 4065, "task_index": 2}, {"db_idx": 4066, "episode_idx": 13, "frame_idx": 108, "global_frame_idx": 4066, "task_index": 2}, {"db_idx": 4067, "episode_idx": 13, "frame_idx": 109, "global_frame_idx": 4067, "task_index": 2}, {"db_idx": 4068, "episode_idx": 13, "frame_idx": 110, "global_frame_idx": 4068, "task_index": 2}, {"db_idx": 4069, "episode_idx": 13, "frame_idx": 111, "global_frame_idx": 4069, "task_index": 2}, {"db_idx": 4070, "episode_idx": 13, "frame_idx": 112, "global_frame_idx": 4070, "task_index": 2}, {"db_idx": 4071, "episode_idx": 13, "frame_idx": 113, "global_frame_idx": 4071, "task_index": 2}, {"db_idx": 4072, "episode_idx": 13, "frame_idx": 114, "global_frame_idx": 4072, "task_index": 2}, {"db_idx": 4073, "episode_idx": 13, "frame_idx": 115, "global_frame_idx": 4073, "task_index": 2}, {"db_idx": 4074, "episode_idx": 13, "frame_idx": 116, "global_frame_idx": 4074, "task_index": 2}, {"db_idx": 4075, "episode_idx": 13, "frame_idx": 117, "global_frame_idx": 4075, "task_index": 2}, {"db_idx": 4076, "episode_idx": 13, "frame_idx": 118, "global_frame_idx": 4076, "task_index": 2}, {"db_idx": 4077, "episode_idx": 13, "frame_idx": 119, "global_frame_idx": 4077, "task_index": 2}, {"db_idx": 4078, "episode_idx": 13, "frame_idx": 120, "global_frame_idx": 4078, "task_index": 2}, {"db_idx": 4079, "episode_idx": 13, "frame_idx": 121, "global_frame_idx": 4079, "task_index": 2}, {"db_idx": 4080, "episode_idx": 13, "frame_idx": 122, "global_frame_idx": 4080, "task_index": 2}, {"db_idx": 4081, "episode_idx": 13, "frame_idx": 123, "global_frame_idx": 4081, "task_index": 2}, {"db_idx": 4082, "episode_idx": 13, "frame_idx": 124, "global_frame_idx": 4082, "task_index": 2}, {"db_idx": 4083, "episode_idx": 13, "frame_idx": 125, "global_frame_idx": 4083, "task_index": 2}, {"db_idx": 4084, "episode_idx": 13, "frame_idx": 126, "global_frame_idx": 4084, "task_index": 2}, {"db_idx": 4085, "episode_idx": 13, "frame_idx": 127, "global_frame_idx": 4085, "task_index": 2}, {"db_idx": 4086, "episode_idx": 13, "frame_idx": 128, "global_frame_idx": 4086, "task_index": 2}, {"db_idx": 4087, "episode_idx": 13, "frame_idx": 129, "global_frame_idx": 4087, "task_index": 2}, {"db_idx": 4088, "episode_idx": 13, "frame_idx": 130, "global_frame_idx": 4088, "task_index": 2}, {"db_idx": 4089, "episode_idx": 13, "frame_idx": 131, "global_frame_idx": 4089, "task_index": 2}, {"db_idx": 4090, "episode_idx": 13, "frame_idx": 132, "global_frame_idx": 4090, "task_index": 2}, {"db_idx": 4091, "episode_idx": 13, "frame_idx": 133, "global_frame_idx": 4091, "task_index": 2}, {"db_idx": 4092, "episode_idx": 13, "frame_idx": 134, "global_frame_idx": 4092, "task_index": 2}, {"db_idx": 4093, "episode_idx": 13, "frame_idx": 135, "global_frame_idx": 4093, "task_index": 2}, {"db_idx": 4094, "episode_idx": 13, "frame_idx": 136, "global_frame_idx": 4094, "task_index": 2}, {"db_idx": 4095, "episode_idx": 13, "frame_idx": 137, "global_frame_idx": 4095, "task_index": 2}, {"db_idx": 4096, "episode_idx": 13, "frame_idx": 138, "global_frame_idx": 4096, "task_index": 2}, {"db_idx": 4097, "episode_idx": 13, "frame_idx": 139, "global_frame_idx": 4097, "task_index": 2}, {"db_idx": 4098, "episode_idx": 13, "frame_idx": 140, "global_frame_idx": 4098, "task_index": 2}, {"db_idx": 4099, "episode_idx": 13, "frame_idx": 141, "global_frame_idx": 4099, "task_index": 2}, {"db_idx": 4100, "episode_idx": 13, "frame_idx": 142, "global_frame_idx": 4100, "task_index": 2}, {"db_idx": 4101, "episode_idx": 13, "frame_idx": 143, "global_frame_idx": 4101, "task_index": 2}, {"db_idx": 4102, "episode_idx": 13, "frame_idx": 144, "global_frame_idx": 4102, "task_index": 2}, {"db_idx": 4103, "episode_idx": 13, "frame_idx": 145, "global_frame_idx": 4103, "task_index": 2}, {"db_idx": 4104, "episode_idx": 13, "frame_idx": 146, "global_frame_idx": 4104, "task_index": 2}, {"db_idx": 4105, "episode_idx": 13, "frame_idx": 147, "global_frame_idx": 4105, "task_index": 2}, {"db_idx": 4106, "episode_idx": 13, "frame_idx": 148, "global_frame_idx": 4106, "task_index": 2}, {"db_idx": 4107, "episode_idx": 13, "frame_idx": 149, "global_frame_idx": 4107, "task_index": 2}, {"db_idx": 4108, "episode_idx": 13, "frame_idx": 150, "global_frame_idx": 4108, "task_index": 2}, {"db_idx": 4109, "episode_idx": 13, "frame_idx": 151, "global_frame_idx": 4109, "task_index": 2}, {"db_idx": 4110, "episode_idx": 13, "frame_idx": 152, "global_frame_idx": 4110, "task_index": 2}, {"db_idx": 4111, "episode_idx": 13, "frame_idx": 153, "global_frame_idx": 4111, "task_index": 2}, {"db_idx": 4112, "episode_idx": 13, "frame_idx": 154, "global_frame_idx": 4112, "task_index": 2}, {"db_idx": 4113, "episode_idx": 13, "frame_idx": 155, "global_frame_idx": 4113, "task_index": 2}, {"db_idx": 4114, "episode_idx": 13, "frame_idx": 156, "global_frame_idx": 4114, "task_index": 2}, {"db_idx": 4115, "episode_idx": 13, "frame_idx": 157, "global_frame_idx": 4115, "task_index": 2}, {"db_idx": 4116, "episode_idx": 13, "frame_idx": 158, "global_frame_idx": 4116, "task_index": 2}, {"db_idx": 4117, "episode_idx": 13, "frame_idx": 159, "global_frame_idx": 4117, "task_index": 2}, {"db_idx": 4118, "episode_idx": 13, "frame_idx": 160, "global_frame_idx": 4118, "task_index": 2}, {"db_idx": 4119, "episode_idx": 13, "frame_idx": 161, "global_frame_idx": 4119, "task_index": 2}, {"db_idx": 4120, "episode_idx": 13, "frame_idx": 162, "global_frame_idx": 4120, "task_index": 2}, {"db_idx": 4121, "episode_idx": 13, "frame_idx": 163, "global_frame_idx": 4121, "task_index": 2}, {"db_idx": 4122, "episode_idx": 13, "frame_idx": 164, "global_frame_idx": 4122, "task_index": 2}, {"db_idx": 4123, "episode_idx": 13, "frame_idx": 165, "global_frame_idx": 4123, "task_index": 2}, {"db_idx": 4124, "episode_idx": 13, "frame_idx": 166, "global_frame_idx": 4124, "task_index": 2}, {"db_idx": 4125, "episode_idx": 13, "frame_idx": 167, "global_frame_idx": 4125, "task_index": 2}, {"db_idx": 4126, "episode_idx": 13, "frame_idx": 168, "global_frame_idx": 4126, "task_index": 2}, {"db_idx": 4127, "episode_idx": 13, "frame_idx": 169, "global_frame_idx": 4127, "task_index": 2}, {"db_idx": 4128, "episode_idx": 13, "frame_idx": 170, "global_frame_idx": 4128, "task_index": 2}, {"db_idx": 4129, "episode_idx": 13, "frame_idx": 171, "global_frame_idx": 4129, "task_index": 2}, {"db_idx": 4130, "episode_idx": 13, "frame_idx": 172, "global_frame_idx": 4130, "task_index": 2}, {"db_idx": 4131, "episode_idx": 13, "frame_idx": 173, "global_frame_idx": 4131, "task_index": 2}, {"db_idx": 4132, "episode_idx": 13, "frame_idx": 174, "global_frame_idx": 4132, "task_index": 2}, {"db_idx": 4133, "episode_idx": 13, "frame_idx": 175, "global_frame_idx": 4133, "task_index": 2}, {"db_idx": 4134, "episode_idx": 13, "frame_idx": 176, "global_frame_idx": 4134, "task_index": 2}, {"db_idx": 4135, "episode_idx": 13, "frame_idx": 177, "global_frame_idx": 4135, "task_index": 2}, {"db_idx": 4136, "episode_idx": 13, "frame_idx": 178, "global_frame_idx": 4136, "task_index": 2}, {"db_idx": 4137, "episode_idx": 13, "frame_idx": 179, "global_frame_idx": 4137, "task_index": 2}, {"db_idx": 4138, "episode_idx": 13, "frame_idx": 180, "global_frame_idx": 4138, "task_index": 2}, {"db_idx": 4139, "episode_idx": 13, "frame_idx": 181, "global_frame_idx": 4139, "task_index": 2}, {"db_idx": 4140, "episode_idx": 13, "frame_idx": 182, "global_frame_idx": 4140, "task_index": 2}, {"db_idx": 4141, "episode_idx": 13, "frame_idx": 183, "global_frame_idx": 4141, "task_index": 2}, {"db_idx": 4142, "episode_idx": 13, "frame_idx": 184, "global_frame_idx": 4142, "task_index": 2}, {"db_idx": 4143, "episode_idx": 13, "frame_idx": 185, "global_frame_idx": 4143, "task_index": 2}, {"db_idx": 4144, "episode_idx": 13, "frame_idx": 186, "global_frame_idx": 4144, "task_index": 2}, {"db_idx": 4145, "episode_idx": 13, "frame_idx": 187, "global_frame_idx": 4145, "task_index": 2}, {"db_idx": 4146, "episode_idx": 13, "frame_idx": 188, "global_frame_idx": 4146, "task_index": 2}, {"db_idx": 4147, "episode_idx": 13, "frame_idx": 189, "global_frame_idx": 4147, "task_index": 2}, {"db_idx": 4148, "episode_idx": 13, "frame_idx": 190, "global_frame_idx": 4148, "task_index": 2}, {"db_idx": 4149, "episode_idx": 13, "frame_idx": 191, "global_frame_idx": 4149, "task_index": 2}, {"db_idx": 4150, "episode_idx": 13, "frame_idx": 192, "global_frame_idx": 4150, "task_index": 2}, {"db_idx": 4151, "episode_idx": 13, "frame_idx": 193, "global_frame_idx": 4151, "task_index": 2}, {"db_idx": 4152, "episode_idx": 13, "frame_idx": 194, "global_frame_idx": 4152, "task_index": 2}, {"db_idx": 4153, "episode_idx": 13, "frame_idx": 195, "global_frame_idx": 4153, "task_index": 2}, {"db_idx": 4154, "episode_idx": 13, "frame_idx": 196, "global_frame_idx": 4154, "task_index": 2}, {"db_idx": 4155, "episode_idx": 13, "frame_idx": 197, "global_frame_idx": 4155, "task_index": 2}, {"db_idx": 4156, "episode_idx": 13, "frame_idx": 198, "global_frame_idx": 4156, "task_index": 2}, {"db_idx": 4157, "episode_idx": 13, "frame_idx": 199, "global_frame_idx": 4157, "task_index": 2}, {"db_idx": 4158, "episode_idx": 13, "frame_idx": 200, "global_frame_idx": 4158, "task_index": 2}, {"db_idx": 4159, "episode_idx": 13, "frame_idx": 201, "global_frame_idx": 4159, "task_index": 2}, {"db_idx": 4160, "episode_idx": 13, "frame_idx": 202, "global_frame_idx": 4160, "task_index": 2}, {"db_idx": 4161, "episode_idx": 13, "frame_idx": 203, "global_frame_idx": 4161, "task_index": 2}, {"db_idx": 4162, "episode_idx": 13, "frame_idx": 204, "global_frame_idx": 4162, "task_index": 2}, {"db_idx": 4163, "episode_idx": 13, "frame_idx": 205, "global_frame_idx": 4163, "task_index": 2}, {"db_idx": 4164, "episode_idx": 13, "frame_idx": 206, "global_frame_idx": 4164, "task_index": 2}, {"db_idx": 4165, "episode_idx": 13, "frame_idx": 207, "global_frame_idx": 4165, "task_index": 2}, {"db_idx": 4166, "episode_idx": 13, "frame_idx": 208, "global_frame_idx": 4166, "task_index": 2}, {"db_idx": 4167, "episode_idx": 13, "frame_idx": 209, "global_frame_idx": 4167, "task_index": 2}, {"db_idx": 4168, "episode_idx": 13, "frame_idx": 210, "global_frame_idx": 4168, "task_index": 2}, {"db_idx": 4169, "episode_idx": 13, "frame_idx": 211, "global_frame_idx": 4169, "task_index": 2}, {"db_idx": 4170, "episode_idx": 13, "frame_idx": 212, "global_frame_idx": 4170, "task_index": 2}, {"db_idx": 4171, "episode_idx": 13, "frame_idx": 213, "global_frame_idx": 4171, "task_index": 2}, {"db_idx": 4172, "episode_idx": 13, "frame_idx": 214, "global_frame_idx": 4172, "task_index": 2}, {"db_idx": 4173, "episode_idx": 13, "frame_idx": 215, "global_frame_idx": 4173, "task_index": 2}, {"db_idx": 4174, "episode_idx": 13, "frame_idx": 216, "global_frame_idx": 4174, "task_index": 2}, {"db_idx": 4175, "episode_idx": 13, "frame_idx": 217, "global_frame_idx": 4175, "task_index": 2}, {"db_idx": 4176, "episode_idx": 13, "frame_idx": 218, "global_frame_idx": 4176, "task_index": 2}, {"db_idx": 4177, "episode_idx": 13, "frame_idx": 219, "global_frame_idx": 4177, "task_index": 2}, {"db_idx": 4178, "episode_idx": 13, "frame_idx": 220, "global_frame_idx": 4178, "task_index": 2}, {"db_idx": 4179, "episode_idx": 13, "frame_idx": 221, "global_frame_idx": 4179, "task_index": 2}, {"db_idx": 4180, "episode_idx": 13, "frame_idx": 222, "global_frame_idx": 4180, "task_index": 2}, {"db_idx": 4181, "episode_idx": 13, "frame_idx": 223, "global_frame_idx": 4181, "task_index": 2}, {"db_idx": 4182, "episode_idx": 13, "frame_idx": 224, "global_frame_idx": 4182, "task_index": 2}, {"db_idx": 4183, "episode_idx": 13, "frame_idx": 225, "global_frame_idx": 4183, "task_index": 2}, {"db_idx": 4184, "episode_idx": 13, "frame_idx": 226, "global_frame_idx": 4184, "task_index": 2}, {"db_idx": 4185, "episode_idx": 13, "frame_idx": 227, "global_frame_idx": 4185, "task_index": 2}, {"db_idx": 4186, "episode_idx": 13, "frame_idx": 228, "global_frame_idx": 4186, "task_index": 2}, {"db_idx": 4187, "episode_idx": 13, "frame_idx": 229, "global_frame_idx": 4187, "task_index": 2}, {"db_idx": 4188, "episode_idx": 13, "frame_idx": 230, "global_frame_idx": 4188, "task_index": 2}, {"db_idx": 4189, "episode_idx": 13, "frame_idx": 231, "global_frame_idx": 4189, "task_index": 2}, {"db_idx": 4190, "episode_idx": 13, "frame_idx": 232, "global_frame_idx": 4190, "task_index": 2}, {"db_idx": 4191, "episode_idx": 13, "frame_idx": 233, "global_frame_idx": 4191, "task_index": 2}, {"db_idx": 4192, "episode_idx": 13, "frame_idx": 234, "global_frame_idx": 4192, "task_index": 2}, {"db_idx": 4193, "episode_idx": 13, "frame_idx": 235, "global_frame_idx": 4193, "task_index": 2}, {"db_idx": 4194, "episode_idx": 13, "frame_idx": 236, "global_frame_idx": 4194, "task_index": 2}, {"db_idx": 4195, "episode_idx": 13, "frame_idx": 237, "global_frame_idx": 4195, "task_index": 2}, {"db_idx": 4196, "episode_idx": 13, "frame_idx": 238, "global_frame_idx": 4196, "task_index": 2}, {"db_idx": 4197, "episode_idx": 13, "frame_idx": 239, "global_frame_idx": 4197, "task_index": 2}, {"db_idx": 4198, "episode_idx": 13, "frame_idx": 240, "global_frame_idx": 4198, "task_index": 2}, {"db_idx": 4199, "episode_idx": 13, "frame_idx": 241, "global_frame_idx": 4199, "task_index": 2}, {"db_idx": 4200, "episode_idx": 13, "frame_idx": 242, "global_frame_idx": 4200, "task_index": 2}, {"db_idx": 4201, "episode_idx": 13, "frame_idx": 243, "global_frame_idx": 4201, "task_index": 2}, {"db_idx": 4202, "episode_idx": 13, "frame_idx": 244, "global_frame_idx": 4202, "task_index": 2}, {"db_idx": 4203, "episode_idx": 13, "frame_idx": 245, "global_frame_idx": 4203, "task_index": 2}, {"db_idx": 4204, "episode_idx": 13, "frame_idx": 246, "global_frame_idx": 4204, "task_index": 2}, {"db_idx": 4205, "episode_idx": 13, "frame_idx": 247, "global_frame_idx": 4205, "task_index": 2}, {"db_idx": 4206, "episode_idx": 13, "frame_idx": 248, "global_frame_idx": 4206, "task_index": 2}, {"db_idx": 4207, "episode_idx": 13, "frame_idx": 249, "global_frame_idx": 4207, "task_index": 2}, {"db_idx": 4208, "episode_idx": 13, "frame_idx": 250, "global_frame_idx": 4208, "task_index": 2}, {"db_idx": 4209, "episode_idx": 13, "frame_idx": 251, "global_frame_idx": 4209, "task_index": 2}, {"db_idx": 4210, "episode_idx": 13, "frame_idx": 252, "global_frame_idx": 4210, "task_index": 2}, {"db_idx": 4211, "episode_idx": 13, "frame_idx": 253, "global_frame_idx": 4211, "task_index": 2}, {"db_idx": 4212, "episode_idx": 13, "frame_idx": 254, "global_frame_idx": 4212, "task_index": 2}, {"db_idx": 4213, "episode_idx": 13, "frame_idx": 255, "global_frame_idx": 4213, "task_index": 2}, {"db_idx": 4214, "episode_idx": 13, "frame_idx": 256, "global_frame_idx": 4214, "task_index": 2}, {"db_idx": 4215, "episode_idx": 13, "frame_idx": 257, "global_frame_idx": 4215, "task_index": 2}, {"db_idx": 4216, "episode_idx": 13, "frame_idx": 258, "global_frame_idx": 4216, "task_index": 2}, {"db_idx": 4217, "episode_idx": 14, "frame_idx": 0, "global_frame_idx": 4217, "task_index": 2}, {"db_idx": 4218, "episode_idx": 14, "frame_idx": 1, "global_frame_idx": 4218, "task_index": 2}, {"db_idx": 4219, "episode_idx": 14, "frame_idx": 2, "global_frame_idx": 4219, "task_index": 2}, {"db_idx": 4220, "episode_idx": 14, "frame_idx": 3, "global_frame_idx": 4220, "task_index": 2}, {"db_idx": 4221, "episode_idx": 14, "frame_idx": 4, "global_frame_idx": 4221, "task_index": 2}, {"db_idx": 4222, "episode_idx": 14, "frame_idx": 5, "global_frame_idx": 4222, "task_index": 2}, {"db_idx": 4223, "episode_idx": 14, "frame_idx": 6, "global_frame_idx": 4223, "task_index": 2}, {"db_idx": 4224, "episode_idx": 14, "frame_idx": 7, "global_frame_idx": 4224, "task_index": 2}, {"db_idx": 4225, "episode_idx": 14, "frame_idx": 8, "global_frame_idx": 4225, "task_index": 2}, {"db_idx": 4226, "episode_idx": 14, "frame_idx": 9, "global_frame_idx": 4226, "task_index": 2}, {"db_idx": 4227, "episode_idx": 14, "frame_idx": 10, "global_frame_idx": 4227, "task_index": 2}, {"db_idx": 4228, "episode_idx": 14, "frame_idx": 11, "global_frame_idx": 4228, "task_index": 2}, {"db_idx": 4229, "episode_idx": 14, "frame_idx": 12, "global_frame_idx": 4229, "task_index": 2}, {"db_idx": 4230, "episode_idx": 14, "frame_idx": 13, "global_frame_idx": 4230, "task_index": 2}, {"db_idx": 4231, "episode_idx": 14, "frame_idx": 14, "global_frame_idx": 4231, "task_index": 2}, {"db_idx": 4232, "episode_idx": 14, "frame_idx": 15, "global_frame_idx": 4232, "task_index": 2}, {"db_idx": 4233, "episode_idx": 14, "frame_idx": 16, "global_frame_idx": 4233, "task_index": 2}, {"db_idx": 4234, "episode_idx": 14, "frame_idx": 17, "global_frame_idx": 4234, "task_index": 2}, {"db_idx": 4235, "episode_idx": 14, "frame_idx": 18, "global_frame_idx": 4235, "task_index": 2}, {"db_idx": 4236, "episode_idx": 14, "frame_idx": 19, "global_frame_idx": 4236, "task_index": 2}, {"db_idx": 4237, "episode_idx": 14, "frame_idx": 20, "global_frame_idx": 4237, "task_index": 2}, {"db_idx": 4238, "episode_idx": 14, "frame_idx": 21, "global_frame_idx": 4238, "task_index": 2}, {"db_idx": 4239, "episode_idx": 14, "frame_idx": 22, "global_frame_idx": 4239, "task_index": 2}, {"db_idx": 4240, "episode_idx": 14, "frame_idx": 23, "global_frame_idx": 4240, "task_index": 2}, {"db_idx": 4241, "episode_idx": 14, "frame_idx": 24, "global_frame_idx": 4241, "task_index": 2}, {"db_idx": 4242, "episode_idx": 14, "frame_idx": 25, "global_frame_idx": 4242, "task_index": 2}, {"db_idx": 4243, "episode_idx": 14, "frame_idx": 26, "global_frame_idx": 4243, "task_index": 2}, {"db_idx": 4244, "episode_idx": 14, "frame_idx": 27, "global_frame_idx": 4244, "task_index": 2}, {"db_idx": 4245, "episode_idx": 14, "frame_idx": 28, "global_frame_idx": 4245, "task_index": 2}, {"db_idx": 4246, "episode_idx": 14, "frame_idx": 29, "global_frame_idx": 4246, "task_index": 2}, {"db_idx": 4247, "episode_idx": 14, "frame_idx": 30, "global_frame_idx": 4247, "task_index": 2}, {"db_idx": 4248, "episode_idx": 14, "frame_idx": 31, "global_frame_idx": 4248, "task_index": 2}, {"db_idx": 4249, "episode_idx": 14, "frame_idx": 32, "global_frame_idx": 4249, "task_index": 2}, {"db_idx": 4250, "episode_idx": 14, "frame_idx": 33, "global_frame_idx": 4250, "task_index": 2}, {"db_idx": 4251, "episode_idx": 14, "frame_idx": 34, "global_frame_idx": 4251, "task_index": 2}, {"db_idx": 4252, "episode_idx": 14, "frame_idx": 35, "global_frame_idx": 4252, "task_index": 2}, {"db_idx": 4253, "episode_idx": 14, "frame_idx": 36, "global_frame_idx": 4253, "task_index": 2}, {"db_idx": 4254, "episode_idx": 14, "frame_idx": 37, "global_frame_idx": 4254, "task_index": 2}, {"db_idx": 4255, "episode_idx": 14, "frame_idx": 38, "global_frame_idx": 4255, "task_index": 2}, {"db_idx": 4256, "episode_idx": 14, "frame_idx": 39, "global_frame_idx": 4256, "task_index": 2}, {"db_idx": 4257, "episode_idx": 14, "frame_idx": 40, "global_frame_idx": 4257, "task_index": 2}, {"db_idx": 4258, "episode_idx": 14, "frame_idx": 41, "global_frame_idx": 4258, "task_index": 2}, {"db_idx": 4259, "episode_idx": 14, "frame_idx": 42, "global_frame_idx": 4259, "task_index": 2}, {"db_idx": 4260, "episode_idx": 14, "frame_idx": 43, "global_frame_idx": 4260, "task_index": 2}, {"db_idx": 4261, "episode_idx": 14, "frame_idx": 44, "global_frame_idx": 4261, "task_index": 2}, {"db_idx": 4262, "episode_idx": 14, "frame_idx": 45, "global_frame_idx": 4262, "task_index": 2}, {"db_idx": 4263, "episode_idx": 14, "frame_idx": 46, "global_frame_idx": 4263, "task_index": 2}, {"db_idx": 4264, "episode_idx": 14, "frame_idx": 47, "global_frame_idx": 4264, "task_index": 2}, {"db_idx": 4265, "episode_idx": 14, "frame_idx": 48, "global_frame_idx": 4265, "task_index": 2}, {"db_idx": 4266, "episode_idx": 14, "frame_idx": 49, "global_frame_idx": 4266, "task_index": 2}, {"db_idx": 4267, "episode_idx": 14, "frame_idx": 50, "global_frame_idx": 4267, "task_index": 2}, {"db_idx": 4268, "episode_idx": 14, "frame_idx": 51, "global_frame_idx": 4268, "task_index": 2}, {"db_idx": 4269, "episode_idx": 14, "frame_idx": 52, "global_frame_idx": 4269, "task_index": 2}, {"db_idx": 4270, "episode_idx": 14, "frame_idx": 53, "global_frame_idx": 4270, "task_index": 2}, {"db_idx": 4271, "episode_idx": 14, "frame_idx": 54, "global_frame_idx": 4271, "task_index": 2}, {"db_idx": 4272, "episode_idx": 14, "frame_idx": 55, "global_frame_idx": 4272, "task_index": 2}, {"db_idx": 4273, "episode_idx": 14, "frame_idx": 56, "global_frame_idx": 4273, "task_index": 2}, {"db_idx": 4274, "episode_idx": 14, "frame_idx": 57, "global_frame_idx": 4274, "task_index": 2}, {"db_idx": 4275, "episode_idx": 14, "frame_idx": 58, "global_frame_idx": 4275, "task_index": 2}, {"db_idx": 4276, "episode_idx": 14, "frame_idx": 59, "global_frame_idx": 4276, "task_index": 2}, {"db_idx": 4277, "episode_idx": 14, "frame_idx": 60, "global_frame_idx": 4277, "task_index": 2}, {"db_idx": 4278, "episode_idx": 14, "frame_idx": 61, "global_frame_idx": 4278, "task_index": 2}, {"db_idx": 4279, "episode_idx": 14, "frame_idx": 62, "global_frame_idx": 4279, "task_index": 2}, {"db_idx": 4280, "episode_idx": 14, "frame_idx": 63, "global_frame_idx": 4280, "task_index": 2}, {"db_idx": 4281, "episode_idx": 14, "frame_idx": 64, "global_frame_idx": 4281, "task_index": 2}, {"db_idx": 4282, "episode_idx": 14, "frame_idx": 65, "global_frame_idx": 4282, "task_index": 2}, {"db_idx": 4283, "episode_idx": 14, "frame_idx": 66, "global_frame_idx": 4283, "task_index": 2}, {"db_idx": 4284, "episode_idx": 14, "frame_idx": 67, "global_frame_idx": 4284, "task_index": 2}, {"db_idx": 4285, "episode_idx": 14, "frame_idx": 68, "global_frame_idx": 4285, "task_index": 2}, {"db_idx": 4286, "episode_idx": 14, "frame_idx": 69, "global_frame_idx": 4286, "task_index": 2}, {"db_idx": 4287, "episode_idx": 14, "frame_idx": 70, "global_frame_idx": 4287, "task_index": 2}, {"db_idx": 4288, "episode_idx": 14, "frame_idx": 71, "global_frame_idx": 4288, "task_index": 2}, {"db_idx": 4289, "episode_idx": 14, "frame_idx": 72, "global_frame_idx": 4289, "task_index": 2}, {"db_idx": 4290, "episode_idx": 14, "frame_idx": 73, "global_frame_idx": 4290, "task_index": 2}, {"db_idx": 4291, "episode_idx": 14, "frame_idx": 74, "global_frame_idx": 4291, "task_index": 2}, {"db_idx": 4292, "episode_idx": 14, "frame_idx": 75, "global_frame_idx": 4292, "task_index": 2}, {"db_idx": 4293, "episode_idx": 14, "frame_idx": 76, "global_frame_idx": 4293, "task_index": 2}, {"db_idx": 4294, "episode_idx": 14, "frame_idx": 77, "global_frame_idx": 4294, "task_index": 2}, {"db_idx": 4295, "episode_idx": 14, "frame_idx": 78, "global_frame_idx": 4295, "task_index": 2}, {"db_idx": 4296, "episode_idx": 14, "frame_idx": 79, "global_frame_idx": 4296, "task_index": 2}, {"db_idx": 4297, "episode_idx": 14, "frame_idx": 80, "global_frame_idx": 4297, "task_index": 2}, {"db_idx": 4298, "episode_idx": 14, "frame_idx": 81, "global_frame_idx": 4298, "task_index": 2}, {"db_idx": 4299, "episode_idx": 14, "frame_idx": 82, "global_frame_idx": 4299, "task_index": 2}, {"db_idx": 4300, "episode_idx": 14, "frame_idx": 83, "global_frame_idx": 4300, "task_index": 2}, {"db_idx": 4301, "episode_idx": 14, "frame_idx": 84, "global_frame_idx": 4301, "task_index": 2}, {"db_idx": 4302, "episode_idx": 14, "frame_idx": 85, "global_frame_idx": 4302, "task_index": 2}, {"db_idx": 4303, "episode_idx": 14, "frame_idx": 86, "global_frame_idx": 4303, "task_index": 2}, {"db_idx": 4304, "episode_idx": 14, "frame_idx": 87, "global_frame_idx": 4304, "task_index": 2}, {"db_idx": 4305, "episode_idx": 14, "frame_idx": 88, "global_frame_idx": 4305, "task_index": 2}, {"db_idx": 4306, "episode_idx": 14, "frame_idx": 89, "global_frame_idx": 4306, "task_index": 2}, {"db_idx": 4307, "episode_idx": 14, "frame_idx": 90, "global_frame_idx": 4307, "task_index": 2}, {"db_idx": 4308, "episode_idx": 14, "frame_idx": 91, "global_frame_idx": 4308, "task_index": 2}, {"db_idx": 4309, "episode_idx": 14, "frame_idx": 92, "global_frame_idx": 4309, "task_index": 2}, {"db_idx": 4310, "episode_idx": 14, "frame_idx": 93, "global_frame_idx": 4310, "task_index": 2}, {"db_idx": 4311, "episode_idx": 14, "frame_idx": 94, "global_frame_idx": 4311, "task_index": 2}, {"db_idx": 4312, "episode_idx": 14, "frame_idx": 95, "global_frame_idx": 4312, "task_index": 2}, {"db_idx": 4313, "episode_idx": 14, "frame_idx": 96, "global_frame_idx": 4313, "task_index": 2}, {"db_idx": 4314, "episode_idx": 14, "frame_idx": 97, "global_frame_idx": 4314, "task_index": 2}, {"db_idx": 4315, "episode_idx": 14, "frame_idx": 98, "global_frame_idx": 4315, "task_index": 2}, {"db_idx": 4316, "episode_idx": 14, "frame_idx": 99, "global_frame_idx": 4316, "task_index": 2}, {"db_idx": 4317, "episode_idx": 14, "frame_idx": 100, "global_frame_idx": 4317, "task_index": 2}, {"db_idx": 4318, "episode_idx": 14, "frame_idx": 101, "global_frame_idx": 4318, "task_index": 2}, {"db_idx": 4319, "episode_idx": 14, "frame_idx": 102, "global_frame_idx": 4319, "task_index": 2}, {"db_idx": 4320, "episode_idx": 14, "frame_idx": 103, "global_frame_idx": 4320, "task_index": 2}, {"db_idx": 4321, "episode_idx": 14, "frame_idx": 104, "global_frame_idx": 4321, "task_index": 2}, {"db_idx": 4322, "episode_idx": 14, "frame_idx": 105, "global_frame_idx": 4322, "task_index": 2}, {"db_idx": 4323, "episode_idx": 14, "frame_idx": 106, "global_frame_idx": 4323, "task_index": 2}, {"db_idx": 4324, "episode_idx": 14, "frame_idx": 107, "global_frame_idx": 4324, "task_index": 2}, {"db_idx": 4325, "episode_idx": 14, "frame_idx": 108, "global_frame_idx": 4325, "task_index": 2}, {"db_idx": 4326, "episode_idx": 14, "frame_idx": 109, "global_frame_idx": 4326, "task_index": 2}, {"db_idx": 4327, "episode_idx": 14, "frame_idx": 110, "global_frame_idx": 4327, "task_index": 2}, {"db_idx": 4328, "episode_idx": 14, "frame_idx": 111, "global_frame_idx": 4328, "task_index": 2}, {"db_idx": 4329, "episode_idx": 14, "frame_idx": 112, "global_frame_idx": 4329, "task_index": 2}, {"db_idx": 4330, "episode_idx": 14, "frame_idx": 113, "global_frame_idx": 4330, "task_index": 2}, {"db_idx": 4331, "episode_idx": 14, "frame_idx": 114, "global_frame_idx": 4331, "task_index": 2}, {"db_idx": 4332, "episode_idx": 14, "frame_idx": 115, "global_frame_idx": 4332, "task_index": 2}, {"db_idx": 4333, "episode_idx": 14, "frame_idx": 116, "global_frame_idx": 4333, "task_index": 2}, {"db_idx": 4334, "episode_idx": 14, "frame_idx": 117, "global_frame_idx": 4334, "task_index": 2}, {"db_idx": 4335, "episode_idx": 14, "frame_idx": 118, "global_frame_idx": 4335, "task_index": 2}, {"db_idx": 4336, "episode_idx": 14, "frame_idx": 119, "global_frame_idx": 4336, "task_index": 2}, {"db_idx": 4337, "episode_idx": 14, "frame_idx": 120, "global_frame_idx": 4337, "task_index": 2}, {"db_idx": 4338, "episode_idx": 14, "frame_idx": 121, "global_frame_idx": 4338, "task_index": 2}, {"db_idx": 4339, "episode_idx": 14, "frame_idx": 122, "global_frame_idx": 4339, "task_index": 2}, {"db_idx": 4340, "episode_idx": 14, "frame_idx": 123, "global_frame_idx": 4340, "task_index": 2}, {"db_idx": 4341, "episode_idx": 14, "frame_idx": 124, "global_frame_idx": 4341, "task_index": 2}, {"db_idx": 4342, "episode_idx": 14, "frame_idx": 125, "global_frame_idx": 4342, "task_index": 2}, {"db_idx": 4343, "episode_idx": 14, "frame_idx": 126, "global_frame_idx": 4343, "task_index": 2}, {"db_idx": 4344, "episode_idx": 14, "frame_idx": 127, "global_frame_idx": 4344, "task_index": 2}, {"db_idx": 4345, "episode_idx": 14, "frame_idx": 128, "global_frame_idx": 4345, "task_index": 2}, {"db_idx": 4346, "episode_idx": 14, "frame_idx": 129, "global_frame_idx": 4346, "task_index": 2}, {"db_idx": 4347, "episode_idx": 14, "frame_idx": 130, "global_frame_idx": 4347, "task_index": 2}, {"db_idx": 4348, "episode_idx": 14, "frame_idx": 131, "global_frame_idx": 4348, "task_index": 2}, {"db_idx": 4349, "episode_idx": 14, "frame_idx": 132, "global_frame_idx": 4349, "task_index": 2}, {"db_idx": 4350, "episode_idx": 14, "frame_idx": 133, "global_frame_idx": 4350, "task_index": 2}, {"db_idx": 4351, "episode_idx": 14, "frame_idx": 134, "global_frame_idx": 4351, "task_index": 2}, {"db_idx": 4352, "episode_idx": 14, "frame_idx": 135, "global_frame_idx": 4352, "task_index": 2}, {"db_idx": 4353, "episode_idx": 14, "frame_idx": 136, "global_frame_idx": 4353, "task_index": 2}, {"db_idx": 4354, "episode_idx": 14, "frame_idx": 137, "global_frame_idx": 4354, "task_index": 2}, {"db_idx": 4355, "episode_idx": 14, "frame_idx": 138, "global_frame_idx": 4355, "task_index": 2}, {"db_idx": 4356, "episode_idx": 14, "frame_idx": 139, "global_frame_idx": 4356, "task_index": 2}, {"db_idx": 4357, "episode_idx": 14, "frame_idx": 140, "global_frame_idx": 4357, "task_index": 2}, {"db_idx": 4358, "episode_idx": 14, "frame_idx": 141, "global_frame_idx": 4358, "task_index": 2}, {"db_idx": 4359, "episode_idx": 14, "frame_idx": 142, "global_frame_idx": 4359, "task_index": 2}, {"db_idx": 4360, "episode_idx": 14, "frame_idx": 143, "global_frame_idx": 4360, "task_index": 2}, {"db_idx": 4361, "episode_idx": 14, "frame_idx": 144, "global_frame_idx": 4361, "task_index": 2}, {"db_idx": 4362, "episode_idx": 14, "frame_idx": 145, "global_frame_idx": 4362, "task_index": 2}, {"db_idx": 4363, "episode_idx": 14, "frame_idx": 146, "global_frame_idx": 4363, "task_index": 2}, {"db_idx": 4364, "episode_idx": 14, "frame_idx": 147, "global_frame_idx": 4364, "task_index": 2}, {"db_idx": 4365, "episode_idx": 14, "frame_idx": 148, "global_frame_idx": 4365, "task_index": 2}, {"db_idx": 4366, "episode_idx": 14, "frame_idx": 149, "global_frame_idx": 4366, "task_index": 2}, {"db_idx": 4367, "episode_idx": 14, "frame_idx": 150, "global_frame_idx": 4367, "task_index": 2}, {"db_idx": 4368, "episode_idx": 14, "frame_idx": 151, "global_frame_idx": 4368, "task_index": 2}, {"db_idx": 4369, "episode_idx": 14, "frame_idx": 152, "global_frame_idx": 4369, "task_index": 2}, {"db_idx": 4370, "episode_idx": 14, "frame_idx": 153, "global_frame_idx": 4370, "task_index": 2}, {"db_idx": 4371, "episode_idx": 14, "frame_idx": 154, "global_frame_idx": 4371, "task_index": 2}, {"db_idx": 4372, "episode_idx": 14, "frame_idx": 155, "global_frame_idx": 4372, "task_index": 2}, {"db_idx": 4373, "episode_idx": 14, "frame_idx": 156, "global_frame_idx": 4373, "task_index": 2}, {"db_idx": 4374, "episode_idx": 14, "frame_idx": 157, "global_frame_idx": 4374, "task_index": 2}, {"db_idx": 4375, "episode_idx": 14, "frame_idx": 158, "global_frame_idx": 4375, "task_index": 2}, {"db_idx": 4376, "episode_idx": 14, "frame_idx": 159, "global_frame_idx": 4376, "task_index": 2}, {"db_idx": 4377, "episode_idx": 14, "frame_idx": 160, "global_frame_idx": 4377, "task_index": 2}, {"db_idx": 4378, "episode_idx": 14, "frame_idx": 161, "global_frame_idx": 4378, "task_index": 2}, {"db_idx": 4379, "episode_idx": 14, "frame_idx": 162, "global_frame_idx": 4379, "task_index": 2}, {"db_idx": 4380, "episode_idx": 14, "frame_idx": 163, "global_frame_idx": 4380, "task_index": 2}, {"db_idx": 4381, "episode_idx": 14, "frame_idx": 164, "global_frame_idx": 4381, "task_index": 2}, {"db_idx": 4382, "episode_idx": 14, "frame_idx": 165, "global_frame_idx": 4382, "task_index": 2}, {"db_idx": 4383, "episode_idx": 14, "frame_idx": 166, "global_frame_idx": 4383, "task_index": 2}, {"db_idx": 4384, "episode_idx": 14, "frame_idx": 167, "global_frame_idx": 4384, "task_index": 2}, {"db_idx": 4385, "episode_idx": 14, "frame_idx": 168, "global_frame_idx": 4385, "task_index": 2}, {"db_idx": 4386, "episode_idx": 14, "frame_idx": 169, "global_frame_idx": 4386, "task_index": 2}, {"db_idx": 4387, "episode_idx": 14, "frame_idx": 170, "global_frame_idx": 4387, "task_index": 2}, {"db_idx": 4388, "episode_idx": 14, "frame_idx": 171, "global_frame_idx": 4388, "task_index": 2}, {"db_idx": 4389, "episode_idx": 14, "frame_idx": 172, "global_frame_idx": 4389, "task_index": 2}, {"db_idx": 4390, "episode_idx": 14, "frame_idx": 173, "global_frame_idx": 4390, "task_index": 2}, {"db_idx": 4391, "episode_idx": 14, "frame_idx": 174, "global_frame_idx": 4391, "task_index": 2}, {"db_idx": 4392, "episode_idx": 14, "frame_idx": 175, "global_frame_idx": 4392, "task_index": 2}, {"db_idx": 4393, "episode_idx": 14, "frame_idx": 176, "global_frame_idx": 4393, "task_index": 2}, {"db_idx": 4394, "episode_idx": 14, "frame_idx": 177, "global_frame_idx": 4394, "task_index": 2}, {"db_idx": 4395, "episode_idx": 14, "frame_idx": 178, "global_frame_idx": 4395, "task_index": 2}, {"db_idx": 4396, "episode_idx": 14, "frame_idx": 179, "global_frame_idx": 4396, "task_index": 2}, {"db_idx": 4397, "episode_idx": 14, "frame_idx": 180, "global_frame_idx": 4397, "task_index": 2}, {"db_idx": 4398, "episode_idx": 14, "frame_idx": 181, "global_frame_idx": 4398, "task_index": 2}, {"db_idx": 4399, "episode_idx": 14, "frame_idx": 182, "global_frame_idx": 4399, "task_index": 2}, {"db_idx": 4400, "episode_idx": 14, "frame_idx": 183, "global_frame_idx": 4400, "task_index": 2}, {"db_idx": 4401, "episode_idx": 14, "frame_idx": 184, "global_frame_idx": 4401, "task_index": 2}, {"db_idx": 4402, "episode_idx": 14, "frame_idx": 185, "global_frame_idx": 4402, "task_index": 2}, {"db_idx": 4403, "episode_idx": 14, "frame_idx": 186, "global_frame_idx": 4403, "task_index": 2}, {"db_idx": 4404, "episode_idx": 14, "frame_idx": 187, "global_frame_idx": 4404, "task_index": 2}, {"db_idx": 4405, "episode_idx": 14, "frame_idx": 188, "global_frame_idx": 4405, "task_index": 2}, {"db_idx": 4406, "episode_idx": 14, "frame_idx": 189, "global_frame_idx": 4406, "task_index": 2}, {"db_idx": 4407, "episode_idx": 14, "frame_idx": 190, "global_frame_idx": 4407, "task_index": 2}, {"db_idx": 4408, "episode_idx": 14, "frame_idx": 191, "global_frame_idx": 4408, "task_index": 2}, {"db_idx": 4409, "episode_idx": 14, "frame_idx": 192, "global_frame_idx": 4409, "task_index": 2}, {"db_idx": 4410, "episode_idx": 14, "frame_idx": 193, "global_frame_idx": 4410, "task_index": 2}, {"db_idx": 4411, "episode_idx": 14, "frame_idx": 194, "global_frame_idx": 4411, "task_index": 2}, {"db_idx": 4412, "episode_idx": 14, "frame_idx": 195, "global_frame_idx": 4412, "task_index": 2}, {"db_idx": 4413, "episode_idx": 14, "frame_idx": 196, "global_frame_idx": 4413, "task_index": 2}, {"db_idx": 4414, "episode_idx": 14, "frame_idx": 197, "global_frame_idx": 4414, "task_index": 2}, {"db_idx": 4415, "episode_idx": 14, "frame_idx": 198, "global_frame_idx": 4415, "task_index": 2}, {"db_idx": 4416, "episode_idx": 14, "frame_idx": 199, "global_frame_idx": 4416, "task_index": 2}, {"db_idx": 4417, "episode_idx": 14, "frame_idx": 200, "global_frame_idx": 4417, "task_index": 2}, {"db_idx": 4418, "episode_idx": 14, "frame_idx": 201, "global_frame_idx": 4418, "task_index": 2}, {"db_idx": 4419, "episode_idx": 14, "frame_idx": 202, "global_frame_idx": 4419, "task_index": 2}, {"db_idx": 4420, "episode_idx": 14, "frame_idx": 203, "global_frame_idx": 4420, "task_index": 2}, {"db_idx": 4421, "episode_idx": 14, "frame_idx": 204, "global_frame_idx": 4421, "task_index": 2}, {"db_idx": 4422, "episode_idx": 14, "frame_idx": 205, "global_frame_idx": 4422, "task_index": 2}, {"db_idx": 4423, "episode_idx": 14, "frame_idx": 206, "global_frame_idx": 4423, "task_index": 2}, {"db_idx": 4424, "episode_idx": 14, "frame_idx": 207, "global_frame_idx": 4424, "task_index": 2}, {"db_idx": 4425, "episode_idx": 14, "frame_idx": 208, "global_frame_idx": 4425, "task_index": 2}, {"db_idx": 4426, "episode_idx": 14, "frame_idx": 209, "global_frame_idx": 4426, "task_index": 2}, {"db_idx": 4427, "episode_idx": 14, "frame_idx": 210, "global_frame_idx": 4427, "task_index": 2}, {"db_idx": 4428, "episode_idx": 14, "frame_idx": 211, "global_frame_idx": 4428, "task_index": 2}, {"db_idx": 4429, "episode_idx": 14, "frame_idx": 212, "global_frame_idx": 4429, "task_index": 2}, {"db_idx": 4430, "episode_idx": 14, "frame_idx": 213, "global_frame_idx": 4430, "task_index": 2}, {"db_idx": 4431, "episode_idx": 14, "frame_idx": 214, "global_frame_idx": 4431, "task_index": 2}, {"db_idx": 4432, "episode_idx": 14, "frame_idx": 215, "global_frame_idx": 4432, "task_index": 2}, {"db_idx": 4433, "episode_idx": 14, "frame_idx": 216, "global_frame_idx": 4433, "task_index": 2}, {"db_idx": 4434, "episode_idx": 14, "frame_idx": 217, "global_frame_idx": 4434, "task_index": 2}, {"db_idx": 4435, "episode_idx": 14, "frame_idx": 218, "global_frame_idx": 4435, "task_index": 2}, {"db_idx": 4436, "episode_idx": 14, "frame_idx": 219, "global_frame_idx": 4436, "task_index": 2}, {"db_idx": 4437, "episode_idx": 14, "frame_idx": 220, "global_frame_idx": 4437, "task_index": 2}, {"db_idx": 4438, "episode_idx": 14, "frame_idx": 221, "global_frame_idx": 4438, "task_index": 2}, {"db_idx": 4439, "episode_idx": 14, "frame_idx": 222, "global_frame_idx": 4439, "task_index": 2}, {"db_idx": 4440, "episode_idx": 14, "frame_idx": 223, "global_frame_idx": 4440, "task_index": 2}, {"db_idx": 4441, "episode_idx": 14, "frame_idx": 224, "global_frame_idx": 4441, "task_index": 2}, {"db_idx": 4442, "episode_idx": 14, "frame_idx": 225, "global_frame_idx": 4442, "task_index": 2}, {"db_idx": 4443, "episode_idx": 14, "frame_idx": 226, "global_frame_idx": 4443, "task_index": 2}, {"db_idx": 4444, "episode_idx": 14, "frame_idx": 227, "global_frame_idx": 4444, "task_index": 2}, {"db_idx": 4445, "episode_idx": 14, "frame_idx": 228, "global_frame_idx": 4445, "task_index": 2}, {"db_idx": 4446, "episode_idx": 14, "frame_idx": 229, "global_frame_idx": 4446, "task_index": 2}, {"db_idx": 4447, "episode_idx": 14, "frame_idx": 230, "global_frame_idx": 4447, "task_index": 2}, {"db_idx": 4448, "episode_idx": 14, "frame_idx": 231, "global_frame_idx": 4448, "task_index": 2}, {"db_idx": 4449, "episode_idx": 14, "frame_idx": 232, "global_frame_idx": 4449, "task_index": 2}, {"db_idx": 4450, "episode_idx": 14, "frame_idx": 233, "global_frame_idx": 4450, "task_index": 2}, {"db_idx": 4451, "episode_idx": 14, "frame_idx": 234, "global_frame_idx": 4451, "task_index": 2}, {"db_idx": 4452, "episode_idx": 14, "frame_idx": 235, "global_frame_idx": 4452, "task_index": 2}, {"db_idx": 4453, "episode_idx": 14, "frame_idx": 236, "global_frame_idx": 4453, "task_index": 2}, {"db_idx": 4454, "episode_idx": 14, "frame_idx": 237, "global_frame_idx": 4454, "task_index": 2}, {"db_idx": 4455, "episode_idx": 14, "frame_idx": 238, "global_frame_idx": 4455, "task_index": 2}, {"db_idx": 4456, "episode_idx": 14, "frame_idx": 239, "global_frame_idx": 4456, "task_index": 2}, {"db_idx": 4457, "episode_idx": 14, "frame_idx": 240, "global_frame_idx": 4457, "task_index": 2}, {"db_idx": 4458, "episode_idx": 14, "frame_idx": 241, "global_frame_idx": 4458, "task_index": 2}, {"db_idx": 4459, "episode_idx": 14, "frame_idx": 242, "global_frame_idx": 4459, "task_index": 2}, {"db_idx": 4460, "episode_idx": 14, "frame_idx": 243, "global_frame_idx": 4460, "task_index": 2}, {"db_idx": 4461, "episode_idx": 15, "frame_idx": 0, "global_frame_idx": 4461, "task_index": 3}, {"db_idx": 4462, "episode_idx": 15, "frame_idx": 1, "global_frame_idx": 4462, "task_index": 3}, {"db_idx": 4463, "episode_idx": 15, "frame_idx": 2, "global_frame_idx": 4463, "task_index": 3}, {"db_idx": 4464, "episode_idx": 15, "frame_idx": 3, "global_frame_idx": 4464, "task_index": 3}, {"db_idx": 4465, "episode_idx": 15, "frame_idx": 4, "global_frame_idx": 4465, "task_index": 3}, {"db_idx": 4466, "episode_idx": 15, "frame_idx": 5, "global_frame_idx": 4466, "task_index": 3}, {"db_idx": 4467, "episode_idx": 15, "frame_idx": 6, "global_frame_idx": 4467, "task_index": 3}, {"db_idx": 4468, "episode_idx": 15, "frame_idx": 7, "global_frame_idx": 4468, "task_index": 3}, {"db_idx": 4469, "episode_idx": 15, "frame_idx": 8, "global_frame_idx": 4469, "task_index": 3}, {"db_idx": 4470, "episode_idx": 15, "frame_idx": 9, "global_frame_idx": 4470, "task_index": 3}, {"db_idx": 4471, "episode_idx": 15, "frame_idx": 10, "global_frame_idx": 4471, "task_index": 3}, {"db_idx": 4472, "episode_idx": 15, "frame_idx": 11, "global_frame_idx": 4472, "task_index": 3}, {"db_idx": 4473, "episode_idx": 15, "frame_idx": 12, "global_frame_idx": 4473, "task_index": 3}, {"db_idx": 4474, "episode_idx": 15, "frame_idx": 13, "global_frame_idx": 4474, "task_index": 3}, {"db_idx": 4475, "episode_idx": 15, "frame_idx": 14, "global_frame_idx": 4475, "task_index": 3}, {"db_idx": 4476, "episode_idx": 15, "frame_idx": 15, "global_frame_idx": 4476, "task_index": 3}, {"db_idx": 4477, "episode_idx": 15, "frame_idx": 16, "global_frame_idx": 4477, "task_index": 3}, {"db_idx": 4478, "episode_idx": 15, "frame_idx": 17, "global_frame_idx": 4478, "task_index": 3}, {"db_idx": 4479, "episode_idx": 15, "frame_idx": 18, "global_frame_idx": 4479, "task_index": 3}, {"db_idx": 4480, "episode_idx": 15, "frame_idx": 19, "global_frame_idx": 4480, "task_index": 3}, {"db_idx": 4481, "episode_idx": 15, "frame_idx": 20, "global_frame_idx": 4481, "task_index": 3}, {"db_idx": 4482, "episode_idx": 15, "frame_idx": 21, "global_frame_idx": 4482, "task_index": 3}, {"db_idx": 4483, "episode_idx": 15, "frame_idx": 22, "global_frame_idx": 4483, "task_index": 3}, {"db_idx": 4484, "episode_idx": 15, "frame_idx": 23, "global_frame_idx": 4484, "task_index": 3}, {"db_idx": 4485, "episode_idx": 15, "frame_idx": 24, "global_frame_idx": 4485, "task_index": 3}, {"db_idx": 4486, "episode_idx": 15, "frame_idx": 25, "global_frame_idx": 4486, "task_index": 3}, {"db_idx": 4487, "episode_idx": 15, "frame_idx": 26, "global_frame_idx": 4487, "task_index": 3}, {"db_idx": 4488, "episode_idx": 15, "frame_idx": 27, "global_frame_idx": 4488, "task_index": 3}, {"db_idx": 4489, "episode_idx": 15, "frame_idx": 28, "global_frame_idx": 4489, "task_index": 3}, {"db_idx": 4490, "episode_idx": 15, "frame_idx": 29, "global_frame_idx": 4490, "task_index": 3}, {"db_idx": 4491, "episode_idx": 15, "frame_idx": 30, "global_frame_idx": 4491, "task_index": 3}, {"db_idx": 4492, "episode_idx": 15, "frame_idx": 31, "global_frame_idx": 4492, "task_index": 3}, {"db_idx": 4493, "episode_idx": 15, "frame_idx": 32, "global_frame_idx": 4493, "task_index": 3}, {"db_idx": 4494, "episode_idx": 15, "frame_idx": 33, "global_frame_idx": 4494, "task_index": 3}, {"db_idx": 4495, "episode_idx": 15, "frame_idx": 34, "global_frame_idx": 4495, "task_index": 3}, {"db_idx": 4496, "episode_idx": 15, "frame_idx": 35, "global_frame_idx": 4496, "task_index": 3}, {"db_idx": 4497, "episode_idx": 15, "frame_idx": 36, "global_frame_idx": 4497, "task_index": 3}, {"db_idx": 4498, "episode_idx": 15, "frame_idx": 37, "global_frame_idx": 4498, "task_index": 3}, {"db_idx": 4499, "episode_idx": 15, "frame_idx": 38, "global_frame_idx": 4499, "task_index": 3}, {"db_idx": 4500, "episode_idx": 15, "frame_idx": 39, "global_frame_idx": 4500, "task_index": 3}, {"db_idx": 4501, "episode_idx": 15, "frame_idx": 40, "global_frame_idx": 4501, "task_index": 3}, {"db_idx": 4502, "episode_idx": 15, "frame_idx": 41, "global_frame_idx": 4502, "task_index": 3}, {"db_idx": 4503, "episode_idx": 15, "frame_idx": 42, "global_frame_idx": 4503, "task_index": 3}, {"db_idx": 4504, "episode_idx": 15, "frame_idx": 43, "global_frame_idx": 4504, "task_index": 3}, {"db_idx": 4505, "episode_idx": 15, "frame_idx": 44, "global_frame_idx": 4505, "task_index": 3}, {"db_idx": 4506, "episode_idx": 15, "frame_idx": 45, "global_frame_idx": 4506, "task_index": 3}, {"db_idx": 4507, "episode_idx": 15, "frame_idx": 46, "global_frame_idx": 4507, "task_index": 3}, {"db_idx": 4508, "episode_idx": 15, "frame_idx": 47, "global_frame_idx": 4508, "task_index": 3}, {"db_idx": 4509, "episode_idx": 15, "frame_idx": 48, "global_frame_idx": 4509, "task_index": 3}, {"db_idx": 4510, "episode_idx": 15, "frame_idx": 49, "global_frame_idx": 4510, "task_index": 3}, {"db_idx": 4511, "episode_idx": 15, "frame_idx": 50, "global_frame_idx": 4511, "task_index": 3}, {"db_idx": 4512, "episode_idx": 15, "frame_idx": 51, "global_frame_idx": 4512, "task_index": 3}, {"db_idx": 4513, "episode_idx": 15, "frame_idx": 52, "global_frame_idx": 4513, "task_index": 3}, {"db_idx": 4514, "episode_idx": 15, "frame_idx": 53, "global_frame_idx": 4514, "task_index": 3}, {"db_idx": 4515, "episode_idx": 15, "frame_idx": 54, "global_frame_idx": 4515, "task_index": 3}, {"db_idx": 4516, "episode_idx": 15, "frame_idx": 55, "global_frame_idx": 4516, "task_index": 3}, {"db_idx": 4517, "episode_idx": 15, "frame_idx": 56, "global_frame_idx": 4517, "task_index": 3}, {"db_idx": 4518, "episode_idx": 15, "frame_idx": 57, "global_frame_idx": 4518, "task_index": 3}, {"db_idx": 4519, "episode_idx": 15, "frame_idx": 58, "global_frame_idx": 4519, "task_index": 3}, {"db_idx": 4520, "episode_idx": 15, "frame_idx": 59, "global_frame_idx": 4520, "task_index": 3}, {"db_idx": 4521, "episode_idx": 15, "frame_idx": 60, "global_frame_idx": 4521, "task_index": 3}, {"db_idx": 4522, "episode_idx": 15, "frame_idx": 61, "global_frame_idx": 4522, "task_index": 3}, {"db_idx": 4523, "episode_idx": 15, "frame_idx": 62, "global_frame_idx": 4523, "task_index": 3}, {"db_idx": 4524, "episode_idx": 15, "frame_idx": 63, "global_frame_idx": 4524, "task_index": 3}, {"db_idx": 4525, "episode_idx": 15, "frame_idx": 64, "global_frame_idx": 4525, "task_index": 3}, {"db_idx": 4526, "episode_idx": 15, "frame_idx": 65, "global_frame_idx": 4526, "task_index": 3}, {"db_idx": 4527, "episode_idx": 15, "frame_idx": 66, "global_frame_idx": 4527, "task_index": 3}, {"db_idx": 4528, "episode_idx": 15, "frame_idx": 67, "global_frame_idx": 4528, "task_index": 3}, {"db_idx": 4529, "episode_idx": 15, "frame_idx": 68, "global_frame_idx": 4529, "task_index": 3}, {"db_idx": 4530, "episode_idx": 15, "frame_idx": 69, "global_frame_idx": 4530, "task_index": 3}, {"db_idx": 4531, "episode_idx": 15, "frame_idx": 70, "global_frame_idx": 4531, "task_index": 3}, {"db_idx": 4532, "episode_idx": 15, "frame_idx": 71, "global_frame_idx": 4532, "task_index": 3}, {"db_idx": 4533, "episode_idx": 15, "frame_idx": 72, "global_frame_idx": 4533, "task_index": 3}, {"db_idx": 4534, "episode_idx": 15, "frame_idx": 73, "global_frame_idx": 4534, "task_index": 3}, {"db_idx": 4535, "episode_idx": 15, "frame_idx": 74, "global_frame_idx": 4535, "task_index": 3}, {"db_idx": 4536, "episode_idx": 15, "frame_idx": 75, "global_frame_idx": 4536, "task_index": 3}, {"db_idx": 4537, "episode_idx": 15, "frame_idx": 76, "global_frame_idx": 4537, "task_index": 3}, {"db_idx": 4538, "episode_idx": 15, "frame_idx": 77, "global_frame_idx": 4538, "task_index": 3}, {"db_idx": 4539, "episode_idx": 15, "frame_idx": 78, "global_frame_idx": 4539, "task_index": 3}, {"db_idx": 4540, "episode_idx": 15, "frame_idx": 79, "global_frame_idx": 4540, "task_index": 3}, {"db_idx": 4541, "episode_idx": 15, "frame_idx": 80, "global_frame_idx": 4541, "task_index": 3}, {"db_idx": 4542, "episode_idx": 15, "frame_idx": 81, "global_frame_idx": 4542, "task_index": 3}, {"db_idx": 4543, "episode_idx": 15, "frame_idx": 82, "global_frame_idx": 4543, "task_index": 3}, {"db_idx": 4544, "episode_idx": 15, "frame_idx": 83, "global_frame_idx": 4544, "task_index": 3}, {"db_idx": 4545, "episode_idx": 15, "frame_idx": 84, "global_frame_idx": 4545, "task_index": 3}, {"db_idx": 4546, "episode_idx": 15, "frame_idx": 85, "global_frame_idx": 4546, "task_index": 3}, {"db_idx": 4547, "episode_idx": 15, "frame_idx": 86, "global_frame_idx": 4547, "task_index": 3}, {"db_idx": 4548, "episode_idx": 15, "frame_idx": 87, "global_frame_idx": 4548, "task_index": 3}, {"db_idx": 4549, "episode_idx": 15, "frame_idx": 88, "global_frame_idx": 4549, "task_index": 3}, {"db_idx": 4550, "episode_idx": 15, "frame_idx": 89, "global_frame_idx": 4550, "task_index": 3}, {"db_idx": 4551, "episode_idx": 15, "frame_idx": 90, "global_frame_idx": 4551, "task_index": 3}, {"db_idx": 4552, "episode_idx": 15, "frame_idx": 91, "global_frame_idx": 4552, "task_index": 3}, {"db_idx": 4553, "episode_idx": 15, "frame_idx": 92, "global_frame_idx": 4553, "task_index": 3}, {"db_idx": 4554, "episode_idx": 15, "frame_idx": 93, "global_frame_idx": 4554, "task_index": 3}, {"db_idx": 4555, "episode_idx": 15, "frame_idx": 94, "global_frame_idx": 4555, "task_index": 3}, {"db_idx": 4556, "episode_idx": 15, "frame_idx": 95, "global_frame_idx": 4556, "task_index": 3}, {"db_idx": 4557, "episode_idx": 15, "frame_idx": 96, "global_frame_idx": 4557, "task_index": 3}, {"db_idx": 4558, "episode_idx": 15, "frame_idx": 97, "global_frame_idx": 4558, "task_index": 3}, {"db_idx": 4559, "episode_idx": 15, "frame_idx": 98, "global_frame_idx": 4559, "task_index": 3}, {"db_idx": 4560, "episode_idx": 15, "frame_idx": 99, "global_frame_idx": 4560, "task_index": 3}, {"db_idx": 4561, "episode_idx": 15, "frame_idx": 100, "global_frame_idx": 4561, "task_index": 3}, {"db_idx": 4562, "episode_idx": 15, "frame_idx": 101, "global_frame_idx": 4562, "task_index": 3}, {"db_idx": 4563, "episode_idx": 15, "frame_idx": 102, "global_frame_idx": 4563, "task_index": 3}, {"db_idx": 4564, "episode_idx": 15, "frame_idx": 103, "global_frame_idx": 4564, "task_index": 3}, {"db_idx": 4565, "episode_idx": 15, "frame_idx": 104, "global_frame_idx": 4565, "task_index": 3}, {"db_idx": 4566, "episode_idx": 15, "frame_idx": 105, "global_frame_idx": 4566, "task_index": 3}, {"db_idx": 4567, "episode_idx": 15, "frame_idx": 106, "global_frame_idx": 4567, "task_index": 3}, {"db_idx": 4568, "episode_idx": 15, "frame_idx": 107, "global_frame_idx": 4568, "task_index": 3}, {"db_idx": 4569, "episode_idx": 15, "frame_idx": 108, "global_frame_idx": 4569, "task_index": 3}, {"db_idx": 4570, "episode_idx": 15, "frame_idx": 109, "global_frame_idx": 4570, "task_index": 3}, {"db_idx": 4571, "episode_idx": 15, "frame_idx": 110, "global_frame_idx": 4571, "task_index": 3}, {"db_idx": 4572, "episode_idx": 15, "frame_idx": 111, "global_frame_idx": 4572, "task_index": 3}, {"db_idx": 4573, "episode_idx": 15, "frame_idx": 112, "global_frame_idx": 4573, "task_index": 3}, {"db_idx": 4574, "episode_idx": 15, "frame_idx": 113, "global_frame_idx": 4574, "task_index": 3}, {"db_idx": 4575, "episode_idx": 15, "frame_idx": 114, "global_frame_idx": 4575, "task_index": 3}, {"db_idx": 4576, "episode_idx": 15, "frame_idx": 115, "global_frame_idx": 4576, "task_index": 3}, {"db_idx": 4577, "episode_idx": 15, "frame_idx": 116, "global_frame_idx": 4577, "task_index": 3}, {"db_idx": 4578, "episode_idx": 15, "frame_idx": 117, "global_frame_idx": 4578, "task_index": 3}, {"db_idx": 4579, "episode_idx": 15, "frame_idx": 118, "global_frame_idx": 4579, "task_index": 3}, {"db_idx": 4580, "episode_idx": 15, "frame_idx": 119, "global_frame_idx": 4580, "task_index": 3}, {"db_idx": 4581, "episode_idx": 15, "frame_idx": 120, "global_frame_idx": 4581, "task_index": 3}, {"db_idx": 4582, "episode_idx": 15, "frame_idx": 121, "global_frame_idx": 4582, "task_index": 3}, {"db_idx": 4583, "episode_idx": 15, "frame_idx": 122, "global_frame_idx": 4583, "task_index": 3}, {"db_idx": 4584, "episode_idx": 15, "frame_idx": 123, "global_frame_idx": 4584, "task_index": 3}, {"db_idx": 4585, "episode_idx": 15, "frame_idx": 124, "global_frame_idx": 4585, "task_index": 3}, {"db_idx": 4586, "episode_idx": 15, "frame_idx": 125, "global_frame_idx": 4586, "task_index": 3}, {"db_idx": 4587, "episode_idx": 15, "frame_idx": 126, "global_frame_idx": 4587, "task_index": 3}, {"db_idx": 4588, "episode_idx": 15, "frame_idx": 127, "global_frame_idx": 4588, "task_index": 3}, {"db_idx": 4589, "episode_idx": 15, "frame_idx": 128, "global_frame_idx": 4589, "task_index": 3}, {"db_idx": 4590, "episode_idx": 15, "frame_idx": 129, "global_frame_idx": 4590, "task_index": 3}, {"db_idx": 4591, "episode_idx": 15, "frame_idx": 130, "global_frame_idx": 4591, "task_index": 3}, {"db_idx": 4592, "episode_idx": 15, "frame_idx": 131, "global_frame_idx": 4592, "task_index": 3}, {"db_idx": 4593, "episode_idx": 15, "frame_idx": 132, "global_frame_idx": 4593, "task_index": 3}, {"db_idx": 4594, "episode_idx": 15, "frame_idx": 133, "global_frame_idx": 4594, "task_index": 3}, {"db_idx": 4595, "episode_idx": 15, "frame_idx": 134, "global_frame_idx": 4595, "task_index": 3}, {"db_idx": 4596, "episode_idx": 15, "frame_idx": 135, "global_frame_idx": 4596, "task_index": 3}, {"db_idx": 4597, "episode_idx": 15, "frame_idx": 136, "global_frame_idx": 4597, "task_index": 3}, {"db_idx": 4598, "episode_idx": 15, "frame_idx": 137, "global_frame_idx": 4598, "task_index": 3}, {"db_idx": 4599, "episode_idx": 15, "frame_idx": 138, "global_frame_idx": 4599, "task_index": 3}, {"db_idx": 4600, "episode_idx": 15, "frame_idx": 139, "global_frame_idx": 4600, "task_index": 3}, {"db_idx": 4601, "episode_idx": 15, "frame_idx": 140, "global_frame_idx": 4601, "task_index": 3}, {"db_idx": 4602, "episode_idx": 15, "frame_idx": 141, "global_frame_idx": 4602, "task_index": 3}, {"db_idx": 4603, "episode_idx": 15, "frame_idx": 142, "global_frame_idx": 4603, "task_index": 3}, {"db_idx": 4604, "episode_idx": 15, "frame_idx": 143, "global_frame_idx": 4604, "task_index": 3}, {"db_idx": 4605, "episode_idx": 15, "frame_idx": 144, "global_frame_idx": 4605, "task_index": 3}, {"db_idx": 4606, "episode_idx": 15, "frame_idx": 145, "global_frame_idx": 4606, "task_index": 3}, {"db_idx": 4607, "episode_idx": 15, "frame_idx": 146, "global_frame_idx": 4607, "task_index": 3}, {"db_idx": 4608, "episode_idx": 15, "frame_idx": 147, "global_frame_idx": 4608, "task_index": 3}, {"db_idx": 4609, "episode_idx": 15, "frame_idx": 148, "global_frame_idx": 4609, "task_index": 3}, {"db_idx": 4610, "episode_idx": 15, "frame_idx": 149, "global_frame_idx": 4610, "task_index": 3}, {"db_idx": 4611, "episode_idx": 15, "frame_idx": 150, "global_frame_idx": 4611, "task_index": 3}, {"db_idx": 4612, "episode_idx": 15, "frame_idx": 151, "global_frame_idx": 4612, "task_index": 3}, {"db_idx": 4613, "episode_idx": 15, "frame_idx": 152, "global_frame_idx": 4613, "task_index": 3}, {"db_idx": 4614, "episode_idx": 15, "frame_idx": 153, "global_frame_idx": 4614, "task_index": 3}, {"db_idx": 4615, "episode_idx": 15, "frame_idx": 154, "global_frame_idx": 4615, "task_index": 3}, {"db_idx": 4616, "episode_idx": 15, "frame_idx": 155, "global_frame_idx": 4616, "task_index": 3}, {"db_idx": 4617, "episode_idx": 15, "frame_idx": 156, "global_frame_idx": 4617, "task_index": 3}, {"db_idx": 4618, "episode_idx": 15, "frame_idx": 157, "global_frame_idx": 4618, "task_index": 3}, {"db_idx": 4619, "episode_idx": 15, "frame_idx": 158, "global_frame_idx": 4619, "task_index": 3}, {"db_idx": 4620, "episode_idx": 15, "frame_idx": 159, "global_frame_idx": 4620, "task_index": 3}, {"db_idx": 4621, "episode_idx": 15, "frame_idx": 160, "global_frame_idx": 4621, "task_index": 3}, {"db_idx": 4622, "episode_idx": 15, "frame_idx": 161, "global_frame_idx": 4622, "task_index": 3}, {"db_idx": 4623, "episode_idx": 15, "frame_idx": 162, "global_frame_idx": 4623, "task_index": 3}, {"db_idx": 4624, "episode_idx": 15, "frame_idx": 163, "global_frame_idx": 4624, "task_index": 3}, {"db_idx": 4625, "episode_idx": 15, "frame_idx": 164, "global_frame_idx": 4625, "task_index": 3}, {"db_idx": 4626, "episode_idx": 15, "frame_idx": 165, "global_frame_idx": 4626, "task_index": 3}, {"db_idx": 4627, "episode_idx": 15, "frame_idx": 166, "global_frame_idx": 4627, "task_index": 3}, {"db_idx": 4628, "episode_idx": 15, "frame_idx": 167, "global_frame_idx": 4628, "task_index": 3}, {"db_idx": 4629, "episode_idx": 15, "frame_idx": 168, "global_frame_idx": 4629, "task_index": 3}, {"db_idx": 4630, "episode_idx": 15, "frame_idx": 169, "global_frame_idx": 4630, "task_index": 3}, {"db_idx": 4631, "episode_idx": 15, "frame_idx": 170, "global_frame_idx": 4631, "task_index": 3}, {"db_idx": 4632, "episode_idx": 15, "frame_idx": 171, "global_frame_idx": 4632, "task_index": 3}, {"db_idx": 4633, "episode_idx": 15, "frame_idx": 172, "global_frame_idx": 4633, "task_index": 3}, {"db_idx": 4634, "episode_idx": 15, "frame_idx": 173, "global_frame_idx": 4634, "task_index": 3}, {"db_idx": 4635, "episode_idx": 15, "frame_idx": 174, "global_frame_idx": 4635, "task_index": 3}, {"db_idx": 4636, "episode_idx": 15, "frame_idx": 175, "global_frame_idx": 4636, "task_index": 3}, {"db_idx": 4637, "episode_idx": 15, "frame_idx": 176, "global_frame_idx": 4637, "task_index": 3}, {"db_idx": 4638, "episode_idx": 15, "frame_idx": 177, "global_frame_idx": 4638, "task_index": 3}, {"db_idx": 4639, "episode_idx": 15, "frame_idx": 178, "global_frame_idx": 4639, "task_index": 3}, {"db_idx": 4640, "episode_idx": 15, "frame_idx": 179, "global_frame_idx": 4640, "task_index": 3}, {"db_idx": 4641, "episode_idx": 15, "frame_idx": 180, "global_frame_idx": 4641, "task_index": 3}, {"db_idx": 4642, "episode_idx": 15, "frame_idx": 181, "global_frame_idx": 4642, "task_index": 3}, {"db_idx": 4643, "episode_idx": 15, "frame_idx": 182, "global_frame_idx": 4643, "task_index": 3}, {"db_idx": 4644, "episode_idx": 15, "frame_idx": 183, "global_frame_idx": 4644, "task_index": 3}, {"db_idx": 4645, "episode_idx": 15, "frame_idx": 184, "global_frame_idx": 4645, "task_index": 3}, {"db_idx": 4646, "episode_idx": 15, "frame_idx": 185, "global_frame_idx": 4646, "task_index": 3}, {"db_idx": 4647, "episode_idx": 15, "frame_idx": 186, "global_frame_idx": 4647, "task_index": 3}, {"db_idx": 4648, "episode_idx": 15, "frame_idx": 187, "global_frame_idx": 4648, "task_index": 3}, {"db_idx": 4649, "episode_idx": 15, "frame_idx": 188, "global_frame_idx": 4649, "task_index": 3}, {"db_idx": 4650, "episode_idx": 15, "frame_idx": 189, "global_frame_idx": 4650, "task_index": 3}, {"db_idx": 4651, "episode_idx": 15, "frame_idx": 190, "global_frame_idx": 4651, "task_index": 3}, {"db_idx": 4652, "episode_idx": 15, "frame_idx": 191, "global_frame_idx": 4652, "task_index": 3}, {"db_idx": 4653, "episode_idx": 15, "frame_idx": 192, "global_frame_idx": 4653, "task_index": 3}, {"db_idx": 4654, "episode_idx": 15, "frame_idx": 193, "global_frame_idx": 4654, "task_index": 3}, {"db_idx": 4655, "episode_idx": 15, "frame_idx": 194, "global_frame_idx": 4655, "task_index": 3}, {"db_idx": 4656, "episode_idx": 15, "frame_idx": 195, "global_frame_idx": 4656, "task_index": 3}, {"db_idx": 4657, "episode_idx": 15, "frame_idx": 196, "global_frame_idx": 4657, "task_index": 3}, {"db_idx": 4658, "episode_idx": 15, "frame_idx": 197, "global_frame_idx": 4658, "task_index": 3}, {"db_idx": 4659, "episode_idx": 15, "frame_idx": 198, "global_frame_idx": 4659, "task_index": 3}, {"db_idx": 4660, "episode_idx": 15, "frame_idx": 199, "global_frame_idx": 4660, "task_index": 3}, {"db_idx": 4661, "episode_idx": 15, "frame_idx": 200, "global_frame_idx": 4661, "task_index": 3}, {"db_idx": 4662, "episode_idx": 15, "frame_idx": 201, "global_frame_idx": 4662, "task_index": 3}, {"db_idx": 4663, "episode_idx": 15, "frame_idx": 202, "global_frame_idx": 4663, "task_index": 3}, {"db_idx": 4664, "episode_idx": 15, "frame_idx": 203, "global_frame_idx": 4664, "task_index": 3}, {"db_idx": 4665, "episode_idx": 15, "frame_idx": 204, "global_frame_idx": 4665, "task_index": 3}, {"db_idx": 4666, "episode_idx": 15, "frame_idx": 205, "global_frame_idx": 4666, "task_index": 3}, {"db_idx": 4667, "episode_idx": 15, "frame_idx": 206, "global_frame_idx": 4667, "task_index": 3}, {"db_idx": 4668, "episode_idx": 15, "frame_idx": 207, "global_frame_idx": 4668, "task_index": 3}, {"db_idx": 4669, "episode_idx": 15, "frame_idx": 208, "global_frame_idx": 4669, "task_index": 3}, {"db_idx": 4670, "episode_idx": 15, "frame_idx": 209, "global_frame_idx": 4670, "task_index": 3}, {"db_idx": 4671, "episode_idx": 15, "frame_idx": 210, "global_frame_idx": 4671, "task_index": 3}, {"db_idx": 4672, "episode_idx": 15, "frame_idx": 211, "global_frame_idx": 4672, "task_index": 3}, {"db_idx": 4673, "episode_idx": 15, "frame_idx": 212, "global_frame_idx": 4673, "task_index": 3}, {"db_idx": 4674, "episode_idx": 15, "frame_idx": 213, "global_frame_idx": 4674, "task_index": 3}, {"db_idx": 4675, "episode_idx": 15, "frame_idx": 214, "global_frame_idx": 4675, "task_index": 3}, {"db_idx": 4676, "episode_idx": 15, "frame_idx": 215, "global_frame_idx": 4676, "task_index": 3}, {"db_idx": 4677, "episode_idx": 15, "frame_idx": 216, "global_frame_idx": 4677, "task_index": 3}, {"db_idx": 4678, "episode_idx": 15, "frame_idx": 217, "global_frame_idx": 4678, "task_index": 3}, {"db_idx": 4679, "episode_idx": 15, "frame_idx": 218, "global_frame_idx": 4679, "task_index": 3}, {"db_idx": 4680, "episode_idx": 15, "frame_idx": 219, "global_frame_idx": 4680, "task_index": 3}, {"db_idx": 4681, "episode_idx": 15, "frame_idx": 220, "global_frame_idx": 4681, "task_index": 3}, {"db_idx": 4682, "episode_idx": 15, "frame_idx": 221, "global_frame_idx": 4682, "task_index": 3}, {"db_idx": 4683, "episode_idx": 15, "frame_idx": 222, "global_frame_idx": 4683, "task_index": 3}, {"db_idx": 4684, "episode_idx": 15, "frame_idx": 223, "global_frame_idx": 4684, "task_index": 3}, {"db_idx": 4685, "episode_idx": 15, "frame_idx": 224, "global_frame_idx": 4685, "task_index": 3}, {"db_idx": 4686, "episode_idx": 15, "frame_idx": 225, "global_frame_idx": 4686, "task_index": 3}, {"db_idx": 4687, "episode_idx": 15, "frame_idx": 226, "global_frame_idx": 4687, "task_index": 3}, {"db_idx": 4688, "episode_idx": 15, "frame_idx": 227, "global_frame_idx": 4688, "task_index": 3}, {"db_idx": 4689, "episode_idx": 15, "frame_idx": 228, "global_frame_idx": 4689, "task_index": 3}, {"db_idx": 4690, "episode_idx": 15, "frame_idx": 229, "global_frame_idx": 4690, "task_index": 3}, {"db_idx": 4691, "episode_idx": 15, "frame_idx": 230, "global_frame_idx": 4691, "task_index": 3}, {"db_idx": 4692, "episode_idx": 15, "frame_idx": 231, "global_frame_idx": 4692, "task_index": 3}, {"db_idx": 4693, "episode_idx": 15, "frame_idx": 232, "global_frame_idx": 4693, "task_index": 3}, {"db_idx": 4694, "episode_idx": 15, "frame_idx": 233, "global_frame_idx": 4694, "task_index": 3}, {"db_idx": 4695, "episode_idx": 15, "frame_idx": 234, "global_frame_idx": 4695, "task_index": 3}, {"db_idx": 4696, "episode_idx": 15, "frame_idx": 235, "global_frame_idx": 4696, "task_index": 3}, {"db_idx": 4697, "episode_idx": 15, "frame_idx": 236, "global_frame_idx": 4697, "task_index": 3}, {"db_idx": 4698, "episode_idx": 15, "frame_idx": 237, "global_frame_idx": 4698, "task_index": 3}, {"db_idx": 4699, "episode_idx": 15, "frame_idx": 238, "global_frame_idx": 4699, "task_index": 3}, {"db_idx": 4700, "episode_idx": 15, "frame_idx": 239, "global_frame_idx": 4700, "task_index": 3}, {"db_idx": 4701, "episode_idx": 15, "frame_idx": 240, "global_frame_idx": 4701, "task_index": 3}, {"db_idx": 4702, "episode_idx": 15, "frame_idx": 241, "global_frame_idx": 4702, "task_index": 3}, {"db_idx": 4703, "episode_idx": 15, "frame_idx": 242, "global_frame_idx": 4703, "task_index": 3}, {"db_idx": 4704, "episode_idx": 15, "frame_idx": 243, "global_frame_idx": 4704, "task_index": 3}, {"db_idx": 4705, "episode_idx": 15, "frame_idx": 244, "global_frame_idx": 4705, "task_index": 3}, {"db_idx": 4706, "episode_idx": 15, "frame_idx": 245, "global_frame_idx": 4706, "task_index": 3}, {"db_idx": 4707, "episode_idx": 15, "frame_idx": 246, "global_frame_idx": 4707, "task_index": 3}, {"db_idx": 4708, "episode_idx": 16, "frame_idx": 0, "global_frame_idx": 4708, "task_index": 3}, {"db_idx": 4709, "episode_idx": 16, "frame_idx": 1, "global_frame_idx": 4709, "task_index": 3}, {"db_idx": 4710, "episode_idx": 16, "frame_idx": 2, "global_frame_idx": 4710, "task_index": 3}, {"db_idx": 4711, "episode_idx": 16, "frame_idx": 3, "global_frame_idx": 4711, "task_index": 3}, {"db_idx": 4712, "episode_idx": 16, "frame_idx": 4, "global_frame_idx": 4712, "task_index": 3}, {"db_idx": 4713, "episode_idx": 16, "frame_idx": 5, "global_frame_idx": 4713, "task_index": 3}, {"db_idx": 4714, "episode_idx": 16, "frame_idx": 6, "global_frame_idx": 4714, "task_index": 3}, {"db_idx": 4715, "episode_idx": 16, "frame_idx": 7, "global_frame_idx": 4715, "task_index": 3}, {"db_idx": 4716, "episode_idx": 16, "frame_idx": 8, "global_frame_idx": 4716, "task_index": 3}, {"db_idx": 4717, "episode_idx": 16, "frame_idx": 9, "global_frame_idx": 4717, "task_index": 3}, {"db_idx": 4718, "episode_idx": 16, "frame_idx": 10, "global_frame_idx": 4718, "task_index": 3}, {"db_idx": 4719, "episode_idx": 16, "frame_idx": 11, "global_frame_idx": 4719, "task_index": 3}, {"db_idx": 4720, "episode_idx": 16, "frame_idx": 12, "global_frame_idx": 4720, "task_index": 3}, {"db_idx": 4721, "episode_idx": 16, "frame_idx": 13, "global_frame_idx": 4721, "task_index": 3}, {"db_idx": 4722, "episode_idx": 16, "frame_idx": 14, "global_frame_idx": 4722, "task_index": 3}, {"db_idx": 4723, "episode_idx": 16, "frame_idx": 15, "global_frame_idx": 4723, "task_index": 3}, {"db_idx": 4724, "episode_idx": 16, "frame_idx": 16, "global_frame_idx": 4724, "task_index": 3}, {"db_idx": 4725, "episode_idx": 16, "frame_idx": 17, "global_frame_idx": 4725, "task_index": 3}, {"db_idx": 4726, "episode_idx": 16, "frame_idx": 18, "global_frame_idx": 4726, "task_index": 3}, {"db_idx": 4727, "episode_idx": 16, "frame_idx": 19, "global_frame_idx": 4727, "task_index": 3}, {"db_idx": 4728, "episode_idx": 16, "frame_idx": 20, "global_frame_idx": 4728, "task_index": 3}, {"db_idx": 4729, "episode_idx": 16, "frame_idx": 21, "global_frame_idx": 4729, "task_index": 3}, {"db_idx": 4730, "episode_idx": 16, "frame_idx": 22, "global_frame_idx": 4730, "task_index": 3}, {"db_idx": 4731, "episode_idx": 16, "frame_idx": 23, "global_frame_idx": 4731, "task_index": 3}, {"db_idx": 4732, "episode_idx": 16, "frame_idx": 24, "global_frame_idx": 4732, "task_index": 3}, {"db_idx": 4733, "episode_idx": 16, "frame_idx": 25, "global_frame_idx": 4733, "task_index": 3}, {"db_idx": 4734, "episode_idx": 16, "frame_idx": 26, "global_frame_idx": 4734, "task_index": 3}, {"db_idx": 4735, "episode_idx": 16, "frame_idx": 27, "global_frame_idx": 4735, "task_index": 3}, {"db_idx": 4736, "episode_idx": 16, "frame_idx": 28, "global_frame_idx": 4736, "task_index": 3}, {"db_idx": 4737, "episode_idx": 16, "frame_idx": 29, "global_frame_idx": 4737, "task_index": 3}, {"db_idx": 4738, "episode_idx": 16, "frame_idx": 30, "global_frame_idx": 4738, "task_index": 3}, {"db_idx": 4739, "episode_idx": 16, "frame_idx": 31, "global_frame_idx": 4739, "task_index": 3}, {"db_idx": 4740, "episode_idx": 16, "frame_idx": 32, "global_frame_idx": 4740, "task_index": 3}, {"db_idx": 4741, "episode_idx": 16, "frame_idx": 33, "global_frame_idx": 4741, "task_index": 3}, {"db_idx": 4742, "episode_idx": 16, "frame_idx": 34, "global_frame_idx": 4742, "task_index": 3}, {"db_idx": 4743, "episode_idx": 16, "frame_idx": 35, "global_frame_idx": 4743, "task_index": 3}, {"db_idx": 4744, "episode_idx": 16, "frame_idx": 36, "global_frame_idx": 4744, "task_index": 3}, {"db_idx": 4745, "episode_idx": 16, "frame_idx": 37, "global_frame_idx": 4745, "task_index": 3}, {"db_idx": 4746, "episode_idx": 16, "frame_idx": 38, "global_frame_idx": 4746, "task_index": 3}, {"db_idx": 4747, "episode_idx": 16, "frame_idx": 39, "global_frame_idx": 4747, "task_index": 3}, {"db_idx": 4748, "episode_idx": 16, "frame_idx": 40, "global_frame_idx": 4748, "task_index": 3}, {"db_idx": 4749, "episode_idx": 16, "frame_idx": 41, "global_frame_idx": 4749, "task_index": 3}, {"db_idx": 4750, "episode_idx": 16, "frame_idx": 42, "global_frame_idx": 4750, "task_index": 3}, {"db_idx": 4751, "episode_idx": 16, "frame_idx": 43, "global_frame_idx": 4751, "task_index": 3}, {"db_idx": 4752, "episode_idx": 16, "frame_idx": 44, "global_frame_idx": 4752, "task_index": 3}, {"db_idx": 4753, "episode_idx": 16, "frame_idx": 45, "global_frame_idx": 4753, "task_index": 3}, {"db_idx": 4754, "episode_idx": 16, "frame_idx": 46, "global_frame_idx": 4754, "task_index": 3}, {"db_idx": 4755, "episode_idx": 16, "frame_idx": 47, "global_frame_idx": 4755, "task_index": 3}, {"db_idx": 4756, "episode_idx": 16, "frame_idx": 48, "global_frame_idx": 4756, "task_index": 3}, {"db_idx": 4757, "episode_idx": 16, "frame_idx": 49, "global_frame_idx": 4757, "task_index": 3}, {"db_idx": 4758, "episode_idx": 16, "frame_idx": 50, "global_frame_idx": 4758, "task_index": 3}, {"db_idx": 4759, "episode_idx": 16, "frame_idx": 51, "global_frame_idx": 4759, "task_index": 3}, {"db_idx": 4760, "episode_idx": 16, "frame_idx": 52, "global_frame_idx": 4760, "task_index": 3}, {"db_idx": 4761, "episode_idx": 16, "frame_idx": 53, "global_frame_idx": 4761, "task_index": 3}, {"db_idx": 4762, "episode_idx": 16, "frame_idx": 54, "global_frame_idx": 4762, "task_index": 3}, {"db_idx": 4763, "episode_idx": 16, "frame_idx": 55, "global_frame_idx": 4763, "task_index": 3}, {"db_idx": 4764, "episode_idx": 16, "frame_idx": 56, "global_frame_idx": 4764, "task_index": 3}, {"db_idx": 4765, "episode_idx": 16, "frame_idx": 57, "global_frame_idx": 4765, "task_index": 3}, {"db_idx": 4766, "episode_idx": 16, "frame_idx": 58, "global_frame_idx": 4766, "task_index": 3}, {"db_idx": 4767, "episode_idx": 16, "frame_idx": 59, "global_frame_idx": 4767, "task_index": 3}, {"db_idx": 4768, "episode_idx": 16, "frame_idx": 60, "global_frame_idx": 4768, "task_index": 3}, {"db_idx": 4769, "episode_idx": 16, "frame_idx": 61, "global_frame_idx": 4769, "task_index": 3}, {"db_idx": 4770, "episode_idx": 16, "frame_idx": 62, "global_frame_idx": 4770, "task_index": 3}, {"db_idx": 4771, "episode_idx": 16, "frame_idx": 63, "global_frame_idx": 4771, "task_index": 3}, {"db_idx": 4772, "episode_idx": 16, "frame_idx": 64, "global_frame_idx": 4772, "task_index": 3}, {"db_idx": 4773, "episode_idx": 16, "frame_idx": 65, "global_frame_idx": 4773, "task_index": 3}, {"db_idx": 4774, "episode_idx": 16, "frame_idx": 66, "global_frame_idx": 4774, "task_index": 3}, {"db_idx": 4775, "episode_idx": 16, "frame_idx": 67, "global_frame_idx": 4775, "task_index": 3}, {"db_idx": 4776, "episode_idx": 16, "frame_idx": 68, "global_frame_idx": 4776, "task_index": 3}, {"db_idx": 4777, "episode_idx": 16, "frame_idx": 69, "global_frame_idx": 4777, "task_index": 3}, {"db_idx": 4778, "episode_idx": 16, "frame_idx": 70, "global_frame_idx": 4778, "task_index": 3}, {"db_idx": 4779, "episode_idx": 16, "frame_idx": 71, "global_frame_idx": 4779, "task_index": 3}, {"db_idx": 4780, "episode_idx": 16, "frame_idx": 72, "global_frame_idx": 4780, "task_index": 3}, {"db_idx": 4781, "episode_idx": 16, "frame_idx": 73, "global_frame_idx": 4781, "task_index": 3}, {"db_idx": 4782, "episode_idx": 16, "frame_idx": 74, "global_frame_idx": 4782, "task_index": 3}, {"db_idx": 4783, "episode_idx": 16, "frame_idx": 75, "global_frame_idx": 4783, "task_index": 3}, {"db_idx": 4784, "episode_idx": 16, "frame_idx": 76, "global_frame_idx": 4784, "task_index": 3}, {"db_idx": 4785, "episode_idx": 16, "frame_idx": 77, "global_frame_idx": 4785, "task_index": 3}, {"db_idx": 4786, "episode_idx": 16, "frame_idx": 78, "global_frame_idx": 4786, "task_index": 3}, {"db_idx": 4787, "episode_idx": 16, "frame_idx": 79, "global_frame_idx": 4787, "task_index": 3}, {"db_idx": 4788, "episode_idx": 16, "frame_idx": 80, "global_frame_idx": 4788, "task_index": 3}, {"db_idx": 4789, "episode_idx": 16, "frame_idx": 81, "global_frame_idx": 4789, "task_index": 3}, {"db_idx": 4790, "episode_idx": 16, "frame_idx": 82, "global_frame_idx": 4790, "task_index": 3}, {"db_idx": 4791, "episode_idx": 16, "frame_idx": 83, "global_frame_idx": 4791, "task_index": 3}, {"db_idx": 4792, "episode_idx": 16, "frame_idx": 84, "global_frame_idx": 4792, "task_index": 3}, {"db_idx": 4793, "episode_idx": 16, "frame_idx": 85, "global_frame_idx": 4793, "task_index": 3}, {"db_idx": 4794, "episode_idx": 16, "frame_idx": 86, "global_frame_idx": 4794, "task_index": 3}, {"db_idx": 4795, "episode_idx": 16, "frame_idx": 87, "global_frame_idx": 4795, "task_index": 3}, {"db_idx": 4796, "episode_idx": 16, "frame_idx": 88, "global_frame_idx": 4796, "task_index": 3}, {"db_idx": 4797, "episode_idx": 16, "frame_idx": 89, "global_frame_idx": 4797, "task_index": 3}, {"db_idx": 4798, "episode_idx": 16, "frame_idx": 90, "global_frame_idx": 4798, "task_index": 3}, {"db_idx": 4799, "episode_idx": 16, "frame_idx": 91, "global_frame_idx": 4799, "task_index": 3}, {"db_idx": 4800, "episode_idx": 16, "frame_idx": 92, "global_frame_idx": 4800, "task_index": 3}, {"db_idx": 4801, "episode_idx": 16, "frame_idx": 93, "global_frame_idx": 4801, "task_index": 3}, {"db_idx": 4802, "episode_idx": 16, "frame_idx": 94, "global_frame_idx": 4802, "task_index": 3}, {"db_idx": 4803, "episode_idx": 16, "frame_idx": 95, "global_frame_idx": 4803, "task_index": 3}, {"db_idx": 4804, "episode_idx": 16, "frame_idx": 96, "global_frame_idx": 4804, "task_index": 3}, {"db_idx": 4805, "episode_idx": 16, "frame_idx": 97, "global_frame_idx": 4805, "task_index": 3}, {"db_idx": 4806, "episode_idx": 16, "frame_idx": 98, "global_frame_idx": 4806, "task_index": 3}, {"db_idx": 4807, "episode_idx": 16, "frame_idx": 99, "global_frame_idx": 4807, "task_index": 3}, {"db_idx": 4808, "episode_idx": 16, "frame_idx": 100, "global_frame_idx": 4808, "task_index": 3}, {"db_idx": 4809, "episode_idx": 16, "frame_idx": 101, "global_frame_idx": 4809, "task_index": 3}, {"db_idx": 4810, "episode_idx": 16, "frame_idx": 102, "global_frame_idx": 4810, "task_index": 3}, {"db_idx": 4811, "episode_idx": 16, "frame_idx": 103, "global_frame_idx": 4811, "task_index": 3}, {"db_idx": 4812, "episode_idx": 16, "frame_idx": 104, "global_frame_idx": 4812, "task_index": 3}, {"db_idx": 4813, "episode_idx": 16, "frame_idx": 105, "global_frame_idx": 4813, "task_index": 3}, {"db_idx": 4814, "episode_idx": 16, "frame_idx": 106, "global_frame_idx": 4814, "task_index": 3}, {"db_idx": 4815, "episode_idx": 16, "frame_idx": 107, "global_frame_idx": 4815, "task_index": 3}, {"db_idx": 4816, "episode_idx": 16, "frame_idx": 108, "global_frame_idx": 4816, "task_index": 3}, {"db_idx": 4817, "episode_idx": 16, "frame_idx": 109, "global_frame_idx": 4817, "task_index": 3}, {"db_idx": 4818, "episode_idx": 16, "frame_idx": 110, "global_frame_idx": 4818, "task_index": 3}, {"db_idx": 4819, "episode_idx": 16, "frame_idx": 111, "global_frame_idx": 4819, "task_index": 3}, {"db_idx": 4820, "episode_idx": 16, "frame_idx": 112, "global_frame_idx": 4820, "task_index": 3}, {"db_idx": 4821, "episode_idx": 16, "frame_idx": 113, "global_frame_idx": 4821, "task_index": 3}, {"db_idx": 4822, "episode_idx": 16, "frame_idx": 114, "global_frame_idx": 4822, "task_index": 3}, {"db_idx": 4823, "episode_idx": 16, "frame_idx": 115, "global_frame_idx": 4823, "task_index": 3}, {"db_idx": 4824, "episode_idx": 16, "frame_idx": 116, "global_frame_idx": 4824, "task_index": 3}, {"db_idx": 4825, "episode_idx": 16, "frame_idx": 117, "global_frame_idx": 4825, "task_index": 3}, {"db_idx": 4826, "episode_idx": 16, "frame_idx": 118, "global_frame_idx": 4826, "task_index": 3}, {"db_idx": 4827, "episode_idx": 16, "frame_idx": 119, "global_frame_idx": 4827, "task_index": 3}, {"db_idx": 4828, "episode_idx": 16, "frame_idx": 120, "global_frame_idx": 4828, "task_index": 3}, {"db_idx": 4829, "episode_idx": 16, "frame_idx": 121, "global_frame_idx": 4829, "task_index": 3}, {"db_idx": 4830, "episode_idx": 16, "frame_idx": 122, "global_frame_idx": 4830, "task_index": 3}, {"db_idx": 4831, "episode_idx": 16, "frame_idx": 123, "global_frame_idx": 4831, "task_index": 3}, {"db_idx": 4832, "episode_idx": 16, "frame_idx": 124, "global_frame_idx": 4832, "task_index": 3}, {"db_idx": 4833, "episode_idx": 16, "frame_idx": 125, "global_frame_idx": 4833, "task_index": 3}, {"db_idx": 4834, "episode_idx": 16, "frame_idx": 126, "global_frame_idx": 4834, "task_index": 3}, {"db_idx": 4835, "episode_idx": 16, "frame_idx": 127, "global_frame_idx": 4835, "task_index": 3}, {"db_idx": 4836, "episode_idx": 16, "frame_idx": 128, "global_frame_idx": 4836, "task_index": 3}, {"db_idx": 4837, "episode_idx": 16, "frame_idx": 129, "global_frame_idx": 4837, "task_index": 3}, {"db_idx": 4838, "episode_idx": 16, "frame_idx": 130, "global_frame_idx": 4838, "task_index": 3}, {"db_idx": 4839, "episode_idx": 16, "frame_idx": 131, "global_frame_idx": 4839, "task_index": 3}, {"db_idx": 4840, "episode_idx": 16, "frame_idx": 132, "global_frame_idx": 4840, "task_index": 3}, {"db_idx": 4841, "episode_idx": 16, "frame_idx": 133, "global_frame_idx": 4841, "task_index": 3}, {"db_idx": 4842, "episode_idx": 16, "frame_idx": 134, "global_frame_idx": 4842, "task_index": 3}, {"db_idx": 4843, "episode_idx": 16, "frame_idx": 135, "global_frame_idx": 4843, "task_index": 3}, {"db_idx": 4844, "episode_idx": 16, "frame_idx": 136, "global_frame_idx": 4844, "task_index": 3}, {"db_idx": 4845, "episode_idx": 16, "frame_idx": 137, "global_frame_idx": 4845, "task_index": 3}, {"db_idx": 4846, "episode_idx": 16, "frame_idx": 138, "global_frame_idx": 4846, "task_index": 3}, {"db_idx": 4847, "episode_idx": 16, "frame_idx": 139, "global_frame_idx": 4847, "task_index": 3}, {"db_idx": 4848, "episode_idx": 16, "frame_idx": 140, "global_frame_idx": 4848, "task_index": 3}, {"db_idx": 4849, "episode_idx": 16, "frame_idx": 141, "global_frame_idx": 4849, "task_index": 3}, {"db_idx": 4850, "episode_idx": 16, "frame_idx": 142, "global_frame_idx": 4850, "task_index": 3}, {"db_idx": 4851, "episode_idx": 16, "frame_idx": 143, "global_frame_idx": 4851, "task_index": 3}, {"db_idx": 4852, "episode_idx": 16, "frame_idx": 144, "global_frame_idx": 4852, "task_index": 3}, {"db_idx": 4853, "episode_idx": 16, "frame_idx": 145, "global_frame_idx": 4853, "task_index": 3}, {"db_idx": 4854, "episode_idx": 16, "frame_idx": 146, "global_frame_idx": 4854, "task_index": 3}, {"db_idx": 4855, "episode_idx": 16, "frame_idx": 147, "global_frame_idx": 4855, "task_index": 3}, {"db_idx": 4856, "episode_idx": 16, "frame_idx": 148, "global_frame_idx": 4856, "task_index": 3}, {"db_idx": 4857, "episode_idx": 16, "frame_idx": 149, "global_frame_idx": 4857, "task_index": 3}, {"db_idx": 4858, "episode_idx": 16, "frame_idx": 150, "global_frame_idx": 4858, "task_index": 3}, {"db_idx": 4859, "episode_idx": 16, "frame_idx": 151, "global_frame_idx": 4859, "task_index": 3}, {"db_idx": 4860, "episode_idx": 16, "frame_idx": 152, "global_frame_idx": 4860, "task_index": 3}, {"db_idx": 4861, "episode_idx": 16, "frame_idx": 153, "global_frame_idx": 4861, "task_index": 3}, {"db_idx": 4862, "episode_idx": 16, "frame_idx": 154, "global_frame_idx": 4862, "task_index": 3}, {"db_idx": 4863, "episode_idx": 16, "frame_idx": 155, "global_frame_idx": 4863, "task_index": 3}, {"db_idx": 4864, "episode_idx": 16, "frame_idx": 156, "global_frame_idx": 4864, "task_index": 3}, {"db_idx": 4865, "episode_idx": 16, "frame_idx": 157, "global_frame_idx": 4865, "task_index": 3}, {"db_idx": 4866, "episode_idx": 16, "frame_idx": 158, "global_frame_idx": 4866, "task_index": 3}, {"db_idx": 4867, "episode_idx": 16, "frame_idx": 159, "global_frame_idx": 4867, "task_index": 3}, {"db_idx": 4868, "episode_idx": 16, "frame_idx": 160, "global_frame_idx": 4868, "task_index": 3}, {"db_idx": 4869, "episode_idx": 16, "frame_idx": 161, "global_frame_idx": 4869, "task_index": 3}, {"db_idx": 4870, "episode_idx": 16, "frame_idx": 162, "global_frame_idx": 4870, "task_index": 3}, {"db_idx": 4871, "episode_idx": 16, "frame_idx": 163, "global_frame_idx": 4871, "task_index": 3}, {"db_idx": 4872, "episode_idx": 16, "frame_idx": 164, "global_frame_idx": 4872, "task_index": 3}, {"db_idx": 4873, "episode_idx": 16, "frame_idx": 165, "global_frame_idx": 4873, "task_index": 3}, {"db_idx": 4874, "episode_idx": 16, "frame_idx": 166, "global_frame_idx": 4874, "task_index": 3}, {"db_idx": 4875, "episode_idx": 16, "frame_idx": 167, "global_frame_idx": 4875, "task_index": 3}, {"db_idx": 4876, "episode_idx": 16, "frame_idx": 168, "global_frame_idx": 4876, "task_index": 3}, {"db_idx": 4877, "episode_idx": 16, "frame_idx": 169, "global_frame_idx": 4877, "task_index": 3}, {"db_idx": 4878, "episode_idx": 16, "frame_idx": 170, "global_frame_idx": 4878, "task_index": 3}, {"db_idx": 4879, "episode_idx": 16, "frame_idx": 171, "global_frame_idx": 4879, "task_index": 3}, {"db_idx": 4880, "episode_idx": 16, "frame_idx": 172, "global_frame_idx": 4880, "task_index": 3}, {"db_idx": 4881, "episode_idx": 16, "frame_idx": 173, "global_frame_idx": 4881, "task_index": 3}, {"db_idx": 4882, "episode_idx": 16, "frame_idx": 174, "global_frame_idx": 4882, "task_index": 3}, {"db_idx": 4883, "episode_idx": 16, "frame_idx": 175, "global_frame_idx": 4883, "task_index": 3}, {"db_idx": 4884, "episode_idx": 16, "frame_idx": 176, "global_frame_idx": 4884, "task_index": 3}, {"db_idx": 4885, "episode_idx": 16, "frame_idx": 177, "global_frame_idx": 4885, "task_index": 3}, {"db_idx": 4886, "episode_idx": 16, "frame_idx": 178, "global_frame_idx": 4886, "task_index": 3}, {"db_idx": 4887, "episode_idx": 16, "frame_idx": 179, "global_frame_idx": 4887, "task_index": 3}, {"db_idx": 4888, "episode_idx": 16, "frame_idx": 180, "global_frame_idx": 4888, "task_index": 3}, {"db_idx": 4889, "episode_idx": 16, "frame_idx": 181, "global_frame_idx": 4889, "task_index": 3}, {"db_idx": 4890, "episode_idx": 16, "frame_idx": 182, "global_frame_idx": 4890, "task_index": 3}, {"db_idx": 4891, "episode_idx": 16, "frame_idx": 183, "global_frame_idx": 4891, "task_index": 3}, {"db_idx": 4892, "episode_idx": 16, "frame_idx": 184, "global_frame_idx": 4892, "task_index": 3}, {"db_idx": 4893, "episode_idx": 16, "frame_idx": 185, "global_frame_idx": 4893, "task_index": 3}, {"db_idx": 4894, "episode_idx": 16, "frame_idx": 186, "global_frame_idx": 4894, "task_index": 3}, {"db_idx": 4895, "episode_idx": 16, "frame_idx": 187, "global_frame_idx": 4895, "task_index": 3}, {"db_idx": 4896, "episode_idx": 16, "frame_idx": 188, "global_frame_idx": 4896, "task_index": 3}, {"db_idx": 4897, "episode_idx": 16, "frame_idx": 189, "global_frame_idx": 4897, "task_index": 3}, {"db_idx": 4898, "episode_idx": 16, "frame_idx": 190, "global_frame_idx": 4898, "task_index": 3}, {"db_idx": 4899, "episode_idx": 16, "frame_idx": 191, "global_frame_idx": 4899, "task_index": 3}, {"db_idx": 4900, "episode_idx": 16, "frame_idx": 192, "global_frame_idx": 4900, "task_index": 3}, {"db_idx": 4901, "episode_idx": 16, "frame_idx": 193, "global_frame_idx": 4901, "task_index": 3}, {"db_idx": 4902, "episode_idx": 16, "frame_idx": 194, "global_frame_idx": 4902, "task_index": 3}, {"db_idx": 4903, "episode_idx": 16, "frame_idx": 195, "global_frame_idx": 4903, "task_index": 3}, {"db_idx": 4904, "episode_idx": 16, "frame_idx": 196, "global_frame_idx": 4904, "task_index": 3}, {"db_idx": 4905, "episode_idx": 16, "frame_idx": 197, "global_frame_idx": 4905, "task_index": 3}, {"db_idx": 4906, "episode_idx": 16, "frame_idx": 198, "global_frame_idx": 4906, "task_index": 3}, {"db_idx": 4907, "episode_idx": 16, "frame_idx": 199, "global_frame_idx": 4907, "task_index": 3}, {"db_idx": 4908, "episode_idx": 16, "frame_idx": 200, "global_frame_idx": 4908, "task_index": 3}, {"db_idx": 4909, "episode_idx": 16, "frame_idx": 201, "global_frame_idx": 4909, "task_index": 3}, {"db_idx": 4910, "episode_idx": 16, "frame_idx": 202, "global_frame_idx": 4910, "task_index": 3}, {"db_idx": 4911, "episode_idx": 16, "frame_idx": 203, "global_frame_idx": 4911, "task_index": 3}, {"db_idx": 4912, "episode_idx": 16, "frame_idx": 204, "global_frame_idx": 4912, "task_index": 3}, {"db_idx": 4913, "episode_idx": 16, "frame_idx": 205, "global_frame_idx": 4913, "task_index": 3}, {"db_idx": 4914, "episode_idx": 16, "frame_idx": 206, "global_frame_idx": 4914, "task_index": 3}, {"db_idx": 4915, "episode_idx": 16, "frame_idx": 207, "global_frame_idx": 4915, "task_index": 3}, {"db_idx": 4916, "episode_idx": 16, "frame_idx": 208, "global_frame_idx": 4916, "task_index": 3}, {"db_idx": 4917, "episode_idx": 16, "frame_idx": 209, "global_frame_idx": 4917, "task_index": 3}, {"db_idx": 4918, "episode_idx": 16, "frame_idx": 210, "global_frame_idx": 4918, "task_index": 3}, {"db_idx": 4919, "episode_idx": 16, "frame_idx": 211, "global_frame_idx": 4919, "task_index": 3}, {"db_idx": 4920, "episode_idx": 16, "frame_idx": 212, "global_frame_idx": 4920, "task_index": 3}, {"db_idx": 4921, "episode_idx": 16, "frame_idx": 213, "global_frame_idx": 4921, "task_index": 3}, {"db_idx": 4922, "episode_idx": 16, "frame_idx": 214, "global_frame_idx": 4922, "task_index": 3}, {"db_idx": 4923, "episode_idx": 16, "frame_idx": 215, "global_frame_idx": 4923, "task_index": 3}, {"db_idx": 4924, "episode_idx": 16, "frame_idx": 216, "global_frame_idx": 4924, "task_index": 3}, {"db_idx": 4925, "episode_idx": 16, "frame_idx": 217, "global_frame_idx": 4925, "task_index": 3}, {"db_idx": 4926, "episode_idx": 16, "frame_idx": 218, "global_frame_idx": 4926, "task_index": 3}, {"db_idx": 4927, "episode_idx": 16, "frame_idx": 219, "global_frame_idx": 4927, "task_index": 3}, {"db_idx": 4928, "episode_idx": 16, "frame_idx": 220, "global_frame_idx": 4928, "task_index": 3}, {"db_idx": 4929, "episode_idx": 16, "frame_idx": 221, "global_frame_idx": 4929, "task_index": 3}, {"db_idx": 4930, "episode_idx": 16, "frame_idx": 222, "global_frame_idx": 4930, "task_index": 3}, {"db_idx": 4931, "episode_idx": 16, "frame_idx": 223, "global_frame_idx": 4931, "task_index": 3}, {"db_idx": 4932, "episode_idx": 16, "frame_idx": 224, "global_frame_idx": 4932, "task_index": 3}, {"db_idx": 4933, "episode_idx": 16, "frame_idx": 225, "global_frame_idx": 4933, "task_index": 3}, {"db_idx": 4934, "episode_idx": 16, "frame_idx": 226, "global_frame_idx": 4934, "task_index": 3}, {"db_idx": 4935, "episode_idx": 16, "frame_idx": 227, "global_frame_idx": 4935, "task_index": 3}, {"db_idx": 4936, "episode_idx": 16, "frame_idx": 228, "global_frame_idx": 4936, "task_index": 3}, {"db_idx": 4937, "episode_idx": 16, "frame_idx": 229, "global_frame_idx": 4937, "task_index": 3}, {"db_idx": 4938, "episode_idx": 16, "frame_idx": 230, "global_frame_idx": 4938, "task_index": 3}, {"db_idx": 4939, "episode_idx": 16, "frame_idx": 231, "global_frame_idx": 4939, "task_index": 3}, {"db_idx": 4940, "episode_idx": 16, "frame_idx": 232, "global_frame_idx": 4940, "task_index": 3}, {"db_idx": 4941, "episode_idx": 16, "frame_idx": 233, "global_frame_idx": 4941, "task_index": 3}, {"db_idx": 4942, "episode_idx": 16, "frame_idx": 234, "global_frame_idx": 4942, "task_index": 3}, {"db_idx": 4943, "episode_idx": 16, "frame_idx": 235, "global_frame_idx": 4943, "task_index": 3}, {"db_idx": 4944, "episode_idx": 16, "frame_idx": 236, "global_frame_idx": 4944, "task_index": 3}, {"db_idx": 4945, "episode_idx": 16, "frame_idx": 237, "global_frame_idx": 4945, "task_index": 3}, {"db_idx": 4946, "episode_idx": 16, "frame_idx": 238, "global_frame_idx": 4946, "task_index": 3}, {"db_idx": 4947, "episode_idx": 16, "frame_idx": 239, "global_frame_idx": 4947, "task_index": 3}, {"db_idx": 4948, "episode_idx": 16, "frame_idx": 240, "global_frame_idx": 4948, "task_index": 3}, {"db_idx": 4949, "episode_idx": 16, "frame_idx": 241, "global_frame_idx": 4949, "task_index": 3}, {"db_idx": 4950, "episode_idx": 16, "frame_idx": 242, "global_frame_idx": 4950, "task_index": 3}, {"db_idx": 4951, "episode_idx": 16, "frame_idx": 243, "global_frame_idx": 4951, "task_index": 3}, {"db_idx": 4952, "episode_idx": 16, "frame_idx": 244, "global_frame_idx": 4952, "task_index": 3}, {"db_idx": 4953, "episode_idx": 16, "frame_idx": 245, "global_frame_idx": 4953, "task_index": 3}, {"db_idx": 4954, "episode_idx": 16, "frame_idx": 246, "global_frame_idx": 4954, "task_index": 3}, {"db_idx": 4955, "episode_idx": 16, "frame_idx": 247, "global_frame_idx": 4955, "task_index": 3}, {"db_idx": 4956, "episode_idx": 17, "frame_idx": 0, "global_frame_idx": 4956, "task_index": 3}, {"db_idx": 4957, "episode_idx": 17, "frame_idx": 1, "global_frame_idx": 4957, "task_index": 3}, {"db_idx": 4958, "episode_idx": 17, "frame_idx": 2, "global_frame_idx": 4958, "task_index": 3}, {"db_idx": 4959, "episode_idx": 17, "frame_idx": 3, "global_frame_idx": 4959, "task_index": 3}, {"db_idx": 4960, "episode_idx": 17, "frame_idx": 4, "global_frame_idx": 4960, "task_index": 3}, {"db_idx": 4961, "episode_idx": 17, "frame_idx": 5, "global_frame_idx": 4961, "task_index": 3}, {"db_idx": 4962, "episode_idx": 17, "frame_idx": 6, "global_frame_idx": 4962, "task_index": 3}, {"db_idx": 4963, "episode_idx": 17, "frame_idx": 7, "global_frame_idx": 4963, "task_index": 3}, {"db_idx": 4964, "episode_idx": 17, "frame_idx": 8, "global_frame_idx": 4964, "task_index": 3}, {"db_idx": 4965, "episode_idx": 17, "frame_idx": 9, "global_frame_idx": 4965, "task_index": 3}, {"db_idx": 4966, "episode_idx": 17, "frame_idx": 10, "global_frame_idx": 4966, "task_index": 3}, {"db_idx": 4967, "episode_idx": 17, "frame_idx": 11, "global_frame_idx": 4967, "task_index": 3}, {"db_idx": 4968, "episode_idx": 17, "frame_idx": 12, "global_frame_idx": 4968, "task_index": 3}, {"db_idx": 4969, "episode_idx": 17, "frame_idx": 13, "global_frame_idx": 4969, "task_index": 3}, {"db_idx": 4970, "episode_idx": 17, "frame_idx": 14, "global_frame_idx": 4970, "task_index": 3}, {"db_idx": 4971, "episode_idx": 17, "frame_idx": 15, "global_frame_idx": 4971, "task_index": 3}, {"db_idx": 4972, "episode_idx": 17, "frame_idx": 16, "global_frame_idx": 4972, "task_index": 3}, {"db_idx": 4973, "episode_idx": 17, "frame_idx": 17, "global_frame_idx": 4973, "task_index": 3}, {"db_idx": 4974, "episode_idx": 17, "frame_idx": 18, "global_frame_idx": 4974, "task_index": 3}, {"db_idx": 4975, "episode_idx": 17, "frame_idx": 19, "global_frame_idx": 4975, "task_index": 3}, {"db_idx": 4976, "episode_idx": 17, "frame_idx": 20, "global_frame_idx": 4976, "task_index": 3}, {"db_idx": 4977, "episode_idx": 17, "frame_idx": 21, "global_frame_idx": 4977, "task_index": 3}, {"db_idx": 4978, "episode_idx": 17, "frame_idx": 22, "global_frame_idx": 4978, "task_index": 3}, {"db_idx": 4979, "episode_idx": 17, "frame_idx": 23, "global_frame_idx": 4979, "task_index": 3}, {"db_idx": 4980, "episode_idx": 17, "frame_idx": 24, "global_frame_idx": 4980, "task_index": 3}, {"db_idx": 4981, "episode_idx": 17, "frame_idx": 25, "global_frame_idx": 4981, "task_index": 3}, {"db_idx": 4982, "episode_idx": 17, "frame_idx": 26, "global_frame_idx": 4982, "task_index": 3}, {"db_idx": 4983, "episode_idx": 17, "frame_idx": 27, "global_frame_idx": 4983, "task_index": 3}, {"db_idx": 4984, "episode_idx": 17, "frame_idx": 28, "global_frame_idx": 4984, "task_index": 3}, {"db_idx": 4985, "episode_idx": 17, "frame_idx": 29, "global_frame_idx": 4985, "task_index": 3}, {"db_idx": 4986, "episode_idx": 17, "frame_idx": 30, "global_frame_idx": 4986, "task_index": 3}, {"db_idx": 4987, "episode_idx": 17, "frame_idx": 31, "global_frame_idx": 4987, "task_index": 3}, {"db_idx": 4988, "episode_idx": 17, "frame_idx": 32, "global_frame_idx": 4988, "task_index": 3}, {"db_idx": 4989, "episode_idx": 17, "frame_idx": 33, "global_frame_idx": 4989, "task_index": 3}, {"db_idx": 4990, "episode_idx": 17, "frame_idx": 34, "global_frame_idx": 4990, "task_index": 3}, {"db_idx": 4991, "episode_idx": 17, "frame_idx": 35, "global_frame_idx": 4991, "task_index": 3}, {"db_idx": 4992, "episode_idx": 17, "frame_idx": 36, "global_frame_idx": 4992, "task_index": 3}, {"db_idx": 4993, "episode_idx": 17, "frame_idx": 37, "global_frame_idx": 4993, "task_index": 3}, {"db_idx": 4994, "episode_idx": 17, "frame_idx": 38, "global_frame_idx": 4994, "task_index": 3}, {"db_idx": 4995, "episode_idx": 17, "frame_idx": 39, "global_frame_idx": 4995, "task_index": 3}, {"db_idx": 4996, "episode_idx": 17, "frame_idx": 40, "global_frame_idx": 4996, "task_index": 3}, {"db_idx": 4997, "episode_idx": 17, "frame_idx": 41, "global_frame_idx": 4997, "task_index": 3}, {"db_idx": 4998, "episode_idx": 17, "frame_idx": 42, "global_frame_idx": 4998, "task_index": 3}, {"db_idx": 4999, "episode_idx": 17, "frame_idx": 43, "global_frame_idx": 4999, "task_index": 3}, {"db_idx": 5000, "episode_idx": 17, "frame_idx": 44, "global_frame_idx": 5000, "task_index": 3}, {"db_idx": 5001, "episode_idx": 17, "frame_idx": 45, "global_frame_idx": 5001, "task_index": 3}, {"db_idx": 5002, "episode_idx": 17, "frame_idx": 46, "global_frame_idx": 5002, "task_index": 3}, {"db_idx": 5003, "episode_idx": 17, "frame_idx": 47, "global_frame_idx": 5003, "task_index": 3}, {"db_idx": 5004, "episode_idx": 17, "frame_idx": 48, "global_frame_idx": 5004, "task_index": 3}, {"db_idx": 5005, "episode_idx": 17, "frame_idx": 49, "global_frame_idx": 5005, "task_index": 3}, {"db_idx": 5006, "episode_idx": 17, "frame_idx": 50, "global_frame_idx": 5006, "task_index": 3}, {"db_idx": 5007, "episode_idx": 17, "frame_idx": 51, "global_frame_idx": 5007, "task_index": 3}, {"db_idx": 5008, "episode_idx": 17, "frame_idx": 52, "global_frame_idx": 5008, "task_index": 3}, {"db_idx": 5009, "episode_idx": 17, "frame_idx": 53, "global_frame_idx": 5009, "task_index": 3}, {"db_idx": 5010, "episode_idx": 17, "frame_idx": 54, "global_frame_idx": 5010, "task_index": 3}, {"db_idx": 5011, "episode_idx": 17, "frame_idx": 55, "global_frame_idx": 5011, "task_index": 3}, {"db_idx": 5012, "episode_idx": 17, "frame_idx": 56, "global_frame_idx": 5012, "task_index": 3}, {"db_idx": 5013, "episode_idx": 17, "frame_idx": 57, "global_frame_idx": 5013, "task_index": 3}, {"db_idx": 5014, "episode_idx": 17, "frame_idx": 58, "global_frame_idx": 5014, "task_index": 3}, {"db_idx": 5015, "episode_idx": 17, "frame_idx": 59, "global_frame_idx": 5015, "task_index": 3}, {"db_idx": 5016, "episode_idx": 17, "frame_idx": 60, "global_frame_idx": 5016, "task_index": 3}, {"db_idx": 5017, "episode_idx": 17, "frame_idx": 61, "global_frame_idx": 5017, "task_index": 3}, {"db_idx": 5018, "episode_idx": 17, "frame_idx": 62, "global_frame_idx": 5018, "task_index": 3}, {"db_idx": 5019, "episode_idx": 17, "frame_idx": 63, "global_frame_idx": 5019, "task_index": 3}, {"db_idx": 5020, "episode_idx": 17, "frame_idx": 64, "global_frame_idx": 5020, "task_index": 3}, {"db_idx": 5021, "episode_idx": 17, "frame_idx": 65, "global_frame_idx": 5021, "task_index": 3}, {"db_idx": 5022, "episode_idx": 17, "frame_idx": 66, "global_frame_idx": 5022, "task_index": 3}, {"db_idx": 5023, "episode_idx": 17, "frame_idx": 67, "global_frame_idx": 5023, "task_index": 3}, {"db_idx": 5024, "episode_idx": 17, "frame_idx": 68, "global_frame_idx": 5024, "task_index": 3}, {"db_idx": 5025, "episode_idx": 17, "frame_idx": 69, "global_frame_idx": 5025, "task_index": 3}, {"db_idx": 5026, "episode_idx": 17, "frame_idx": 70, "global_frame_idx": 5026, "task_index": 3}, {"db_idx": 5027, "episode_idx": 17, "frame_idx": 71, "global_frame_idx": 5027, "task_index": 3}, {"db_idx": 5028, "episode_idx": 17, "frame_idx": 72, "global_frame_idx": 5028, "task_index": 3}, {"db_idx": 5029, "episode_idx": 17, "frame_idx": 73, "global_frame_idx": 5029, "task_index": 3}, {"db_idx": 5030, "episode_idx": 17, "frame_idx": 74, "global_frame_idx": 5030, "task_index": 3}, {"db_idx": 5031, "episode_idx": 17, "frame_idx": 75, "global_frame_idx": 5031, "task_index": 3}, {"db_idx": 5032, "episode_idx": 17, "frame_idx": 76, "global_frame_idx": 5032, "task_index": 3}, {"db_idx": 5033, "episode_idx": 17, "frame_idx": 77, "global_frame_idx": 5033, "task_index": 3}, {"db_idx": 5034, "episode_idx": 17, "frame_idx": 78, "global_frame_idx": 5034, "task_index": 3}, {"db_idx": 5035, "episode_idx": 17, "frame_idx": 79, "global_frame_idx": 5035, "task_index": 3}, {"db_idx": 5036, "episode_idx": 17, "frame_idx": 80, "global_frame_idx": 5036, "task_index": 3}, {"db_idx": 5037, "episode_idx": 17, "frame_idx": 81, "global_frame_idx": 5037, "task_index": 3}, {"db_idx": 5038, "episode_idx": 17, "frame_idx": 82, "global_frame_idx": 5038, "task_index": 3}, {"db_idx": 5039, "episode_idx": 17, "frame_idx": 83, "global_frame_idx": 5039, "task_index": 3}, {"db_idx": 5040, "episode_idx": 17, "frame_idx": 84, "global_frame_idx": 5040, "task_index": 3}, {"db_idx": 5041, "episode_idx": 17, "frame_idx": 85, "global_frame_idx": 5041, "task_index": 3}, {"db_idx": 5042, "episode_idx": 17, "frame_idx": 86, "global_frame_idx": 5042, "task_index": 3}, {"db_idx": 5043, "episode_idx": 17, "frame_idx": 87, "global_frame_idx": 5043, "task_index": 3}, {"db_idx": 5044, "episode_idx": 17, "frame_idx": 88, "global_frame_idx": 5044, "task_index": 3}, {"db_idx": 5045, "episode_idx": 17, "frame_idx": 89, "global_frame_idx": 5045, "task_index": 3}, {"db_idx": 5046, "episode_idx": 17, "frame_idx": 90, "global_frame_idx": 5046, "task_index": 3}, {"db_idx": 5047, "episode_idx": 17, "frame_idx": 91, "global_frame_idx": 5047, "task_index": 3}, {"db_idx": 5048, "episode_idx": 17, "frame_idx": 92, "global_frame_idx": 5048, "task_index": 3}, {"db_idx": 5049, "episode_idx": 17, "frame_idx": 93, "global_frame_idx": 5049, "task_index": 3}, {"db_idx": 5050, "episode_idx": 17, "frame_idx": 94, "global_frame_idx": 5050, "task_index": 3}, {"db_idx": 5051, "episode_idx": 17, "frame_idx": 95, "global_frame_idx": 5051, "task_index": 3}, {"db_idx": 5052, "episode_idx": 17, "frame_idx": 96, "global_frame_idx": 5052, "task_index": 3}, {"db_idx": 5053, "episode_idx": 17, "frame_idx": 97, "global_frame_idx": 5053, "task_index": 3}, {"db_idx": 5054, "episode_idx": 17, "frame_idx": 98, "global_frame_idx": 5054, "task_index": 3}, {"db_idx": 5055, "episode_idx": 17, "frame_idx": 99, "global_frame_idx": 5055, "task_index": 3}, {"db_idx": 5056, "episode_idx": 17, "frame_idx": 100, "global_frame_idx": 5056, "task_index": 3}, {"db_idx": 5057, "episode_idx": 17, "frame_idx": 101, "global_frame_idx": 5057, "task_index": 3}, {"db_idx": 5058, "episode_idx": 17, "frame_idx": 102, "global_frame_idx": 5058, "task_index": 3}, {"db_idx": 5059, "episode_idx": 17, "frame_idx": 103, "global_frame_idx": 5059, "task_index": 3}, {"db_idx": 5060, "episode_idx": 17, "frame_idx": 104, "global_frame_idx": 5060, "task_index": 3}, {"db_idx": 5061, "episode_idx": 17, "frame_idx": 105, "global_frame_idx": 5061, "task_index": 3}, {"db_idx": 5062, "episode_idx": 17, "frame_idx": 106, "global_frame_idx": 5062, "task_index": 3}, {"db_idx": 5063, "episode_idx": 17, "frame_idx": 107, "global_frame_idx": 5063, "task_index": 3}, {"db_idx": 5064, "episode_idx": 17, "frame_idx": 108, "global_frame_idx": 5064, "task_index": 3}, {"db_idx": 5065, "episode_idx": 17, "frame_idx": 109, "global_frame_idx": 5065, "task_index": 3}, {"db_idx": 5066, "episode_idx": 17, "frame_idx": 110, "global_frame_idx": 5066, "task_index": 3}, {"db_idx": 5067, "episode_idx": 17, "frame_idx": 111, "global_frame_idx": 5067, "task_index": 3}, {"db_idx": 5068, "episode_idx": 17, "frame_idx": 112, "global_frame_idx": 5068, "task_index": 3}, {"db_idx": 5069, "episode_idx": 17, "frame_idx": 113, "global_frame_idx": 5069, "task_index": 3}, {"db_idx": 5070, "episode_idx": 17, "frame_idx": 114, "global_frame_idx": 5070, "task_index": 3}, {"db_idx": 5071, "episode_idx": 17, "frame_idx": 115, "global_frame_idx": 5071, "task_index": 3}, {"db_idx": 5072, "episode_idx": 17, "frame_idx": 116, "global_frame_idx": 5072, "task_index": 3}, {"db_idx": 5073, "episode_idx": 17, "frame_idx": 117, "global_frame_idx": 5073, "task_index": 3}, {"db_idx": 5074, "episode_idx": 17, "frame_idx": 118, "global_frame_idx": 5074, "task_index": 3}, {"db_idx": 5075, "episode_idx": 17, "frame_idx": 119, "global_frame_idx": 5075, "task_index": 3}, {"db_idx": 5076, "episode_idx": 17, "frame_idx": 120, "global_frame_idx": 5076, "task_index": 3}, {"db_idx": 5077, "episode_idx": 17, "frame_idx": 121, "global_frame_idx": 5077, "task_index": 3}, {"db_idx": 5078, "episode_idx": 17, "frame_idx": 122, "global_frame_idx": 5078, "task_index": 3}, {"db_idx": 5079, "episode_idx": 17, "frame_idx": 123, "global_frame_idx": 5079, "task_index": 3}, {"db_idx": 5080, "episode_idx": 17, "frame_idx": 124, "global_frame_idx": 5080, "task_index": 3}, {"db_idx": 5081, "episode_idx": 17, "frame_idx": 125, "global_frame_idx": 5081, "task_index": 3}, {"db_idx": 5082, "episode_idx": 17, "frame_idx": 126, "global_frame_idx": 5082, "task_index": 3}, {"db_idx": 5083, "episode_idx": 17, "frame_idx": 127, "global_frame_idx": 5083, "task_index": 3}, {"db_idx": 5084, "episode_idx": 17, "frame_idx": 128, "global_frame_idx": 5084, "task_index": 3}, {"db_idx": 5085, "episode_idx": 17, "frame_idx": 129, "global_frame_idx": 5085, "task_index": 3}, {"db_idx": 5086, "episode_idx": 17, "frame_idx": 130, "global_frame_idx": 5086, "task_index": 3}, {"db_idx": 5087, "episode_idx": 17, "frame_idx": 131, "global_frame_idx": 5087, "task_index": 3}, {"db_idx": 5088, "episode_idx": 17, "frame_idx": 132, "global_frame_idx": 5088, "task_index": 3}, {"db_idx": 5089, "episode_idx": 17, "frame_idx": 133, "global_frame_idx": 5089, "task_index": 3}, {"db_idx": 5090, "episode_idx": 17, "frame_idx": 134, "global_frame_idx": 5090, "task_index": 3}, {"db_idx": 5091, "episode_idx": 17, "frame_idx": 135, "global_frame_idx": 5091, "task_index": 3}, {"db_idx": 5092, "episode_idx": 17, "frame_idx": 136, "global_frame_idx": 5092, "task_index": 3}, {"db_idx": 5093, "episode_idx": 17, "frame_idx": 137, "global_frame_idx": 5093, "task_index": 3}, {"db_idx": 5094, "episode_idx": 17, "frame_idx": 138, "global_frame_idx": 5094, "task_index": 3}, {"db_idx": 5095, "episode_idx": 17, "frame_idx": 139, "global_frame_idx": 5095, "task_index": 3}, {"db_idx": 5096, "episode_idx": 17, "frame_idx": 140, "global_frame_idx": 5096, "task_index": 3}, {"db_idx": 5097, "episode_idx": 17, "frame_idx": 141, "global_frame_idx": 5097, "task_index": 3}, {"db_idx": 5098, "episode_idx": 17, "frame_idx": 142, "global_frame_idx": 5098, "task_index": 3}, {"db_idx": 5099, "episode_idx": 17, "frame_idx": 143, "global_frame_idx": 5099, "task_index": 3}, {"db_idx": 5100, "episode_idx": 17, "frame_idx": 144, "global_frame_idx": 5100, "task_index": 3}, {"db_idx": 5101, "episode_idx": 17, "frame_idx": 145, "global_frame_idx": 5101, "task_index": 3}, {"db_idx": 5102, "episode_idx": 17, "frame_idx": 146, "global_frame_idx": 5102, "task_index": 3}, {"db_idx": 5103, "episode_idx": 17, "frame_idx": 147, "global_frame_idx": 5103, "task_index": 3}, {"db_idx": 5104, "episode_idx": 17, "frame_idx": 148, "global_frame_idx": 5104, "task_index": 3}, {"db_idx": 5105, "episode_idx": 17, "frame_idx": 149, "global_frame_idx": 5105, "task_index": 3}, {"db_idx": 5106, "episode_idx": 17, "frame_idx": 150, "global_frame_idx": 5106, "task_index": 3}, {"db_idx": 5107, "episode_idx": 17, "frame_idx": 151, "global_frame_idx": 5107, "task_index": 3}, {"db_idx": 5108, "episode_idx": 17, "frame_idx": 152, "global_frame_idx": 5108, "task_index": 3}, {"db_idx": 5109, "episode_idx": 17, "frame_idx": 153, "global_frame_idx": 5109, "task_index": 3}, {"db_idx": 5110, "episode_idx": 17, "frame_idx": 154, "global_frame_idx": 5110, "task_index": 3}, {"db_idx": 5111, "episode_idx": 17, "frame_idx": 155, "global_frame_idx": 5111, "task_index": 3}, {"db_idx": 5112, "episode_idx": 17, "frame_idx": 156, "global_frame_idx": 5112, "task_index": 3}, {"db_idx": 5113, "episode_idx": 17, "frame_idx": 157, "global_frame_idx": 5113, "task_index": 3}, {"db_idx": 5114, "episode_idx": 17, "frame_idx": 158, "global_frame_idx": 5114, "task_index": 3}, {"db_idx": 5115, "episode_idx": 17, "frame_idx": 159, "global_frame_idx": 5115, "task_index": 3}, {"db_idx": 5116, "episode_idx": 17, "frame_idx": 160, "global_frame_idx": 5116, "task_index": 3}, {"db_idx": 5117, "episode_idx": 17, "frame_idx": 161, "global_frame_idx": 5117, "task_index": 3}, {"db_idx": 5118, "episode_idx": 17, "frame_idx": 162, "global_frame_idx": 5118, "task_index": 3}, {"db_idx": 5119, "episode_idx": 17, "frame_idx": 163, "global_frame_idx": 5119, "task_index": 3}, {"db_idx": 5120, "episode_idx": 17, "frame_idx": 164, "global_frame_idx": 5120, "task_index": 3}, {"db_idx": 5121, "episode_idx": 17, "frame_idx": 165, "global_frame_idx": 5121, "task_index": 3}, {"db_idx": 5122, "episode_idx": 17, "frame_idx": 166, "global_frame_idx": 5122, "task_index": 3}, {"db_idx": 5123, "episode_idx": 17, "frame_idx": 167, "global_frame_idx": 5123, "task_index": 3}, {"db_idx": 5124, "episode_idx": 17, "frame_idx": 168, "global_frame_idx": 5124, "task_index": 3}, {"db_idx": 5125, "episode_idx": 17, "frame_idx": 169, "global_frame_idx": 5125, "task_index": 3}, {"db_idx": 5126, "episode_idx": 17, "frame_idx": 170, "global_frame_idx": 5126, "task_index": 3}, {"db_idx": 5127, "episode_idx": 17, "frame_idx": 171, "global_frame_idx": 5127, "task_index": 3}, {"db_idx": 5128, "episode_idx": 17, "frame_idx": 172, "global_frame_idx": 5128, "task_index": 3}, {"db_idx": 5129, "episode_idx": 17, "frame_idx": 173, "global_frame_idx": 5129, "task_index": 3}, {"db_idx": 5130, "episode_idx": 17, "frame_idx": 174, "global_frame_idx": 5130, "task_index": 3}, {"db_idx": 5131, "episode_idx": 17, "frame_idx": 175, "global_frame_idx": 5131, "task_index": 3}, {"db_idx": 5132, "episode_idx": 17, "frame_idx": 176, "global_frame_idx": 5132, "task_index": 3}, {"db_idx": 5133, "episode_idx": 17, "frame_idx": 177, "global_frame_idx": 5133, "task_index": 3}, {"db_idx": 5134, "episode_idx": 17, "frame_idx": 178, "global_frame_idx": 5134, "task_index": 3}, {"db_idx": 5135, "episode_idx": 17, "frame_idx": 179, "global_frame_idx": 5135, "task_index": 3}, {"db_idx": 5136, "episode_idx": 17, "frame_idx": 180, "global_frame_idx": 5136, "task_index": 3}, {"db_idx": 5137, "episode_idx": 17, "frame_idx": 181, "global_frame_idx": 5137, "task_index": 3}, {"db_idx": 5138, "episode_idx": 17, "frame_idx": 182, "global_frame_idx": 5138, "task_index": 3}, {"db_idx": 5139, "episode_idx": 17, "frame_idx": 183, "global_frame_idx": 5139, "task_index": 3}, {"db_idx": 5140, "episode_idx": 17, "frame_idx": 184, "global_frame_idx": 5140, "task_index": 3}, {"db_idx": 5141, "episode_idx": 17, "frame_idx": 185, "global_frame_idx": 5141, "task_index": 3}, {"db_idx": 5142, "episode_idx": 17, "frame_idx": 186, "global_frame_idx": 5142, "task_index": 3}, {"db_idx": 5143, "episode_idx": 17, "frame_idx": 187, "global_frame_idx": 5143, "task_index": 3}, {"db_idx": 5144, "episode_idx": 17, "frame_idx": 188, "global_frame_idx": 5144, "task_index": 3}, {"db_idx": 5145, "episode_idx": 17, "frame_idx": 189, "global_frame_idx": 5145, "task_index": 3}, {"db_idx": 5146, "episode_idx": 17, "frame_idx": 190, "global_frame_idx": 5146, "task_index": 3}, {"db_idx": 5147, "episode_idx": 17, "frame_idx": 191, "global_frame_idx": 5147, "task_index": 3}, {"db_idx": 5148, "episode_idx": 17, "frame_idx": 192, "global_frame_idx": 5148, "task_index": 3}, {"db_idx": 5149, "episode_idx": 17, "frame_idx": 193, "global_frame_idx": 5149, "task_index": 3}, {"db_idx": 5150, "episode_idx": 17, "frame_idx": 194, "global_frame_idx": 5150, "task_index": 3}, {"db_idx": 5151, "episode_idx": 17, "frame_idx": 195, "global_frame_idx": 5151, "task_index": 3}, {"db_idx": 5152, "episode_idx": 17, "frame_idx": 196, "global_frame_idx": 5152, "task_index": 3}, {"db_idx": 5153, "episode_idx": 17, "frame_idx": 197, "global_frame_idx": 5153, "task_index": 3}, {"db_idx": 5154, "episode_idx": 17, "frame_idx": 198, "global_frame_idx": 5154, "task_index": 3}, {"db_idx": 5155, "episode_idx": 17, "frame_idx": 199, "global_frame_idx": 5155, "task_index": 3}, {"db_idx": 5156, "episode_idx": 17, "frame_idx": 200, "global_frame_idx": 5156, "task_index": 3}, {"db_idx": 5157, "episode_idx": 17, "frame_idx": 201, "global_frame_idx": 5157, "task_index": 3}, {"db_idx": 5158, "episode_idx": 17, "frame_idx": 202, "global_frame_idx": 5158, "task_index": 3}, {"db_idx": 5159, "episode_idx": 17, "frame_idx": 203, "global_frame_idx": 5159, "task_index": 3}, {"db_idx": 5160, "episode_idx": 17, "frame_idx": 204, "global_frame_idx": 5160, "task_index": 3}, {"db_idx": 5161, "episode_idx": 17, "frame_idx": 205, "global_frame_idx": 5161, "task_index": 3}, {"db_idx": 5162, "episode_idx": 17, "frame_idx": 206, "global_frame_idx": 5162, "task_index": 3}, {"db_idx": 5163, "episode_idx": 17, "frame_idx": 207, "global_frame_idx": 5163, "task_index": 3}, {"db_idx": 5164, "episode_idx": 17, "frame_idx": 208, "global_frame_idx": 5164, "task_index": 3}, {"db_idx": 5165, "episode_idx": 17, "frame_idx": 209, "global_frame_idx": 5165, "task_index": 3}, {"db_idx": 5166, "episode_idx": 17, "frame_idx": 210, "global_frame_idx": 5166, "task_index": 3}, {"db_idx": 5167, "episode_idx": 17, "frame_idx": 211, "global_frame_idx": 5167, "task_index": 3}, {"db_idx": 5168, "episode_idx": 17, "frame_idx": 212, "global_frame_idx": 5168, "task_index": 3}, {"db_idx": 5169, "episode_idx": 17, "frame_idx": 213, "global_frame_idx": 5169, "task_index": 3}, {"db_idx": 5170, "episode_idx": 17, "frame_idx": 214, "global_frame_idx": 5170, "task_index": 3}, {"db_idx": 5171, "episode_idx": 17, "frame_idx": 215, "global_frame_idx": 5171, "task_index": 3}, {"db_idx": 5172, "episode_idx": 17, "frame_idx": 216, "global_frame_idx": 5172, "task_index": 3}, {"db_idx": 5173, "episode_idx": 17, "frame_idx": 217, "global_frame_idx": 5173, "task_index": 3}, {"db_idx": 5174, "episode_idx": 17, "frame_idx": 218, "global_frame_idx": 5174, "task_index": 3}, {"db_idx": 5175, "episode_idx": 17, "frame_idx": 219, "global_frame_idx": 5175, "task_index": 3}, {"db_idx": 5176, "episode_idx": 17, "frame_idx": 220, "global_frame_idx": 5176, "task_index": 3}, {"db_idx": 5177, "episode_idx": 17, "frame_idx": 221, "global_frame_idx": 5177, "task_index": 3}, {"db_idx": 5178, "episode_idx": 17, "frame_idx": 222, "global_frame_idx": 5178, "task_index": 3}, {"db_idx": 5179, "episode_idx": 17, "frame_idx": 223, "global_frame_idx": 5179, "task_index": 3}, {"db_idx": 5180, "episode_idx": 17, "frame_idx": 224, "global_frame_idx": 5180, "task_index": 3}, {"db_idx": 5181, "episode_idx": 17, "frame_idx": 225, "global_frame_idx": 5181, "task_index": 3}, {"db_idx": 5182, "episode_idx": 17, "frame_idx": 226, "global_frame_idx": 5182, "task_index": 3}, {"db_idx": 5183, "episode_idx": 17, "frame_idx": 227, "global_frame_idx": 5183, "task_index": 3}, {"db_idx": 5184, "episode_idx": 17, "frame_idx": 228, "global_frame_idx": 5184, "task_index": 3}, {"db_idx": 5185, "episode_idx": 17, "frame_idx": 229, "global_frame_idx": 5185, "task_index": 3}, {"db_idx": 5186, "episode_idx": 17, "frame_idx": 230, "global_frame_idx": 5186, "task_index": 3}, {"db_idx": 5187, "episode_idx": 17, "frame_idx": 231, "global_frame_idx": 5187, "task_index": 3}, {"db_idx": 5188, "episode_idx": 17, "frame_idx": 232, "global_frame_idx": 5188, "task_index": 3}, {"db_idx": 5189, "episode_idx": 17, "frame_idx": 233, "global_frame_idx": 5189, "task_index": 3}, {"db_idx": 5190, "episode_idx": 17, "frame_idx": 234, "global_frame_idx": 5190, "task_index": 3}, {"db_idx": 5191, "episode_idx": 18, "frame_idx": 0, "global_frame_idx": 5191, "task_index": 3}, {"db_idx": 5192, "episode_idx": 18, "frame_idx": 1, "global_frame_idx": 5192, "task_index": 3}, {"db_idx": 5193, "episode_idx": 18, "frame_idx": 2, "global_frame_idx": 5193, "task_index": 3}, {"db_idx": 5194, "episode_idx": 18, "frame_idx": 3, "global_frame_idx": 5194, "task_index": 3}, {"db_idx": 5195, "episode_idx": 18, "frame_idx": 4, "global_frame_idx": 5195, "task_index": 3}, {"db_idx": 5196, "episode_idx": 18, "frame_idx": 5, "global_frame_idx": 5196, "task_index": 3}, {"db_idx": 5197, "episode_idx": 18, "frame_idx": 6, "global_frame_idx": 5197, "task_index": 3}, {"db_idx": 5198, "episode_idx": 18, "frame_idx": 7, "global_frame_idx": 5198, "task_index": 3}, {"db_idx": 5199, "episode_idx": 18, "frame_idx": 8, "global_frame_idx": 5199, "task_index": 3}, {"db_idx": 5200, "episode_idx": 18, "frame_idx": 9, "global_frame_idx": 5200, "task_index": 3}, {"db_idx": 5201, "episode_idx": 18, "frame_idx": 10, "global_frame_idx": 5201, "task_index": 3}, {"db_idx": 5202, "episode_idx": 18, "frame_idx": 11, "global_frame_idx": 5202, "task_index": 3}, {"db_idx": 5203, "episode_idx": 18, "frame_idx": 12, "global_frame_idx": 5203, "task_index": 3}, {"db_idx": 5204, "episode_idx": 18, "frame_idx": 13, "global_frame_idx": 5204, "task_index": 3}, {"db_idx": 5205, "episode_idx": 18, "frame_idx": 14, "global_frame_idx": 5205, "task_index": 3}, {"db_idx": 5206, "episode_idx": 18, "frame_idx": 15, "global_frame_idx": 5206, "task_index": 3}, {"db_idx": 5207, "episode_idx": 18, "frame_idx": 16, "global_frame_idx": 5207, "task_index": 3}, {"db_idx": 5208, "episode_idx": 18, "frame_idx": 17, "global_frame_idx": 5208, "task_index": 3}, {"db_idx": 5209, "episode_idx": 18, "frame_idx": 18, "global_frame_idx": 5209, "task_index": 3}, {"db_idx": 5210, "episode_idx": 18, "frame_idx": 19, "global_frame_idx": 5210, "task_index": 3}, {"db_idx": 5211, "episode_idx": 18, "frame_idx": 20, "global_frame_idx": 5211, "task_index": 3}, {"db_idx": 5212, "episode_idx": 18, "frame_idx": 21, "global_frame_idx": 5212, "task_index": 3}, {"db_idx": 5213, "episode_idx": 18, "frame_idx": 22, "global_frame_idx": 5213, "task_index": 3}, {"db_idx": 5214, "episode_idx": 18, "frame_idx": 23, "global_frame_idx": 5214, "task_index": 3}, {"db_idx": 5215, "episode_idx": 18, "frame_idx": 24, "global_frame_idx": 5215, "task_index": 3}, {"db_idx": 5216, "episode_idx": 18, "frame_idx": 25, "global_frame_idx": 5216, "task_index": 3}, {"db_idx": 5217, "episode_idx": 18, "frame_idx": 26, "global_frame_idx": 5217, "task_index": 3}, {"db_idx": 5218, "episode_idx": 18, "frame_idx": 27, "global_frame_idx": 5218, "task_index": 3}, {"db_idx": 5219, "episode_idx": 18, "frame_idx": 28, "global_frame_idx": 5219, "task_index": 3}, {"db_idx": 5220, "episode_idx": 18, "frame_idx": 29, "global_frame_idx": 5220, "task_index": 3}, {"db_idx": 5221, "episode_idx": 18, "frame_idx": 30, "global_frame_idx": 5221, "task_index": 3}, {"db_idx": 5222, "episode_idx": 18, "frame_idx": 31, "global_frame_idx": 5222, "task_index": 3}, {"db_idx": 5223, "episode_idx": 18, "frame_idx": 32, "global_frame_idx": 5223, "task_index": 3}, {"db_idx": 5224, "episode_idx": 18, "frame_idx": 33, "global_frame_idx": 5224, "task_index": 3}, {"db_idx": 5225, "episode_idx": 18, "frame_idx": 34, "global_frame_idx": 5225, "task_index": 3}, {"db_idx": 5226, "episode_idx": 18, "frame_idx": 35, "global_frame_idx": 5226, "task_index": 3}, {"db_idx": 5227, "episode_idx": 18, "frame_idx": 36, "global_frame_idx": 5227, "task_index": 3}, {"db_idx": 5228, "episode_idx": 18, "frame_idx": 37, "global_frame_idx": 5228, "task_index": 3}, {"db_idx": 5229, "episode_idx": 18, "frame_idx": 38, "global_frame_idx": 5229, "task_index": 3}, {"db_idx": 5230, "episode_idx": 18, "frame_idx": 39, "global_frame_idx": 5230, "task_index": 3}, {"db_idx": 5231, "episode_idx": 18, "frame_idx": 40, "global_frame_idx": 5231, "task_index": 3}, {"db_idx": 5232, "episode_idx": 18, "frame_idx": 41, "global_frame_idx": 5232, "task_index": 3}, {"db_idx": 5233, "episode_idx": 18, "frame_idx": 42, "global_frame_idx": 5233, "task_index": 3}, {"db_idx": 5234, "episode_idx": 18, "frame_idx": 43, "global_frame_idx": 5234, "task_index": 3}, {"db_idx": 5235, "episode_idx": 18, "frame_idx": 44, "global_frame_idx": 5235, "task_index": 3}, {"db_idx": 5236, "episode_idx": 18, "frame_idx": 45, "global_frame_idx": 5236, "task_index": 3}, {"db_idx": 5237, "episode_idx": 18, "frame_idx": 46, "global_frame_idx": 5237, "task_index": 3}, {"db_idx": 5238, "episode_idx": 18, "frame_idx": 47, "global_frame_idx": 5238, "task_index": 3}, {"db_idx": 5239, "episode_idx": 18, "frame_idx": 48, "global_frame_idx": 5239, "task_index": 3}, {"db_idx": 5240, "episode_idx": 18, "frame_idx": 49, "global_frame_idx": 5240, "task_index": 3}, {"db_idx": 5241, "episode_idx": 18, "frame_idx": 50, "global_frame_idx": 5241, "task_index": 3}, {"db_idx": 5242, "episode_idx": 18, "frame_idx": 51, "global_frame_idx": 5242, "task_index": 3}, {"db_idx": 5243, "episode_idx": 18, "frame_idx": 52, "global_frame_idx": 5243, "task_index": 3}, {"db_idx": 5244, "episode_idx": 18, "frame_idx": 53, "global_frame_idx": 5244, "task_index": 3}, {"db_idx": 5245, "episode_idx": 18, "frame_idx": 54, "global_frame_idx": 5245, "task_index": 3}, {"db_idx": 5246, "episode_idx": 18, "frame_idx": 55, "global_frame_idx": 5246, "task_index": 3}, {"db_idx": 5247, "episode_idx": 18, "frame_idx": 56, "global_frame_idx": 5247, "task_index": 3}, {"db_idx": 5248, "episode_idx": 18, "frame_idx": 57, "global_frame_idx": 5248, "task_index": 3}, {"db_idx": 5249, "episode_idx": 18, "frame_idx": 58, "global_frame_idx": 5249, "task_index": 3}, {"db_idx": 5250, "episode_idx": 18, "frame_idx": 59, "global_frame_idx": 5250, "task_index": 3}, {"db_idx": 5251, "episode_idx": 18, "frame_idx": 60, "global_frame_idx": 5251, "task_index": 3}, {"db_idx": 5252, "episode_idx": 18, "frame_idx": 61, "global_frame_idx": 5252, "task_index": 3}, {"db_idx": 5253, "episode_idx": 18, "frame_idx": 62, "global_frame_idx": 5253, "task_index": 3}, {"db_idx": 5254, "episode_idx": 18, "frame_idx": 63, "global_frame_idx": 5254, "task_index": 3}, {"db_idx": 5255, "episode_idx": 18, "frame_idx": 64, "global_frame_idx": 5255, "task_index": 3}, {"db_idx": 5256, "episode_idx": 18, "frame_idx": 65, "global_frame_idx": 5256, "task_index": 3}, {"db_idx": 5257, "episode_idx": 18, "frame_idx": 66, "global_frame_idx": 5257, "task_index": 3}, {"db_idx": 5258, "episode_idx": 18, "frame_idx": 67, "global_frame_idx": 5258, "task_index": 3}, {"db_idx": 5259, "episode_idx": 18, "frame_idx": 68, "global_frame_idx": 5259, "task_index": 3}, {"db_idx": 5260, "episode_idx": 18, "frame_idx": 69, "global_frame_idx": 5260, "task_index": 3}, {"db_idx": 5261, "episode_idx": 18, "frame_idx": 70, "global_frame_idx": 5261, "task_index": 3}, {"db_idx": 5262, "episode_idx": 18, "frame_idx": 71, "global_frame_idx": 5262, "task_index": 3}, {"db_idx": 5263, "episode_idx": 18, "frame_idx": 72, "global_frame_idx": 5263, "task_index": 3}, {"db_idx": 5264, "episode_idx": 18, "frame_idx": 73, "global_frame_idx": 5264, "task_index": 3}, {"db_idx": 5265, "episode_idx": 18, "frame_idx": 74, "global_frame_idx": 5265, "task_index": 3}, {"db_idx": 5266, "episode_idx": 18, "frame_idx": 75, "global_frame_idx": 5266, "task_index": 3}, {"db_idx": 5267, "episode_idx": 18, "frame_idx": 76, "global_frame_idx": 5267, "task_index": 3}, {"db_idx": 5268, "episode_idx": 18, "frame_idx": 77, "global_frame_idx": 5268, "task_index": 3}, {"db_idx": 5269, "episode_idx": 18, "frame_idx": 78, "global_frame_idx": 5269, "task_index": 3}, {"db_idx": 5270, "episode_idx": 18, "frame_idx": 79, "global_frame_idx": 5270, "task_index": 3}, {"db_idx": 5271, "episode_idx": 18, "frame_idx": 80, "global_frame_idx": 5271, "task_index": 3}, {"db_idx": 5272, "episode_idx": 18, "frame_idx": 81, "global_frame_idx": 5272, "task_index": 3}, {"db_idx": 5273, "episode_idx": 18, "frame_idx": 82, "global_frame_idx": 5273, "task_index": 3}, {"db_idx": 5274, "episode_idx": 18, "frame_idx": 83, "global_frame_idx": 5274, "task_index": 3}, {"db_idx": 5275, "episode_idx": 18, "frame_idx": 84, "global_frame_idx": 5275, "task_index": 3}, {"db_idx": 5276, "episode_idx": 18, "frame_idx": 85, "global_frame_idx": 5276, "task_index": 3}, {"db_idx": 5277, "episode_idx": 18, "frame_idx": 86, "global_frame_idx": 5277, "task_index": 3}, {"db_idx": 5278, "episode_idx": 18, "frame_idx": 87, "global_frame_idx": 5278, "task_index": 3}, {"db_idx": 5279, "episode_idx": 18, "frame_idx": 88, "global_frame_idx": 5279, "task_index": 3}, {"db_idx": 5280, "episode_idx": 18, "frame_idx": 89, "global_frame_idx": 5280, "task_index": 3}, {"db_idx": 5281, "episode_idx": 18, "frame_idx": 90, "global_frame_idx": 5281, "task_index": 3}, {"db_idx": 5282, "episode_idx": 18, "frame_idx": 91, "global_frame_idx": 5282, "task_index": 3}, {"db_idx": 5283, "episode_idx": 18, "frame_idx": 92, "global_frame_idx": 5283, "task_index": 3}, {"db_idx": 5284, "episode_idx": 18, "frame_idx": 93, "global_frame_idx": 5284, "task_index": 3}, {"db_idx": 5285, "episode_idx": 18, "frame_idx": 94, "global_frame_idx": 5285, "task_index": 3}, {"db_idx": 5286, "episode_idx": 18, "frame_idx": 95, "global_frame_idx": 5286, "task_index": 3}, {"db_idx": 5287, "episode_idx": 18, "frame_idx": 96, "global_frame_idx": 5287, "task_index": 3}, {"db_idx": 5288, "episode_idx": 18, "frame_idx": 97, "global_frame_idx": 5288, "task_index": 3}, {"db_idx": 5289, "episode_idx": 18, "frame_idx": 98, "global_frame_idx": 5289, "task_index": 3}, {"db_idx": 5290, "episode_idx": 18, "frame_idx": 99, "global_frame_idx": 5290, "task_index": 3}, {"db_idx": 5291, "episode_idx": 18, "frame_idx": 100, "global_frame_idx": 5291, "task_index": 3}, {"db_idx": 5292, "episode_idx": 18, "frame_idx": 101, "global_frame_idx": 5292, "task_index": 3}, {"db_idx": 5293, "episode_idx": 18, "frame_idx": 102, "global_frame_idx": 5293, "task_index": 3}, {"db_idx": 5294, "episode_idx": 18, "frame_idx": 103, "global_frame_idx": 5294, "task_index": 3}, {"db_idx": 5295, "episode_idx": 18, "frame_idx": 104, "global_frame_idx": 5295, "task_index": 3}, {"db_idx": 5296, "episode_idx": 18, "frame_idx": 105, "global_frame_idx": 5296, "task_index": 3}, {"db_idx": 5297, "episode_idx": 18, "frame_idx": 106, "global_frame_idx": 5297, "task_index": 3}, {"db_idx": 5298, "episode_idx": 18, "frame_idx": 107, "global_frame_idx": 5298, "task_index": 3}, {"db_idx": 5299, "episode_idx": 18, "frame_idx": 108, "global_frame_idx": 5299, "task_index": 3}, {"db_idx": 5300, "episode_idx": 18, "frame_idx": 109, "global_frame_idx": 5300, "task_index": 3}, {"db_idx": 5301, "episode_idx": 18, "frame_idx": 110, "global_frame_idx": 5301, "task_index": 3}, {"db_idx": 5302, "episode_idx": 18, "frame_idx": 111, "global_frame_idx": 5302, "task_index": 3}, {"db_idx": 5303, "episode_idx": 18, "frame_idx": 112, "global_frame_idx": 5303, "task_index": 3}, {"db_idx": 5304, "episode_idx": 18, "frame_idx": 113, "global_frame_idx": 5304, "task_index": 3}, {"db_idx": 5305, "episode_idx": 18, "frame_idx": 114, "global_frame_idx": 5305, "task_index": 3}, {"db_idx": 5306, "episode_idx": 18, "frame_idx": 115, "global_frame_idx": 5306, "task_index": 3}, {"db_idx": 5307, "episode_idx": 18, "frame_idx": 116, "global_frame_idx": 5307, "task_index": 3}, {"db_idx": 5308, "episode_idx": 18, "frame_idx": 117, "global_frame_idx": 5308, "task_index": 3}, {"db_idx": 5309, "episode_idx": 18, "frame_idx": 118, "global_frame_idx": 5309, "task_index": 3}, {"db_idx": 5310, "episode_idx": 18, "frame_idx": 119, "global_frame_idx": 5310, "task_index": 3}, {"db_idx": 5311, "episode_idx": 18, "frame_idx": 120, "global_frame_idx": 5311, "task_index": 3}, {"db_idx": 5312, "episode_idx": 18, "frame_idx": 121, "global_frame_idx": 5312, "task_index": 3}, {"db_idx": 5313, "episode_idx": 18, "frame_idx": 122, "global_frame_idx": 5313, "task_index": 3}, {"db_idx": 5314, "episode_idx": 18, "frame_idx": 123, "global_frame_idx": 5314, "task_index": 3}, {"db_idx": 5315, "episode_idx": 18, "frame_idx": 124, "global_frame_idx": 5315, "task_index": 3}, {"db_idx": 5316, "episode_idx": 18, "frame_idx": 125, "global_frame_idx": 5316, "task_index": 3}, {"db_idx": 5317, "episode_idx": 18, "frame_idx": 126, "global_frame_idx": 5317, "task_index": 3}, {"db_idx": 5318, "episode_idx": 18, "frame_idx": 127, "global_frame_idx": 5318, "task_index": 3}, {"db_idx": 5319, "episode_idx": 18, "frame_idx": 128, "global_frame_idx": 5319, "task_index": 3}, {"db_idx": 5320, "episode_idx": 18, "frame_idx": 129, "global_frame_idx": 5320, "task_index": 3}, {"db_idx": 5321, "episode_idx": 18, "frame_idx": 130, "global_frame_idx": 5321, "task_index": 3}, {"db_idx": 5322, "episode_idx": 18, "frame_idx": 131, "global_frame_idx": 5322, "task_index": 3}, {"db_idx": 5323, "episode_idx": 18, "frame_idx": 132, "global_frame_idx": 5323, "task_index": 3}, {"db_idx": 5324, "episode_idx": 18, "frame_idx": 133, "global_frame_idx": 5324, "task_index": 3}, {"db_idx": 5325, "episode_idx": 18, "frame_idx": 134, "global_frame_idx": 5325, "task_index": 3}, {"db_idx": 5326, "episode_idx": 18, "frame_idx": 135, "global_frame_idx": 5326, "task_index": 3}, {"db_idx": 5327, "episode_idx": 18, "frame_idx": 136, "global_frame_idx": 5327, "task_index": 3}, {"db_idx": 5328, "episode_idx": 18, "frame_idx": 137, "global_frame_idx": 5328, "task_index": 3}, {"db_idx": 5329, "episode_idx": 18, "frame_idx": 138, "global_frame_idx": 5329, "task_index": 3}, {"db_idx": 5330, "episode_idx": 18, "frame_idx": 139, "global_frame_idx": 5330, "task_index": 3}, {"db_idx": 5331, "episode_idx": 18, "frame_idx": 140, "global_frame_idx": 5331, "task_index": 3}, {"db_idx": 5332, "episode_idx": 18, "frame_idx": 141, "global_frame_idx": 5332, "task_index": 3}, {"db_idx": 5333, "episode_idx": 18, "frame_idx": 142, "global_frame_idx": 5333, "task_index": 3}, {"db_idx": 5334, "episode_idx": 18, "frame_idx": 143, "global_frame_idx": 5334, "task_index": 3}, {"db_idx": 5335, "episode_idx": 18, "frame_idx": 144, "global_frame_idx": 5335, "task_index": 3}, {"db_idx": 5336, "episode_idx": 18, "frame_idx": 145, "global_frame_idx": 5336, "task_index": 3}, {"db_idx": 5337, "episode_idx": 18, "frame_idx": 146, "global_frame_idx": 5337, "task_index": 3}, {"db_idx": 5338, "episode_idx": 18, "frame_idx": 147, "global_frame_idx": 5338, "task_index": 3}, {"db_idx": 5339, "episode_idx": 18, "frame_idx": 148, "global_frame_idx": 5339, "task_index": 3}, {"db_idx": 5340, "episode_idx": 18, "frame_idx": 149, "global_frame_idx": 5340, "task_index": 3}, {"db_idx": 5341, "episode_idx": 18, "frame_idx": 150, "global_frame_idx": 5341, "task_index": 3}, {"db_idx": 5342, "episode_idx": 18, "frame_idx": 151, "global_frame_idx": 5342, "task_index": 3}, {"db_idx": 5343, "episode_idx": 18, "frame_idx": 152, "global_frame_idx": 5343, "task_index": 3}, {"db_idx": 5344, "episode_idx": 18, "frame_idx": 153, "global_frame_idx": 5344, "task_index": 3}, {"db_idx": 5345, "episode_idx": 18, "frame_idx": 154, "global_frame_idx": 5345, "task_index": 3}, {"db_idx": 5346, "episode_idx": 18, "frame_idx": 155, "global_frame_idx": 5346, "task_index": 3}, {"db_idx": 5347, "episode_idx": 18, "frame_idx": 156, "global_frame_idx": 5347, "task_index": 3}, {"db_idx": 5348, "episode_idx": 18, "frame_idx": 157, "global_frame_idx": 5348, "task_index": 3}, {"db_idx": 5349, "episode_idx": 18, "frame_idx": 158, "global_frame_idx": 5349, "task_index": 3}, {"db_idx": 5350, "episode_idx": 18, "frame_idx": 159, "global_frame_idx": 5350, "task_index": 3}, {"db_idx": 5351, "episode_idx": 18, "frame_idx": 160, "global_frame_idx": 5351, "task_index": 3}, {"db_idx": 5352, "episode_idx": 18, "frame_idx": 161, "global_frame_idx": 5352, "task_index": 3}, {"db_idx": 5353, "episode_idx": 18, "frame_idx": 162, "global_frame_idx": 5353, "task_index": 3}, {"db_idx": 5354, "episode_idx": 18, "frame_idx": 163, "global_frame_idx": 5354, "task_index": 3}, {"db_idx": 5355, "episode_idx": 18, "frame_idx": 164, "global_frame_idx": 5355, "task_index": 3}, {"db_idx": 5356, "episode_idx": 18, "frame_idx": 165, "global_frame_idx": 5356, "task_index": 3}, {"db_idx": 5357, "episode_idx": 18, "frame_idx": 166, "global_frame_idx": 5357, "task_index": 3}, {"db_idx": 5358, "episode_idx": 18, "frame_idx": 167, "global_frame_idx": 5358, "task_index": 3}, {"db_idx": 5359, "episode_idx": 18, "frame_idx": 168, "global_frame_idx": 5359, "task_index": 3}, {"db_idx": 5360, "episode_idx": 18, "frame_idx": 169, "global_frame_idx": 5360, "task_index": 3}, {"db_idx": 5361, "episode_idx": 18, "frame_idx": 170, "global_frame_idx": 5361, "task_index": 3}, {"db_idx": 5362, "episode_idx": 18, "frame_idx": 171, "global_frame_idx": 5362, "task_index": 3}, {"db_idx": 5363, "episode_idx": 18, "frame_idx": 172, "global_frame_idx": 5363, "task_index": 3}, {"db_idx": 5364, "episode_idx": 18, "frame_idx": 173, "global_frame_idx": 5364, "task_index": 3}, {"db_idx": 5365, "episode_idx": 18, "frame_idx": 174, "global_frame_idx": 5365, "task_index": 3}, {"db_idx": 5366, "episode_idx": 18, "frame_idx": 175, "global_frame_idx": 5366, "task_index": 3}, {"db_idx": 5367, "episode_idx": 18, "frame_idx": 176, "global_frame_idx": 5367, "task_index": 3}, {"db_idx": 5368, "episode_idx": 18, "frame_idx": 177, "global_frame_idx": 5368, "task_index": 3}, {"db_idx": 5369, "episode_idx": 18, "frame_idx": 178, "global_frame_idx": 5369, "task_index": 3}, {"db_idx": 5370, "episode_idx": 18, "frame_idx": 179, "global_frame_idx": 5370, "task_index": 3}, {"db_idx": 5371, "episode_idx": 18, "frame_idx": 180, "global_frame_idx": 5371, "task_index": 3}, {"db_idx": 5372, "episode_idx": 18, "frame_idx": 181, "global_frame_idx": 5372, "task_index": 3}, {"db_idx": 5373, "episode_idx": 18, "frame_idx": 182, "global_frame_idx": 5373, "task_index": 3}, {"db_idx": 5374, "episode_idx": 18, "frame_idx": 183, "global_frame_idx": 5374, "task_index": 3}, {"db_idx": 5375, "episode_idx": 18, "frame_idx": 184, "global_frame_idx": 5375, "task_index": 3}, {"db_idx": 5376, "episode_idx": 18, "frame_idx": 185, "global_frame_idx": 5376, "task_index": 3}, {"db_idx": 5377, "episode_idx": 18, "frame_idx": 186, "global_frame_idx": 5377, "task_index": 3}, {"db_idx": 5378, "episode_idx": 18, "frame_idx": 187, "global_frame_idx": 5378, "task_index": 3}, {"db_idx": 5379, "episode_idx": 18, "frame_idx": 188, "global_frame_idx": 5379, "task_index": 3}, {"db_idx": 5380, "episode_idx": 18, "frame_idx": 189, "global_frame_idx": 5380, "task_index": 3}, {"db_idx": 5381, "episode_idx": 18, "frame_idx": 190, "global_frame_idx": 5381, "task_index": 3}, {"db_idx": 5382, "episode_idx": 18, "frame_idx": 191, "global_frame_idx": 5382, "task_index": 3}, {"db_idx": 5383, "episode_idx": 18, "frame_idx": 192, "global_frame_idx": 5383, "task_index": 3}, {"db_idx": 5384, "episode_idx": 18, "frame_idx": 193, "global_frame_idx": 5384, "task_index": 3}, {"db_idx": 5385, "episode_idx": 18, "frame_idx": 194, "global_frame_idx": 5385, "task_index": 3}, {"db_idx": 5386, "episode_idx": 18, "frame_idx": 195, "global_frame_idx": 5386, "task_index": 3}, {"db_idx": 5387, "episode_idx": 18, "frame_idx": 196, "global_frame_idx": 5387, "task_index": 3}, {"db_idx": 5388, "episode_idx": 18, "frame_idx": 197, "global_frame_idx": 5388, "task_index": 3}, {"db_idx": 5389, "episode_idx": 18, "frame_idx": 198, "global_frame_idx": 5389, "task_index": 3}, {"db_idx": 5390, "episode_idx": 18, "frame_idx": 199, "global_frame_idx": 5390, "task_index": 3}, {"db_idx": 5391, "episode_idx": 18, "frame_idx": 200, "global_frame_idx": 5391, "task_index": 3}, {"db_idx": 5392, "episode_idx": 18, "frame_idx": 201, "global_frame_idx": 5392, "task_index": 3}, {"db_idx": 5393, "episode_idx": 18, "frame_idx": 202, "global_frame_idx": 5393, "task_index": 3}, {"db_idx": 5394, "episode_idx": 18, "frame_idx": 203, "global_frame_idx": 5394, "task_index": 3}, {"db_idx": 5395, "episode_idx": 18, "frame_idx": 204, "global_frame_idx": 5395, "task_index": 3}, {"db_idx": 5396, "episode_idx": 18, "frame_idx": 205, "global_frame_idx": 5396, "task_index": 3}, {"db_idx": 5397, "episode_idx": 18, "frame_idx": 206, "global_frame_idx": 5397, "task_index": 3}, {"db_idx": 5398, "episode_idx": 18, "frame_idx": 207, "global_frame_idx": 5398, "task_index": 3}, {"db_idx": 5399, "episode_idx": 18, "frame_idx": 208, "global_frame_idx": 5399, "task_index": 3}, {"db_idx": 5400, "episode_idx": 18, "frame_idx": 209, "global_frame_idx": 5400, "task_index": 3}, {"db_idx": 5401, "episode_idx": 18, "frame_idx": 210, "global_frame_idx": 5401, "task_index": 3}, {"db_idx": 5402, "episode_idx": 18, "frame_idx": 211, "global_frame_idx": 5402, "task_index": 3}, {"db_idx": 5403, "episode_idx": 18, "frame_idx": 212, "global_frame_idx": 5403, "task_index": 3}, {"db_idx": 5404, "episode_idx": 18, "frame_idx": 213, "global_frame_idx": 5404, "task_index": 3}, {"db_idx": 5405, "episode_idx": 18, "frame_idx": 214, "global_frame_idx": 5405, "task_index": 3}, {"db_idx": 5406, "episode_idx": 18, "frame_idx": 215, "global_frame_idx": 5406, "task_index": 3}, {"db_idx": 5407, "episode_idx": 18, "frame_idx": 216, "global_frame_idx": 5407, "task_index": 3}, {"db_idx": 5408, "episode_idx": 18, "frame_idx": 217, "global_frame_idx": 5408, "task_index": 3}, {"db_idx": 5409, "episode_idx": 18, "frame_idx": 218, "global_frame_idx": 5409, "task_index": 3}, {"db_idx": 5410, "episode_idx": 18, "frame_idx": 219, "global_frame_idx": 5410, "task_index": 3}, {"db_idx": 5411, "episode_idx": 18, "frame_idx": 220, "global_frame_idx": 5411, "task_index": 3}, {"db_idx": 5412, "episode_idx": 18, "frame_idx": 221, "global_frame_idx": 5412, "task_index": 3}, {"db_idx": 5413, "episode_idx": 18, "frame_idx": 222, "global_frame_idx": 5413, "task_index": 3}, {"db_idx": 5414, "episode_idx": 18, "frame_idx": 223, "global_frame_idx": 5414, "task_index": 3}, {"db_idx": 5415, "episode_idx": 18, "frame_idx": 224, "global_frame_idx": 5415, "task_index": 3}, {"db_idx": 5416, "episode_idx": 18, "frame_idx": 225, "global_frame_idx": 5416, "task_index": 3}, {"db_idx": 5417, "episode_idx": 18, "frame_idx": 226, "global_frame_idx": 5417, "task_index": 3}, {"db_idx": 5418, "episode_idx": 18, "frame_idx": 227, "global_frame_idx": 5418, "task_index": 3}, {"db_idx": 5419, "episode_idx": 18, "frame_idx": 228, "global_frame_idx": 5419, "task_index": 3}, {"db_idx": 5420, "episode_idx": 18, "frame_idx": 229, "global_frame_idx": 5420, "task_index": 3}, {"db_idx": 5421, "episode_idx": 18, "frame_idx": 230, "global_frame_idx": 5421, "task_index": 3}, {"db_idx": 5422, "episode_idx": 19, "frame_idx": 0, "global_frame_idx": 5422, "task_index": 3}, {"db_idx": 5423, "episode_idx": 19, "frame_idx": 1, "global_frame_idx": 5423, "task_index": 3}, {"db_idx": 5424, "episode_idx": 19, "frame_idx": 2, "global_frame_idx": 5424, "task_index": 3}, {"db_idx": 5425, "episode_idx": 19, "frame_idx": 3, "global_frame_idx": 5425, "task_index": 3}, {"db_idx": 5426, "episode_idx": 19, "frame_idx": 4, "global_frame_idx": 5426, "task_index": 3}, {"db_idx": 5427, "episode_idx": 19, "frame_idx": 5, "global_frame_idx": 5427, "task_index": 3}, {"db_idx": 5428, "episode_idx": 19, "frame_idx": 6, "global_frame_idx": 5428, "task_index": 3}, {"db_idx": 5429, "episode_idx": 19, "frame_idx": 7, "global_frame_idx": 5429, "task_index": 3}, {"db_idx": 5430, "episode_idx": 19, "frame_idx": 8, "global_frame_idx": 5430, "task_index": 3}, {"db_idx": 5431, "episode_idx": 19, "frame_idx": 9, "global_frame_idx": 5431, "task_index": 3}, {"db_idx": 5432, "episode_idx": 19, "frame_idx": 10, "global_frame_idx": 5432, "task_index": 3}, {"db_idx": 5433, "episode_idx": 19, "frame_idx": 11, "global_frame_idx": 5433, "task_index": 3}, {"db_idx": 5434, "episode_idx": 19, "frame_idx": 12, "global_frame_idx": 5434, "task_index": 3}, {"db_idx": 5435, "episode_idx": 19, "frame_idx": 13, "global_frame_idx": 5435, "task_index": 3}, {"db_idx": 5436, "episode_idx": 19, "frame_idx": 14, "global_frame_idx": 5436, "task_index": 3}, {"db_idx": 5437, "episode_idx": 19, "frame_idx": 15, "global_frame_idx": 5437, "task_index": 3}, {"db_idx": 5438, "episode_idx": 19, "frame_idx": 16, "global_frame_idx": 5438, "task_index": 3}, {"db_idx": 5439, "episode_idx": 19, "frame_idx": 17, "global_frame_idx": 5439, "task_index": 3}, {"db_idx": 5440, "episode_idx": 19, "frame_idx": 18, "global_frame_idx": 5440, "task_index": 3}, {"db_idx": 5441, "episode_idx": 19, "frame_idx": 19, "global_frame_idx": 5441, "task_index": 3}, {"db_idx": 5442, "episode_idx": 19, "frame_idx": 20, "global_frame_idx": 5442, "task_index": 3}, {"db_idx": 5443, "episode_idx": 19, "frame_idx": 21, "global_frame_idx": 5443, "task_index": 3}, {"db_idx": 5444, "episode_idx": 19, "frame_idx": 22, "global_frame_idx": 5444, "task_index": 3}, {"db_idx": 5445, "episode_idx": 19, "frame_idx": 23, "global_frame_idx": 5445, "task_index": 3}, {"db_idx": 5446, "episode_idx": 19, "frame_idx": 24, "global_frame_idx": 5446, "task_index": 3}, {"db_idx": 5447, "episode_idx": 19, "frame_idx": 25, "global_frame_idx": 5447, "task_index": 3}, {"db_idx": 5448, "episode_idx": 19, "frame_idx": 26, "global_frame_idx": 5448, "task_index": 3}, {"db_idx": 5449, "episode_idx": 19, "frame_idx": 27, "global_frame_idx": 5449, "task_index": 3}, {"db_idx": 5450, "episode_idx": 19, "frame_idx": 28, "global_frame_idx": 5450, "task_index": 3}, {"db_idx": 5451, "episode_idx": 19, "frame_idx": 29, "global_frame_idx": 5451, "task_index": 3}, {"db_idx": 5452, "episode_idx": 19, "frame_idx": 30, "global_frame_idx": 5452, "task_index": 3}, {"db_idx": 5453, "episode_idx": 19, "frame_idx": 31, "global_frame_idx": 5453, "task_index": 3}, {"db_idx": 5454, "episode_idx": 19, "frame_idx": 32, "global_frame_idx": 5454, "task_index": 3}, {"db_idx": 5455, "episode_idx": 19, "frame_idx": 33, "global_frame_idx": 5455, "task_index": 3}, {"db_idx": 5456, "episode_idx": 19, "frame_idx": 34, "global_frame_idx": 5456, "task_index": 3}, {"db_idx": 5457, "episode_idx": 19, "frame_idx": 35, "global_frame_idx": 5457, "task_index": 3}, {"db_idx": 5458, "episode_idx": 19, "frame_idx": 36, "global_frame_idx": 5458, "task_index": 3}, {"db_idx": 5459, "episode_idx": 19, "frame_idx": 37, "global_frame_idx": 5459, "task_index": 3}, {"db_idx": 5460, "episode_idx": 19, "frame_idx": 38, "global_frame_idx": 5460, "task_index": 3}, {"db_idx": 5461, "episode_idx": 19, "frame_idx": 39, "global_frame_idx": 5461, "task_index": 3}, {"db_idx": 5462, "episode_idx": 19, "frame_idx": 40, "global_frame_idx": 5462, "task_index": 3}, {"db_idx": 5463, "episode_idx": 19, "frame_idx": 41, "global_frame_idx": 5463, "task_index": 3}, {"db_idx": 5464, "episode_idx": 19, "frame_idx": 42, "global_frame_idx": 5464, "task_index": 3}, {"db_idx": 5465, "episode_idx": 19, "frame_idx": 43, "global_frame_idx": 5465, "task_index": 3}, {"db_idx": 5466, "episode_idx": 19, "frame_idx": 44, "global_frame_idx": 5466, "task_index": 3}, {"db_idx": 5467, "episode_idx": 19, "frame_idx": 45, "global_frame_idx": 5467, "task_index": 3}, {"db_idx": 5468, "episode_idx": 19, "frame_idx": 46, "global_frame_idx": 5468, "task_index": 3}, {"db_idx": 5469, "episode_idx": 19, "frame_idx": 47, "global_frame_idx": 5469, "task_index": 3}, {"db_idx": 5470, "episode_idx": 19, "frame_idx": 48, "global_frame_idx": 5470, "task_index": 3}, {"db_idx": 5471, "episode_idx": 19, "frame_idx": 49, "global_frame_idx": 5471, "task_index": 3}, {"db_idx": 5472, "episode_idx": 19, "frame_idx": 50, "global_frame_idx": 5472, "task_index": 3}, {"db_idx": 5473, "episode_idx": 19, "frame_idx": 51, "global_frame_idx": 5473, "task_index": 3}, {"db_idx": 5474, "episode_idx": 19, "frame_idx": 52, "global_frame_idx": 5474, "task_index": 3}, {"db_idx": 5475, "episode_idx": 19, "frame_idx": 53, "global_frame_idx": 5475, "task_index": 3}, {"db_idx": 5476, "episode_idx": 19, "frame_idx": 54, "global_frame_idx": 5476, "task_index": 3}, {"db_idx": 5477, "episode_idx": 19, "frame_idx": 55, "global_frame_idx": 5477, "task_index": 3}, {"db_idx": 5478, "episode_idx": 19, "frame_idx": 56, "global_frame_idx": 5478, "task_index": 3}, {"db_idx": 5479, "episode_idx": 19, "frame_idx": 57, "global_frame_idx": 5479, "task_index": 3}, {"db_idx": 5480, "episode_idx": 19, "frame_idx": 58, "global_frame_idx": 5480, "task_index": 3}, {"db_idx": 5481, "episode_idx": 19, "frame_idx": 59, "global_frame_idx": 5481, "task_index": 3}, {"db_idx": 5482, "episode_idx": 19, "frame_idx": 60, "global_frame_idx": 5482, "task_index": 3}, {"db_idx": 5483, "episode_idx": 19, "frame_idx": 61, "global_frame_idx": 5483, "task_index": 3}, {"db_idx": 5484, "episode_idx": 19, "frame_idx": 62, "global_frame_idx": 5484, "task_index": 3}, {"db_idx": 5485, "episode_idx": 19, "frame_idx": 63, "global_frame_idx": 5485, "task_index": 3}, {"db_idx": 5486, "episode_idx": 19, "frame_idx": 64, "global_frame_idx": 5486, "task_index": 3}, {"db_idx": 5487, "episode_idx": 19, "frame_idx": 65, "global_frame_idx": 5487, "task_index": 3}, {"db_idx": 5488, "episode_idx": 19, "frame_idx": 66, "global_frame_idx": 5488, "task_index": 3}, {"db_idx": 5489, "episode_idx": 19, "frame_idx": 67, "global_frame_idx": 5489, "task_index": 3}, {"db_idx": 5490, "episode_idx": 19, "frame_idx": 68, "global_frame_idx": 5490, "task_index": 3}, {"db_idx": 5491, "episode_idx": 19, "frame_idx": 69, "global_frame_idx": 5491, "task_index": 3}, {"db_idx": 5492, "episode_idx": 19, "frame_idx": 70, "global_frame_idx": 5492, "task_index": 3}, {"db_idx": 5493, "episode_idx": 19, "frame_idx": 71, "global_frame_idx": 5493, "task_index": 3}, {"db_idx": 5494, "episode_idx": 19, "frame_idx": 72, "global_frame_idx": 5494, "task_index": 3}, {"db_idx": 5495, "episode_idx": 19, "frame_idx": 73, "global_frame_idx": 5495, "task_index": 3}, {"db_idx": 5496, "episode_idx": 19, "frame_idx": 74, "global_frame_idx": 5496, "task_index": 3}, {"db_idx": 5497, "episode_idx": 19, "frame_idx": 75, "global_frame_idx": 5497, "task_index": 3}, {"db_idx": 5498, "episode_idx": 19, "frame_idx": 76, "global_frame_idx": 5498, "task_index": 3}, {"db_idx": 5499, "episode_idx": 19, "frame_idx": 77, "global_frame_idx": 5499, "task_index": 3}, {"db_idx": 5500, "episode_idx": 19, "frame_idx": 78, "global_frame_idx": 5500, "task_index": 3}, {"db_idx": 5501, "episode_idx": 19, "frame_idx": 79, "global_frame_idx": 5501, "task_index": 3}, {"db_idx": 5502, "episode_idx": 19, "frame_idx": 80, "global_frame_idx": 5502, "task_index": 3}, {"db_idx": 5503, "episode_idx": 19, "frame_idx": 81, "global_frame_idx": 5503, "task_index": 3}, {"db_idx": 5504, "episode_idx": 19, "frame_idx": 82, "global_frame_idx": 5504, "task_index": 3}, {"db_idx": 5505, "episode_idx": 19, "frame_idx": 83, "global_frame_idx": 5505, "task_index": 3}, {"db_idx": 5506, "episode_idx": 19, "frame_idx": 84, "global_frame_idx": 5506, "task_index": 3}, {"db_idx": 5507, "episode_idx": 19, "frame_idx": 85, "global_frame_idx": 5507, "task_index": 3}, {"db_idx": 5508, "episode_idx": 19, "frame_idx": 86, "global_frame_idx": 5508, "task_index": 3}, {"db_idx": 5509, "episode_idx": 19, "frame_idx": 87, "global_frame_idx": 5509, "task_index": 3}, {"db_idx": 5510, "episode_idx": 19, "frame_idx": 88, "global_frame_idx": 5510, "task_index": 3}, {"db_idx": 5511, "episode_idx": 19, "frame_idx": 89, "global_frame_idx": 5511, "task_index": 3}, {"db_idx": 5512, "episode_idx": 19, "frame_idx": 90, "global_frame_idx": 5512, "task_index": 3}, {"db_idx": 5513, "episode_idx": 19, "frame_idx": 91, "global_frame_idx": 5513, "task_index": 3}, {"db_idx": 5514, "episode_idx": 19, "frame_idx": 92, "global_frame_idx": 5514, "task_index": 3}, {"db_idx": 5515, "episode_idx": 19, "frame_idx": 93, "global_frame_idx": 5515, "task_index": 3}, {"db_idx": 5516, "episode_idx": 19, "frame_idx": 94, "global_frame_idx": 5516, "task_index": 3}, {"db_idx": 5517, "episode_idx": 19, "frame_idx": 95, "global_frame_idx": 5517, "task_index": 3}, {"db_idx": 5518, "episode_idx": 19, "frame_idx": 96, "global_frame_idx": 5518, "task_index": 3}, {"db_idx": 5519, "episode_idx": 19, "frame_idx": 97, "global_frame_idx": 5519, "task_index": 3}, {"db_idx": 5520, "episode_idx": 19, "frame_idx": 98, "global_frame_idx": 5520, "task_index": 3}, {"db_idx": 5521, "episode_idx": 19, "frame_idx": 99, "global_frame_idx": 5521, "task_index": 3}, {"db_idx": 5522, "episode_idx": 19, "frame_idx": 100, "global_frame_idx": 5522, "task_index": 3}, {"db_idx": 5523, "episode_idx": 19, "frame_idx": 101, "global_frame_idx": 5523, "task_index": 3}, {"db_idx": 5524, "episode_idx": 19, "frame_idx": 102, "global_frame_idx": 5524, "task_index": 3}, {"db_idx": 5525, "episode_idx": 19, "frame_idx": 103, "global_frame_idx": 5525, "task_index": 3}, {"db_idx": 5526, "episode_idx": 19, "frame_idx": 104, "global_frame_idx": 5526, "task_index": 3}, {"db_idx": 5527, "episode_idx": 19, "frame_idx": 105, "global_frame_idx": 5527, "task_index": 3}, {"db_idx": 5528, "episode_idx": 19, "frame_idx": 106, "global_frame_idx": 5528, "task_index": 3}, {"db_idx": 5529, "episode_idx": 19, "frame_idx": 107, "global_frame_idx": 5529, "task_index": 3}, {"db_idx": 5530, "episode_idx": 19, "frame_idx": 108, "global_frame_idx": 5530, "task_index": 3}, {"db_idx": 5531, "episode_idx": 19, "frame_idx": 109, "global_frame_idx": 5531, "task_index": 3}, {"db_idx": 5532, "episode_idx": 19, "frame_idx": 110, "global_frame_idx": 5532, "task_index": 3}, {"db_idx": 5533, "episode_idx": 19, "frame_idx": 111, "global_frame_idx": 5533, "task_index": 3}, {"db_idx": 5534, "episode_idx": 19, "frame_idx": 112, "global_frame_idx": 5534, "task_index": 3}, {"db_idx": 5535, "episode_idx": 19, "frame_idx": 113, "global_frame_idx": 5535, "task_index": 3}, {"db_idx": 5536, "episode_idx": 19, "frame_idx": 114, "global_frame_idx": 5536, "task_index": 3}, {"db_idx": 5537, "episode_idx": 19, "frame_idx": 115, "global_frame_idx": 5537, "task_index": 3}, {"db_idx": 5538, "episode_idx": 19, "frame_idx": 116, "global_frame_idx": 5538, "task_index": 3}, {"db_idx": 5539, "episode_idx": 19, "frame_idx": 117, "global_frame_idx": 5539, "task_index": 3}, {"db_idx": 5540, "episode_idx": 19, "frame_idx": 118, "global_frame_idx": 5540, "task_index": 3}, {"db_idx": 5541, "episode_idx": 19, "frame_idx": 119, "global_frame_idx": 5541, "task_index": 3}, {"db_idx": 5542, "episode_idx": 19, "frame_idx": 120, "global_frame_idx": 5542, "task_index": 3}, {"db_idx": 5543, "episode_idx": 19, "frame_idx": 121, "global_frame_idx": 5543, "task_index": 3}, {"db_idx": 5544, "episode_idx": 19, "frame_idx": 122, "global_frame_idx": 5544, "task_index": 3}, {"db_idx": 5545, "episode_idx": 19, "frame_idx": 123, "global_frame_idx": 5545, "task_index": 3}, {"db_idx": 5546, "episode_idx": 19, "frame_idx": 124, "global_frame_idx": 5546, "task_index": 3}, {"db_idx": 5547, "episode_idx": 19, "frame_idx": 125, "global_frame_idx": 5547, "task_index": 3}, {"db_idx": 5548, "episode_idx": 19, "frame_idx": 126, "global_frame_idx": 5548, "task_index": 3}, {"db_idx": 5549, "episode_idx": 19, "frame_idx": 127, "global_frame_idx": 5549, "task_index": 3}, {"db_idx": 5550, "episode_idx": 19, "frame_idx": 128, "global_frame_idx": 5550, "task_index": 3}, {"db_idx": 5551, "episode_idx": 19, "frame_idx": 129, "global_frame_idx": 5551, "task_index": 3}, {"db_idx": 5552, "episode_idx": 19, "frame_idx": 130, "global_frame_idx": 5552, "task_index": 3}, {"db_idx": 5553, "episode_idx": 19, "frame_idx": 131, "global_frame_idx": 5553, "task_index": 3}, {"db_idx": 5554, "episode_idx": 19, "frame_idx": 132, "global_frame_idx": 5554, "task_index": 3}, {"db_idx": 5555, "episode_idx": 19, "frame_idx": 133, "global_frame_idx": 5555, "task_index": 3}, {"db_idx": 5556, "episode_idx": 19, "frame_idx": 134, "global_frame_idx": 5556, "task_index": 3}, {"db_idx": 5557, "episode_idx": 19, "frame_idx": 135, "global_frame_idx": 5557, "task_index": 3}, {"db_idx": 5558, "episode_idx": 19, "frame_idx": 136, "global_frame_idx": 5558, "task_index": 3}, {"db_idx": 5559, "episode_idx": 19, "frame_idx": 137, "global_frame_idx": 5559, "task_index": 3}, {"db_idx": 5560, "episode_idx": 19, "frame_idx": 138, "global_frame_idx": 5560, "task_index": 3}, {"db_idx": 5561, "episode_idx": 19, "frame_idx": 139, "global_frame_idx": 5561, "task_index": 3}, {"db_idx": 5562, "episode_idx": 19, "frame_idx": 140, "global_frame_idx": 5562, "task_index": 3}, {"db_idx": 5563, "episode_idx": 19, "frame_idx": 141, "global_frame_idx": 5563, "task_index": 3}, {"db_idx": 5564, "episode_idx": 19, "frame_idx": 142, "global_frame_idx": 5564, "task_index": 3}, {"db_idx": 5565, "episode_idx": 19, "frame_idx": 143, "global_frame_idx": 5565, "task_index": 3}, {"db_idx": 5566, "episode_idx": 19, "frame_idx": 144, "global_frame_idx": 5566, "task_index": 3}, {"db_idx": 5567, "episode_idx": 19, "frame_idx": 145, "global_frame_idx": 5567, "task_index": 3}, {"db_idx": 5568, "episode_idx": 19, "frame_idx": 146, "global_frame_idx": 5568, "task_index": 3}, {"db_idx": 5569, "episode_idx": 19, "frame_idx": 147, "global_frame_idx": 5569, "task_index": 3}, {"db_idx": 5570, "episode_idx": 19, "frame_idx": 148, "global_frame_idx": 5570, "task_index": 3}, {"db_idx": 5571, "episode_idx": 19, "frame_idx": 149, "global_frame_idx": 5571, "task_index": 3}, {"db_idx": 5572, "episode_idx": 19, "frame_idx": 150, "global_frame_idx": 5572, "task_index": 3}, {"db_idx": 5573, "episode_idx": 19, "frame_idx": 151, "global_frame_idx": 5573, "task_index": 3}, {"db_idx": 5574, "episode_idx": 19, "frame_idx": 152, "global_frame_idx": 5574, "task_index": 3}, {"db_idx": 5575, "episode_idx": 19, "frame_idx": 153, "global_frame_idx": 5575, "task_index": 3}, {"db_idx": 5576, "episode_idx": 19, "frame_idx": 154, "global_frame_idx": 5576, "task_index": 3}, {"db_idx": 5577, "episode_idx": 19, "frame_idx": 155, "global_frame_idx": 5577, "task_index": 3}, {"db_idx": 5578, "episode_idx": 19, "frame_idx": 156, "global_frame_idx": 5578, "task_index": 3}, {"db_idx": 5579, "episode_idx": 19, "frame_idx": 157, "global_frame_idx": 5579, "task_index": 3}, {"db_idx": 5580, "episode_idx": 19, "frame_idx": 158, "global_frame_idx": 5580, "task_index": 3}, {"db_idx": 5581, "episode_idx": 19, "frame_idx": 159, "global_frame_idx": 5581, "task_index": 3}, {"db_idx": 5582, "episode_idx": 19, "frame_idx": 160, "global_frame_idx": 5582, "task_index": 3}, {"db_idx": 5583, "episode_idx": 19, "frame_idx": 161, "global_frame_idx": 5583, "task_index": 3}, {"db_idx": 5584, "episode_idx": 19, "frame_idx": 162, "global_frame_idx": 5584, "task_index": 3}, {"db_idx": 5585, "episode_idx": 19, "frame_idx": 163, "global_frame_idx": 5585, "task_index": 3}, {"db_idx": 5586, "episode_idx": 19, "frame_idx": 164, "global_frame_idx": 5586, "task_index": 3}, {"db_idx": 5587, "episode_idx": 19, "frame_idx": 165, "global_frame_idx": 5587, "task_index": 3}, {"db_idx": 5588, "episode_idx": 19, "frame_idx": 166, "global_frame_idx": 5588, "task_index": 3}, {"db_idx": 5589, "episode_idx": 19, "frame_idx": 167, "global_frame_idx": 5589, "task_index": 3}, {"db_idx": 5590, "episode_idx": 19, "frame_idx": 168, "global_frame_idx": 5590, "task_index": 3}, {"db_idx": 5591, "episode_idx": 19, "frame_idx": 169, "global_frame_idx": 5591, "task_index": 3}, {"db_idx": 5592, "episode_idx": 19, "frame_idx": 170, "global_frame_idx": 5592, "task_index": 3}, {"db_idx": 5593, "episode_idx": 19, "frame_idx": 171, "global_frame_idx": 5593, "task_index": 3}, {"db_idx": 5594, "episode_idx": 19, "frame_idx": 172, "global_frame_idx": 5594, "task_index": 3}, {"db_idx": 5595, "episode_idx": 19, "frame_idx": 173, "global_frame_idx": 5595, "task_index": 3}, {"db_idx": 5596, "episode_idx": 19, "frame_idx": 174, "global_frame_idx": 5596, "task_index": 3}, {"db_idx": 5597, "episode_idx": 19, "frame_idx": 175, "global_frame_idx": 5597, "task_index": 3}, {"db_idx": 5598, "episode_idx": 19, "frame_idx": 176, "global_frame_idx": 5598, "task_index": 3}, {"db_idx": 5599, "episode_idx": 19, "frame_idx": 177, "global_frame_idx": 5599, "task_index": 3}, {"db_idx": 5600, "episode_idx": 19, "frame_idx": 178, "global_frame_idx": 5600, "task_index": 3}, {"db_idx": 5601, "episode_idx": 19, "frame_idx": 179, "global_frame_idx": 5601, "task_index": 3}, {"db_idx": 5602, "episode_idx": 19, "frame_idx": 180, "global_frame_idx": 5602, "task_index": 3}, {"db_idx": 5603, "episode_idx": 19, "frame_idx": 181, "global_frame_idx": 5603, "task_index": 3}, {"db_idx": 5604, "episode_idx": 19, "frame_idx": 182, "global_frame_idx": 5604, "task_index": 3}, {"db_idx": 5605, "episode_idx": 19, "frame_idx": 183, "global_frame_idx": 5605, "task_index": 3}, {"db_idx": 5606, "episode_idx": 19, "frame_idx": 184, "global_frame_idx": 5606, "task_index": 3}, {"db_idx": 5607, "episode_idx": 19, "frame_idx": 185, "global_frame_idx": 5607, "task_index": 3}, {"db_idx": 5608, "episode_idx": 19, "frame_idx": 186, "global_frame_idx": 5608, "task_index": 3}, {"db_idx": 5609, "episode_idx": 19, "frame_idx": 187, "global_frame_idx": 5609, "task_index": 3}, {"db_idx": 5610, "episode_idx": 19, "frame_idx": 188, "global_frame_idx": 5610, "task_index": 3}, {"db_idx": 5611, "episode_idx": 19, "frame_idx": 189, "global_frame_idx": 5611, "task_index": 3}, {"db_idx": 5612, "episode_idx": 19, "frame_idx": 190, "global_frame_idx": 5612, "task_index": 3}, {"db_idx": 5613, "episode_idx": 19, "frame_idx": 191, "global_frame_idx": 5613, "task_index": 3}, {"db_idx": 5614, "episode_idx": 19, "frame_idx": 192, "global_frame_idx": 5614, "task_index": 3}, {"db_idx": 5615, "episode_idx": 19, "frame_idx": 193, "global_frame_idx": 5615, "task_index": 3}, {"db_idx": 5616, "episode_idx": 19, "frame_idx": 194, "global_frame_idx": 5616, "task_index": 3}, {"db_idx": 5617, "episode_idx": 19, "frame_idx": 195, "global_frame_idx": 5617, "task_index": 3}, {"db_idx": 5618, "episode_idx": 19, "frame_idx": 196, "global_frame_idx": 5618, "task_index": 3}, {"db_idx": 5619, "episode_idx": 19, "frame_idx": 197, "global_frame_idx": 5619, "task_index": 3}, {"db_idx": 5620, "episode_idx": 19, "frame_idx": 198, "global_frame_idx": 5620, "task_index": 3}, {"db_idx": 5621, "episode_idx": 19, "frame_idx": 199, "global_frame_idx": 5621, "task_index": 3}, {"db_idx": 5622, "episode_idx": 19, "frame_idx": 200, "global_frame_idx": 5622, "task_index": 3}, {"db_idx": 5623, "episode_idx": 19, "frame_idx": 201, "global_frame_idx": 5623, "task_index": 3}, {"db_idx": 5624, "episode_idx": 19, "frame_idx": 202, "global_frame_idx": 5624, "task_index": 3}, {"db_idx": 5625, "episode_idx": 19, "frame_idx": 203, "global_frame_idx": 5625, "task_index": 3}, {"db_idx": 5626, "episode_idx": 19, "frame_idx": 204, "global_frame_idx": 5626, "task_index": 3}, {"db_idx": 5627, "episode_idx": 19, "frame_idx": 205, "global_frame_idx": 5627, "task_index": 3}, {"db_idx": 5628, "episode_idx": 19, "frame_idx": 206, "global_frame_idx": 5628, "task_index": 3}, {"db_idx": 5629, "episode_idx": 19, "frame_idx": 207, "global_frame_idx": 5629, "task_index": 3}, {"db_idx": 5630, "episode_idx": 19, "frame_idx": 208, "global_frame_idx": 5630, "task_index": 3}, {"db_idx": 5631, "episode_idx": 19, "frame_idx": 209, "global_frame_idx": 5631, "task_index": 3}, {"db_idx": 5632, "episode_idx": 19, "frame_idx": 210, "global_frame_idx": 5632, "task_index": 3}, {"db_idx": 5633, "episode_idx": 19, "frame_idx": 211, "global_frame_idx": 5633, "task_index": 3}, {"db_idx": 5634, "episode_idx": 19, "frame_idx": 212, "global_frame_idx": 5634, "task_index": 3}, {"db_idx": 5635, "episode_idx": 19, "frame_idx": 213, "global_frame_idx": 5635, "task_index": 3}, {"db_idx": 5636, "episode_idx": 19, "frame_idx": 214, "global_frame_idx": 5636, "task_index": 3}, {"db_idx": 5637, "episode_idx": 19, "frame_idx": 215, "global_frame_idx": 5637, "task_index": 3}, {"db_idx": 5638, "episode_idx": 19, "frame_idx": 216, "global_frame_idx": 5638, "task_index": 3}, {"db_idx": 5639, "episode_idx": 19, "frame_idx": 217, "global_frame_idx": 5639, "task_index": 3}, {"db_idx": 5640, "episode_idx": 19, "frame_idx": 218, "global_frame_idx": 5640, "task_index": 3}, {"db_idx": 5641, "episode_idx": 19, "frame_idx": 219, "global_frame_idx": 5641, "task_index": 3}, {"db_idx": 5642, "episode_idx": 19, "frame_idx": 220, "global_frame_idx": 5642, "task_index": 3}, {"db_idx": 5643, "episode_idx": 19, "frame_idx": 221, "global_frame_idx": 5643, "task_index": 3}, {"db_idx": 5644, "episode_idx": 19, "frame_idx": 222, "global_frame_idx": 5644, "task_index": 3}, {"db_idx": 5645, "episode_idx": 20, "frame_idx": 0, "global_frame_idx": 5645, "task_index": 4}, {"db_idx": 5646, "episode_idx": 20, "frame_idx": 1, "global_frame_idx": 5646, "task_index": 4}, {"db_idx": 5647, "episode_idx": 20, "frame_idx": 2, "global_frame_idx": 5647, "task_index": 4}, {"db_idx": 5648, "episode_idx": 20, "frame_idx": 3, "global_frame_idx": 5648, "task_index": 4}, {"db_idx": 5649, "episode_idx": 20, "frame_idx": 4, "global_frame_idx": 5649, "task_index": 4}, {"db_idx": 5650, "episode_idx": 20, "frame_idx": 5, "global_frame_idx": 5650, "task_index": 4}, {"db_idx": 5651, "episode_idx": 20, "frame_idx": 6, "global_frame_idx": 5651, "task_index": 4}, {"db_idx": 5652, "episode_idx": 20, "frame_idx": 7, "global_frame_idx": 5652, "task_index": 4}, {"db_idx": 5653, "episode_idx": 20, "frame_idx": 8, "global_frame_idx": 5653, "task_index": 4}, {"db_idx": 5654, "episode_idx": 20, "frame_idx": 9, "global_frame_idx": 5654, "task_index": 4}, {"db_idx": 5655, "episode_idx": 20, "frame_idx": 10, "global_frame_idx": 5655, "task_index": 4}, {"db_idx": 5656, "episode_idx": 20, "frame_idx": 11, "global_frame_idx": 5656, "task_index": 4}, {"db_idx": 5657, "episode_idx": 20, "frame_idx": 12, "global_frame_idx": 5657, "task_index": 4}, {"db_idx": 5658, "episode_idx": 20, "frame_idx": 13, "global_frame_idx": 5658, "task_index": 4}, {"db_idx": 5659, "episode_idx": 20, "frame_idx": 14, "global_frame_idx": 5659, "task_index": 4}, {"db_idx": 5660, "episode_idx": 20, "frame_idx": 15, "global_frame_idx": 5660, "task_index": 4}, {"db_idx": 5661, "episode_idx": 20, "frame_idx": 16, "global_frame_idx": 5661, "task_index": 4}, {"db_idx": 5662, "episode_idx": 20, "frame_idx": 17, "global_frame_idx": 5662, "task_index": 4}, {"db_idx": 5663, "episode_idx": 20, "frame_idx": 18, "global_frame_idx": 5663, "task_index": 4}, {"db_idx": 5664, "episode_idx": 20, "frame_idx": 19, "global_frame_idx": 5664, "task_index": 4}, {"db_idx": 5665, "episode_idx": 20, "frame_idx": 20, "global_frame_idx": 5665, "task_index": 4}, {"db_idx": 5666, "episode_idx": 20, "frame_idx": 21, "global_frame_idx": 5666, "task_index": 4}, {"db_idx": 5667, "episode_idx": 20, "frame_idx": 22, "global_frame_idx": 5667, "task_index": 4}, {"db_idx": 5668, "episode_idx": 20, "frame_idx": 23, "global_frame_idx": 5668, "task_index": 4}, {"db_idx": 5669, "episode_idx": 20, "frame_idx": 24, "global_frame_idx": 5669, "task_index": 4}, {"db_idx": 5670, "episode_idx": 20, "frame_idx": 25, "global_frame_idx": 5670, "task_index": 4}, {"db_idx": 5671, "episode_idx": 20, "frame_idx": 26, "global_frame_idx": 5671, "task_index": 4}, {"db_idx": 5672, "episode_idx": 20, "frame_idx": 27, "global_frame_idx": 5672, "task_index": 4}, {"db_idx": 5673, "episode_idx": 20, "frame_idx": 28, "global_frame_idx": 5673, "task_index": 4}, {"db_idx": 5674, "episode_idx": 20, "frame_idx": 29, "global_frame_idx": 5674, "task_index": 4}, {"db_idx": 5675, "episode_idx": 20, "frame_idx": 30, "global_frame_idx": 5675, "task_index": 4}, {"db_idx": 5676, "episode_idx": 20, "frame_idx": 31, "global_frame_idx": 5676, "task_index": 4}, {"db_idx": 5677, "episode_idx": 20, "frame_idx": 32, "global_frame_idx": 5677, "task_index": 4}, {"db_idx": 5678, "episode_idx": 20, "frame_idx": 33, "global_frame_idx": 5678, "task_index": 4}, {"db_idx": 5679, "episode_idx": 20, "frame_idx": 34, "global_frame_idx": 5679, "task_index": 4}, {"db_idx": 5680, "episode_idx": 20, "frame_idx": 35, "global_frame_idx": 5680, "task_index": 4}, {"db_idx": 5681, "episode_idx": 20, "frame_idx": 36, "global_frame_idx": 5681, "task_index": 4}, {"db_idx": 5682, "episode_idx": 20, "frame_idx": 37, "global_frame_idx": 5682, "task_index": 4}, {"db_idx": 5683, "episode_idx": 20, "frame_idx": 38, "global_frame_idx": 5683, "task_index": 4}, {"db_idx": 5684, "episode_idx": 20, "frame_idx": 39, "global_frame_idx": 5684, "task_index": 4}, {"db_idx": 5685, "episode_idx": 20, "frame_idx": 40, "global_frame_idx": 5685, "task_index": 4}, {"db_idx": 5686, "episode_idx": 20, "frame_idx": 41, "global_frame_idx": 5686, "task_index": 4}, {"db_idx": 5687, "episode_idx": 20, "frame_idx": 42, "global_frame_idx": 5687, "task_index": 4}, {"db_idx": 5688, "episode_idx": 20, "frame_idx": 43, "global_frame_idx": 5688, "task_index": 4}, {"db_idx": 5689, "episode_idx": 20, "frame_idx": 44, "global_frame_idx": 5689, "task_index": 4}, {"db_idx": 5690, "episode_idx": 20, "frame_idx": 45, "global_frame_idx": 5690, "task_index": 4}, {"db_idx": 5691, "episode_idx": 20, "frame_idx": 46, "global_frame_idx": 5691, "task_index": 4}, {"db_idx": 5692, "episode_idx": 20, "frame_idx": 47, "global_frame_idx": 5692, "task_index": 4}, {"db_idx": 5693, "episode_idx": 20, "frame_idx": 48, "global_frame_idx": 5693, "task_index": 4}, {"db_idx": 5694, "episode_idx": 20, "frame_idx": 49, "global_frame_idx": 5694, "task_index": 4}, {"db_idx": 5695, "episode_idx": 20, "frame_idx": 50, "global_frame_idx": 5695, "task_index": 4}, {"db_idx": 5696, "episode_idx": 20, "frame_idx": 51, "global_frame_idx": 5696, "task_index": 4}, {"db_idx": 5697, "episode_idx": 20, "frame_idx": 52, "global_frame_idx": 5697, "task_index": 4}, {"db_idx": 5698, "episode_idx": 20, "frame_idx": 53, "global_frame_idx": 5698, "task_index": 4}, {"db_idx": 5699, "episode_idx": 20, "frame_idx": 54, "global_frame_idx": 5699, "task_index": 4}, {"db_idx": 5700, "episode_idx": 20, "frame_idx": 55, "global_frame_idx": 5700, "task_index": 4}, {"db_idx": 5701, "episode_idx": 20, "frame_idx": 56, "global_frame_idx": 5701, "task_index": 4}, {"db_idx": 5702, "episode_idx": 20, "frame_idx": 57, "global_frame_idx": 5702, "task_index": 4}, {"db_idx": 5703, "episode_idx": 20, "frame_idx": 58, "global_frame_idx": 5703, "task_index": 4}, {"db_idx": 5704, "episode_idx": 20, "frame_idx": 59, "global_frame_idx": 5704, "task_index": 4}, {"db_idx": 5705, "episode_idx": 20, "frame_idx": 60, "global_frame_idx": 5705, "task_index": 4}, {"db_idx": 5706, "episode_idx": 20, "frame_idx": 61, "global_frame_idx": 5706, "task_index": 4}, {"db_idx": 5707, "episode_idx": 20, "frame_idx": 62, "global_frame_idx": 5707, "task_index": 4}, {"db_idx": 5708, "episode_idx": 20, "frame_idx": 63, "global_frame_idx": 5708, "task_index": 4}, {"db_idx": 5709, "episode_idx": 20, "frame_idx": 64, "global_frame_idx": 5709, "task_index": 4}, {"db_idx": 5710, "episode_idx": 20, "frame_idx": 65, "global_frame_idx": 5710, "task_index": 4}, {"db_idx": 5711, "episode_idx": 20, "frame_idx": 66, "global_frame_idx": 5711, "task_index": 4}, {"db_idx": 5712, "episode_idx": 20, "frame_idx": 67, "global_frame_idx": 5712, "task_index": 4}, {"db_idx": 5713, "episode_idx": 20, "frame_idx": 68, "global_frame_idx": 5713, "task_index": 4}, {"db_idx": 5714, "episode_idx": 20, "frame_idx": 69, "global_frame_idx": 5714, "task_index": 4}, {"db_idx": 5715, "episode_idx": 20, "frame_idx": 70, "global_frame_idx": 5715, "task_index": 4}, {"db_idx": 5716, "episode_idx": 20, "frame_idx": 71, "global_frame_idx": 5716, "task_index": 4}, {"db_idx": 5717, "episode_idx": 20, "frame_idx": 72, "global_frame_idx": 5717, "task_index": 4}, {"db_idx": 5718, "episode_idx": 20, "frame_idx": 73, "global_frame_idx": 5718, "task_index": 4}, {"db_idx": 5719, "episode_idx": 20, "frame_idx": 74, "global_frame_idx": 5719, "task_index": 4}, {"db_idx": 5720, "episode_idx": 20, "frame_idx": 75, "global_frame_idx": 5720, "task_index": 4}, {"db_idx": 5721, "episode_idx": 20, "frame_idx": 76, "global_frame_idx": 5721, "task_index": 4}, {"db_idx": 5722, "episode_idx": 20, "frame_idx": 77, "global_frame_idx": 5722, "task_index": 4}, {"db_idx": 5723, "episode_idx": 20, "frame_idx": 78, "global_frame_idx": 5723, "task_index": 4}, {"db_idx": 5724, "episode_idx": 20, "frame_idx": 79, "global_frame_idx": 5724, "task_index": 4}, {"db_idx": 5725, "episode_idx": 20, "frame_idx": 80, "global_frame_idx": 5725, "task_index": 4}, {"db_idx": 5726, "episode_idx": 20, "frame_idx": 81, "global_frame_idx": 5726, "task_index": 4}, {"db_idx": 5727, "episode_idx": 20, "frame_idx": 82, "global_frame_idx": 5727, "task_index": 4}, {"db_idx": 5728, "episode_idx": 20, "frame_idx": 83, "global_frame_idx": 5728, "task_index": 4}, {"db_idx": 5729, "episode_idx": 20, "frame_idx": 84, "global_frame_idx": 5729, "task_index": 4}, {"db_idx": 5730, "episode_idx": 20, "frame_idx": 85, "global_frame_idx": 5730, "task_index": 4}, {"db_idx": 5731, "episode_idx": 20, "frame_idx": 86, "global_frame_idx": 5731, "task_index": 4}, {"db_idx": 5732, "episode_idx": 20, "frame_idx": 87, "global_frame_idx": 5732, "task_index": 4}, {"db_idx": 5733, "episode_idx": 20, "frame_idx": 88, "global_frame_idx": 5733, "task_index": 4}, {"db_idx": 5734, "episode_idx": 20, "frame_idx": 89, "global_frame_idx": 5734, "task_index": 4}, {"db_idx": 5735, "episode_idx": 20, "frame_idx": 90, "global_frame_idx": 5735, "task_index": 4}, {"db_idx": 5736, "episode_idx": 20, "frame_idx": 91, "global_frame_idx": 5736, "task_index": 4}, {"db_idx": 5737, "episode_idx": 20, "frame_idx": 92, "global_frame_idx": 5737, "task_index": 4}, {"db_idx": 5738, "episode_idx": 20, "frame_idx": 93, "global_frame_idx": 5738, "task_index": 4}, {"db_idx": 5739, "episode_idx": 20, "frame_idx": 94, "global_frame_idx": 5739, "task_index": 4}, {"db_idx": 5740, "episode_idx": 20, "frame_idx": 95, "global_frame_idx": 5740, "task_index": 4}, {"db_idx": 5741, "episode_idx": 20, "frame_idx": 96, "global_frame_idx": 5741, "task_index": 4}, {"db_idx": 5742, "episode_idx": 20, "frame_idx": 97, "global_frame_idx": 5742, "task_index": 4}, {"db_idx": 5743, "episode_idx": 20, "frame_idx": 98, "global_frame_idx": 5743, "task_index": 4}, {"db_idx": 5744, "episode_idx": 20, "frame_idx": 99, "global_frame_idx": 5744, "task_index": 4}, {"db_idx": 5745, "episode_idx": 20, "frame_idx": 100, "global_frame_idx": 5745, "task_index": 4}, {"db_idx": 5746, "episode_idx": 20, "frame_idx": 101, "global_frame_idx": 5746, "task_index": 4}, {"db_idx": 5747, "episode_idx": 20, "frame_idx": 102, "global_frame_idx": 5747, "task_index": 4}, {"db_idx": 5748, "episode_idx": 20, "frame_idx": 103, "global_frame_idx": 5748, "task_index": 4}, {"db_idx": 5749, "episode_idx": 20, "frame_idx": 104, "global_frame_idx": 5749, "task_index": 4}, {"db_idx": 5750, "episode_idx": 20, "frame_idx": 105, "global_frame_idx": 5750, "task_index": 4}, {"db_idx": 5751, "episode_idx": 20, "frame_idx": 106, "global_frame_idx": 5751, "task_index": 4}, {"db_idx": 5752, "episode_idx": 20, "frame_idx": 107, "global_frame_idx": 5752, "task_index": 4}, {"db_idx": 5753, "episode_idx": 20, "frame_idx": 108, "global_frame_idx": 5753, "task_index": 4}, {"db_idx": 5754, "episode_idx": 20, "frame_idx": 109, "global_frame_idx": 5754, "task_index": 4}, {"db_idx": 5755, "episode_idx": 20, "frame_idx": 110, "global_frame_idx": 5755, "task_index": 4}, {"db_idx": 5756, "episode_idx": 20, "frame_idx": 111, "global_frame_idx": 5756, "task_index": 4}, {"db_idx": 5757, "episode_idx": 20, "frame_idx": 112, "global_frame_idx": 5757, "task_index": 4}, {"db_idx": 5758, "episode_idx": 20, "frame_idx": 113, "global_frame_idx": 5758, "task_index": 4}, {"db_idx": 5759, "episode_idx": 20, "frame_idx": 114, "global_frame_idx": 5759, "task_index": 4}, {"db_idx": 5760, "episode_idx": 20, "frame_idx": 115, "global_frame_idx": 5760, "task_index": 4}, {"db_idx": 5761, "episode_idx": 20, "frame_idx": 116, "global_frame_idx": 5761, "task_index": 4}, {"db_idx": 5762, "episode_idx": 20, "frame_idx": 117, "global_frame_idx": 5762, "task_index": 4}, {"db_idx": 5763, "episode_idx": 20, "frame_idx": 118, "global_frame_idx": 5763, "task_index": 4}, {"db_idx": 5764, "episode_idx": 20, "frame_idx": 119, "global_frame_idx": 5764, "task_index": 4}, {"db_idx": 5765, "episode_idx": 20, "frame_idx": 120, "global_frame_idx": 5765, "task_index": 4}, {"db_idx": 5766, "episode_idx": 20, "frame_idx": 121, "global_frame_idx": 5766, "task_index": 4}, {"db_idx": 5767, "episode_idx": 20, "frame_idx": 122, "global_frame_idx": 5767, "task_index": 4}, {"db_idx": 5768, "episode_idx": 20, "frame_idx": 123, "global_frame_idx": 5768, "task_index": 4}, {"db_idx": 5769, "episode_idx": 20, "frame_idx": 124, "global_frame_idx": 5769, "task_index": 4}, {"db_idx": 5770, "episode_idx": 20, "frame_idx": 125, "global_frame_idx": 5770, "task_index": 4}, {"db_idx": 5771, "episode_idx": 20, "frame_idx": 126, "global_frame_idx": 5771, "task_index": 4}, {"db_idx": 5772, "episode_idx": 20, "frame_idx": 127, "global_frame_idx": 5772, "task_index": 4}, {"db_idx": 5773, "episode_idx": 20, "frame_idx": 128, "global_frame_idx": 5773, "task_index": 4}, {"db_idx": 5774, "episode_idx": 20, "frame_idx": 129, "global_frame_idx": 5774, "task_index": 4}, {"db_idx": 5775, "episode_idx": 20, "frame_idx": 130, "global_frame_idx": 5775, "task_index": 4}, {"db_idx": 5776, "episode_idx": 20, "frame_idx": 131, "global_frame_idx": 5776, "task_index": 4}, {"db_idx": 5777, "episode_idx": 20, "frame_idx": 132, "global_frame_idx": 5777, "task_index": 4}, {"db_idx": 5778, "episode_idx": 20, "frame_idx": 133, "global_frame_idx": 5778, "task_index": 4}, {"db_idx": 5779, "episode_idx": 20, "frame_idx": 134, "global_frame_idx": 5779, "task_index": 4}, {"db_idx": 5780, "episode_idx": 20, "frame_idx": 135, "global_frame_idx": 5780, "task_index": 4}, {"db_idx": 5781, "episode_idx": 20, "frame_idx": 136, "global_frame_idx": 5781, "task_index": 4}, {"db_idx": 5782, "episode_idx": 20, "frame_idx": 137, "global_frame_idx": 5782, "task_index": 4}, {"db_idx": 5783, "episode_idx": 20, "frame_idx": 138, "global_frame_idx": 5783, "task_index": 4}, {"db_idx": 5784, "episode_idx": 20, "frame_idx": 139, "global_frame_idx": 5784, "task_index": 4}, {"db_idx": 5785, "episode_idx": 20, "frame_idx": 140, "global_frame_idx": 5785, "task_index": 4}, {"db_idx": 5786, "episode_idx": 20, "frame_idx": 141, "global_frame_idx": 5786, "task_index": 4}, {"db_idx": 5787, "episode_idx": 20, "frame_idx": 142, "global_frame_idx": 5787, "task_index": 4}, {"db_idx": 5788, "episode_idx": 20, "frame_idx": 143, "global_frame_idx": 5788, "task_index": 4}, {"db_idx": 5789, "episode_idx": 20, "frame_idx": 144, "global_frame_idx": 5789, "task_index": 4}, {"db_idx": 5790, "episode_idx": 20, "frame_idx": 145, "global_frame_idx": 5790, "task_index": 4}, {"db_idx": 5791, "episode_idx": 20, "frame_idx": 146, "global_frame_idx": 5791, "task_index": 4}, {"db_idx": 5792, "episode_idx": 20, "frame_idx": 147, "global_frame_idx": 5792, "task_index": 4}, {"db_idx": 5793, "episode_idx": 20, "frame_idx": 148, "global_frame_idx": 5793, "task_index": 4}, {"db_idx": 5794, "episode_idx": 20, "frame_idx": 149, "global_frame_idx": 5794, "task_index": 4}, {"db_idx": 5795, "episode_idx": 20, "frame_idx": 150, "global_frame_idx": 5795, "task_index": 4}, {"db_idx": 5796, "episode_idx": 20, "frame_idx": 151, "global_frame_idx": 5796, "task_index": 4}, {"db_idx": 5797, "episode_idx": 20, "frame_idx": 152, "global_frame_idx": 5797, "task_index": 4}, {"db_idx": 5798, "episode_idx": 20, "frame_idx": 153, "global_frame_idx": 5798, "task_index": 4}, {"db_idx": 5799, "episode_idx": 20, "frame_idx": 154, "global_frame_idx": 5799, "task_index": 4}, {"db_idx": 5800, "episode_idx": 20, "frame_idx": 155, "global_frame_idx": 5800, "task_index": 4}, {"db_idx": 5801, "episode_idx": 20, "frame_idx": 156, "global_frame_idx": 5801, "task_index": 4}, {"db_idx": 5802, "episode_idx": 20, "frame_idx": 157, "global_frame_idx": 5802, "task_index": 4}, {"db_idx": 5803, "episode_idx": 20, "frame_idx": 158, "global_frame_idx": 5803, "task_index": 4}, {"db_idx": 5804, "episode_idx": 20, "frame_idx": 159, "global_frame_idx": 5804, "task_index": 4}, {"db_idx": 5805, "episode_idx": 20, "frame_idx": 160, "global_frame_idx": 5805, "task_index": 4}, {"db_idx": 5806, "episode_idx": 20, "frame_idx": 161, "global_frame_idx": 5806, "task_index": 4}, {"db_idx": 5807, "episode_idx": 20, "frame_idx": 162, "global_frame_idx": 5807, "task_index": 4}, {"db_idx": 5808, "episode_idx": 20, "frame_idx": 163, "global_frame_idx": 5808, "task_index": 4}, {"db_idx": 5809, "episode_idx": 20, "frame_idx": 164, "global_frame_idx": 5809, "task_index": 4}, {"db_idx": 5810, "episode_idx": 20, "frame_idx": 165, "global_frame_idx": 5810, "task_index": 4}, {"db_idx": 5811, "episode_idx": 20, "frame_idx": 166, "global_frame_idx": 5811, "task_index": 4}, {"db_idx": 5812, "episode_idx": 20, "frame_idx": 167, "global_frame_idx": 5812, "task_index": 4}, {"db_idx": 5813, "episode_idx": 20, "frame_idx": 168, "global_frame_idx": 5813, "task_index": 4}, {"db_idx": 5814, "episode_idx": 20, "frame_idx": 169, "global_frame_idx": 5814, "task_index": 4}, {"db_idx": 5815, "episode_idx": 20, "frame_idx": 170, "global_frame_idx": 5815, "task_index": 4}, {"db_idx": 5816, "episode_idx": 20, "frame_idx": 171, "global_frame_idx": 5816, "task_index": 4}, {"db_idx": 5817, "episode_idx": 20, "frame_idx": 172, "global_frame_idx": 5817, "task_index": 4}, {"db_idx": 5818, "episode_idx": 20, "frame_idx": 173, "global_frame_idx": 5818, "task_index": 4}, {"db_idx": 5819, "episode_idx": 20, "frame_idx": 174, "global_frame_idx": 5819, "task_index": 4}, {"db_idx": 5820, "episode_idx": 20, "frame_idx": 175, "global_frame_idx": 5820, "task_index": 4}, {"db_idx": 5821, "episode_idx": 20, "frame_idx": 176, "global_frame_idx": 5821, "task_index": 4}, {"db_idx": 5822, "episode_idx": 20, "frame_idx": 177, "global_frame_idx": 5822, "task_index": 4}, {"db_idx": 5823, "episode_idx": 20, "frame_idx": 178, "global_frame_idx": 5823, "task_index": 4}, {"db_idx": 5824, "episode_idx": 20, "frame_idx": 179, "global_frame_idx": 5824, "task_index": 4}, {"db_idx": 5825, "episode_idx": 20, "frame_idx": 180, "global_frame_idx": 5825, "task_index": 4}, {"db_idx": 5826, "episode_idx": 20, "frame_idx": 181, "global_frame_idx": 5826, "task_index": 4}, {"db_idx": 5827, "episode_idx": 20, "frame_idx": 182, "global_frame_idx": 5827, "task_index": 4}, {"db_idx": 5828, "episode_idx": 20, "frame_idx": 183, "global_frame_idx": 5828, "task_index": 4}, {"db_idx": 5829, "episode_idx": 20, "frame_idx": 184, "global_frame_idx": 5829, "task_index": 4}, {"db_idx": 5830, "episode_idx": 20, "frame_idx": 185, "global_frame_idx": 5830, "task_index": 4}, {"db_idx": 5831, "episode_idx": 20, "frame_idx": 186, "global_frame_idx": 5831, "task_index": 4}, {"db_idx": 5832, "episode_idx": 20, "frame_idx": 187, "global_frame_idx": 5832, "task_index": 4}, {"db_idx": 5833, "episode_idx": 20, "frame_idx": 188, "global_frame_idx": 5833, "task_index": 4}, {"db_idx": 5834, "episode_idx": 20, "frame_idx": 189, "global_frame_idx": 5834, "task_index": 4}, {"db_idx": 5835, "episode_idx": 20, "frame_idx": 190, "global_frame_idx": 5835, "task_index": 4}, {"db_idx": 5836, "episode_idx": 20, "frame_idx": 191, "global_frame_idx": 5836, "task_index": 4}, {"db_idx": 5837, "episode_idx": 20, "frame_idx": 192, "global_frame_idx": 5837, "task_index": 4}, {"db_idx": 5838, "episode_idx": 20, "frame_idx": 193, "global_frame_idx": 5838, "task_index": 4}, {"db_idx": 5839, "episode_idx": 20, "frame_idx": 194, "global_frame_idx": 5839, "task_index": 4}, {"db_idx": 5840, "episode_idx": 20, "frame_idx": 195, "global_frame_idx": 5840, "task_index": 4}, {"db_idx": 5841, "episode_idx": 20, "frame_idx": 196, "global_frame_idx": 5841, "task_index": 4}, {"db_idx": 5842, "episode_idx": 20, "frame_idx": 197, "global_frame_idx": 5842, "task_index": 4}, {"db_idx": 5843, "episode_idx": 20, "frame_idx": 198, "global_frame_idx": 5843, "task_index": 4}, {"db_idx": 5844, "episode_idx": 20, "frame_idx": 199, "global_frame_idx": 5844, "task_index": 4}, {"db_idx": 5845, "episode_idx": 20, "frame_idx": 200, "global_frame_idx": 5845, "task_index": 4}, {"db_idx": 5846, "episode_idx": 20, "frame_idx": 201, "global_frame_idx": 5846, "task_index": 4}, {"db_idx": 5847, "episode_idx": 20, "frame_idx": 202, "global_frame_idx": 5847, "task_index": 4}, {"db_idx": 5848, "episode_idx": 20, "frame_idx": 203, "global_frame_idx": 5848, "task_index": 4}, {"db_idx": 5849, "episode_idx": 20, "frame_idx": 204, "global_frame_idx": 5849, "task_index": 4}, {"db_idx": 5850, "episode_idx": 20, "frame_idx": 205, "global_frame_idx": 5850, "task_index": 4}, {"db_idx": 5851, "episode_idx": 20, "frame_idx": 206, "global_frame_idx": 5851, "task_index": 4}, {"db_idx": 5852, "episode_idx": 20, "frame_idx": 207, "global_frame_idx": 5852, "task_index": 4}, {"db_idx": 5853, "episode_idx": 20, "frame_idx": 208, "global_frame_idx": 5853, "task_index": 4}, {"db_idx": 5854, "episode_idx": 20, "frame_idx": 209, "global_frame_idx": 5854, "task_index": 4}, {"db_idx": 5855, "episode_idx": 20, "frame_idx": 210, "global_frame_idx": 5855, "task_index": 4}, {"db_idx": 5856, "episode_idx": 20, "frame_idx": 211, "global_frame_idx": 5856, "task_index": 4}, {"db_idx": 5857, "episode_idx": 20, "frame_idx": 212, "global_frame_idx": 5857, "task_index": 4}, {"db_idx": 5858, "episode_idx": 20, "frame_idx": 213, "global_frame_idx": 5858, "task_index": 4}, {"db_idx": 5859, "episode_idx": 20, "frame_idx": 214, "global_frame_idx": 5859, "task_index": 4}, {"db_idx": 5860, "episode_idx": 20, "frame_idx": 215, "global_frame_idx": 5860, "task_index": 4}, {"db_idx": 5861, "episode_idx": 20, "frame_idx": 216, "global_frame_idx": 5861, "task_index": 4}, {"db_idx": 5862, "episode_idx": 20, "frame_idx": 217, "global_frame_idx": 5862, "task_index": 4}, {"db_idx": 5863, "episode_idx": 20, "frame_idx": 218, "global_frame_idx": 5863, "task_index": 4}, {"db_idx": 5864, "episode_idx": 20, "frame_idx": 219, "global_frame_idx": 5864, "task_index": 4}, {"db_idx": 5865, "episode_idx": 20, "frame_idx": 220, "global_frame_idx": 5865, "task_index": 4}, {"db_idx": 5866, "episode_idx": 20, "frame_idx": 221, "global_frame_idx": 5866, "task_index": 4}, {"db_idx": 5867, "episode_idx": 20, "frame_idx": 222, "global_frame_idx": 5867, "task_index": 4}, {"db_idx": 5868, "episode_idx": 20, "frame_idx": 223, "global_frame_idx": 5868, "task_index": 4}, {"db_idx": 5869, "episode_idx": 20, "frame_idx": 224, "global_frame_idx": 5869, "task_index": 4}, {"db_idx": 5870, "episode_idx": 20, "frame_idx": 225, "global_frame_idx": 5870, "task_index": 4}, {"db_idx": 5871, "episode_idx": 20, "frame_idx": 226, "global_frame_idx": 5871, "task_index": 4}, {"db_idx": 5872, "episode_idx": 20, "frame_idx": 227, "global_frame_idx": 5872, "task_index": 4}, {"db_idx": 5873, "episode_idx": 20, "frame_idx": 228, "global_frame_idx": 5873, "task_index": 4}, {"db_idx": 5874, "episode_idx": 20, "frame_idx": 229, "global_frame_idx": 5874, "task_index": 4}, {"db_idx": 5875, "episode_idx": 20, "frame_idx": 230, "global_frame_idx": 5875, "task_index": 4}, {"db_idx": 5876, "episode_idx": 20, "frame_idx": 231, "global_frame_idx": 5876, "task_index": 4}, {"db_idx": 5877, "episode_idx": 20, "frame_idx": 232, "global_frame_idx": 5877, "task_index": 4}, {"db_idx": 5878, "episode_idx": 20, "frame_idx": 233, "global_frame_idx": 5878, "task_index": 4}, {"db_idx": 5879, "episode_idx": 20, "frame_idx": 234, "global_frame_idx": 5879, "task_index": 4}, {"db_idx": 5880, "episode_idx": 20, "frame_idx": 235, "global_frame_idx": 5880, "task_index": 4}, {"db_idx": 5881, "episode_idx": 20, "frame_idx": 236, "global_frame_idx": 5881, "task_index": 4}, {"db_idx": 5882, "episode_idx": 20, "frame_idx": 237, "global_frame_idx": 5882, "task_index": 4}, {"db_idx": 5883, "episode_idx": 20, "frame_idx": 238, "global_frame_idx": 5883, "task_index": 4}, {"db_idx": 5884, "episode_idx": 20, "frame_idx": 239, "global_frame_idx": 5884, "task_index": 4}, {"db_idx": 5885, "episode_idx": 20, "frame_idx": 240, "global_frame_idx": 5885, "task_index": 4}, {"db_idx": 5886, "episode_idx": 20, "frame_idx": 241, "global_frame_idx": 5886, "task_index": 4}, {"db_idx": 5887, "episode_idx": 20, "frame_idx": 242, "global_frame_idx": 5887, "task_index": 4}, {"db_idx": 5888, "episode_idx": 20, "frame_idx": 243, "global_frame_idx": 5888, "task_index": 4}, {"db_idx": 5889, "episode_idx": 20, "frame_idx": 244, "global_frame_idx": 5889, "task_index": 4}, {"db_idx": 5890, "episode_idx": 20, "frame_idx": 245, "global_frame_idx": 5890, "task_index": 4}, {"db_idx": 5891, "episode_idx": 20, "frame_idx": 246, "global_frame_idx": 5891, "task_index": 4}, {"db_idx": 5892, "episode_idx": 20, "frame_idx": 247, "global_frame_idx": 5892, "task_index": 4}, {"db_idx": 5893, "episode_idx": 20, "frame_idx": 248, "global_frame_idx": 5893, "task_index": 4}, {"db_idx": 5894, "episode_idx": 20, "frame_idx": 249, "global_frame_idx": 5894, "task_index": 4}, {"db_idx": 5895, "episode_idx": 20, "frame_idx": 250, "global_frame_idx": 5895, "task_index": 4}, {"db_idx": 5896, "episode_idx": 20, "frame_idx": 251, "global_frame_idx": 5896, "task_index": 4}, {"db_idx": 5897, "episode_idx": 20, "frame_idx": 252, "global_frame_idx": 5897, "task_index": 4}, {"db_idx": 5898, "episode_idx": 20, "frame_idx": 253, "global_frame_idx": 5898, "task_index": 4}, {"db_idx": 5899, "episode_idx": 20, "frame_idx": 254, "global_frame_idx": 5899, "task_index": 4}, {"db_idx": 5900, "episode_idx": 20, "frame_idx": 255, "global_frame_idx": 5900, "task_index": 4}, {"db_idx": 5901, "episode_idx": 20, "frame_idx": 256, "global_frame_idx": 5901, "task_index": 4}, {"db_idx": 5902, "episode_idx": 20, "frame_idx": 257, "global_frame_idx": 5902, "task_index": 4}, {"db_idx": 5903, "episode_idx": 20, "frame_idx": 258, "global_frame_idx": 5903, "task_index": 4}, {"db_idx": 5904, "episode_idx": 20, "frame_idx": 259, "global_frame_idx": 5904, "task_index": 4}, {"db_idx": 5905, "episode_idx": 20, "frame_idx": 260, "global_frame_idx": 5905, "task_index": 4}, {"db_idx": 5906, "episode_idx": 20, "frame_idx": 261, "global_frame_idx": 5906, "task_index": 4}, {"db_idx": 5907, "episode_idx": 20, "frame_idx": 262, "global_frame_idx": 5907, "task_index": 4}, {"db_idx": 5908, "episode_idx": 20, "frame_idx": 263, "global_frame_idx": 5908, "task_index": 4}, {"db_idx": 5909, "episode_idx": 21, "frame_idx": 0, "global_frame_idx": 5909, "task_index": 4}, {"db_idx": 5910, "episode_idx": 21, "frame_idx": 1, "global_frame_idx": 5910, "task_index": 4}, {"db_idx": 5911, "episode_idx": 21, "frame_idx": 2, "global_frame_idx": 5911, "task_index": 4}, {"db_idx": 5912, "episode_idx": 21, "frame_idx": 3, "global_frame_idx": 5912, "task_index": 4}, {"db_idx": 5913, "episode_idx": 21, "frame_idx": 4, "global_frame_idx": 5913, "task_index": 4}, {"db_idx": 5914, "episode_idx": 21, "frame_idx": 5, "global_frame_idx": 5914, "task_index": 4}, {"db_idx": 5915, "episode_idx": 21, "frame_idx": 6, "global_frame_idx": 5915, "task_index": 4}, {"db_idx": 5916, "episode_idx": 21, "frame_idx": 7, "global_frame_idx": 5916, "task_index": 4}, {"db_idx": 5917, "episode_idx": 21, "frame_idx": 8, "global_frame_idx": 5917, "task_index": 4}, {"db_idx": 5918, "episode_idx": 21, "frame_idx": 9, "global_frame_idx": 5918, "task_index": 4}, {"db_idx": 5919, "episode_idx": 21, "frame_idx": 10, "global_frame_idx": 5919, "task_index": 4}, {"db_idx": 5920, "episode_idx": 21, "frame_idx": 11, "global_frame_idx": 5920, "task_index": 4}, {"db_idx": 5921, "episode_idx": 21, "frame_idx": 12, "global_frame_idx": 5921, "task_index": 4}, {"db_idx": 5922, "episode_idx": 21, "frame_idx": 13, "global_frame_idx": 5922, "task_index": 4}, {"db_idx": 5923, "episode_idx": 21, "frame_idx": 14, "global_frame_idx": 5923, "task_index": 4}, {"db_idx": 5924, "episode_idx": 21, "frame_idx": 15, "global_frame_idx": 5924, "task_index": 4}, {"db_idx": 5925, "episode_idx": 21, "frame_idx": 16, "global_frame_idx": 5925, "task_index": 4}, {"db_idx": 5926, "episode_idx": 21, "frame_idx": 17, "global_frame_idx": 5926, "task_index": 4}, {"db_idx": 5927, "episode_idx": 21, "frame_idx": 18, "global_frame_idx": 5927, "task_index": 4}, {"db_idx": 5928, "episode_idx": 21, "frame_idx": 19, "global_frame_idx": 5928, "task_index": 4}, {"db_idx": 5929, "episode_idx": 21, "frame_idx": 20, "global_frame_idx": 5929, "task_index": 4}, {"db_idx": 5930, "episode_idx": 21, "frame_idx": 21, "global_frame_idx": 5930, "task_index": 4}, {"db_idx": 5931, "episode_idx": 21, "frame_idx": 22, "global_frame_idx": 5931, "task_index": 4}, {"db_idx": 5932, "episode_idx": 21, "frame_idx": 23, "global_frame_idx": 5932, "task_index": 4}, {"db_idx": 5933, "episode_idx": 21, "frame_idx": 24, "global_frame_idx": 5933, "task_index": 4}, {"db_idx": 5934, "episode_idx": 21, "frame_idx": 25, "global_frame_idx": 5934, "task_index": 4}, {"db_idx": 5935, "episode_idx": 21, "frame_idx": 26, "global_frame_idx": 5935, "task_index": 4}, {"db_idx": 5936, "episode_idx": 21, "frame_idx": 27, "global_frame_idx": 5936, "task_index": 4}, {"db_idx": 5937, "episode_idx": 21, "frame_idx": 28, "global_frame_idx": 5937, "task_index": 4}, {"db_idx": 5938, "episode_idx": 21, "frame_idx": 29, "global_frame_idx": 5938, "task_index": 4}, {"db_idx": 5939, "episode_idx": 21, "frame_idx": 30, "global_frame_idx": 5939, "task_index": 4}, {"db_idx": 5940, "episode_idx": 21, "frame_idx": 31, "global_frame_idx": 5940, "task_index": 4}, {"db_idx": 5941, "episode_idx": 21, "frame_idx": 32, "global_frame_idx": 5941, "task_index": 4}, {"db_idx": 5942, "episode_idx": 21, "frame_idx": 33, "global_frame_idx": 5942, "task_index": 4}, {"db_idx": 5943, "episode_idx": 21, "frame_idx": 34, "global_frame_idx": 5943, "task_index": 4}, {"db_idx": 5944, "episode_idx": 21, "frame_idx": 35, "global_frame_idx": 5944, "task_index": 4}, {"db_idx": 5945, "episode_idx": 21, "frame_idx": 36, "global_frame_idx": 5945, "task_index": 4}, {"db_idx": 5946, "episode_idx": 21, "frame_idx": 37, "global_frame_idx": 5946, "task_index": 4}, {"db_idx": 5947, "episode_idx": 21, "frame_idx": 38, "global_frame_idx": 5947, "task_index": 4}, {"db_idx": 5948, "episode_idx": 21, "frame_idx": 39, "global_frame_idx": 5948, "task_index": 4}, {"db_idx": 5949, "episode_idx": 21, "frame_idx": 40, "global_frame_idx": 5949, "task_index": 4}, {"db_idx": 5950, "episode_idx": 21, "frame_idx": 41, "global_frame_idx": 5950, "task_index": 4}, {"db_idx": 5951, "episode_idx": 21, "frame_idx": 42, "global_frame_idx": 5951, "task_index": 4}, {"db_idx": 5952, "episode_idx": 21, "frame_idx": 43, "global_frame_idx": 5952, "task_index": 4}, {"db_idx": 5953, "episode_idx": 21, "frame_idx": 44, "global_frame_idx": 5953, "task_index": 4}, {"db_idx": 5954, "episode_idx": 21, "frame_idx": 45, "global_frame_idx": 5954, "task_index": 4}, {"db_idx": 5955, "episode_idx": 21, "frame_idx": 46, "global_frame_idx": 5955, "task_index": 4}, {"db_idx": 5956, "episode_idx": 21, "frame_idx": 47, "global_frame_idx": 5956, "task_index": 4}, {"db_idx": 5957, "episode_idx": 21, "frame_idx": 48, "global_frame_idx": 5957, "task_index": 4}, {"db_idx": 5958, "episode_idx": 21, "frame_idx": 49, "global_frame_idx": 5958, "task_index": 4}, {"db_idx": 5959, "episode_idx": 21, "frame_idx": 50, "global_frame_idx": 5959, "task_index": 4}, {"db_idx": 5960, "episode_idx": 21, "frame_idx": 51, "global_frame_idx": 5960, "task_index": 4}, {"db_idx": 5961, "episode_idx": 21, "frame_idx": 52, "global_frame_idx": 5961, "task_index": 4}, {"db_idx": 5962, "episode_idx": 21, "frame_idx": 53, "global_frame_idx": 5962, "task_index": 4}, {"db_idx": 5963, "episode_idx": 21, "frame_idx": 54, "global_frame_idx": 5963, "task_index": 4}, {"db_idx": 5964, "episode_idx": 21, "frame_idx": 55, "global_frame_idx": 5964, "task_index": 4}, {"db_idx": 5965, "episode_idx": 21, "frame_idx": 56, "global_frame_idx": 5965, "task_index": 4}, {"db_idx": 5966, "episode_idx": 21, "frame_idx": 57, "global_frame_idx": 5966, "task_index": 4}, {"db_idx": 5967, "episode_idx": 21, "frame_idx": 58, "global_frame_idx": 5967, "task_index": 4}, {"db_idx": 5968, "episode_idx": 21, "frame_idx": 59, "global_frame_idx": 5968, "task_index": 4}, {"db_idx": 5969, "episode_idx": 21, "frame_idx": 60, "global_frame_idx": 5969, "task_index": 4}, {"db_idx": 5970, "episode_idx": 21, "frame_idx": 61, "global_frame_idx": 5970, "task_index": 4}, {"db_idx": 5971, "episode_idx": 21, "frame_idx": 62, "global_frame_idx": 5971, "task_index": 4}, {"db_idx": 5972, "episode_idx": 21, "frame_idx": 63, "global_frame_idx": 5972, "task_index": 4}, {"db_idx": 5973, "episode_idx": 21, "frame_idx": 64, "global_frame_idx": 5973, "task_index": 4}, {"db_idx": 5974, "episode_idx": 21, "frame_idx": 65, "global_frame_idx": 5974, "task_index": 4}, {"db_idx": 5975, "episode_idx": 21, "frame_idx": 66, "global_frame_idx": 5975, "task_index": 4}, {"db_idx": 5976, "episode_idx": 21, "frame_idx": 67, "global_frame_idx": 5976, "task_index": 4}, {"db_idx": 5977, "episode_idx": 21, "frame_idx": 68, "global_frame_idx": 5977, "task_index": 4}, {"db_idx": 5978, "episode_idx": 21, "frame_idx": 69, "global_frame_idx": 5978, "task_index": 4}, {"db_idx": 5979, "episode_idx": 21, "frame_idx": 70, "global_frame_idx": 5979, "task_index": 4}, {"db_idx": 5980, "episode_idx": 21, "frame_idx": 71, "global_frame_idx": 5980, "task_index": 4}, {"db_idx": 5981, "episode_idx": 21, "frame_idx": 72, "global_frame_idx": 5981, "task_index": 4}, {"db_idx": 5982, "episode_idx": 21, "frame_idx": 73, "global_frame_idx": 5982, "task_index": 4}, {"db_idx": 5983, "episode_idx": 21, "frame_idx": 74, "global_frame_idx": 5983, "task_index": 4}, {"db_idx": 5984, "episode_idx": 21, "frame_idx": 75, "global_frame_idx": 5984, "task_index": 4}, {"db_idx": 5985, "episode_idx": 21, "frame_idx": 76, "global_frame_idx": 5985, "task_index": 4}, {"db_idx": 5986, "episode_idx": 21, "frame_idx": 77, "global_frame_idx": 5986, "task_index": 4}, {"db_idx": 5987, "episode_idx": 21, "frame_idx": 78, "global_frame_idx": 5987, "task_index": 4}, {"db_idx": 5988, "episode_idx": 21, "frame_idx": 79, "global_frame_idx": 5988, "task_index": 4}, {"db_idx": 5989, "episode_idx": 21, "frame_idx": 80, "global_frame_idx": 5989, "task_index": 4}, {"db_idx": 5990, "episode_idx": 21, "frame_idx": 81, "global_frame_idx": 5990, "task_index": 4}, {"db_idx": 5991, "episode_idx": 21, "frame_idx": 82, "global_frame_idx": 5991, "task_index": 4}, {"db_idx": 5992, "episode_idx": 21, "frame_idx": 83, "global_frame_idx": 5992, "task_index": 4}, {"db_idx": 5993, "episode_idx": 21, "frame_idx": 84, "global_frame_idx": 5993, "task_index": 4}, {"db_idx": 5994, "episode_idx": 21, "frame_idx": 85, "global_frame_idx": 5994, "task_index": 4}, {"db_idx": 5995, "episode_idx": 21, "frame_idx": 86, "global_frame_idx": 5995, "task_index": 4}, {"db_idx": 5996, "episode_idx": 21, "frame_idx": 87, "global_frame_idx": 5996, "task_index": 4}, {"db_idx": 5997, "episode_idx": 21, "frame_idx": 88, "global_frame_idx": 5997, "task_index": 4}, {"db_idx": 5998, "episode_idx": 21, "frame_idx": 89, "global_frame_idx": 5998, "task_index": 4}, {"db_idx": 5999, "episode_idx": 21, "frame_idx": 90, "global_frame_idx": 5999, "task_index": 4}, {"db_idx": 6000, "episode_idx": 21, "frame_idx": 91, "global_frame_idx": 6000, "task_index": 4}, {"db_idx": 6001, "episode_idx": 21, "frame_idx": 92, "global_frame_idx": 6001, "task_index": 4}, {"db_idx": 6002, "episode_idx": 21, "frame_idx": 93, "global_frame_idx": 6002, "task_index": 4}, {"db_idx": 6003, "episode_idx": 21, "frame_idx": 94, "global_frame_idx": 6003, "task_index": 4}, {"db_idx": 6004, "episode_idx": 21, "frame_idx": 95, "global_frame_idx": 6004, "task_index": 4}, {"db_idx": 6005, "episode_idx": 21, "frame_idx": 96, "global_frame_idx": 6005, "task_index": 4}, {"db_idx": 6006, "episode_idx": 21, "frame_idx": 97, "global_frame_idx": 6006, "task_index": 4}, {"db_idx": 6007, "episode_idx": 21, "frame_idx": 98, "global_frame_idx": 6007, "task_index": 4}, {"db_idx": 6008, "episode_idx": 21, "frame_idx": 99, "global_frame_idx": 6008, "task_index": 4}, {"db_idx": 6009, "episode_idx": 21, "frame_idx": 100, "global_frame_idx": 6009, "task_index": 4}, {"db_idx": 6010, "episode_idx": 21, "frame_idx": 101, "global_frame_idx": 6010, "task_index": 4}, {"db_idx": 6011, "episode_idx": 21, "frame_idx": 102, "global_frame_idx": 6011, "task_index": 4}, {"db_idx": 6012, "episode_idx": 21, "frame_idx": 103, "global_frame_idx": 6012, "task_index": 4}, {"db_idx": 6013, "episode_idx": 21, "frame_idx": 104, "global_frame_idx": 6013, "task_index": 4}, {"db_idx": 6014, "episode_idx": 21, "frame_idx": 105, "global_frame_idx": 6014, "task_index": 4}, {"db_idx": 6015, "episode_idx": 21, "frame_idx": 106, "global_frame_idx": 6015, "task_index": 4}, {"db_idx": 6016, "episode_idx": 21, "frame_idx": 107, "global_frame_idx": 6016, "task_index": 4}, {"db_idx": 6017, "episode_idx": 21, "frame_idx": 108, "global_frame_idx": 6017, "task_index": 4}, {"db_idx": 6018, "episode_idx": 21, "frame_idx": 109, "global_frame_idx": 6018, "task_index": 4}, {"db_idx": 6019, "episode_idx": 21, "frame_idx": 110, "global_frame_idx": 6019, "task_index": 4}, {"db_idx": 6020, "episode_idx": 21, "frame_idx": 111, "global_frame_idx": 6020, "task_index": 4}, {"db_idx": 6021, "episode_idx": 21, "frame_idx": 112, "global_frame_idx": 6021, "task_index": 4}, {"db_idx": 6022, "episode_idx": 21, "frame_idx": 113, "global_frame_idx": 6022, "task_index": 4}, {"db_idx": 6023, "episode_idx": 21, "frame_idx": 114, "global_frame_idx": 6023, "task_index": 4}, {"db_idx": 6024, "episode_idx": 21, "frame_idx": 115, "global_frame_idx": 6024, "task_index": 4}, {"db_idx": 6025, "episode_idx": 21, "frame_idx": 116, "global_frame_idx": 6025, "task_index": 4}, {"db_idx": 6026, "episode_idx": 21, "frame_idx": 117, "global_frame_idx": 6026, "task_index": 4}, {"db_idx": 6027, "episode_idx": 21, "frame_idx": 118, "global_frame_idx": 6027, "task_index": 4}, {"db_idx": 6028, "episode_idx": 21, "frame_idx": 119, "global_frame_idx": 6028, "task_index": 4}, {"db_idx": 6029, "episode_idx": 21, "frame_idx": 120, "global_frame_idx": 6029, "task_index": 4}, {"db_idx": 6030, "episode_idx": 21, "frame_idx": 121, "global_frame_idx": 6030, "task_index": 4}, {"db_idx": 6031, "episode_idx": 21, "frame_idx": 122, "global_frame_idx": 6031, "task_index": 4}, {"db_idx": 6032, "episode_idx": 21, "frame_idx": 123, "global_frame_idx": 6032, "task_index": 4}, {"db_idx": 6033, "episode_idx": 21, "frame_idx": 124, "global_frame_idx": 6033, "task_index": 4}, {"db_idx": 6034, "episode_idx": 21, "frame_idx": 125, "global_frame_idx": 6034, "task_index": 4}, {"db_idx": 6035, "episode_idx": 21, "frame_idx": 126, "global_frame_idx": 6035, "task_index": 4}, {"db_idx": 6036, "episode_idx": 21, "frame_idx": 127, "global_frame_idx": 6036, "task_index": 4}, {"db_idx": 6037, "episode_idx": 21, "frame_idx": 128, "global_frame_idx": 6037, "task_index": 4}, {"db_idx": 6038, "episode_idx": 21, "frame_idx": 129, "global_frame_idx": 6038, "task_index": 4}, {"db_idx": 6039, "episode_idx": 21, "frame_idx": 130, "global_frame_idx": 6039, "task_index": 4}, {"db_idx": 6040, "episode_idx": 21, "frame_idx": 131, "global_frame_idx": 6040, "task_index": 4}, {"db_idx": 6041, "episode_idx": 21, "frame_idx": 132, "global_frame_idx": 6041, "task_index": 4}, {"db_idx": 6042, "episode_idx": 21, "frame_idx": 133, "global_frame_idx": 6042, "task_index": 4}, {"db_idx": 6043, "episode_idx": 21, "frame_idx": 134, "global_frame_idx": 6043, "task_index": 4}, {"db_idx": 6044, "episode_idx": 21, "frame_idx": 135, "global_frame_idx": 6044, "task_index": 4}, {"db_idx": 6045, "episode_idx": 21, "frame_idx": 136, "global_frame_idx": 6045, "task_index": 4}, {"db_idx": 6046, "episode_idx": 21, "frame_idx": 137, "global_frame_idx": 6046, "task_index": 4}, {"db_idx": 6047, "episode_idx": 21, "frame_idx": 138, "global_frame_idx": 6047, "task_index": 4}, {"db_idx": 6048, "episode_idx": 21, "frame_idx": 139, "global_frame_idx": 6048, "task_index": 4}, {"db_idx": 6049, "episode_idx": 21, "frame_idx": 140, "global_frame_idx": 6049, "task_index": 4}, {"db_idx": 6050, "episode_idx": 21, "frame_idx": 141, "global_frame_idx": 6050, "task_index": 4}, {"db_idx": 6051, "episode_idx": 21, "frame_idx": 142, "global_frame_idx": 6051, "task_index": 4}, {"db_idx": 6052, "episode_idx": 21, "frame_idx": 143, "global_frame_idx": 6052, "task_index": 4}, {"db_idx": 6053, "episode_idx": 21, "frame_idx": 144, "global_frame_idx": 6053, "task_index": 4}, {"db_idx": 6054, "episode_idx": 21, "frame_idx": 145, "global_frame_idx": 6054, "task_index": 4}, {"db_idx": 6055, "episode_idx": 21, "frame_idx": 146, "global_frame_idx": 6055, "task_index": 4}, {"db_idx": 6056, "episode_idx": 21, "frame_idx": 147, "global_frame_idx": 6056, "task_index": 4}, {"db_idx": 6057, "episode_idx": 21, "frame_idx": 148, "global_frame_idx": 6057, "task_index": 4}, {"db_idx": 6058, "episode_idx": 21, "frame_idx": 149, "global_frame_idx": 6058, "task_index": 4}, {"db_idx": 6059, "episode_idx": 21, "frame_idx": 150, "global_frame_idx": 6059, "task_index": 4}, {"db_idx": 6060, "episode_idx": 21, "frame_idx": 151, "global_frame_idx": 6060, "task_index": 4}, {"db_idx": 6061, "episode_idx": 21, "frame_idx": 152, "global_frame_idx": 6061, "task_index": 4}, {"db_idx": 6062, "episode_idx": 21, "frame_idx": 153, "global_frame_idx": 6062, "task_index": 4}, {"db_idx": 6063, "episode_idx": 21, "frame_idx": 154, "global_frame_idx": 6063, "task_index": 4}, {"db_idx": 6064, "episode_idx": 21, "frame_idx": 155, "global_frame_idx": 6064, "task_index": 4}, {"db_idx": 6065, "episode_idx": 21, "frame_idx": 156, "global_frame_idx": 6065, "task_index": 4}, {"db_idx": 6066, "episode_idx": 21, "frame_idx": 157, "global_frame_idx": 6066, "task_index": 4}, {"db_idx": 6067, "episode_idx": 21, "frame_idx": 158, "global_frame_idx": 6067, "task_index": 4}, {"db_idx": 6068, "episode_idx": 21, "frame_idx": 159, "global_frame_idx": 6068, "task_index": 4}, {"db_idx": 6069, "episode_idx": 21, "frame_idx": 160, "global_frame_idx": 6069, "task_index": 4}, {"db_idx": 6070, "episode_idx": 21, "frame_idx": 161, "global_frame_idx": 6070, "task_index": 4}, {"db_idx": 6071, "episode_idx": 21, "frame_idx": 162, "global_frame_idx": 6071, "task_index": 4}, {"db_idx": 6072, "episode_idx": 21, "frame_idx": 163, "global_frame_idx": 6072, "task_index": 4}, {"db_idx": 6073, "episode_idx": 21, "frame_idx": 164, "global_frame_idx": 6073, "task_index": 4}, {"db_idx": 6074, "episode_idx": 21, "frame_idx": 165, "global_frame_idx": 6074, "task_index": 4}, {"db_idx": 6075, "episode_idx": 21, "frame_idx": 166, "global_frame_idx": 6075, "task_index": 4}, {"db_idx": 6076, "episode_idx": 21, "frame_idx": 167, "global_frame_idx": 6076, "task_index": 4}, {"db_idx": 6077, "episode_idx": 21, "frame_idx": 168, "global_frame_idx": 6077, "task_index": 4}, {"db_idx": 6078, "episode_idx": 21, "frame_idx": 169, "global_frame_idx": 6078, "task_index": 4}, {"db_idx": 6079, "episode_idx": 21, "frame_idx": 170, "global_frame_idx": 6079, "task_index": 4}, {"db_idx": 6080, "episode_idx": 21, "frame_idx": 171, "global_frame_idx": 6080, "task_index": 4}, {"db_idx": 6081, "episode_idx": 21, "frame_idx": 172, "global_frame_idx": 6081, "task_index": 4}, {"db_idx": 6082, "episode_idx": 21, "frame_idx": 173, "global_frame_idx": 6082, "task_index": 4}, {"db_idx": 6083, "episode_idx": 21, "frame_idx": 174, "global_frame_idx": 6083, "task_index": 4}, {"db_idx": 6084, "episode_idx": 21, "frame_idx": 175, "global_frame_idx": 6084, "task_index": 4}, {"db_idx": 6085, "episode_idx": 21, "frame_idx": 176, "global_frame_idx": 6085, "task_index": 4}, {"db_idx": 6086, "episode_idx": 21, "frame_idx": 177, "global_frame_idx": 6086, "task_index": 4}, {"db_idx": 6087, "episode_idx": 21, "frame_idx": 178, "global_frame_idx": 6087, "task_index": 4}, {"db_idx": 6088, "episode_idx": 21, "frame_idx": 179, "global_frame_idx": 6088, "task_index": 4}, {"db_idx": 6089, "episode_idx": 21, "frame_idx": 180, "global_frame_idx": 6089, "task_index": 4}, {"db_idx": 6090, "episode_idx": 21, "frame_idx": 181, "global_frame_idx": 6090, "task_index": 4}, {"db_idx": 6091, "episode_idx": 21, "frame_idx": 182, "global_frame_idx": 6091, "task_index": 4}, {"db_idx": 6092, "episode_idx": 21, "frame_idx": 183, "global_frame_idx": 6092, "task_index": 4}, {"db_idx": 6093, "episode_idx": 21, "frame_idx": 184, "global_frame_idx": 6093, "task_index": 4}, {"db_idx": 6094, "episode_idx": 21, "frame_idx": 185, "global_frame_idx": 6094, "task_index": 4}, {"db_idx": 6095, "episode_idx": 21, "frame_idx": 186, "global_frame_idx": 6095, "task_index": 4}, {"db_idx": 6096, "episode_idx": 21, "frame_idx": 187, "global_frame_idx": 6096, "task_index": 4}, {"db_idx": 6097, "episode_idx": 21, "frame_idx": 188, "global_frame_idx": 6097, "task_index": 4}, {"db_idx": 6098, "episode_idx": 21, "frame_idx": 189, "global_frame_idx": 6098, "task_index": 4}, {"db_idx": 6099, "episode_idx": 21, "frame_idx": 190, "global_frame_idx": 6099, "task_index": 4}, {"db_idx": 6100, "episode_idx": 21, "frame_idx": 191, "global_frame_idx": 6100, "task_index": 4}, {"db_idx": 6101, "episode_idx": 21, "frame_idx": 192, "global_frame_idx": 6101, "task_index": 4}, {"db_idx": 6102, "episode_idx": 21, "frame_idx": 193, "global_frame_idx": 6102, "task_index": 4}, {"db_idx": 6103, "episode_idx": 21, "frame_idx": 194, "global_frame_idx": 6103, "task_index": 4}, {"db_idx": 6104, "episode_idx": 21, "frame_idx": 195, "global_frame_idx": 6104, "task_index": 4}, {"db_idx": 6105, "episode_idx": 21, "frame_idx": 196, "global_frame_idx": 6105, "task_index": 4}, {"db_idx": 6106, "episode_idx": 21, "frame_idx": 197, "global_frame_idx": 6106, "task_index": 4}, {"db_idx": 6107, "episode_idx": 21, "frame_idx": 198, "global_frame_idx": 6107, "task_index": 4}, {"db_idx": 6108, "episode_idx": 21, "frame_idx": 199, "global_frame_idx": 6108, "task_index": 4}, {"db_idx": 6109, "episode_idx": 21, "frame_idx": 200, "global_frame_idx": 6109, "task_index": 4}, {"db_idx": 6110, "episode_idx": 21, "frame_idx": 201, "global_frame_idx": 6110, "task_index": 4}, {"db_idx": 6111, "episode_idx": 21, "frame_idx": 202, "global_frame_idx": 6111, "task_index": 4}, {"db_idx": 6112, "episode_idx": 21, "frame_idx": 203, "global_frame_idx": 6112, "task_index": 4}, {"db_idx": 6113, "episode_idx": 21, "frame_idx": 204, "global_frame_idx": 6113, "task_index": 4}, {"db_idx": 6114, "episode_idx": 21, "frame_idx": 205, "global_frame_idx": 6114, "task_index": 4}, {"db_idx": 6115, "episode_idx": 21, "frame_idx": 206, "global_frame_idx": 6115, "task_index": 4}, {"db_idx": 6116, "episode_idx": 21, "frame_idx": 207, "global_frame_idx": 6116, "task_index": 4}, {"db_idx": 6117, "episode_idx": 21, "frame_idx": 208, "global_frame_idx": 6117, "task_index": 4}, {"db_idx": 6118, "episode_idx": 21, "frame_idx": 209, "global_frame_idx": 6118, "task_index": 4}, {"db_idx": 6119, "episode_idx": 21, "frame_idx": 210, "global_frame_idx": 6119, "task_index": 4}, {"db_idx": 6120, "episode_idx": 21, "frame_idx": 211, "global_frame_idx": 6120, "task_index": 4}, {"db_idx": 6121, "episode_idx": 21, "frame_idx": 212, "global_frame_idx": 6121, "task_index": 4}, {"db_idx": 6122, "episode_idx": 21, "frame_idx": 213, "global_frame_idx": 6122, "task_index": 4}, {"db_idx": 6123, "episode_idx": 21, "frame_idx": 214, "global_frame_idx": 6123, "task_index": 4}, {"db_idx": 6124, "episode_idx": 21, "frame_idx": 215, "global_frame_idx": 6124, "task_index": 4}, {"db_idx": 6125, "episode_idx": 21, "frame_idx": 216, "global_frame_idx": 6125, "task_index": 4}, {"db_idx": 6126, "episode_idx": 21, "frame_idx": 217, "global_frame_idx": 6126, "task_index": 4}, {"db_idx": 6127, "episode_idx": 21, "frame_idx": 218, "global_frame_idx": 6127, "task_index": 4}, {"db_idx": 6128, "episode_idx": 21, "frame_idx": 219, "global_frame_idx": 6128, "task_index": 4}, {"db_idx": 6129, "episode_idx": 21, "frame_idx": 220, "global_frame_idx": 6129, "task_index": 4}, {"db_idx": 6130, "episode_idx": 21, "frame_idx": 221, "global_frame_idx": 6130, "task_index": 4}, {"db_idx": 6131, "episode_idx": 21, "frame_idx": 222, "global_frame_idx": 6131, "task_index": 4}, {"db_idx": 6132, "episode_idx": 21, "frame_idx": 223, "global_frame_idx": 6132, "task_index": 4}, {"db_idx": 6133, "episode_idx": 21, "frame_idx": 224, "global_frame_idx": 6133, "task_index": 4}, {"db_idx": 6134, "episode_idx": 21, "frame_idx": 225, "global_frame_idx": 6134, "task_index": 4}, {"db_idx": 6135, "episode_idx": 21, "frame_idx": 226, "global_frame_idx": 6135, "task_index": 4}, {"db_idx": 6136, "episode_idx": 21, "frame_idx": 227, "global_frame_idx": 6136, "task_index": 4}, {"db_idx": 6137, "episode_idx": 21, "frame_idx": 228, "global_frame_idx": 6137, "task_index": 4}, {"db_idx": 6138, "episode_idx": 21, "frame_idx": 229, "global_frame_idx": 6138, "task_index": 4}, {"db_idx": 6139, "episode_idx": 21, "frame_idx": 230, "global_frame_idx": 6139, "task_index": 4}, {"db_idx": 6140, "episode_idx": 21, "frame_idx": 231, "global_frame_idx": 6140, "task_index": 4}, {"db_idx": 6141, "episode_idx": 22, "frame_idx": 0, "global_frame_idx": 6141, "task_index": 4}, {"db_idx": 6142, "episode_idx": 22, "frame_idx": 1, "global_frame_idx": 6142, "task_index": 4}, {"db_idx": 6143, "episode_idx": 22, "frame_idx": 2, "global_frame_idx": 6143, "task_index": 4}, {"db_idx": 6144, "episode_idx": 22, "frame_idx": 3, "global_frame_idx": 6144, "task_index": 4}, {"db_idx": 6145, "episode_idx": 22, "frame_idx": 4, "global_frame_idx": 6145, "task_index": 4}, {"db_idx": 6146, "episode_idx": 22, "frame_idx": 5, "global_frame_idx": 6146, "task_index": 4}, {"db_idx": 6147, "episode_idx": 22, "frame_idx": 6, "global_frame_idx": 6147, "task_index": 4}, {"db_idx": 6148, "episode_idx": 22, "frame_idx": 7, "global_frame_idx": 6148, "task_index": 4}, {"db_idx": 6149, "episode_idx": 22, "frame_idx": 8, "global_frame_idx": 6149, "task_index": 4}, {"db_idx": 6150, "episode_idx": 22, "frame_idx": 9, "global_frame_idx": 6150, "task_index": 4}, {"db_idx": 6151, "episode_idx": 22, "frame_idx": 10, "global_frame_idx": 6151, "task_index": 4}, {"db_idx": 6152, "episode_idx": 22, "frame_idx": 11, "global_frame_idx": 6152, "task_index": 4}, {"db_idx": 6153, "episode_idx": 22, "frame_idx": 12, "global_frame_idx": 6153, "task_index": 4}, {"db_idx": 6154, "episode_idx": 22, "frame_idx": 13, "global_frame_idx": 6154, "task_index": 4}, {"db_idx": 6155, "episode_idx": 22, "frame_idx": 14, "global_frame_idx": 6155, "task_index": 4}, {"db_idx": 6156, "episode_idx": 22, "frame_idx": 15, "global_frame_idx": 6156, "task_index": 4}, {"db_idx": 6157, "episode_idx": 22, "frame_idx": 16, "global_frame_idx": 6157, "task_index": 4}, {"db_idx": 6158, "episode_idx": 22, "frame_idx": 17, "global_frame_idx": 6158, "task_index": 4}, {"db_idx": 6159, "episode_idx": 22, "frame_idx": 18, "global_frame_idx": 6159, "task_index": 4}, {"db_idx": 6160, "episode_idx": 22, "frame_idx": 19, "global_frame_idx": 6160, "task_index": 4}, {"db_idx": 6161, "episode_idx": 22, "frame_idx": 20, "global_frame_idx": 6161, "task_index": 4}, {"db_idx": 6162, "episode_idx": 22, "frame_idx": 21, "global_frame_idx": 6162, "task_index": 4}, {"db_idx": 6163, "episode_idx": 22, "frame_idx": 22, "global_frame_idx": 6163, "task_index": 4}, {"db_idx": 6164, "episode_idx": 22, "frame_idx": 23, "global_frame_idx": 6164, "task_index": 4}, {"db_idx": 6165, "episode_idx": 22, "frame_idx": 24, "global_frame_idx": 6165, "task_index": 4}, {"db_idx": 6166, "episode_idx": 22, "frame_idx": 25, "global_frame_idx": 6166, "task_index": 4}, {"db_idx": 6167, "episode_idx": 22, "frame_idx": 26, "global_frame_idx": 6167, "task_index": 4}, {"db_idx": 6168, "episode_idx": 22, "frame_idx": 27, "global_frame_idx": 6168, "task_index": 4}, {"db_idx": 6169, "episode_idx": 22, "frame_idx": 28, "global_frame_idx": 6169, "task_index": 4}, {"db_idx": 6170, "episode_idx": 22, "frame_idx": 29, "global_frame_idx": 6170, "task_index": 4}, {"db_idx": 6171, "episode_idx": 22, "frame_idx": 30, "global_frame_idx": 6171, "task_index": 4}, {"db_idx": 6172, "episode_idx": 22, "frame_idx": 31, "global_frame_idx": 6172, "task_index": 4}, {"db_idx": 6173, "episode_idx": 22, "frame_idx": 32, "global_frame_idx": 6173, "task_index": 4}, {"db_idx": 6174, "episode_idx": 22, "frame_idx": 33, "global_frame_idx": 6174, "task_index": 4}, {"db_idx": 6175, "episode_idx": 22, "frame_idx": 34, "global_frame_idx": 6175, "task_index": 4}, {"db_idx": 6176, "episode_idx": 22, "frame_idx": 35, "global_frame_idx": 6176, "task_index": 4}, {"db_idx": 6177, "episode_idx": 22, "frame_idx": 36, "global_frame_idx": 6177, "task_index": 4}, {"db_idx": 6178, "episode_idx": 22, "frame_idx": 37, "global_frame_idx": 6178, "task_index": 4}, {"db_idx": 6179, "episode_idx": 22, "frame_idx": 38, "global_frame_idx": 6179, "task_index": 4}, {"db_idx": 6180, "episode_idx": 22, "frame_idx": 39, "global_frame_idx": 6180, "task_index": 4}, {"db_idx": 6181, "episode_idx": 22, "frame_idx": 40, "global_frame_idx": 6181, "task_index": 4}, {"db_idx": 6182, "episode_idx": 22, "frame_idx": 41, "global_frame_idx": 6182, "task_index": 4}, {"db_idx": 6183, "episode_idx": 22, "frame_idx": 42, "global_frame_idx": 6183, "task_index": 4}, {"db_idx": 6184, "episode_idx": 22, "frame_idx": 43, "global_frame_idx": 6184, "task_index": 4}, {"db_idx": 6185, "episode_idx": 22, "frame_idx": 44, "global_frame_idx": 6185, "task_index": 4}, {"db_idx": 6186, "episode_idx": 22, "frame_idx": 45, "global_frame_idx": 6186, "task_index": 4}, {"db_idx": 6187, "episode_idx": 22, "frame_idx": 46, "global_frame_idx": 6187, "task_index": 4}, {"db_idx": 6188, "episode_idx": 22, "frame_idx": 47, "global_frame_idx": 6188, "task_index": 4}, {"db_idx": 6189, "episode_idx": 22, "frame_idx": 48, "global_frame_idx": 6189, "task_index": 4}, {"db_idx": 6190, "episode_idx": 22, "frame_idx": 49, "global_frame_idx": 6190, "task_index": 4}, {"db_idx": 6191, "episode_idx": 22, "frame_idx": 50, "global_frame_idx": 6191, "task_index": 4}, {"db_idx": 6192, "episode_idx": 22, "frame_idx": 51, "global_frame_idx": 6192, "task_index": 4}, {"db_idx": 6193, "episode_idx": 22, "frame_idx": 52, "global_frame_idx": 6193, "task_index": 4}, {"db_idx": 6194, "episode_idx": 22, "frame_idx": 53, "global_frame_idx": 6194, "task_index": 4}, {"db_idx": 6195, "episode_idx": 22, "frame_idx": 54, "global_frame_idx": 6195, "task_index": 4}, {"db_idx": 6196, "episode_idx": 22, "frame_idx": 55, "global_frame_idx": 6196, "task_index": 4}, {"db_idx": 6197, "episode_idx": 22, "frame_idx": 56, "global_frame_idx": 6197, "task_index": 4}, {"db_idx": 6198, "episode_idx": 22, "frame_idx": 57, "global_frame_idx": 6198, "task_index": 4}, {"db_idx": 6199, "episode_idx": 22, "frame_idx": 58, "global_frame_idx": 6199, "task_index": 4}, {"db_idx": 6200, "episode_idx": 22, "frame_idx": 59, "global_frame_idx": 6200, "task_index": 4}, {"db_idx": 6201, "episode_idx": 22, "frame_idx": 60, "global_frame_idx": 6201, "task_index": 4}, {"db_idx": 6202, "episode_idx": 22, "frame_idx": 61, "global_frame_idx": 6202, "task_index": 4}, {"db_idx": 6203, "episode_idx": 22, "frame_idx": 62, "global_frame_idx": 6203, "task_index": 4}, {"db_idx": 6204, "episode_idx": 22, "frame_idx": 63, "global_frame_idx": 6204, "task_index": 4}, {"db_idx": 6205, "episode_idx": 22, "frame_idx": 64, "global_frame_idx": 6205, "task_index": 4}, {"db_idx": 6206, "episode_idx": 22, "frame_idx": 65, "global_frame_idx": 6206, "task_index": 4}, {"db_idx": 6207, "episode_idx": 22, "frame_idx": 66, "global_frame_idx": 6207, "task_index": 4}, {"db_idx": 6208, "episode_idx": 22, "frame_idx": 67, "global_frame_idx": 6208, "task_index": 4}, {"db_idx": 6209, "episode_idx": 22, "frame_idx": 68, "global_frame_idx": 6209, "task_index": 4}, {"db_idx": 6210, "episode_idx": 22, "frame_idx": 69, "global_frame_idx": 6210, "task_index": 4}, {"db_idx": 6211, "episode_idx": 22, "frame_idx": 70, "global_frame_idx": 6211, "task_index": 4}, {"db_idx": 6212, "episode_idx": 22, "frame_idx": 71, "global_frame_idx": 6212, "task_index": 4}, {"db_idx": 6213, "episode_idx": 22, "frame_idx": 72, "global_frame_idx": 6213, "task_index": 4}, {"db_idx": 6214, "episode_idx": 22, "frame_idx": 73, "global_frame_idx": 6214, "task_index": 4}, {"db_idx": 6215, "episode_idx": 22, "frame_idx": 74, "global_frame_idx": 6215, "task_index": 4}, {"db_idx": 6216, "episode_idx": 22, "frame_idx": 75, "global_frame_idx": 6216, "task_index": 4}, {"db_idx": 6217, "episode_idx": 22, "frame_idx": 76, "global_frame_idx": 6217, "task_index": 4}, {"db_idx": 6218, "episode_idx": 22, "frame_idx": 77, "global_frame_idx": 6218, "task_index": 4}, {"db_idx": 6219, "episode_idx": 22, "frame_idx": 78, "global_frame_idx": 6219, "task_index": 4}, {"db_idx": 6220, "episode_idx": 22, "frame_idx": 79, "global_frame_idx": 6220, "task_index": 4}, {"db_idx": 6221, "episode_idx": 22, "frame_idx": 80, "global_frame_idx": 6221, "task_index": 4}, {"db_idx": 6222, "episode_idx": 22, "frame_idx": 81, "global_frame_idx": 6222, "task_index": 4}, {"db_idx": 6223, "episode_idx": 22, "frame_idx": 82, "global_frame_idx": 6223, "task_index": 4}, {"db_idx": 6224, "episode_idx": 22, "frame_idx": 83, "global_frame_idx": 6224, "task_index": 4}, {"db_idx": 6225, "episode_idx": 22, "frame_idx": 84, "global_frame_idx": 6225, "task_index": 4}, {"db_idx": 6226, "episode_idx": 22, "frame_idx": 85, "global_frame_idx": 6226, "task_index": 4}, {"db_idx": 6227, "episode_idx": 22, "frame_idx": 86, "global_frame_idx": 6227, "task_index": 4}, {"db_idx": 6228, "episode_idx": 22, "frame_idx": 87, "global_frame_idx": 6228, "task_index": 4}, {"db_idx": 6229, "episode_idx": 22, "frame_idx": 88, "global_frame_idx": 6229, "task_index": 4}, {"db_idx": 6230, "episode_idx": 22, "frame_idx": 89, "global_frame_idx": 6230, "task_index": 4}, {"db_idx": 6231, "episode_idx": 22, "frame_idx": 90, "global_frame_idx": 6231, "task_index": 4}, {"db_idx": 6232, "episode_idx": 22, "frame_idx": 91, "global_frame_idx": 6232, "task_index": 4}, {"db_idx": 6233, "episode_idx": 22, "frame_idx": 92, "global_frame_idx": 6233, "task_index": 4}, {"db_idx": 6234, "episode_idx": 22, "frame_idx": 93, "global_frame_idx": 6234, "task_index": 4}, {"db_idx": 6235, "episode_idx": 22, "frame_idx": 94, "global_frame_idx": 6235, "task_index": 4}, {"db_idx": 6236, "episode_idx": 22, "frame_idx": 95, "global_frame_idx": 6236, "task_index": 4}, {"db_idx": 6237, "episode_idx": 22, "frame_idx": 96, "global_frame_idx": 6237, "task_index": 4}, {"db_idx": 6238, "episode_idx": 22, "frame_idx": 97, "global_frame_idx": 6238, "task_index": 4}, {"db_idx": 6239, "episode_idx": 22, "frame_idx": 98, "global_frame_idx": 6239, "task_index": 4}, {"db_idx": 6240, "episode_idx": 22, "frame_idx": 99, "global_frame_idx": 6240, "task_index": 4}, {"db_idx": 6241, "episode_idx": 22, "frame_idx": 100, "global_frame_idx": 6241, "task_index": 4}, {"db_idx": 6242, "episode_idx": 22, "frame_idx": 101, "global_frame_idx": 6242, "task_index": 4}, {"db_idx": 6243, "episode_idx": 22, "frame_idx": 102, "global_frame_idx": 6243, "task_index": 4}, {"db_idx": 6244, "episode_idx": 22, "frame_idx": 103, "global_frame_idx": 6244, "task_index": 4}, {"db_idx": 6245, "episode_idx": 22, "frame_idx": 104, "global_frame_idx": 6245, "task_index": 4}, {"db_idx": 6246, "episode_idx": 22, "frame_idx": 105, "global_frame_idx": 6246, "task_index": 4}, {"db_idx": 6247, "episode_idx": 22, "frame_idx": 106, "global_frame_idx": 6247, "task_index": 4}, {"db_idx": 6248, "episode_idx": 22, "frame_idx": 107, "global_frame_idx": 6248, "task_index": 4}, {"db_idx": 6249, "episode_idx": 22, "frame_idx": 108, "global_frame_idx": 6249, "task_index": 4}, {"db_idx": 6250, "episode_idx": 22, "frame_idx": 109, "global_frame_idx": 6250, "task_index": 4}, {"db_idx": 6251, "episode_idx": 22, "frame_idx": 110, "global_frame_idx": 6251, "task_index": 4}, {"db_idx": 6252, "episode_idx": 22, "frame_idx": 111, "global_frame_idx": 6252, "task_index": 4}, {"db_idx": 6253, "episode_idx": 22, "frame_idx": 112, "global_frame_idx": 6253, "task_index": 4}, {"db_idx": 6254, "episode_idx": 22, "frame_idx": 113, "global_frame_idx": 6254, "task_index": 4}, {"db_idx": 6255, "episode_idx": 22, "frame_idx": 114, "global_frame_idx": 6255, "task_index": 4}, {"db_idx": 6256, "episode_idx": 22, "frame_idx": 115, "global_frame_idx": 6256, "task_index": 4}, {"db_idx": 6257, "episode_idx": 22, "frame_idx": 116, "global_frame_idx": 6257, "task_index": 4}, {"db_idx": 6258, "episode_idx": 22, "frame_idx": 117, "global_frame_idx": 6258, "task_index": 4}, {"db_idx": 6259, "episode_idx": 22, "frame_idx": 118, "global_frame_idx": 6259, "task_index": 4}, {"db_idx": 6260, "episode_idx": 22, "frame_idx": 119, "global_frame_idx": 6260, "task_index": 4}, {"db_idx": 6261, "episode_idx": 22, "frame_idx": 120, "global_frame_idx": 6261, "task_index": 4}, {"db_idx": 6262, "episode_idx": 22, "frame_idx": 121, "global_frame_idx": 6262, "task_index": 4}, {"db_idx": 6263, "episode_idx": 22, "frame_idx": 122, "global_frame_idx": 6263, "task_index": 4}, {"db_idx": 6264, "episode_idx": 22, "frame_idx": 123, "global_frame_idx": 6264, "task_index": 4}, {"db_idx": 6265, "episode_idx": 22, "frame_idx": 124, "global_frame_idx": 6265, "task_index": 4}, {"db_idx": 6266, "episode_idx": 22, "frame_idx": 125, "global_frame_idx": 6266, "task_index": 4}, {"db_idx": 6267, "episode_idx": 22, "frame_idx": 126, "global_frame_idx": 6267, "task_index": 4}, {"db_idx": 6268, "episode_idx": 22, "frame_idx": 127, "global_frame_idx": 6268, "task_index": 4}, {"db_idx": 6269, "episode_idx": 22, "frame_idx": 128, "global_frame_idx": 6269, "task_index": 4}, {"db_idx": 6270, "episode_idx": 22, "frame_idx": 129, "global_frame_idx": 6270, "task_index": 4}, {"db_idx": 6271, "episode_idx": 22, "frame_idx": 130, "global_frame_idx": 6271, "task_index": 4}, {"db_idx": 6272, "episode_idx": 22, "frame_idx": 131, "global_frame_idx": 6272, "task_index": 4}, {"db_idx": 6273, "episode_idx": 22, "frame_idx": 132, "global_frame_idx": 6273, "task_index": 4}, {"db_idx": 6274, "episode_idx": 22, "frame_idx": 133, "global_frame_idx": 6274, "task_index": 4}, {"db_idx": 6275, "episode_idx": 22, "frame_idx": 134, "global_frame_idx": 6275, "task_index": 4}, {"db_idx": 6276, "episode_idx": 22, "frame_idx": 135, "global_frame_idx": 6276, "task_index": 4}, {"db_idx": 6277, "episode_idx": 22, "frame_idx": 136, "global_frame_idx": 6277, "task_index": 4}, {"db_idx": 6278, "episode_idx": 22, "frame_idx": 137, "global_frame_idx": 6278, "task_index": 4}, {"db_idx": 6279, "episode_idx": 22, "frame_idx": 138, "global_frame_idx": 6279, "task_index": 4}, {"db_idx": 6280, "episode_idx": 22, "frame_idx": 139, "global_frame_idx": 6280, "task_index": 4}, {"db_idx": 6281, "episode_idx": 22, "frame_idx": 140, "global_frame_idx": 6281, "task_index": 4}, {"db_idx": 6282, "episode_idx": 22, "frame_idx": 141, "global_frame_idx": 6282, "task_index": 4}, {"db_idx": 6283, "episode_idx": 22, "frame_idx": 142, "global_frame_idx": 6283, "task_index": 4}, {"db_idx": 6284, "episode_idx": 22, "frame_idx": 143, "global_frame_idx": 6284, "task_index": 4}, {"db_idx": 6285, "episode_idx": 22, "frame_idx": 144, "global_frame_idx": 6285, "task_index": 4}, {"db_idx": 6286, "episode_idx": 22, "frame_idx": 145, "global_frame_idx": 6286, "task_index": 4}, {"db_idx": 6287, "episode_idx": 22, "frame_idx": 146, "global_frame_idx": 6287, "task_index": 4}, {"db_idx": 6288, "episode_idx": 22, "frame_idx": 147, "global_frame_idx": 6288, "task_index": 4}, {"db_idx": 6289, "episode_idx": 22, "frame_idx": 148, "global_frame_idx": 6289, "task_index": 4}, {"db_idx": 6290, "episode_idx": 22, "frame_idx": 149, "global_frame_idx": 6290, "task_index": 4}, {"db_idx": 6291, "episode_idx": 22, "frame_idx": 150, "global_frame_idx": 6291, "task_index": 4}, {"db_idx": 6292, "episode_idx": 22, "frame_idx": 151, "global_frame_idx": 6292, "task_index": 4}, {"db_idx": 6293, "episode_idx": 22, "frame_idx": 152, "global_frame_idx": 6293, "task_index": 4}, {"db_idx": 6294, "episode_idx": 22, "frame_idx": 153, "global_frame_idx": 6294, "task_index": 4}, {"db_idx": 6295, "episode_idx": 22, "frame_idx": 154, "global_frame_idx": 6295, "task_index": 4}, {"db_idx": 6296, "episode_idx": 22, "frame_idx": 155, "global_frame_idx": 6296, "task_index": 4}, {"db_idx": 6297, "episode_idx": 22, "frame_idx": 156, "global_frame_idx": 6297, "task_index": 4}, {"db_idx": 6298, "episode_idx": 22, "frame_idx": 157, "global_frame_idx": 6298, "task_index": 4}, {"db_idx": 6299, "episode_idx": 22, "frame_idx": 158, "global_frame_idx": 6299, "task_index": 4}, {"db_idx": 6300, "episode_idx": 22, "frame_idx": 159, "global_frame_idx": 6300, "task_index": 4}, {"db_idx": 6301, "episode_idx": 22, "frame_idx": 160, "global_frame_idx": 6301, "task_index": 4}, {"db_idx": 6302, "episode_idx": 22, "frame_idx": 161, "global_frame_idx": 6302, "task_index": 4}, {"db_idx": 6303, "episode_idx": 22, "frame_idx": 162, "global_frame_idx": 6303, "task_index": 4}, {"db_idx": 6304, "episode_idx": 22, "frame_idx": 163, "global_frame_idx": 6304, "task_index": 4}, {"db_idx": 6305, "episode_idx": 22, "frame_idx": 164, "global_frame_idx": 6305, "task_index": 4}, {"db_idx": 6306, "episode_idx": 22, "frame_idx": 165, "global_frame_idx": 6306, "task_index": 4}, {"db_idx": 6307, "episode_idx": 22, "frame_idx": 166, "global_frame_idx": 6307, "task_index": 4}, {"db_idx": 6308, "episode_idx": 22, "frame_idx": 167, "global_frame_idx": 6308, "task_index": 4}, {"db_idx": 6309, "episode_idx": 22, "frame_idx": 168, "global_frame_idx": 6309, "task_index": 4}, {"db_idx": 6310, "episode_idx": 22, "frame_idx": 169, "global_frame_idx": 6310, "task_index": 4}, {"db_idx": 6311, "episode_idx": 22, "frame_idx": 170, "global_frame_idx": 6311, "task_index": 4}, {"db_idx": 6312, "episode_idx": 22, "frame_idx": 171, "global_frame_idx": 6312, "task_index": 4}, {"db_idx": 6313, "episode_idx": 22, "frame_idx": 172, "global_frame_idx": 6313, "task_index": 4}, {"db_idx": 6314, "episode_idx": 22, "frame_idx": 173, "global_frame_idx": 6314, "task_index": 4}, {"db_idx": 6315, "episode_idx": 22, "frame_idx": 174, "global_frame_idx": 6315, "task_index": 4}, {"db_idx": 6316, "episode_idx": 22, "frame_idx": 175, "global_frame_idx": 6316, "task_index": 4}, {"db_idx": 6317, "episode_idx": 22, "frame_idx": 176, "global_frame_idx": 6317, "task_index": 4}, {"db_idx": 6318, "episode_idx": 22, "frame_idx": 177, "global_frame_idx": 6318, "task_index": 4}, {"db_idx": 6319, "episode_idx": 22, "frame_idx": 178, "global_frame_idx": 6319, "task_index": 4}, {"db_idx": 6320, "episode_idx": 22, "frame_idx": 179, "global_frame_idx": 6320, "task_index": 4}, {"db_idx": 6321, "episode_idx": 22, "frame_idx": 180, "global_frame_idx": 6321, "task_index": 4}, {"db_idx": 6322, "episode_idx": 22, "frame_idx": 181, "global_frame_idx": 6322, "task_index": 4}, {"db_idx": 6323, "episode_idx": 22, "frame_idx": 182, "global_frame_idx": 6323, "task_index": 4}, {"db_idx": 6324, "episode_idx": 22, "frame_idx": 183, "global_frame_idx": 6324, "task_index": 4}, {"db_idx": 6325, "episode_idx": 22, "frame_idx": 184, "global_frame_idx": 6325, "task_index": 4}, {"db_idx": 6326, "episode_idx": 22, "frame_idx": 185, "global_frame_idx": 6326, "task_index": 4}, {"db_idx": 6327, "episode_idx": 22, "frame_idx": 186, "global_frame_idx": 6327, "task_index": 4}, {"db_idx": 6328, "episode_idx": 22, "frame_idx": 187, "global_frame_idx": 6328, "task_index": 4}, {"db_idx": 6329, "episode_idx": 22, "frame_idx": 188, "global_frame_idx": 6329, "task_index": 4}, {"db_idx": 6330, "episode_idx": 22, "frame_idx": 189, "global_frame_idx": 6330, "task_index": 4}, {"db_idx": 6331, "episode_idx": 22, "frame_idx": 190, "global_frame_idx": 6331, "task_index": 4}, {"db_idx": 6332, "episode_idx": 22, "frame_idx": 191, "global_frame_idx": 6332, "task_index": 4}, {"db_idx": 6333, "episode_idx": 22, "frame_idx": 192, "global_frame_idx": 6333, "task_index": 4}, {"db_idx": 6334, "episode_idx": 22, "frame_idx": 193, "global_frame_idx": 6334, "task_index": 4}, {"db_idx": 6335, "episode_idx": 22, "frame_idx": 194, "global_frame_idx": 6335, "task_index": 4}, {"db_idx": 6336, "episode_idx": 22, "frame_idx": 195, "global_frame_idx": 6336, "task_index": 4}, {"db_idx": 6337, "episode_idx": 22, "frame_idx": 196, "global_frame_idx": 6337, "task_index": 4}, {"db_idx": 6338, "episode_idx": 22, "frame_idx": 197, "global_frame_idx": 6338, "task_index": 4}, {"db_idx": 6339, "episode_idx": 22, "frame_idx": 198, "global_frame_idx": 6339, "task_index": 4}, {"db_idx": 6340, "episode_idx": 22, "frame_idx": 199, "global_frame_idx": 6340, "task_index": 4}, {"db_idx": 6341, "episode_idx": 22, "frame_idx": 200, "global_frame_idx": 6341, "task_index": 4}, {"db_idx": 6342, "episode_idx": 22, "frame_idx": 201, "global_frame_idx": 6342, "task_index": 4}, {"db_idx": 6343, "episode_idx": 22, "frame_idx": 202, "global_frame_idx": 6343, "task_index": 4}, {"db_idx": 6344, "episode_idx": 22, "frame_idx": 203, "global_frame_idx": 6344, "task_index": 4}, {"db_idx": 6345, "episode_idx": 22, "frame_idx": 204, "global_frame_idx": 6345, "task_index": 4}, {"db_idx": 6346, "episode_idx": 22, "frame_idx": 205, "global_frame_idx": 6346, "task_index": 4}, {"db_idx": 6347, "episode_idx": 22, "frame_idx": 206, "global_frame_idx": 6347, "task_index": 4}, {"db_idx": 6348, "episode_idx": 22, "frame_idx": 207, "global_frame_idx": 6348, "task_index": 4}, {"db_idx": 6349, "episode_idx": 22, "frame_idx": 208, "global_frame_idx": 6349, "task_index": 4}, {"db_idx": 6350, "episode_idx": 22, "frame_idx": 209, "global_frame_idx": 6350, "task_index": 4}, {"db_idx": 6351, "episode_idx": 22, "frame_idx": 210, "global_frame_idx": 6351, "task_index": 4}, {"db_idx": 6352, "episode_idx": 22, "frame_idx": 211, "global_frame_idx": 6352, "task_index": 4}, {"db_idx": 6353, "episode_idx": 22, "frame_idx": 212, "global_frame_idx": 6353, "task_index": 4}, {"db_idx": 6354, "episode_idx": 22, "frame_idx": 213, "global_frame_idx": 6354, "task_index": 4}, {"db_idx": 6355, "episode_idx": 22, "frame_idx": 214, "global_frame_idx": 6355, "task_index": 4}, {"db_idx": 6356, "episode_idx": 22, "frame_idx": 215, "global_frame_idx": 6356, "task_index": 4}, {"db_idx": 6357, "episode_idx": 22, "frame_idx": 216, "global_frame_idx": 6357, "task_index": 4}, {"db_idx": 6358, "episode_idx": 22, "frame_idx": 217, "global_frame_idx": 6358, "task_index": 4}, {"db_idx": 6359, "episode_idx": 22, "frame_idx": 218, "global_frame_idx": 6359, "task_index": 4}, {"db_idx": 6360, "episode_idx": 22, "frame_idx": 219, "global_frame_idx": 6360, "task_index": 4}, {"db_idx": 6361, "episode_idx": 22, "frame_idx": 220, "global_frame_idx": 6361, "task_index": 4}, {"db_idx": 6362, "episode_idx": 22, "frame_idx": 221, "global_frame_idx": 6362, "task_index": 4}, {"db_idx": 6363, "episode_idx": 22, "frame_idx": 222, "global_frame_idx": 6363, "task_index": 4}, {"db_idx": 6364, "episode_idx": 22, "frame_idx": 223, "global_frame_idx": 6364, "task_index": 4}, {"db_idx": 6365, "episode_idx": 22, "frame_idx": 224, "global_frame_idx": 6365, "task_index": 4}, {"db_idx": 6366, "episode_idx": 22, "frame_idx": 225, "global_frame_idx": 6366, "task_index": 4}, {"db_idx": 6367, "episode_idx": 22, "frame_idx": 226, "global_frame_idx": 6367, "task_index": 4}, {"db_idx": 6368, "episode_idx": 22, "frame_idx": 227, "global_frame_idx": 6368, "task_index": 4}, {"db_idx": 6369, "episode_idx": 22, "frame_idx": 228, "global_frame_idx": 6369, "task_index": 4}, {"db_idx": 6370, "episode_idx": 22, "frame_idx": 229, "global_frame_idx": 6370, "task_index": 4}, {"db_idx": 6371, "episode_idx": 22, "frame_idx": 230, "global_frame_idx": 6371, "task_index": 4}, {"db_idx": 6372, "episode_idx": 22, "frame_idx": 231, "global_frame_idx": 6372, "task_index": 4}, {"db_idx": 6373, "episode_idx": 22, "frame_idx": 232, "global_frame_idx": 6373, "task_index": 4}, {"db_idx": 6374, "episode_idx": 22, "frame_idx": 233, "global_frame_idx": 6374, "task_index": 4}, {"db_idx": 6375, "episode_idx": 22, "frame_idx": 234, "global_frame_idx": 6375, "task_index": 4}, {"db_idx": 6376, "episode_idx": 22, "frame_idx": 235, "global_frame_idx": 6376, "task_index": 4}, {"db_idx": 6377, "episode_idx": 22, "frame_idx": 236, "global_frame_idx": 6377, "task_index": 4}, {"db_idx": 6378, "episode_idx": 22, "frame_idx": 237, "global_frame_idx": 6378, "task_index": 4}, {"db_idx": 6379, "episode_idx": 22, "frame_idx": 238, "global_frame_idx": 6379, "task_index": 4}, {"db_idx": 6380, "episode_idx": 22, "frame_idx": 239, "global_frame_idx": 6380, "task_index": 4}, {"db_idx": 6381, "episode_idx": 22, "frame_idx": 240, "global_frame_idx": 6381, "task_index": 4}, {"db_idx": 6382, "episode_idx": 22, "frame_idx": 241, "global_frame_idx": 6382, "task_index": 4}, {"db_idx": 6383, "episode_idx": 22, "frame_idx": 242, "global_frame_idx": 6383, "task_index": 4}, {"db_idx": 6384, "episode_idx": 22, "frame_idx": 243, "global_frame_idx": 6384, "task_index": 4}, {"db_idx": 6385, "episode_idx": 22, "frame_idx": 244, "global_frame_idx": 6385, "task_index": 4}, {"db_idx": 6386, "episode_idx": 22, "frame_idx": 245, "global_frame_idx": 6386, "task_index": 4}, {"db_idx": 6387, "episode_idx": 22, "frame_idx": 246, "global_frame_idx": 6387, "task_index": 4}, {"db_idx": 6388, "episode_idx": 23, "frame_idx": 0, "global_frame_idx": 6388, "task_index": 4}, {"db_idx": 6389, "episode_idx": 23, "frame_idx": 1, "global_frame_idx": 6389, "task_index": 4}, {"db_idx": 6390, "episode_idx": 23, "frame_idx": 2, "global_frame_idx": 6390, "task_index": 4}, {"db_idx": 6391, "episode_idx": 23, "frame_idx": 3, "global_frame_idx": 6391, "task_index": 4}, {"db_idx": 6392, "episode_idx": 23, "frame_idx": 4, "global_frame_idx": 6392, "task_index": 4}, {"db_idx": 6393, "episode_idx": 23, "frame_idx": 5, "global_frame_idx": 6393, "task_index": 4}, {"db_idx": 6394, "episode_idx": 23, "frame_idx": 6, "global_frame_idx": 6394, "task_index": 4}, {"db_idx": 6395, "episode_idx": 23, "frame_idx": 7, "global_frame_idx": 6395, "task_index": 4}, {"db_idx": 6396, "episode_idx": 23, "frame_idx": 8, "global_frame_idx": 6396, "task_index": 4}, {"db_idx": 6397, "episode_idx": 23, "frame_idx": 9, "global_frame_idx": 6397, "task_index": 4}, {"db_idx": 6398, "episode_idx": 23, "frame_idx": 10, "global_frame_idx": 6398, "task_index": 4}, {"db_idx": 6399, "episode_idx": 23, "frame_idx": 11, "global_frame_idx": 6399, "task_index": 4}, {"db_idx": 6400, "episode_idx": 23, "frame_idx": 12, "global_frame_idx": 6400, "task_index": 4}, {"db_idx": 6401, "episode_idx": 23, "frame_idx": 13, "global_frame_idx": 6401, "task_index": 4}, {"db_idx": 6402, "episode_idx": 23, "frame_idx": 14, "global_frame_idx": 6402, "task_index": 4}, {"db_idx": 6403, "episode_idx": 23, "frame_idx": 15, "global_frame_idx": 6403, "task_index": 4}, {"db_idx": 6404, "episode_idx": 23, "frame_idx": 16, "global_frame_idx": 6404, "task_index": 4}, {"db_idx": 6405, "episode_idx": 23, "frame_idx": 17, "global_frame_idx": 6405, "task_index": 4}, {"db_idx": 6406, "episode_idx": 23, "frame_idx": 18, "global_frame_idx": 6406, "task_index": 4}, {"db_idx": 6407, "episode_idx": 23, "frame_idx": 19, "global_frame_idx": 6407, "task_index": 4}, {"db_idx": 6408, "episode_idx": 23, "frame_idx": 20, "global_frame_idx": 6408, "task_index": 4}, {"db_idx": 6409, "episode_idx": 23, "frame_idx": 21, "global_frame_idx": 6409, "task_index": 4}, {"db_idx": 6410, "episode_idx": 23, "frame_idx": 22, "global_frame_idx": 6410, "task_index": 4}, {"db_idx": 6411, "episode_idx": 23, "frame_idx": 23, "global_frame_idx": 6411, "task_index": 4}, {"db_idx": 6412, "episode_idx": 23, "frame_idx": 24, "global_frame_idx": 6412, "task_index": 4}, {"db_idx": 6413, "episode_idx": 23, "frame_idx": 25, "global_frame_idx": 6413, "task_index": 4}, {"db_idx": 6414, "episode_idx": 23, "frame_idx": 26, "global_frame_idx": 6414, "task_index": 4}, {"db_idx": 6415, "episode_idx": 23, "frame_idx": 27, "global_frame_idx": 6415, "task_index": 4}, {"db_idx": 6416, "episode_idx": 23, "frame_idx": 28, "global_frame_idx": 6416, "task_index": 4}, {"db_idx": 6417, "episode_idx": 23, "frame_idx": 29, "global_frame_idx": 6417, "task_index": 4}, {"db_idx": 6418, "episode_idx": 23, "frame_idx": 30, "global_frame_idx": 6418, "task_index": 4}, {"db_idx": 6419, "episode_idx": 23, "frame_idx": 31, "global_frame_idx": 6419, "task_index": 4}, {"db_idx": 6420, "episode_idx": 23, "frame_idx": 32, "global_frame_idx": 6420, "task_index": 4}, {"db_idx": 6421, "episode_idx": 23, "frame_idx": 33, "global_frame_idx": 6421, "task_index": 4}, {"db_idx": 6422, "episode_idx": 23, "frame_idx": 34, "global_frame_idx": 6422, "task_index": 4}, {"db_idx": 6423, "episode_idx": 23, "frame_idx": 35, "global_frame_idx": 6423, "task_index": 4}, {"db_idx": 6424, "episode_idx": 23, "frame_idx": 36, "global_frame_idx": 6424, "task_index": 4}, {"db_idx": 6425, "episode_idx": 23, "frame_idx": 37, "global_frame_idx": 6425, "task_index": 4}, {"db_idx": 6426, "episode_idx": 23, "frame_idx": 38, "global_frame_idx": 6426, "task_index": 4}, {"db_idx": 6427, "episode_idx": 23, "frame_idx": 39, "global_frame_idx": 6427, "task_index": 4}, {"db_idx": 6428, "episode_idx": 23, "frame_idx": 40, "global_frame_idx": 6428, "task_index": 4}, {"db_idx": 6429, "episode_idx": 23, "frame_idx": 41, "global_frame_idx": 6429, "task_index": 4}, {"db_idx": 6430, "episode_idx": 23, "frame_idx": 42, "global_frame_idx": 6430, "task_index": 4}, {"db_idx": 6431, "episode_idx": 23, "frame_idx": 43, "global_frame_idx": 6431, "task_index": 4}, {"db_idx": 6432, "episode_idx": 23, "frame_idx": 44, "global_frame_idx": 6432, "task_index": 4}, {"db_idx": 6433, "episode_idx": 23, "frame_idx": 45, "global_frame_idx": 6433, "task_index": 4}, {"db_idx": 6434, "episode_idx": 23, "frame_idx": 46, "global_frame_idx": 6434, "task_index": 4}, {"db_idx": 6435, "episode_idx": 23, "frame_idx": 47, "global_frame_idx": 6435, "task_index": 4}, {"db_idx": 6436, "episode_idx": 23, "frame_idx": 48, "global_frame_idx": 6436, "task_index": 4}, {"db_idx": 6437, "episode_idx": 23, "frame_idx": 49, "global_frame_idx": 6437, "task_index": 4}, {"db_idx": 6438, "episode_idx": 23, "frame_idx": 50, "global_frame_idx": 6438, "task_index": 4}, {"db_idx": 6439, "episode_idx": 23, "frame_idx": 51, "global_frame_idx": 6439, "task_index": 4}, {"db_idx": 6440, "episode_idx": 23, "frame_idx": 52, "global_frame_idx": 6440, "task_index": 4}, {"db_idx": 6441, "episode_idx": 23, "frame_idx": 53, "global_frame_idx": 6441, "task_index": 4}, {"db_idx": 6442, "episode_idx": 23, "frame_idx": 54, "global_frame_idx": 6442, "task_index": 4}, {"db_idx": 6443, "episode_idx": 23, "frame_idx": 55, "global_frame_idx": 6443, "task_index": 4}, {"db_idx": 6444, "episode_idx": 23, "frame_idx": 56, "global_frame_idx": 6444, "task_index": 4}, {"db_idx": 6445, "episode_idx": 23, "frame_idx": 57, "global_frame_idx": 6445, "task_index": 4}, {"db_idx": 6446, "episode_idx": 23, "frame_idx": 58, "global_frame_idx": 6446, "task_index": 4}, {"db_idx": 6447, "episode_idx": 23, "frame_idx": 59, "global_frame_idx": 6447, "task_index": 4}, {"db_idx": 6448, "episode_idx": 23, "frame_idx": 60, "global_frame_idx": 6448, "task_index": 4}, {"db_idx": 6449, "episode_idx": 23, "frame_idx": 61, "global_frame_idx": 6449, "task_index": 4}, {"db_idx": 6450, "episode_idx": 23, "frame_idx": 62, "global_frame_idx": 6450, "task_index": 4}, {"db_idx": 6451, "episode_idx": 23, "frame_idx": 63, "global_frame_idx": 6451, "task_index": 4}, {"db_idx": 6452, "episode_idx": 23, "frame_idx": 64, "global_frame_idx": 6452, "task_index": 4}, {"db_idx": 6453, "episode_idx": 23, "frame_idx": 65, "global_frame_idx": 6453, "task_index": 4}, {"db_idx": 6454, "episode_idx": 23, "frame_idx": 66, "global_frame_idx": 6454, "task_index": 4}, {"db_idx": 6455, "episode_idx": 23, "frame_idx": 67, "global_frame_idx": 6455, "task_index": 4}, {"db_idx": 6456, "episode_idx": 23, "frame_idx": 68, "global_frame_idx": 6456, "task_index": 4}, {"db_idx": 6457, "episode_idx": 23, "frame_idx": 69, "global_frame_idx": 6457, "task_index": 4}, {"db_idx": 6458, "episode_idx": 23, "frame_idx": 70, "global_frame_idx": 6458, "task_index": 4}, {"db_idx": 6459, "episode_idx": 23, "frame_idx": 71, "global_frame_idx": 6459, "task_index": 4}, {"db_idx": 6460, "episode_idx": 23, "frame_idx": 72, "global_frame_idx": 6460, "task_index": 4}, {"db_idx": 6461, "episode_idx": 23, "frame_idx": 73, "global_frame_idx": 6461, "task_index": 4}, {"db_idx": 6462, "episode_idx": 23, "frame_idx": 74, "global_frame_idx": 6462, "task_index": 4}, {"db_idx": 6463, "episode_idx": 23, "frame_idx": 75, "global_frame_idx": 6463, "task_index": 4}, {"db_idx": 6464, "episode_idx": 23, "frame_idx": 76, "global_frame_idx": 6464, "task_index": 4}, {"db_idx": 6465, "episode_idx": 23, "frame_idx": 77, "global_frame_idx": 6465, "task_index": 4}, {"db_idx": 6466, "episode_idx": 23, "frame_idx": 78, "global_frame_idx": 6466, "task_index": 4}, {"db_idx": 6467, "episode_idx": 23, "frame_idx": 79, "global_frame_idx": 6467, "task_index": 4}, {"db_idx": 6468, "episode_idx": 23, "frame_idx": 80, "global_frame_idx": 6468, "task_index": 4}, {"db_idx": 6469, "episode_idx": 23, "frame_idx": 81, "global_frame_idx": 6469, "task_index": 4}, {"db_idx": 6470, "episode_idx": 23, "frame_idx": 82, "global_frame_idx": 6470, "task_index": 4}, {"db_idx": 6471, "episode_idx": 23, "frame_idx": 83, "global_frame_idx": 6471, "task_index": 4}, {"db_idx": 6472, "episode_idx": 23, "frame_idx": 84, "global_frame_idx": 6472, "task_index": 4}, {"db_idx": 6473, "episode_idx": 23, "frame_idx": 85, "global_frame_idx": 6473, "task_index": 4}, {"db_idx": 6474, "episode_idx": 23, "frame_idx": 86, "global_frame_idx": 6474, "task_index": 4}, {"db_idx": 6475, "episode_idx": 23, "frame_idx": 87, "global_frame_idx": 6475, "task_index": 4}, {"db_idx": 6476, "episode_idx": 23, "frame_idx": 88, "global_frame_idx": 6476, "task_index": 4}, {"db_idx": 6477, "episode_idx": 23, "frame_idx": 89, "global_frame_idx": 6477, "task_index": 4}, {"db_idx": 6478, "episode_idx": 23, "frame_idx": 90, "global_frame_idx": 6478, "task_index": 4}, {"db_idx": 6479, "episode_idx": 23, "frame_idx": 91, "global_frame_idx": 6479, "task_index": 4}, {"db_idx": 6480, "episode_idx": 23, "frame_idx": 92, "global_frame_idx": 6480, "task_index": 4}, {"db_idx": 6481, "episode_idx": 23, "frame_idx": 93, "global_frame_idx": 6481, "task_index": 4}, {"db_idx": 6482, "episode_idx": 23, "frame_idx": 94, "global_frame_idx": 6482, "task_index": 4}, {"db_idx": 6483, "episode_idx": 23, "frame_idx": 95, "global_frame_idx": 6483, "task_index": 4}, {"db_idx": 6484, "episode_idx": 23, "frame_idx": 96, "global_frame_idx": 6484, "task_index": 4}, {"db_idx": 6485, "episode_idx": 23, "frame_idx": 97, "global_frame_idx": 6485, "task_index": 4}, {"db_idx": 6486, "episode_idx": 23, "frame_idx": 98, "global_frame_idx": 6486, "task_index": 4}, {"db_idx": 6487, "episode_idx": 23, "frame_idx": 99, "global_frame_idx": 6487, "task_index": 4}, {"db_idx": 6488, "episode_idx": 23, "frame_idx": 100, "global_frame_idx": 6488, "task_index": 4}, {"db_idx": 6489, "episode_idx": 23, "frame_idx": 101, "global_frame_idx": 6489, "task_index": 4}, {"db_idx": 6490, "episode_idx": 23, "frame_idx": 102, "global_frame_idx": 6490, "task_index": 4}, {"db_idx": 6491, "episode_idx": 23, "frame_idx": 103, "global_frame_idx": 6491, "task_index": 4}, {"db_idx": 6492, "episode_idx": 23, "frame_idx": 104, "global_frame_idx": 6492, "task_index": 4}, {"db_idx": 6493, "episode_idx": 23, "frame_idx": 105, "global_frame_idx": 6493, "task_index": 4}, {"db_idx": 6494, "episode_idx": 23, "frame_idx": 106, "global_frame_idx": 6494, "task_index": 4}, {"db_idx": 6495, "episode_idx": 23, "frame_idx": 107, "global_frame_idx": 6495, "task_index": 4}, {"db_idx": 6496, "episode_idx": 23, "frame_idx": 108, "global_frame_idx": 6496, "task_index": 4}, {"db_idx": 6497, "episode_idx": 23, "frame_idx": 109, "global_frame_idx": 6497, "task_index": 4}, {"db_idx": 6498, "episode_idx": 23, "frame_idx": 110, "global_frame_idx": 6498, "task_index": 4}, {"db_idx": 6499, "episode_idx": 23, "frame_idx": 111, "global_frame_idx": 6499, "task_index": 4}, {"db_idx": 6500, "episode_idx": 23, "frame_idx": 112, "global_frame_idx": 6500, "task_index": 4}, {"db_idx": 6501, "episode_idx": 23, "frame_idx": 113, "global_frame_idx": 6501, "task_index": 4}, {"db_idx": 6502, "episode_idx": 23, "frame_idx": 114, "global_frame_idx": 6502, "task_index": 4}, {"db_idx": 6503, "episode_idx": 23, "frame_idx": 115, "global_frame_idx": 6503, "task_index": 4}, {"db_idx": 6504, "episode_idx": 23, "frame_idx": 116, "global_frame_idx": 6504, "task_index": 4}, {"db_idx": 6505, "episode_idx": 23, "frame_idx": 117, "global_frame_idx": 6505, "task_index": 4}, {"db_idx": 6506, "episode_idx": 23, "frame_idx": 118, "global_frame_idx": 6506, "task_index": 4}, {"db_idx": 6507, "episode_idx": 23, "frame_idx": 119, "global_frame_idx": 6507, "task_index": 4}, {"db_idx": 6508, "episode_idx": 23, "frame_idx": 120, "global_frame_idx": 6508, "task_index": 4}, {"db_idx": 6509, "episode_idx": 23, "frame_idx": 121, "global_frame_idx": 6509, "task_index": 4}, {"db_idx": 6510, "episode_idx": 23, "frame_idx": 122, "global_frame_idx": 6510, "task_index": 4}, {"db_idx": 6511, "episode_idx": 23, "frame_idx": 123, "global_frame_idx": 6511, "task_index": 4}, {"db_idx": 6512, "episode_idx": 23, "frame_idx": 124, "global_frame_idx": 6512, "task_index": 4}, {"db_idx": 6513, "episode_idx": 23, "frame_idx": 125, "global_frame_idx": 6513, "task_index": 4}, {"db_idx": 6514, "episode_idx": 23, "frame_idx": 126, "global_frame_idx": 6514, "task_index": 4}, {"db_idx": 6515, "episode_idx": 23, "frame_idx": 127, "global_frame_idx": 6515, "task_index": 4}, {"db_idx": 6516, "episode_idx": 23, "frame_idx": 128, "global_frame_idx": 6516, "task_index": 4}, {"db_idx": 6517, "episode_idx": 23, "frame_idx": 129, "global_frame_idx": 6517, "task_index": 4}, {"db_idx": 6518, "episode_idx": 23, "frame_idx": 130, "global_frame_idx": 6518, "task_index": 4}, {"db_idx": 6519, "episode_idx": 23, "frame_idx": 131, "global_frame_idx": 6519, "task_index": 4}, {"db_idx": 6520, "episode_idx": 23, "frame_idx": 132, "global_frame_idx": 6520, "task_index": 4}, {"db_idx": 6521, "episode_idx": 23, "frame_idx": 133, "global_frame_idx": 6521, "task_index": 4}, {"db_idx": 6522, "episode_idx": 23, "frame_idx": 134, "global_frame_idx": 6522, "task_index": 4}, {"db_idx": 6523, "episode_idx": 23, "frame_idx": 135, "global_frame_idx": 6523, "task_index": 4}, {"db_idx": 6524, "episode_idx": 23, "frame_idx": 136, "global_frame_idx": 6524, "task_index": 4}, {"db_idx": 6525, "episode_idx": 23, "frame_idx": 137, "global_frame_idx": 6525, "task_index": 4}, {"db_idx": 6526, "episode_idx": 23, "frame_idx": 138, "global_frame_idx": 6526, "task_index": 4}, {"db_idx": 6527, "episode_idx": 23, "frame_idx": 139, "global_frame_idx": 6527, "task_index": 4}, {"db_idx": 6528, "episode_idx": 23, "frame_idx": 140, "global_frame_idx": 6528, "task_index": 4}, {"db_idx": 6529, "episode_idx": 23, "frame_idx": 141, "global_frame_idx": 6529, "task_index": 4}, {"db_idx": 6530, "episode_idx": 23, "frame_idx": 142, "global_frame_idx": 6530, "task_index": 4}, {"db_idx": 6531, "episode_idx": 23, "frame_idx": 143, "global_frame_idx": 6531, "task_index": 4}, {"db_idx": 6532, "episode_idx": 23, "frame_idx": 144, "global_frame_idx": 6532, "task_index": 4}, {"db_idx": 6533, "episode_idx": 23, "frame_idx": 145, "global_frame_idx": 6533, "task_index": 4}, {"db_idx": 6534, "episode_idx": 23, "frame_idx": 146, "global_frame_idx": 6534, "task_index": 4}, {"db_idx": 6535, "episode_idx": 23, "frame_idx": 147, "global_frame_idx": 6535, "task_index": 4}, {"db_idx": 6536, "episode_idx": 23, "frame_idx": 148, "global_frame_idx": 6536, "task_index": 4}, {"db_idx": 6537, "episode_idx": 23, "frame_idx": 149, "global_frame_idx": 6537, "task_index": 4}, {"db_idx": 6538, "episode_idx": 23, "frame_idx": 150, "global_frame_idx": 6538, "task_index": 4}, {"db_idx": 6539, "episode_idx": 23, "frame_idx": 151, "global_frame_idx": 6539, "task_index": 4}, {"db_idx": 6540, "episode_idx": 23, "frame_idx": 152, "global_frame_idx": 6540, "task_index": 4}, {"db_idx": 6541, "episode_idx": 23, "frame_idx": 153, "global_frame_idx": 6541, "task_index": 4}, {"db_idx": 6542, "episode_idx": 23, "frame_idx": 154, "global_frame_idx": 6542, "task_index": 4}, {"db_idx": 6543, "episode_idx": 23, "frame_idx": 155, "global_frame_idx": 6543, "task_index": 4}, {"db_idx": 6544, "episode_idx": 23, "frame_idx": 156, "global_frame_idx": 6544, "task_index": 4}, {"db_idx": 6545, "episode_idx": 23, "frame_idx": 157, "global_frame_idx": 6545, "task_index": 4}, {"db_idx": 6546, "episode_idx": 23, "frame_idx": 158, "global_frame_idx": 6546, "task_index": 4}, {"db_idx": 6547, "episode_idx": 23, "frame_idx": 159, "global_frame_idx": 6547, "task_index": 4}, {"db_idx": 6548, "episode_idx": 23, "frame_idx": 160, "global_frame_idx": 6548, "task_index": 4}, {"db_idx": 6549, "episode_idx": 23, "frame_idx": 161, "global_frame_idx": 6549, "task_index": 4}, {"db_idx": 6550, "episode_idx": 23, "frame_idx": 162, "global_frame_idx": 6550, "task_index": 4}, {"db_idx": 6551, "episode_idx": 23, "frame_idx": 163, "global_frame_idx": 6551, "task_index": 4}, {"db_idx": 6552, "episode_idx": 23, "frame_idx": 164, "global_frame_idx": 6552, "task_index": 4}, {"db_idx": 6553, "episode_idx": 23, "frame_idx": 165, "global_frame_idx": 6553, "task_index": 4}, {"db_idx": 6554, "episode_idx": 23, "frame_idx": 166, "global_frame_idx": 6554, "task_index": 4}, {"db_idx": 6555, "episode_idx": 23, "frame_idx": 167, "global_frame_idx": 6555, "task_index": 4}, {"db_idx": 6556, "episode_idx": 23, "frame_idx": 168, "global_frame_idx": 6556, "task_index": 4}, {"db_idx": 6557, "episode_idx": 23, "frame_idx": 169, "global_frame_idx": 6557, "task_index": 4}, {"db_idx": 6558, "episode_idx": 23, "frame_idx": 170, "global_frame_idx": 6558, "task_index": 4}, {"db_idx": 6559, "episode_idx": 23, "frame_idx": 171, "global_frame_idx": 6559, "task_index": 4}, {"db_idx": 6560, "episode_idx": 23, "frame_idx": 172, "global_frame_idx": 6560, "task_index": 4}, {"db_idx": 6561, "episode_idx": 23, "frame_idx": 173, "global_frame_idx": 6561, "task_index": 4}, {"db_idx": 6562, "episode_idx": 23, "frame_idx": 174, "global_frame_idx": 6562, "task_index": 4}, {"db_idx": 6563, "episode_idx": 23, "frame_idx": 175, "global_frame_idx": 6563, "task_index": 4}, {"db_idx": 6564, "episode_idx": 23, "frame_idx": 176, "global_frame_idx": 6564, "task_index": 4}, {"db_idx": 6565, "episode_idx": 23, "frame_idx": 177, "global_frame_idx": 6565, "task_index": 4}, {"db_idx": 6566, "episode_idx": 23, "frame_idx": 178, "global_frame_idx": 6566, "task_index": 4}, {"db_idx": 6567, "episode_idx": 23, "frame_idx": 179, "global_frame_idx": 6567, "task_index": 4}, {"db_idx": 6568, "episode_idx": 23, "frame_idx": 180, "global_frame_idx": 6568, "task_index": 4}, {"db_idx": 6569, "episode_idx": 23, "frame_idx": 181, "global_frame_idx": 6569, "task_index": 4}, {"db_idx": 6570, "episode_idx": 23, "frame_idx": 182, "global_frame_idx": 6570, "task_index": 4}, {"db_idx": 6571, "episode_idx": 23, "frame_idx": 183, "global_frame_idx": 6571, "task_index": 4}, {"db_idx": 6572, "episode_idx": 23, "frame_idx": 184, "global_frame_idx": 6572, "task_index": 4}, {"db_idx": 6573, "episode_idx": 23, "frame_idx": 185, "global_frame_idx": 6573, "task_index": 4}, {"db_idx": 6574, "episode_idx": 23, "frame_idx": 186, "global_frame_idx": 6574, "task_index": 4}, {"db_idx": 6575, "episode_idx": 23, "frame_idx": 187, "global_frame_idx": 6575, "task_index": 4}, {"db_idx": 6576, "episode_idx": 23, "frame_idx": 188, "global_frame_idx": 6576, "task_index": 4}, {"db_idx": 6577, "episode_idx": 23, "frame_idx": 189, "global_frame_idx": 6577, "task_index": 4}, {"db_idx": 6578, "episode_idx": 23, "frame_idx": 190, "global_frame_idx": 6578, "task_index": 4}, {"db_idx": 6579, "episode_idx": 23, "frame_idx": 191, "global_frame_idx": 6579, "task_index": 4}, {"db_idx": 6580, "episode_idx": 23, "frame_idx": 192, "global_frame_idx": 6580, "task_index": 4}, {"db_idx": 6581, "episode_idx": 23, "frame_idx": 193, "global_frame_idx": 6581, "task_index": 4}, {"db_idx": 6582, "episode_idx": 23, "frame_idx": 194, "global_frame_idx": 6582, "task_index": 4}, {"db_idx": 6583, "episode_idx": 23, "frame_idx": 195, "global_frame_idx": 6583, "task_index": 4}, {"db_idx": 6584, "episode_idx": 23, "frame_idx": 196, "global_frame_idx": 6584, "task_index": 4}, {"db_idx": 6585, "episode_idx": 23, "frame_idx": 197, "global_frame_idx": 6585, "task_index": 4}, {"db_idx": 6586, "episode_idx": 23, "frame_idx": 198, "global_frame_idx": 6586, "task_index": 4}, {"db_idx": 6587, "episode_idx": 23, "frame_idx": 199, "global_frame_idx": 6587, "task_index": 4}, {"db_idx": 6588, "episode_idx": 23, "frame_idx": 200, "global_frame_idx": 6588, "task_index": 4}, {"db_idx": 6589, "episode_idx": 23, "frame_idx": 201, "global_frame_idx": 6589, "task_index": 4}, {"db_idx": 6590, "episode_idx": 23, "frame_idx": 202, "global_frame_idx": 6590, "task_index": 4}, {"db_idx": 6591, "episode_idx": 23, "frame_idx": 203, "global_frame_idx": 6591, "task_index": 4}, {"db_idx": 6592, "episode_idx": 23, "frame_idx": 204, "global_frame_idx": 6592, "task_index": 4}, {"db_idx": 6593, "episode_idx": 23, "frame_idx": 205, "global_frame_idx": 6593, "task_index": 4}, {"db_idx": 6594, "episode_idx": 23, "frame_idx": 206, "global_frame_idx": 6594, "task_index": 4}, {"db_idx": 6595, "episode_idx": 23, "frame_idx": 207, "global_frame_idx": 6595, "task_index": 4}, {"db_idx": 6596, "episode_idx": 23, "frame_idx": 208, "global_frame_idx": 6596, "task_index": 4}, {"db_idx": 6597, "episode_idx": 23, "frame_idx": 209, "global_frame_idx": 6597, "task_index": 4}, {"db_idx": 6598, "episode_idx": 23, "frame_idx": 210, "global_frame_idx": 6598, "task_index": 4}, {"db_idx": 6599, "episode_idx": 23, "frame_idx": 211, "global_frame_idx": 6599, "task_index": 4}, {"db_idx": 6600, "episode_idx": 23, "frame_idx": 212, "global_frame_idx": 6600, "task_index": 4}, {"db_idx": 6601, "episode_idx": 23, "frame_idx": 213, "global_frame_idx": 6601, "task_index": 4}, {"db_idx": 6602, "episode_idx": 23, "frame_idx": 214, "global_frame_idx": 6602, "task_index": 4}, {"db_idx": 6603, "episode_idx": 23, "frame_idx": 215, "global_frame_idx": 6603, "task_index": 4}, {"db_idx": 6604, "episode_idx": 23, "frame_idx": 216, "global_frame_idx": 6604, "task_index": 4}, {"db_idx": 6605, "episode_idx": 23, "frame_idx": 217, "global_frame_idx": 6605, "task_index": 4}, {"db_idx": 6606, "episode_idx": 23, "frame_idx": 218, "global_frame_idx": 6606, "task_index": 4}, {"db_idx": 6607, "episode_idx": 23, "frame_idx": 219, "global_frame_idx": 6607, "task_index": 4}, {"db_idx": 6608, "episode_idx": 23, "frame_idx": 220, "global_frame_idx": 6608, "task_index": 4}, {"db_idx": 6609, "episode_idx": 23, "frame_idx": 221, "global_frame_idx": 6609, "task_index": 4}, {"db_idx": 6610, "episode_idx": 23, "frame_idx": 222, "global_frame_idx": 6610, "task_index": 4}, {"db_idx": 6611, "episode_idx": 23, "frame_idx": 223, "global_frame_idx": 6611, "task_index": 4}, {"db_idx": 6612, "episode_idx": 23, "frame_idx": 224, "global_frame_idx": 6612, "task_index": 4}, {"db_idx": 6613, "episode_idx": 23, "frame_idx": 225, "global_frame_idx": 6613, "task_index": 4}, {"db_idx": 6614, "episode_idx": 23, "frame_idx": 226, "global_frame_idx": 6614, "task_index": 4}, {"db_idx": 6615, "episode_idx": 23, "frame_idx": 227, "global_frame_idx": 6615, "task_index": 4}, {"db_idx": 6616, "episode_idx": 23, "frame_idx": 228, "global_frame_idx": 6616, "task_index": 4}, {"db_idx": 6617, "episode_idx": 23, "frame_idx": 229, "global_frame_idx": 6617, "task_index": 4}, {"db_idx": 6618, "episode_idx": 23, "frame_idx": 230, "global_frame_idx": 6618, "task_index": 4}, {"db_idx": 6619, "episode_idx": 23, "frame_idx": 231, "global_frame_idx": 6619, "task_index": 4}, {"db_idx": 6620, "episode_idx": 23, "frame_idx": 232, "global_frame_idx": 6620, "task_index": 4}, {"db_idx": 6621, "episode_idx": 23, "frame_idx": 233, "global_frame_idx": 6621, "task_index": 4}, {"db_idx": 6622, "episode_idx": 23, "frame_idx": 234, "global_frame_idx": 6622, "task_index": 4}, {"db_idx": 6623, "episode_idx": 23, "frame_idx": 235, "global_frame_idx": 6623, "task_index": 4}, {"db_idx": 6624, "episode_idx": 23, "frame_idx": 236, "global_frame_idx": 6624, "task_index": 4}, {"db_idx": 6625, "episode_idx": 24, "frame_idx": 0, "global_frame_idx": 6625, "task_index": 4}, {"db_idx": 6626, "episode_idx": 24, "frame_idx": 1, "global_frame_idx": 6626, "task_index": 4}, {"db_idx": 6627, "episode_idx": 24, "frame_idx": 2, "global_frame_idx": 6627, "task_index": 4}, {"db_idx": 6628, "episode_idx": 24, "frame_idx": 3, "global_frame_idx": 6628, "task_index": 4}, {"db_idx": 6629, "episode_idx": 24, "frame_idx": 4, "global_frame_idx": 6629, "task_index": 4}, {"db_idx": 6630, "episode_idx": 24, "frame_idx": 5, "global_frame_idx": 6630, "task_index": 4}, {"db_idx": 6631, "episode_idx": 24, "frame_idx": 6, "global_frame_idx": 6631, "task_index": 4}, {"db_idx": 6632, "episode_idx": 24, "frame_idx": 7, "global_frame_idx": 6632, "task_index": 4}, {"db_idx": 6633, "episode_idx": 24, "frame_idx": 8, "global_frame_idx": 6633, "task_index": 4}, {"db_idx": 6634, "episode_idx": 24, "frame_idx": 9, "global_frame_idx": 6634, "task_index": 4}, {"db_idx": 6635, "episode_idx": 24, "frame_idx": 10, "global_frame_idx": 6635, "task_index": 4}, {"db_idx": 6636, "episode_idx": 24, "frame_idx": 11, "global_frame_idx": 6636, "task_index": 4}, {"db_idx": 6637, "episode_idx": 24, "frame_idx": 12, "global_frame_idx": 6637, "task_index": 4}, {"db_idx": 6638, "episode_idx": 24, "frame_idx": 13, "global_frame_idx": 6638, "task_index": 4}, {"db_idx": 6639, "episode_idx": 24, "frame_idx": 14, "global_frame_idx": 6639, "task_index": 4}, {"db_idx": 6640, "episode_idx": 24, "frame_idx": 15, "global_frame_idx": 6640, "task_index": 4}, {"db_idx": 6641, "episode_idx": 24, "frame_idx": 16, "global_frame_idx": 6641, "task_index": 4}, {"db_idx": 6642, "episode_idx": 24, "frame_idx": 17, "global_frame_idx": 6642, "task_index": 4}, {"db_idx": 6643, "episode_idx": 24, "frame_idx": 18, "global_frame_idx": 6643, "task_index": 4}, {"db_idx": 6644, "episode_idx": 24, "frame_idx": 19, "global_frame_idx": 6644, "task_index": 4}, {"db_idx": 6645, "episode_idx": 24, "frame_idx": 20, "global_frame_idx": 6645, "task_index": 4}, {"db_idx": 6646, "episode_idx": 24, "frame_idx": 21, "global_frame_idx": 6646, "task_index": 4}, {"db_idx": 6647, "episode_idx": 24, "frame_idx": 22, "global_frame_idx": 6647, "task_index": 4}, {"db_idx": 6648, "episode_idx": 24, "frame_idx": 23, "global_frame_idx": 6648, "task_index": 4}, {"db_idx": 6649, "episode_idx": 24, "frame_idx": 24, "global_frame_idx": 6649, "task_index": 4}, {"db_idx": 6650, "episode_idx": 24, "frame_idx": 25, "global_frame_idx": 6650, "task_index": 4}, {"db_idx": 6651, "episode_idx": 24, "frame_idx": 26, "global_frame_idx": 6651, "task_index": 4}, {"db_idx": 6652, "episode_idx": 24, "frame_idx": 27, "global_frame_idx": 6652, "task_index": 4}, {"db_idx": 6653, "episode_idx": 24, "frame_idx": 28, "global_frame_idx": 6653, "task_index": 4}, {"db_idx": 6654, "episode_idx": 24, "frame_idx": 29, "global_frame_idx": 6654, "task_index": 4}, {"db_idx": 6655, "episode_idx": 24, "frame_idx": 30, "global_frame_idx": 6655, "task_index": 4}, {"db_idx": 6656, "episode_idx": 24, "frame_idx": 31, "global_frame_idx": 6656, "task_index": 4}, {"db_idx": 6657, "episode_idx": 24, "frame_idx": 32, "global_frame_idx": 6657, "task_index": 4}, {"db_idx": 6658, "episode_idx": 24, "frame_idx": 33, "global_frame_idx": 6658, "task_index": 4}, {"db_idx": 6659, "episode_idx": 24, "frame_idx": 34, "global_frame_idx": 6659, "task_index": 4}, {"db_idx": 6660, "episode_idx": 24, "frame_idx": 35, "global_frame_idx": 6660, "task_index": 4}, {"db_idx": 6661, "episode_idx": 24, "frame_idx": 36, "global_frame_idx": 6661, "task_index": 4}, {"db_idx": 6662, "episode_idx": 24, "frame_idx": 37, "global_frame_idx": 6662, "task_index": 4}, {"db_idx": 6663, "episode_idx": 24, "frame_idx": 38, "global_frame_idx": 6663, "task_index": 4}, {"db_idx": 6664, "episode_idx": 24, "frame_idx": 39, "global_frame_idx": 6664, "task_index": 4}, {"db_idx": 6665, "episode_idx": 24, "frame_idx": 40, "global_frame_idx": 6665, "task_index": 4}, {"db_idx": 6666, "episode_idx": 24, "frame_idx": 41, "global_frame_idx": 6666, "task_index": 4}, {"db_idx": 6667, "episode_idx": 24, "frame_idx": 42, "global_frame_idx": 6667, "task_index": 4}, {"db_idx": 6668, "episode_idx": 24, "frame_idx": 43, "global_frame_idx": 6668, "task_index": 4}, {"db_idx": 6669, "episode_idx": 24, "frame_idx": 44, "global_frame_idx": 6669, "task_index": 4}, {"db_idx": 6670, "episode_idx": 24, "frame_idx": 45, "global_frame_idx": 6670, "task_index": 4}, {"db_idx": 6671, "episode_idx": 24, "frame_idx": 46, "global_frame_idx": 6671, "task_index": 4}, {"db_idx": 6672, "episode_idx": 24, "frame_idx": 47, "global_frame_idx": 6672, "task_index": 4}, {"db_idx": 6673, "episode_idx": 24, "frame_idx": 48, "global_frame_idx": 6673, "task_index": 4}, {"db_idx": 6674, "episode_idx": 24, "frame_idx": 49, "global_frame_idx": 6674, "task_index": 4}, {"db_idx": 6675, "episode_idx": 24, "frame_idx": 50, "global_frame_idx": 6675, "task_index": 4}, {"db_idx": 6676, "episode_idx": 24, "frame_idx": 51, "global_frame_idx": 6676, "task_index": 4}, {"db_idx": 6677, "episode_idx": 24, "frame_idx": 52, "global_frame_idx": 6677, "task_index": 4}, {"db_idx": 6678, "episode_idx": 24, "frame_idx": 53, "global_frame_idx": 6678, "task_index": 4}, {"db_idx": 6679, "episode_idx": 24, "frame_idx": 54, "global_frame_idx": 6679, "task_index": 4}, {"db_idx": 6680, "episode_idx": 24, "frame_idx": 55, "global_frame_idx": 6680, "task_index": 4}, {"db_idx": 6681, "episode_idx": 24, "frame_idx": 56, "global_frame_idx": 6681, "task_index": 4}, {"db_idx": 6682, "episode_idx": 24, "frame_idx": 57, "global_frame_idx": 6682, "task_index": 4}, {"db_idx": 6683, "episode_idx": 24, "frame_idx": 58, "global_frame_idx": 6683, "task_index": 4}, {"db_idx": 6684, "episode_idx": 24, "frame_idx": 59, "global_frame_idx": 6684, "task_index": 4}, {"db_idx": 6685, "episode_idx": 24, "frame_idx": 60, "global_frame_idx": 6685, "task_index": 4}, {"db_idx": 6686, "episode_idx": 24, "frame_idx": 61, "global_frame_idx": 6686, "task_index": 4}, {"db_idx": 6687, "episode_idx": 24, "frame_idx": 62, "global_frame_idx": 6687, "task_index": 4}, {"db_idx": 6688, "episode_idx": 24, "frame_idx": 63, "global_frame_idx": 6688, "task_index": 4}, {"db_idx": 6689, "episode_idx": 24, "frame_idx": 64, "global_frame_idx": 6689, "task_index": 4}, {"db_idx": 6690, "episode_idx": 24, "frame_idx": 65, "global_frame_idx": 6690, "task_index": 4}, {"db_idx": 6691, "episode_idx": 24, "frame_idx": 66, "global_frame_idx": 6691, "task_index": 4}, {"db_idx": 6692, "episode_idx": 24, "frame_idx": 67, "global_frame_idx": 6692, "task_index": 4}, {"db_idx": 6693, "episode_idx": 24, "frame_idx": 68, "global_frame_idx": 6693, "task_index": 4}, {"db_idx": 6694, "episode_idx": 24, "frame_idx": 69, "global_frame_idx": 6694, "task_index": 4}, {"db_idx": 6695, "episode_idx": 24, "frame_idx": 70, "global_frame_idx": 6695, "task_index": 4}, {"db_idx": 6696, "episode_idx": 24, "frame_idx": 71, "global_frame_idx": 6696, "task_index": 4}, {"db_idx": 6697, "episode_idx": 24, "frame_idx": 72, "global_frame_idx": 6697, "task_index": 4}, {"db_idx": 6698, "episode_idx": 24, "frame_idx": 73, "global_frame_idx": 6698, "task_index": 4}, {"db_idx": 6699, "episode_idx": 24, "frame_idx": 74, "global_frame_idx": 6699, "task_index": 4}, {"db_idx": 6700, "episode_idx": 24, "frame_idx": 75, "global_frame_idx": 6700, "task_index": 4}, {"db_idx": 6701, "episode_idx": 24, "frame_idx": 76, "global_frame_idx": 6701, "task_index": 4}, {"db_idx": 6702, "episode_idx": 24, "frame_idx": 77, "global_frame_idx": 6702, "task_index": 4}, {"db_idx": 6703, "episode_idx": 24, "frame_idx": 78, "global_frame_idx": 6703, "task_index": 4}, {"db_idx": 6704, "episode_idx": 24, "frame_idx": 79, "global_frame_idx": 6704, "task_index": 4}, {"db_idx": 6705, "episode_idx": 24, "frame_idx": 80, "global_frame_idx": 6705, "task_index": 4}, {"db_idx": 6706, "episode_idx": 24, "frame_idx": 81, "global_frame_idx": 6706, "task_index": 4}, {"db_idx": 6707, "episode_idx": 24, "frame_idx": 82, "global_frame_idx": 6707, "task_index": 4}, {"db_idx": 6708, "episode_idx": 24, "frame_idx": 83, "global_frame_idx": 6708, "task_index": 4}, {"db_idx": 6709, "episode_idx": 24, "frame_idx": 84, "global_frame_idx": 6709, "task_index": 4}, {"db_idx": 6710, "episode_idx": 24, "frame_idx": 85, "global_frame_idx": 6710, "task_index": 4}, {"db_idx": 6711, "episode_idx": 24, "frame_idx": 86, "global_frame_idx": 6711, "task_index": 4}, {"db_idx": 6712, "episode_idx": 24, "frame_idx": 87, "global_frame_idx": 6712, "task_index": 4}, {"db_idx": 6713, "episode_idx": 24, "frame_idx": 88, "global_frame_idx": 6713, "task_index": 4}, {"db_idx": 6714, "episode_idx": 24, "frame_idx": 89, "global_frame_idx": 6714, "task_index": 4}, {"db_idx": 6715, "episode_idx": 24, "frame_idx": 90, "global_frame_idx": 6715, "task_index": 4}, {"db_idx": 6716, "episode_idx": 24, "frame_idx": 91, "global_frame_idx": 6716, "task_index": 4}, {"db_idx": 6717, "episode_idx": 24, "frame_idx": 92, "global_frame_idx": 6717, "task_index": 4}, {"db_idx": 6718, "episode_idx": 24, "frame_idx": 93, "global_frame_idx": 6718, "task_index": 4}, {"db_idx": 6719, "episode_idx": 24, "frame_idx": 94, "global_frame_idx": 6719, "task_index": 4}, {"db_idx": 6720, "episode_idx": 24, "frame_idx": 95, "global_frame_idx": 6720, "task_index": 4}, {"db_idx": 6721, "episode_idx": 24, "frame_idx": 96, "global_frame_idx": 6721, "task_index": 4}, {"db_idx": 6722, "episode_idx": 24, "frame_idx": 97, "global_frame_idx": 6722, "task_index": 4}, {"db_idx": 6723, "episode_idx": 24, "frame_idx": 98, "global_frame_idx": 6723, "task_index": 4}, {"db_idx": 6724, "episode_idx": 24, "frame_idx": 99, "global_frame_idx": 6724, "task_index": 4}, {"db_idx": 6725, "episode_idx": 24, "frame_idx": 100, "global_frame_idx": 6725, "task_index": 4}, {"db_idx": 6726, "episode_idx": 24, "frame_idx": 101, "global_frame_idx": 6726, "task_index": 4}, {"db_idx": 6727, "episode_idx": 24, "frame_idx": 102, "global_frame_idx": 6727, "task_index": 4}, {"db_idx": 6728, "episode_idx": 24, "frame_idx": 103, "global_frame_idx": 6728, "task_index": 4}, {"db_idx": 6729, "episode_idx": 24, "frame_idx": 104, "global_frame_idx": 6729, "task_index": 4}, {"db_idx": 6730, "episode_idx": 24, "frame_idx": 105, "global_frame_idx": 6730, "task_index": 4}, {"db_idx": 6731, "episode_idx": 24, "frame_idx": 106, "global_frame_idx": 6731, "task_index": 4}, {"db_idx": 6732, "episode_idx": 24, "frame_idx": 107, "global_frame_idx": 6732, "task_index": 4}, {"db_idx": 6733, "episode_idx": 24, "frame_idx": 108, "global_frame_idx": 6733, "task_index": 4}, {"db_idx": 6734, "episode_idx": 24, "frame_idx": 109, "global_frame_idx": 6734, "task_index": 4}, {"db_idx": 6735, "episode_idx": 24, "frame_idx": 110, "global_frame_idx": 6735, "task_index": 4}, {"db_idx": 6736, "episode_idx": 24, "frame_idx": 111, "global_frame_idx": 6736, "task_index": 4}, {"db_idx": 6737, "episode_idx": 24, "frame_idx": 112, "global_frame_idx": 6737, "task_index": 4}, {"db_idx": 6738, "episode_idx": 24, "frame_idx": 113, "global_frame_idx": 6738, "task_index": 4}, {"db_idx": 6739, "episode_idx": 24, "frame_idx": 114, "global_frame_idx": 6739, "task_index": 4}, {"db_idx": 6740, "episode_idx": 24, "frame_idx": 115, "global_frame_idx": 6740, "task_index": 4}, {"db_idx": 6741, "episode_idx": 24, "frame_idx": 116, "global_frame_idx": 6741, "task_index": 4}, {"db_idx": 6742, "episode_idx": 24, "frame_idx": 117, "global_frame_idx": 6742, "task_index": 4}, {"db_idx": 6743, "episode_idx": 24, "frame_idx": 118, "global_frame_idx": 6743, "task_index": 4}, {"db_idx": 6744, "episode_idx": 24, "frame_idx": 119, "global_frame_idx": 6744, "task_index": 4}, {"db_idx": 6745, "episode_idx": 24, "frame_idx": 120, "global_frame_idx": 6745, "task_index": 4}, {"db_idx": 6746, "episode_idx": 24, "frame_idx": 121, "global_frame_idx": 6746, "task_index": 4}, {"db_idx": 6747, "episode_idx": 24, "frame_idx": 122, "global_frame_idx": 6747, "task_index": 4}, {"db_idx": 6748, "episode_idx": 24, "frame_idx": 123, "global_frame_idx": 6748, "task_index": 4}, {"db_idx": 6749, "episode_idx": 24, "frame_idx": 124, "global_frame_idx": 6749, "task_index": 4}, {"db_idx": 6750, "episode_idx": 24, "frame_idx": 125, "global_frame_idx": 6750, "task_index": 4}, {"db_idx": 6751, "episode_idx": 24, "frame_idx": 126, "global_frame_idx": 6751, "task_index": 4}, {"db_idx": 6752, "episode_idx": 24, "frame_idx": 127, "global_frame_idx": 6752, "task_index": 4}, {"db_idx": 6753, "episode_idx": 24, "frame_idx": 128, "global_frame_idx": 6753, "task_index": 4}, {"db_idx": 6754, "episode_idx": 24, "frame_idx": 129, "global_frame_idx": 6754, "task_index": 4}, {"db_idx": 6755, "episode_idx": 24, "frame_idx": 130, "global_frame_idx": 6755, "task_index": 4}, {"db_idx": 6756, "episode_idx": 24, "frame_idx": 131, "global_frame_idx": 6756, "task_index": 4}, {"db_idx": 6757, "episode_idx": 24, "frame_idx": 132, "global_frame_idx": 6757, "task_index": 4}, {"db_idx": 6758, "episode_idx": 24, "frame_idx": 133, "global_frame_idx": 6758, "task_index": 4}, {"db_idx": 6759, "episode_idx": 24, "frame_idx": 134, "global_frame_idx": 6759, "task_index": 4}, {"db_idx": 6760, "episode_idx": 24, "frame_idx": 135, "global_frame_idx": 6760, "task_index": 4}, {"db_idx": 6761, "episode_idx": 24, "frame_idx": 136, "global_frame_idx": 6761, "task_index": 4}, {"db_idx": 6762, "episode_idx": 24, "frame_idx": 137, "global_frame_idx": 6762, "task_index": 4}, {"db_idx": 6763, "episode_idx": 24, "frame_idx": 138, "global_frame_idx": 6763, "task_index": 4}, {"db_idx": 6764, "episode_idx": 24, "frame_idx": 139, "global_frame_idx": 6764, "task_index": 4}, {"db_idx": 6765, "episode_idx": 24, "frame_idx": 140, "global_frame_idx": 6765, "task_index": 4}, {"db_idx": 6766, "episode_idx": 24, "frame_idx": 141, "global_frame_idx": 6766, "task_index": 4}, {"db_idx": 6767, "episode_idx": 24, "frame_idx": 142, "global_frame_idx": 6767, "task_index": 4}, {"db_idx": 6768, "episode_idx": 24, "frame_idx": 143, "global_frame_idx": 6768, "task_index": 4}, {"db_idx": 6769, "episode_idx": 24, "frame_idx": 144, "global_frame_idx": 6769, "task_index": 4}, {"db_idx": 6770, "episode_idx": 24, "frame_idx": 145, "global_frame_idx": 6770, "task_index": 4}, {"db_idx": 6771, "episode_idx": 24, "frame_idx": 146, "global_frame_idx": 6771, "task_index": 4}, {"db_idx": 6772, "episode_idx": 24, "frame_idx": 147, "global_frame_idx": 6772, "task_index": 4}, {"db_idx": 6773, "episode_idx": 24, "frame_idx": 148, "global_frame_idx": 6773, "task_index": 4}, {"db_idx": 6774, "episode_idx": 24, "frame_idx": 149, "global_frame_idx": 6774, "task_index": 4}, {"db_idx": 6775, "episode_idx": 24, "frame_idx": 150, "global_frame_idx": 6775, "task_index": 4}, {"db_idx": 6776, "episode_idx": 24, "frame_idx": 151, "global_frame_idx": 6776, "task_index": 4}, {"db_idx": 6777, "episode_idx": 24, "frame_idx": 152, "global_frame_idx": 6777, "task_index": 4}, {"db_idx": 6778, "episode_idx": 24, "frame_idx": 153, "global_frame_idx": 6778, "task_index": 4}, {"db_idx": 6779, "episode_idx": 24, "frame_idx": 154, "global_frame_idx": 6779, "task_index": 4}, {"db_idx": 6780, "episode_idx": 24, "frame_idx": 155, "global_frame_idx": 6780, "task_index": 4}, {"db_idx": 6781, "episode_idx": 24, "frame_idx": 156, "global_frame_idx": 6781, "task_index": 4}, {"db_idx": 6782, "episode_idx": 24, "frame_idx": 157, "global_frame_idx": 6782, "task_index": 4}, {"db_idx": 6783, "episode_idx": 24, "frame_idx": 158, "global_frame_idx": 6783, "task_index": 4}, {"db_idx": 6784, "episode_idx": 24, "frame_idx": 159, "global_frame_idx": 6784, "task_index": 4}, {"db_idx": 6785, "episode_idx": 24, "frame_idx": 160, "global_frame_idx": 6785, "task_index": 4}, {"db_idx": 6786, "episode_idx": 24, "frame_idx": 161, "global_frame_idx": 6786, "task_index": 4}, {"db_idx": 6787, "episode_idx": 24, "frame_idx": 162, "global_frame_idx": 6787, "task_index": 4}, {"db_idx": 6788, "episode_idx": 24, "frame_idx": 163, "global_frame_idx": 6788, "task_index": 4}, {"db_idx": 6789, "episode_idx": 24, "frame_idx": 164, "global_frame_idx": 6789, "task_index": 4}, {"db_idx": 6790, "episode_idx": 24, "frame_idx": 165, "global_frame_idx": 6790, "task_index": 4}, {"db_idx": 6791, "episode_idx": 24, "frame_idx": 166, "global_frame_idx": 6791, "task_index": 4}, {"db_idx": 6792, "episode_idx": 24, "frame_idx": 167, "global_frame_idx": 6792, "task_index": 4}, {"db_idx": 6793, "episode_idx": 24, "frame_idx": 168, "global_frame_idx": 6793, "task_index": 4}, {"db_idx": 6794, "episode_idx": 24, "frame_idx": 169, "global_frame_idx": 6794, "task_index": 4}, {"db_idx": 6795, "episode_idx": 24, "frame_idx": 170, "global_frame_idx": 6795, "task_index": 4}, {"db_idx": 6796, "episode_idx": 24, "frame_idx": 171, "global_frame_idx": 6796, "task_index": 4}, {"db_idx": 6797, "episode_idx": 24, "frame_idx": 172, "global_frame_idx": 6797, "task_index": 4}, {"db_idx": 6798, "episode_idx": 24, "frame_idx": 173, "global_frame_idx": 6798, "task_index": 4}, {"db_idx": 6799, "episode_idx": 24, "frame_idx": 174, "global_frame_idx": 6799, "task_index": 4}, {"db_idx": 6800, "episode_idx": 24, "frame_idx": 175, "global_frame_idx": 6800, "task_index": 4}, {"db_idx": 6801, "episode_idx": 24, "frame_idx": 176, "global_frame_idx": 6801, "task_index": 4}, {"db_idx": 6802, "episode_idx": 24, "frame_idx": 177, "global_frame_idx": 6802, "task_index": 4}, {"db_idx": 6803, "episode_idx": 24, "frame_idx": 178, "global_frame_idx": 6803, "task_index": 4}, {"db_idx": 6804, "episode_idx": 24, "frame_idx": 179, "global_frame_idx": 6804, "task_index": 4}, {"db_idx": 6805, "episode_idx": 24, "frame_idx": 180, "global_frame_idx": 6805, "task_index": 4}, {"db_idx": 6806, "episode_idx": 24, "frame_idx": 181, "global_frame_idx": 6806, "task_index": 4}, {"db_idx": 6807, "episode_idx": 24, "frame_idx": 182, "global_frame_idx": 6807, "task_index": 4}, {"db_idx": 6808, "episode_idx": 24, "frame_idx": 183, "global_frame_idx": 6808, "task_index": 4}, {"db_idx": 6809, "episode_idx": 24, "frame_idx": 184, "global_frame_idx": 6809, "task_index": 4}, {"db_idx": 6810, "episode_idx": 24, "frame_idx": 185, "global_frame_idx": 6810, "task_index": 4}, {"db_idx": 6811, "episode_idx": 24, "frame_idx": 186, "global_frame_idx": 6811, "task_index": 4}, {"db_idx": 6812, "episode_idx": 24, "frame_idx": 187, "global_frame_idx": 6812, "task_index": 4}, {"db_idx": 6813, "episode_idx": 24, "frame_idx": 188, "global_frame_idx": 6813, "task_index": 4}, {"db_idx": 6814, "episode_idx": 24, "frame_idx": 189, "global_frame_idx": 6814, "task_index": 4}, {"db_idx": 6815, "episode_idx": 24, "frame_idx": 190, "global_frame_idx": 6815, "task_index": 4}, {"db_idx": 6816, "episode_idx": 24, "frame_idx": 191, "global_frame_idx": 6816, "task_index": 4}, {"db_idx": 6817, "episode_idx": 24, "frame_idx": 192, "global_frame_idx": 6817, "task_index": 4}, {"db_idx": 6818, "episode_idx": 24, "frame_idx": 193, "global_frame_idx": 6818, "task_index": 4}, {"db_idx": 6819, "episode_idx": 24, "frame_idx": 194, "global_frame_idx": 6819, "task_index": 4}, {"db_idx": 6820, "episode_idx": 24, "frame_idx": 195, "global_frame_idx": 6820, "task_index": 4}, {"db_idx": 6821, "episode_idx": 24, "frame_idx": 196, "global_frame_idx": 6821, "task_index": 4}, {"db_idx": 6822, "episode_idx": 24, "frame_idx": 197, "global_frame_idx": 6822, "task_index": 4}, {"db_idx": 6823, "episode_idx": 24, "frame_idx": 198, "global_frame_idx": 6823, "task_index": 4}, {"db_idx": 6824, "episode_idx": 24, "frame_idx": 199, "global_frame_idx": 6824, "task_index": 4}, {"db_idx": 6825, "episode_idx": 24, "frame_idx": 200, "global_frame_idx": 6825, "task_index": 4}, {"db_idx": 6826, "episode_idx": 24, "frame_idx": 201, "global_frame_idx": 6826, "task_index": 4}, {"db_idx": 6827, "episode_idx": 24, "frame_idx": 202, "global_frame_idx": 6827, "task_index": 4}, {"db_idx": 6828, "episode_idx": 24, "frame_idx": 203, "global_frame_idx": 6828, "task_index": 4}, {"db_idx": 6829, "episode_idx": 24, "frame_idx": 204, "global_frame_idx": 6829, "task_index": 4}, {"db_idx": 6830, "episode_idx": 24, "frame_idx": 205, "global_frame_idx": 6830, "task_index": 4}, {"db_idx": 6831, "episode_idx": 24, "frame_idx": 206, "global_frame_idx": 6831, "task_index": 4}, {"db_idx": 6832, "episode_idx": 24, "frame_idx": 207, "global_frame_idx": 6832, "task_index": 4}, {"db_idx": 6833, "episode_idx": 24, "frame_idx": 208, "global_frame_idx": 6833, "task_index": 4}, {"db_idx": 6834, "episode_idx": 24, "frame_idx": 209, "global_frame_idx": 6834, "task_index": 4}, {"db_idx": 6835, "episode_idx": 24, "frame_idx": 210, "global_frame_idx": 6835, "task_index": 4}, {"db_idx": 6836, "episode_idx": 24, "frame_idx": 211, "global_frame_idx": 6836, "task_index": 4}, {"db_idx": 6837, "episode_idx": 24, "frame_idx": 212, "global_frame_idx": 6837, "task_index": 4}, {"db_idx": 6838, "episode_idx": 24, "frame_idx": 213, "global_frame_idx": 6838, "task_index": 4}, {"db_idx": 6839, "episode_idx": 24, "frame_idx": 214, "global_frame_idx": 6839, "task_index": 4}, {"db_idx": 6840, "episode_idx": 24, "frame_idx": 215, "global_frame_idx": 6840, "task_index": 4}, {"db_idx": 6841, "episode_idx": 24, "frame_idx": 216, "global_frame_idx": 6841, "task_index": 4}, {"db_idx": 6842, "episode_idx": 24, "frame_idx": 217, "global_frame_idx": 6842, "task_index": 4}, {"db_idx": 6843, "episode_idx": 24, "frame_idx": 218, "global_frame_idx": 6843, "task_index": 4}, {"db_idx": 6844, "episode_idx": 24, "frame_idx": 219, "global_frame_idx": 6844, "task_index": 4}, {"db_idx": 6845, "episode_idx": 24, "frame_idx": 220, "global_frame_idx": 6845, "task_index": 4}, {"db_idx": 6846, "episode_idx": 24, "frame_idx": 221, "global_frame_idx": 6846, "task_index": 4}, {"db_idx": 6847, "episode_idx": 24, "frame_idx": 222, "global_frame_idx": 6847, "task_index": 4}, {"db_idx": 6848, "episode_idx": 24, "frame_idx": 223, "global_frame_idx": 6848, "task_index": 4}, {"db_idx": 6849, "episode_idx": 24, "frame_idx": 224, "global_frame_idx": 6849, "task_index": 4}, {"db_idx": 6850, "episode_idx": 24, "frame_idx": 225, "global_frame_idx": 6850, "task_index": 4}, {"db_idx": 6851, "episode_idx": 24, "frame_idx": 226, "global_frame_idx": 6851, "task_index": 4}, {"db_idx": 6852, "episode_idx": 24, "frame_idx": 227, "global_frame_idx": 6852, "task_index": 4}, {"db_idx": 6853, "episode_idx": 24, "frame_idx": 228, "global_frame_idx": 6853, "task_index": 4}, {"db_idx": 6854, "episode_idx": 24, "frame_idx": 229, "global_frame_idx": 6854, "task_index": 4}, {"db_idx": 6855, "episode_idx": 24, "frame_idx": 230, "global_frame_idx": 6855, "task_index": 4}, {"db_idx": 6856, "episode_idx": 24, "frame_idx": 231, "global_frame_idx": 6856, "task_index": 4}, {"db_idx": 6857, "episode_idx": 24, "frame_idx": 232, "global_frame_idx": 6857, "task_index": 4}, {"db_idx": 6858, "episode_idx": 24, "frame_idx": 233, "global_frame_idx": 6858, "task_index": 4}, {"db_idx": 6859, "episode_idx": 24, "frame_idx": 234, "global_frame_idx": 6859, "task_index": 4}, {"db_idx": 6860, "episode_idx": 24, "frame_idx": 235, "global_frame_idx": 6860, "task_index": 4}, {"db_idx": 6861, "episode_idx": 24, "frame_idx": 236, "global_frame_idx": 6861, "task_index": 4}, {"db_idx": 6862, "episode_idx": 25, "frame_idx": 0, "global_frame_idx": 6862, "task_index": 5}, {"db_idx": 6863, "episode_idx": 25, "frame_idx": 1, "global_frame_idx": 6863, "task_index": 5}, {"db_idx": 6864, "episode_idx": 25, "frame_idx": 2, "global_frame_idx": 6864, "task_index": 5}, {"db_idx": 6865, "episode_idx": 25, "frame_idx": 3, "global_frame_idx": 6865, "task_index": 5}, {"db_idx": 6866, "episode_idx": 25, "frame_idx": 4, "global_frame_idx": 6866, "task_index": 5}, {"db_idx": 6867, "episode_idx": 25, "frame_idx": 5, "global_frame_idx": 6867, "task_index": 5}, {"db_idx": 6868, "episode_idx": 25, "frame_idx": 6, "global_frame_idx": 6868, "task_index": 5}, {"db_idx": 6869, "episode_idx": 25, "frame_idx": 7, "global_frame_idx": 6869, "task_index": 5}, {"db_idx": 6870, "episode_idx": 25, "frame_idx": 8, "global_frame_idx": 6870, "task_index": 5}, {"db_idx": 6871, "episode_idx": 25, "frame_idx": 9, "global_frame_idx": 6871, "task_index": 5}, {"db_idx": 6872, "episode_idx": 25, "frame_idx": 10, "global_frame_idx": 6872, "task_index": 5}, {"db_idx": 6873, "episode_idx": 25, "frame_idx": 11, "global_frame_idx": 6873, "task_index": 5}, {"db_idx": 6874, "episode_idx": 25, "frame_idx": 12, "global_frame_idx": 6874, "task_index": 5}, {"db_idx": 6875, "episode_idx": 25, "frame_idx": 13, "global_frame_idx": 6875, "task_index": 5}, {"db_idx": 6876, "episode_idx": 25, "frame_idx": 14, "global_frame_idx": 6876, "task_index": 5}, {"db_idx": 6877, "episode_idx": 25, "frame_idx": 15, "global_frame_idx": 6877, "task_index": 5}, {"db_idx": 6878, "episode_idx": 25, "frame_idx": 16, "global_frame_idx": 6878, "task_index": 5}, {"db_idx": 6879, "episode_idx": 25, "frame_idx": 17, "global_frame_idx": 6879, "task_index": 5}, {"db_idx": 6880, "episode_idx": 25, "frame_idx": 18, "global_frame_idx": 6880, "task_index": 5}, {"db_idx": 6881, "episode_idx": 25, "frame_idx": 19, "global_frame_idx": 6881, "task_index": 5}, {"db_idx": 6882, "episode_idx": 25, "frame_idx": 20, "global_frame_idx": 6882, "task_index": 5}, {"db_idx": 6883, "episode_idx": 25, "frame_idx": 21, "global_frame_idx": 6883, "task_index": 5}, {"db_idx": 6884, "episode_idx": 25, "frame_idx": 22, "global_frame_idx": 6884, "task_index": 5}, {"db_idx": 6885, "episode_idx": 25, "frame_idx": 23, "global_frame_idx": 6885, "task_index": 5}, {"db_idx": 6886, "episode_idx": 25, "frame_idx": 24, "global_frame_idx": 6886, "task_index": 5}, {"db_idx": 6887, "episode_idx": 25, "frame_idx": 25, "global_frame_idx": 6887, "task_index": 5}, {"db_idx": 6888, "episode_idx": 25, "frame_idx": 26, "global_frame_idx": 6888, "task_index": 5}, {"db_idx": 6889, "episode_idx": 25, "frame_idx": 27, "global_frame_idx": 6889, "task_index": 5}, {"db_idx": 6890, "episode_idx": 25, "frame_idx": 28, "global_frame_idx": 6890, "task_index": 5}, {"db_idx": 6891, "episode_idx": 25, "frame_idx": 29, "global_frame_idx": 6891, "task_index": 5}, {"db_idx": 6892, "episode_idx": 25, "frame_idx": 30, "global_frame_idx": 6892, "task_index": 5}, {"db_idx": 6893, "episode_idx": 25, "frame_idx": 31, "global_frame_idx": 6893, "task_index": 5}, {"db_idx": 6894, "episode_idx": 25, "frame_idx": 32, "global_frame_idx": 6894, "task_index": 5}, {"db_idx": 6895, "episode_idx": 25, "frame_idx": 33, "global_frame_idx": 6895, "task_index": 5}, {"db_idx": 6896, "episode_idx": 25, "frame_idx": 34, "global_frame_idx": 6896, "task_index": 5}, {"db_idx": 6897, "episode_idx": 25, "frame_idx": 35, "global_frame_idx": 6897, "task_index": 5}, {"db_idx": 6898, "episode_idx": 25, "frame_idx": 36, "global_frame_idx": 6898, "task_index": 5}, {"db_idx": 6899, "episode_idx": 25, "frame_idx": 37, "global_frame_idx": 6899, "task_index": 5}, {"db_idx": 6900, "episode_idx": 25, "frame_idx": 38, "global_frame_idx": 6900, "task_index": 5}, {"db_idx": 6901, "episode_idx": 25, "frame_idx": 39, "global_frame_idx": 6901, "task_index": 5}, {"db_idx": 6902, "episode_idx": 25, "frame_idx": 40, "global_frame_idx": 6902, "task_index": 5}, {"db_idx": 6903, "episode_idx": 25, "frame_idx": 41, "global_frame_idx": 6903, "task_index": 5}, {"db_idx": 6904, "episode_idx": 25, "frame_idx": 42, "global_frame_idx": 6904, "task_index": 5}, {"db_idx": 6905, "episode_idx": 25, "frame_idx": 43, "global_frame_idx": 6905, "task_index": 5}, {"db_idx": 6906, "episode_idx": 25, "frame_idx": 44, "global_frame_idx": 6906, "task_index": 5}, {"db_idx": 6907, "episode_idx": 25, "frame_idx": 45, "global_frame_idx": 6907, "task_index": 5}, {"db_idx": 6908, "episode_idx": 25, "frame_idx": 46, "global_frame_idx": 6908, "task_index": 5}, {"db_idx": 6909, "episode_idx": 25, "frame_idx": 47, "global_frame_idx": 6909, "task_index": 5}, {"db_idx": 6910, "episode_idx": 25, "frame_idx": 48, "global_frame_idx": 6910, "task_index": 5}, {"db_idx": 6911, "episode_idx": 25, "frame_idx": 49, "global_frame_idx": 6911, "task_index": 5}, {"db_idx": 6912, "episode_idx": 25, "frame_idx": 50, "global_frame_idx": 6912, "task_index": 5}, {"db_idx": 6913, "episode_idx": 25, "frame_idx": 51, "global_frame_idx": 6913, "task_index": 5}, {"db_idx": 6914, "episode_idx": 25, "frame_idx": 52, "global_frame_idx": 6914, "task_index": 5}, {"db_idx": 6915, "episode_idx": 25, "frame_idx": 53, "global_frame_idx": 6915, "task_index": 5}, {"db_idx": 6916, "episode_idx": 25, "frame_idx": 54, "global_frame_idx": 6916, "task_index": 5}, {"db_idx": 6917, "episode_idx": 25, "frame_idx": 55, "global_frame_idx": 6917, "task_index": 5}, {"db_idx": 6918, "episode_idx": 25, "frame_idx": 56, "global_frame_idx": 6918, "task_index": 5}, {"db_idx": 6919, "episode_idx": 25, "frame_idx": 57, "global_frame_idx": 6919, "task_index": 5}, {"db_idx": 6920, "episode_idx": 25, "frame_idx": 58, "global_frame_idx": 6920, "task_index": 5}, {"db_idx": 6921, "episode_idx": 25, "frame_idx": 59, "global_frame_idx": 6921, "task_index": 5}, {"db_idx": 6922, "episode_idx": 25, "frame_idx": 60, "global_frame_idx": 6922, "task_index": 5}, {"db_idx": 6923, "episode_idx": 25, "frame_idx": 61, "global_frame_idx": 6923, "task_index": 5}, {"db_idx": 6924, "episode_idx": 25, "frame_idx": 62, "global_frame_idx": 6924, "task_index": 5}, {"db_idx": 6925, "episode_idx": 25, "frame_idx": 63, "global_frame_idx": 6925, "task_index": 5}, {"db_idx": 6926, "episode_idx": 25, "frame_idx": 64, "global_frame_idx": 6926, "task_index": 5}, {"db_idx": 6927, "episode_idx": 25, "frame_idx": 65, "global_frame_idx": 6927, "task_index": 5}, {"db_idx": 6928, "episode_idx": 25, "frame_idx": 66, "global_frame_idx": 6928, "task_index": 5}, {"db_idx": 6929, "episode_idx": 25, "frame_idx": 67, "global_frame_idx": 6929, "task_index": 5}, {"db_idx": 6930, "episode_idx": 25, "frame_idx": 68, "global_frame_idx": 6930, "task_index": 5}, {"db_idx": 6931, "episode_idx": 25, "frame_idx": 69, "global_frame_idx": 6931, "task_index": 5}, {"db_idx": 6932, "episode_idx": 25, "frame_idx": 70, "global_frame_idx": 6932, "task_index": 5}, {"db_idx": 6933, "episode_idx": 25, "frame_idx": 71, "global_frame_idx": 6933, "task_index": 5}, {"db_idx": 6934, "episode_idx": 25, "frame_idx": 72, "global_frame_idx": 6934, "task_index": 5}, {"db_idx": 6935, "episode_idx": 25, "frame_idx": 73, "global_frame_idx": 6935, "task_index": 5}, {"db_idx": 6936, "episode_idx": 25, "frame_idx": 74, "global_frame_idx": 6936, "task_index": 5}, {"db_idx": 6937, "episode_idx": 25, "frame_idx": 75, "global_frame_idx": 6937, "task_index": 5}, {"db_idx": 6938, "episode_idx": 25, "frame_idx": 76, "global_frame_idx": 6938, "task_index": 5}, {"db_idx": 6939, "episode_idx": 25, "frame_idx": 77, "global_frame_idx": 6939, "task_index": 5}, {"db_idx": 6940, "episode_idx": 25, "frame_idx": 78, "global_frame_idx": 6940, "task_index": 5}, {"db_idx": 6941, "episode_idx": 25, "frame_idx": 79, "global_frame_idx": 6941, "task_index": 5}, {"db_idx": 6942, "episode_idx": 25, "frame_idx": 80, "global_frame_idx": 6942, "task_index": 5}, {"db_idx": 6943, "episode_idx": 25, "frame_idx": 81, "global_frame_idx": 6943, "task_index": 5}, {"db_idx": 6944, "episode_idx": 25, "frame_idx": 82, "global_frame_idx": 6944, "task_index": 5}, {"db_idx": 6945, "episode_idx": 25, "frame_idx": 83, "global_frame_idx": 6945, "task_index": 5}, {"db_idx": 6946, "episode_idx": 25, "frame_idx": 84, "global_frame_idx": 6946, "task_index": 5}, {"db_idx": 6947, "episode_idx": 25, "frame_idx": 85, "global_frame_idx": 6947, "task_index": 5}, {"db_idx": 6948, "episode_idx": 25, "frame_idx": 86, "global_frame_idx": 6948, "task_index": 5}, {"db_idx": 6949, "episode_idx": 25, "frame_idx": 87, "global_frame_idx": 6949, "task_index": 5}, {"db_idx": 6950, "episode_idx": 25, "frame_idx": 88, "global_frame_idx": 6950, "task_index": 5}, {"db_idx": 6951, "episode_idx": 25, "frame_idx": 89, "global_frame_idx": 6951, "task_index": 5}, {"db_idx": 6952, "episode_idx": 25, "frame_idx": 90, "global_frame_idx": 6952, "task_index": 5}, {"db_idx": 6953, "episode_idx": 25, "frame_idx": 91, "global_frame_idx": 6953, "task_index": 5}, {"db_idx": 6954, "episode_idx": 25, "frame_idx": 92, "global_frame_idx": 6954, "task_index": 5}, {"db_idx": 6955, "episode_idx": 25, "frame_idx": 93, "global_frame_idx": 6955, "task_index": 5}, {"db_idx": 6956, "episode_idx": 25, "frame_idx": 94, "global_frame_idx": 6956, "task_index": 5}, {"db_idx": 6957, "episode_idx": 25, "frame_idx": 95, "global_frame_idx": 6957, "task_index": 5}, {"db_idx": 6958, "episode_idx": 25, "frame_idx": 96, "global_frame_idx": 6958, "task_index": 5}, {"db_idx": 6959, "episode_idx": 25, "frame_idx": 97, "global_frame_idx": 6959, "task_index": 5}, {"db_idx": 6960, "episode_idx": 25, "frame_idx": 98, "global_frame_idx": 6960, "task_index": 5}, {"db_idx": 6961, "episode_idx": 25, "frame_idx": 99, "global_frame_idx": 6961, "task_index": 5}, {"db_idx": 6962, "episode_idx": 25, "frame_idx": 100, "global_frame_idx": 6962, "task_index": 5}, {"db_idx": 6963, "episode_idx": 25, "frame_idx": 101, "global_frame_idx": 6963, "task_index": 5}, {"db_idx": 6964, "episode_idx": 25, "frame_idx": 102, "global_frame_idx": 6964, "task_index": 5}, {"db_idx": 6965, "episode_idx": 25, "frame_idx": 103, "global_frame_idx": 6965, "task_index": 5}, {"db_idx": 6966, "episode_idx": 25, "frame_idx": 104, "global_frame_idx": 6966, "task_index": 5}, {"db_idx": 6967, "episode_idx": 25, "frame_idx": 105, "global_frame_idx": 6967, "task_index": 5}, {"db_idx": 6968, "episode_idx": 25, "frame_idx": 106, "global_frame_idx": 6968, "task_index": 5}, {"db_idx": 6969, "episode_idx": 25, "frame_idx": 107, "global_frame_idx": 6969, "task_index": 5}, {"db_idx": 6970, "episode_idx": 25, "frame_idx": 108, "global_frame_idx": 6970, "task_index": 5}, {"db_idx": 6971, "episode_idx": 25, "frame_idx": 109, "global_frame_idx": 6971, "task_index": 5}, {"db_idx": 6972, "episode_idx": 25, "frame_idx": 110, "global_frame_idx": 6972, "task_index": 5}, {"db_idx": 6973, "episode_idx": 25, "frame_idx": 111, "global_frame_idx": 6973, "task_index": 5}, {"db_idx": 6974, "episode_idx": 25, "frame_idx": 112, "global_frame_idx": 6974, "task_index": 5}, {"db_idx": 6975, "episode_idx": 25, "frame_idx": 113, "global_frame_idx": 6975, "task_index": 5}, {"db_idx": 6976, "episode_idx": 25, "frame_idx": 114, "global_frame_idx": 6976, "task_index": 5}, {"db_idx": 6977, "episode_idx": 25, "frame_idx": 115, "global_frame_idx": 6977, "task_index": 5}, {"db_idx": 6978, "episode_idx": 25, "frame_idx": 116, "global_frame_idx": 6978, "task_index": 5}, {"db_idx": 6979, "episode_idx": 25, "frame_idx": 117, "global_frame_idx": 6979, "task_index": 5}, {"db_idx": 6980, "episode_idx": 25, "frame_idx": 118, "global_frame_idx": 6980, "task_index": 5}, {"db_idx": 6981, "episode_idx": 25, "frame_idx": 119, "global_frame_idx": 6981, "task_index": 5}, {"db_idx": 6982, "episode_idx": 25, "frame_idx": 120, "global_frame_idx": 6982, "task_index": 5}, {"db_idx": 6983, "episode_idx": 25, "frame_idx": 121, "global_frame_idx": 6983, "task_index": 5}, {"db_idx": 6984, "episode_idx": 25, "frame_idx": 122, "global_frame_idx": 6984, "task_index": 5}, {"db_idx": 6985, "episode_idx": 25, "frame_idx": 123, "global_frame_idx": 6985, "task_index": 5}, {"db_idx": 6986, "episode_idx": 25, "frame_idx": 124, "global_frame_idx": 6986, "task_index": 5}, {"db_idx": 6987, "episode_idx": 25, "frame_idx": 125, "global_frame_idx": 6987, "task_index": 5}, {"db_idx": 6988, "episode_idx": 25, "frame_idx": 126, "global_frame_idx": 6988, "task_index": 5}, {"db_idx": 6989, "episode_idx": 25, "frame_idx": 127, "global_frame_idx": 6989, "task_index": 5}, {"db_idx": 6990, "episode_idx": 25, "frame_idx": 128, "global_frame_idx": 6990, "task_index": 5}, {"db_idx": 6991, "episode_idx": 25, "frame_idx": 129, "global_frame_idx": 6991, "task_index": 5}, {"db_idx": 6992, "episode_idx": 25, "frame_idx": 130, "global_frame_idx": 6992, "task_index": 5}, {"db_idx": 6993, "episode_idx": 25, "frame_idx": 131, "global_frame_idx": 6993, "task_index": 5}, {"db_idx": 6994, "episode_idx": 25, "frame_idx": 132, "global_frame_idx": 6994, "task_index": 5}, {"db_idx": 6995, "episode_idx": 25, "frame_idx": 133, "global_frame_idx": 6995, "task_index": 5}, {"db_idx": 6996, "episode_idx": 25, "frame_idx": 134, "global_frame_idx": 6996, "task_index": 5}, {"db_idx": 6997, "episode_idx": 25, "frame_idx": 135, "global_frame_idx": 6997, "task_index": 5}, {"db_idx": 6998, "episode_idx": 25, "frame_idx": 136, "global_frame_idx": 6998, "task_index": 5}, {"db_idx": 6999, "episode_idx": 25, "frame_idx": 137, "global_frame_idx": 6999, "task_index": 5}, {"db_idx": 7000, "episode_idx": 25, "frame_idx": 138, "global_frame_idx": 7000, "task_index": 5}, {"db_idx": 7001, "episode_idx": 25, "frame_idx": 139, "global_frame_idx": 7001, "task_index": 5}, {"db_idx": 7002, "episode_idx": 25, "frame_idx": 140, "global_frame_idx": 7002, "task_index": 5}, {"db_idx": 7003, "episode_idx": 25, "frame_idx": 141, "global_frame_idx": 7003, "task_index": 5}, {"db_idx": 7004, "episode_idx": 25, "frame_idx": 142, "global_frame_idx": 7004, "task_index": 5}, {"db_idx": 7005, "episode_idx": 25, "frame_idx": 143, "global_frame_idx": 7005, "task_index": 5}, {"db_idx": 7006, "episode_idx": 25, "frame_idx": 144, "global_frame_idx": 7006, "task_index": 5}, {"db_idx": 7007, "episode_idx": 25, "frame_idx": 145, "global_frame_idx": 7007, "task_index": 5}, {"db_idx": 7008, "episode_idx": 25, "frame_idx": 146, "global_frame_idx": 7008, "task_index": 5}, {"db_idx": 7009, "episode_idx": 25, "frame_idx": 147, "global_frame_idx": 7009, "task_index": 5}, {"db_idx": 7010, "episode_idx": 25, "frame_idx": 148, "global_frame_idx": 7010, "task_index": 5}, {"db_idx": 7011, "episode_idx": 25, "frame_idx": 149, "global_frame_idx": 7011, "task_index": 5}, {"db_idx": 7012, "episode_idx": 25, "frame_idx": 150, "global_frame_idx": 7012, "task_index": 5}, {"db_idx": 7013, "episode_idx": 25, "frame_idx": 151, "global_frame_idx": 7013, "task_index": 5}, {"db_idx": 7014, "episode_idx": 25, "frame_idx": 152, "global_frame_idx": 7014, "task_index": 5}, {"db_idx": 7015, "episode_idx": 25, "frame_idx": 153, "global_frame_idx": 7015, "task_index": 5}, {"db_idx": 7016, "episode_idx": 25, "frame_idx": 154, "global_frame_idx": 7016, "task_index": 5}, {"db_idx": 7017, "episode_idx": 25, "frame_idx": 155, "global_frame_idx": 7017, "task_index": 5}, {"db_idx": 7018, "episode_idx": 25, "frame_idx": 156, "global_frame_idx": 7018, "task_index": 5}, {"db_idx": 7019, "episode_idx": 25, "frame_idx": 157, "global_frame_idx": 7019, "task_index": 5}, {"db_idx": 7020, "episode_idx": 25, "frame_idx": 158, "global_frame_idx": 7020, "task_index": 5}, {"db_idx": 7021, "episode_idx": 25, "frame_idx": 159, "global_frame_idx": 7021, "task_index": 5}, {"db_idx": 7022, "episode_idx": 25, "frame_idx": 160, "global_frame_idx": 7022, "task_index": 5}, {"db_idx": 7023, "episode_idx": 25, "frame_idx": 161, "global_frame_idx": 7023, "task_index": 5}, {"db_idx": 7024, "episode_idx": 25, "frame_idx": 162, "global_frame_idx": 7024, "task_index": 5}, {"db_idx": 7025, "episode_idx": 25, "frame_idx": 163, "global_frame_idx": 7025, "task_index": 5}, {"db_idx": 7026, "episode_idx": 25, "frame_idx": 164, "global_frame_idx": 7026, "task_index": 5}, {"db_idx": 7027, "episode_idx": 25, "frame_idx": 165, "global_frame_idx": 7027, "task_index": 5}, {"db_idx": 7028, "episode_idx": 25, "frame_idx": 166, "global_frame_idx": 7028, "task_index": 5}, {"db_idx": 7029, "episode_idx": 25, "frame_idx": 167, "global_frame_idx": 7029, "task_index": 5}, {"db_idx": 7030, "episode_idx": 25, "frame_idx": 168, "global_frame_idx": 7030, "task_index": 5}, {"db_idx": 7031, "episode_idx": 25, "frame_idx": 169, "global_frame_idx": 7031, "task_index": 5}, {"db_idx": 7032, "episode_idx": 25, "frame_idx": 170, "global_frame_idx": 7032, "task_index": 5}, {"db_idx": 7033, "episode_idx": 25, "frame_idx": 171, "global_frame_idx": 7033, "task_index": 5}, {"db_idx": 7034, "episode_idx": 25, "frame_idx": 172, "global_frame_idx": 7034, "task_index": 5}, {"db_idx": 7035, "episode_idx": 25, "frame_idx": 173, "global_frame_idx": 7035, "task_index": 5}, {"db_idx": 7036, "episode_idx": 25, "frame_idx": 174, "global_frame_idx": 7036, "task_index": 5}, {"db_idx": 7037, "episode_idx": 25, "frame_idx": 175, "global_frame_idx": 7037, "task_index": 5}, {"db_idx": 7038, "episode_idx": 25, "frame_idx": 176, "global_frame_idx": 7038, "task_index": 5}, {"db_idx": 7039, "episode_idx": 25, "frame_idx": 177, "global_frame_idx": 7039, "task_index": 5}, {"db_idx": 7040, "episode_idx": 25, "frame_idx": 178, "global_frame_idx": 7040, "task_index": 5}, {"db_idx": 7041, "episode_idx": 25, "frame_idx": 179, "global_frame_idx": 7041, "task_index": 5}, {"db_idx": 7042, "episode_idx": 25, "frame_idx": 180, "global_frame_idx": 7042, "task_index": 5}, {"db_idx": 7043, "episode_idx": 25, "frame_idx": 181, "global_frame_idx": 7043, "task_index": 5}, {"db_idx": 7044, "episode_idx": 25, "frame_idx": 182, "global_frame_idx": 7044, "task_index": 5}, {"db_idx": 7045, "episode_idx": 25, "frame_idx": 183, "global_frame_idx": 7045, "task_index": 5}, {"db_idx": 7046, "episode_idx": 25, "frame_idx": 184, "global_frame_idx": 7046, "task_index": 5}, {"db_idx": 7047, "episode_idx": 25, "frame_idx": 185, "global_frame_idx": 7047, "task_index": 5}, {"db_idx": 7048, "episode_idx": 25, "frame_idx": 186, "global_frame_idx": 7048, "task_index": 5}, {"db_idx": 7049, "episode_idx": 25, "frame_idx": 187, "global_frame_idx": 7049, "task_index": 5}, {"db_idx": 7050, "episode_idx": 25, "frame_idx": 188, "global_frame_idx": 7050, "task_index": 5}, {"db_idx": 7051, "episode_idx": 25, "frame_idx": 189, "global_frame_idx": 7051, "task_index": 5}, {"db_idx": 7052, "episode_idx": 25, "frame_idx": 190, "global_frame_idx": 7052, "task_index": 5}, {"db_idx": 7053, "episode_idx": 25, "frame_idx": 191, "global_frame_idx": 7053, "task_index": 5}, {"db_idx": 7054, "episode_idx": 25, "frame_idx": 192, "global_frame_idx": 7054, "task_index": 5}, {"db_idx": 7055, "episode_idx": 25, "frame_idx": 193, "global_frame_idx": 7055, "task_index": 5}, {"db_idx": 7056, "episode_idx": 25, "frame_idx": 194, "global_frame_idx": 7056, "task_index": 5}, {"db_idx": 7057, "episode_idx": 25, "frame_idx": 195, "global_frame_idx": 7057, "task_index": 5}, {"db_idx": 7058, "episode_idx": 25, "frame_idx": 196, "global_frame_idx": 7058, "task_index": 5}, {"db_idx": 7059, "episode_idx": 25, "frame_idx": 197, "global_frame_idx": 7059, "task_index": 5}, {"db_idx": 7060, "episode_idx": 25, "frame_idx": 198, "global_frame_idx": 7060, "task_index": 5}, {"db_idx": 7061, "episode_idx": 25, "frame_idx": 199, "global_frame_idx": 7061, "task_index": 5}, {"db_idx": 7062, "episode_idx": 25, "frame_idx": 200, "global_frame_idx": 7062, "task_index": 5}, {"db_idx": 7063, "episode_idx": 25, "frame_idx": 201, "global_frame_idx": 7063, "task_index": 5}, {"db_idx": 7064, "episode_idx": 25, "frame_idx": 202, "global_frame_idx": 7064, "task_index": 5}, {"db_idx": 7065, "episode_idx": 25, "frame_idx": 203, "global_frame_idx": 7065, "task_index": 5}, {"db_idx": 7066, "episode_idx": 25, "frame_idx": 204, "global_frame_idx": 7066, "task_index": 5}, {"db_idx": 7067, "episode_idx": 25, "frame_idx": 205, "global_frame_idx": 7067, "task_index": 5}, {"db_idx": 7068, "episode_idx": 25, "frame_idx": 206, "global_frame_idx": 7068, "task_index": 5}, {"db_idx": 7069, "episode_idx": 25, "frame_idx": 207, "global_frame_idx": 7069, "task_index": 5}, {"db_idx": 7070, "episode_idx": 25, "frame_idx": 208, "global_frame_idx": 7070, "task_index": 5}, {"db_idx": 7071, "episode_idx": 25, "frame_idx": 209, "global_frame_idx": 7071, "task_index": 5}, {"db_idx": 7072, "episode_idx": 25, "frame_idx": 210, "global_frame_idx": 7072, "task_index": 5}, {"db_idx": 7073, "episode_idx": 25, "frame_idx": 211, "global_frame_idx": 7073, "task_index": 5}, {"db_idx": 7074, "episode_idx": 25, "frame_idx": 212, "global_frame_idx": 7074, "task_index": 5}, {"db_idx": 7075, "episode_idx": 25, "frame_idx": 213, "global_frame_idx": 7075, "task_index": 5}, {"db_idx": 7076, "episode_idx": 25, "frame_idx": 214, "global_frame_idx": 7076, "task_index": 5}, {"db_idx": 7077, "episode_idx": 25, "frame_idx": 215, "global_frame_idx": 7077, "task_index": 5}, {"db_idx": 7078, "episode_idx": 25, "frame_idx": 216, "global_frame_idx": 7078, "task_index": 5}, {"db_idx": 7079, "episode_idx": 25, "frame_idx": 217, "global_frame_idx": 7079, "task_index": 5}, {"db_idx": 7080, "episode_idx": 25, "frame_idx": 218, "global_frame_idx": 7080, "task_index": 5}, {"db_idx": 7081, "episode_idx": 25, "frame_idx": 219, "global_frame_idx": 7081, "task_index": 5}, {"db_idx": 7082, "episode_idx": 25, "frame_idx": 220, "global_frame_idx": 7082, "task_index": 5}, {"db_idx": 7083, "episode_idx": 25, "frame_idx": 221, "global_frame_idx": 7083, "task_index": 5}, {"db_idx": 7084, "episode_idx": 25, "frame_idx": 222, "global_frame_idx": 7084, "task_index": 5}, {"db_idx": 7085, "episode_idx": 25, "frame_idx": 223, "global_frame_idx": 7085, "task_index": 5}, {"db_idx": 7086, "episode_idx": 25, "frame_idx": 224, "global_frame_idx": 7086, "task_index": 5}, {"db_idx": 7087, "episode_idx": 25, "frame_idx": 225, "global_frame_idx": 7087, "task_index": 5}, {"db_idx": 7088, "episode_idx": 25, "frame_idx": 226, "global_frame_idx": 7088, "task_index": 5}, {"db_idx": 7089, "episode_idx": 25, "frame_idx": 227, "global_frame_idx": 7089, "task_index": 5}, {"db_idx": 7090, "episode_idx": 25, "frame_idx": 228, "global_frame_idx": 7090, "task_index": 5}, {"db_idx": 7091, "episode_idx": 25, "frame_idx": 229, "global_frame_idx": 7091, "task_index": 5}, {"db_idx": 7092, "episode_idx": 25, "frame_idx": 230, "global_frame_idx": 7092, "task_index": 5}, {"db_idx": 7093, "episode_idx": 25, "frame_idx": 231, "global_frame_idx": 7093, "task_index": 5}, {"db_idx": 7094, "episode_idx": 25, "frame_idx": 232, "global_frame_idx": 7094, "task_index": 5}, {"db_idx": 7095, "episode_idx": 25, "frame_idx": 233, "global_frame_idx": 7095, "task_index": 5}, {"db_idx": 7096, "episode_idx": 25, "frame_idx": 234, "global_frame_idx": 7096, "task_index": 5}, {"db_idx": 7097, "episode_idx": 25, "frame_idx": 235, "global_frame_idx": 7097, "task_index": 5}, {"db_idx": 7098, "episode_idx": 25, "frame_idx": 236, "global_frame_idx": 7098, "task_index": 5}, {"db_idx": 7099, "episode_idx": 25, "frame_idx": 237, "global_frame_idx": 7099, "task_index": 5}, {"db_idx": 7100, "episode_idx": 25, "frame_idx": 238, "global_frame_idx": 7100, "task_index": 5}, {"db_idx": 7101, "episode_idx": 25, "frame_idx": 239, "global_frame_idx": 7101, "task_index": 5}, {"db_idx": 7102, "episode_idx": 25, "frame_idx": 240, "global_frame_idx": 7102, "task_index": 5}, {"db_idx": 7103, "episode_idx": 25, "frame_idx": 241, "global_frame_idx": 7103, "task_index": 5}, {"db_idx": 7104, "episode_idx": 25, "frame_idx": 242, "global_frame_idx": 7104, "task_index": 5}, {"db_idx": 7105, "episode_idx": 25, "frame_idx": 243, "global_frame_idx": 7105, "task_index": 5}, {"db_idx": 7106, "episode_idx": 25, "frame_idx": 244, "global_frame_idx": 7106, "task_index": 5}, {"db_idx": 7107, "episode_idx": 25, "frame_idx": 245, "global_frame_idx": 7107, "task_index": 5}, {"db_idx": 7108, "episode_idx": 25, "frame_idx": 246, "global_frame_idx": 7108, "task_index": 5}, {"db_idx": 7109, "episode_idx": 25, "frame_idx": 247, "global_frame_idx": 7109, "task_index": 5}, {"db_idx": 7110, "episode_idx": 25, "frame_idx": 248, "global_frame_idx": 7110, "task_index": 5}, {"db_idx": 7111, "episode_idx": 25, "frame_idx": 249, "global_frame_idx": 7111, "task_index": 5}, {"db_idx": 7112, "episode_idx": 25, "frame_idx": 250, "global_frame_idx": 7112, "task_index": 5}, {"db_idx": 7113, "episode_idx": 25, "frame_idx": 251, "global_frame_idx": 7113, "task_index": 5}, {"db_idx": 7114, "episode_idx": 25, "frame_idx": 252, "global_frame_idx": 7114, "task_index": 5}, {"db_idx": 7115, "episode_idx": 25, "frame_idx": 253, "global_frame_idx": 7115, "task_index": 5}, {"db_idx": 7116, "episode_idx": 25, "frame_idx": 254, "global_frame_idx": 7116, "task_index": 5}, {"db_idx": 7117, "episode_idx": 25, "frame_idx": 255, "global_frame_idx": 7117, "task_index": 5}, {"db_idx": 7118, "episode_idx": 25, "frame_idx": 256, "global_frame_idx": 7118, "task_index": 5}, {"db_idx": 7119, "episode_idx": 25, "frame_idx": 257, "global_frame_idx": 7119, "task_index": 5}, {"db_idx": 7120, "episode_idx": 25, "frame_idx": 258, "global_frame_idx": 7120, "task_index": 5}, {"db_idx": 7121, "episode_idx": 25, "frame_idx": 259, "global_frame_idx": 7121, "task_index": 5}, {"db_idx": 7122, "episode_idx": 25, "frame_idx": 260, "global_frame_idx": 7122, "task_index": 5}, {"db_idx": 7123, "episode_idx": 25, "frame_idx": 261, "global_frame_idx": 7123, "task_index": 5}, {"db_idx": 7124, "episode_idx": 25, "frame_idx": 262, "global_frame_idx": 7124, "task_index": 5}, {"db_idx": 7125, "episode_idx": 25, "frame_idx": 263, "global_frame_idx": 7125, "task_index": 5}, {"db_idx": 7126, "episode_idx": 25, "frame_idx": 264, "global_frame_idx": 7126, "task_index": 5}, {"db_idx": 7127, "episode_idx": 25, "frame_idx": 265, "global_frame_idx": 7127, "task_index": 5}, {"db_idx": 7128, "episode_idx": 25, "frame_idx": 266, "global_frame_idx": 7128, "task_index": 5}, {"db_idx": 7129, "episode_idx": 25, "frame_idx": 267, "global_frame_idx": 7129, "task_index": 5}, {"db_idx": 7130, "episode_idx": 25, "frame_idx": 268, "global_frame_idx": 7130, "task_index": 5}, {"db_idx": 7131, "episode_idx": 25, "frame_idx": 269, "global_frame_idx": 7131, "task_index": 5}, {"db_idx": 7132, "episode_idx": 25, "frame_idx": 270, "global_frame_idx": 7132, "task_index": 5}, {"db_idx": 7133, "episode_idx": 25, "frame_idx": 271, "global_frame_idx": 7133, "task_index": 5}, {"db_idx": 7134, "episode_idx": 25, "frame_idx": 272, "global_frame_idx": 7134, "task_index": 5}, {"db_idx": 7135, "episode_idx": 25, "frame_idx": 273, "global_frame_idx": 7135, "task_index": 5}, {"db_idx": 7136, "episode_idx": 25, "frame_idx": 274, "global_frame_idx": 7136, "task_index": 5}, {"db_idx": 7137, "episode_idx": 25, "frame_idx": 275, "global_frame_idx": 7137, "task_index": 5}, {"db_idx": 7138, "episode_idx": 25, "frame_idx": 276, "global_frame_idx": 7138, "task_index": 5}, {"db_idx": 7139, "episode_idx": 25, "frame_idx": 277, "global_frame_idx": 7139, "task_index": 5}, {"db_idx": 7140, "episode_idx": 25, "frame_idx": 278, "global_frame_idx": 7140, "task_index": 5}, {"db_idx": 7141, "episode_idx": 25, "frame_idx": 279, "global_frame_idx": 7141, "task_index": 5}, {"db_idx": 7142, "episode_idx": 25, "frame_idx": 280, "global_frame_idx": 7142, "task_index": 5}, {"db_idx": 7143, "episode_idx": 25, "frame_idx": 281, "global_frame_idx": 7143, "task_index": 5}, {"db_idx": 7144, "episode_idx": 25, "frame_idx": 282, "global_frame_idx": 7144, "task_index": 5}, {"db_idx": 7145, "episode_idx": 25, "frame_idx": 283, "global_frame_idx": 7145, "task_index": 5}, {"db_idx": 7146, "episode_idx": 25, "frame_idx": 284, "global_frame_idx": 7146, "task_index": 5}, {"db_idx": 7147, "episode_idx": 25, "frame_idx": 285, "global_frame_idx": 7147, "task_index": 5}, {"db_idx": 7148, "episode_idx": 25, "frame_idx": 286, "global_frame_idx": 7148, "task_index": 5}, {"db_idx": 7149, "episode_idx": 25, "frame_idx": 287, "global_frame_idx": 7149, "task_index": 5}, {"db_idx": 7150, "episode_idx": 25, "frame_idx": 288, "global_frame_idx": 7150, "task_index": 5}, {"db_idx": 7151, "episode_idx": 25, "frame_idx": 289, "global_frame_idx": 7151, "task_index": 5}, {"db_idx": 7152, "episode_idx": 25, "frame_idx": 290, "global_frame_idx": 7152, "task_index": 5}, {"db_idx": 7153, "episode_idx": 25, "frame_idx": 291, "global_frame_idx": 7153, "task_index": 5}, {"db_idx": 7154, "episode_idx": 25, "frame_idx": 292, "global_frame_idx": 7154, "task_index": 5}, {"db_idx": 7155, "episode_idx": 25, "frame_idx": 293, "global_frame_idx": 7155, "task_index": 5}, {"db_idx": 7156, "episode_idx": 25, "frame_idx": 294, "global_frame_idx": 7156, "task_index": 5}, {"db_idx": 7157, "episode_idx": 25, "frame_idx": 295, "global_frame_idx": 7157, "task_index": 5}, {"db_idx": 7158, "episode_idx": 25, "frame_idx": 296, "global_frame_idx": 7158, "task_index": 5}, {"db_idx": 7159, "episode_idx": 25, "frame_idx": 297, "global_frame_idx": 7159, "task_index": 5}, {"db_idx": 7160, "episode_idx": 25, "frame_idx": 298, "global_frame_idx": 7160, "task_index": 5}, {"db_idx": 7161, "episode_idx": 25, "frame_idx": 299, "global_frame_idx": 7161, "task_index": 5}, {"db_idx": 7162, "episode_idx": 25, "frame_idx": 300, "global_frame_idx": 7162, "task_index": 5}, {"db_idx": 7163, "episode_idx": 25, "frame_idx": 301, "global_frame_idx": 7163, "task_index": 5}, {"db_idx": 7164, "episode_idx": 25, "frame_idx": 302, "global_frame_idx": 7164, "task_index": 5}, {"db_idx": 7165, "episode_idx": 25, "frame_idx": 303, "global_frame_idx": 7165, "task_index": 5}, {"db_idx": 7166, "episode_idx": 25, "frame_idx": 304, "global_frame_idx": 7166, "task_index": 5}, {"db_idx": 7167, "episode_idx": 25, "frame_idx": 305, "global_frame_idx": 7167, "task_index": 5}, {"db_idx": 7168, "episode_idx": 25, "frame_idx": 306, "global_frame_idx": 7168, "task_index": 5}, {"db_idx": 7169, "episode_idx": 25, "frame_idx": 307, "global_frame_idx": 7169, "task_index": 5}, {"db_idx": 7170, "episode_idx": 25, "frame_idx": 308, "global_frame_idx": 7170, "task_index": 5}, {"db_idx": 7171, "episode_idx": 25, "frame_idx": 309, "global_frame_idx": 7171, "task_index": 5}, {"db_idx": 7172, "episode_idx": 25, "frame_idx": 310, "global_frame_idx": 7172, "task_index": 5}, {"db_idx": 7173, "episode_idx": 25, "frame_idx": 311, "global_frame_idx": 7173, "task_index": 5}, {"db_idx": 7174, "episode_idx": 25, "frame_idx": 312, "global_frame_idx": 7174, "task_index": 5}, {"db_idx": 7175, "episode_idx": 25, "frame_idx": 313, "global_frame_idx": 7175, "task_index": 5}, {"db_idx": 7176, "episode_idx": 25, "frame_idx": 314, "global_frame_idx": 7176, "task_index": 5}, {"db_idx": 7177, "episode_idx": 25, "frame_idx": 315, "global_frame_idx": 7177, "task_index": 5}, {"db_idx": 7178, "episode_idx": 25, "frame_idx": 316, "global_frame_idx": 7178, "task_index": 5}, {"db_idx": 7179, "episode_idx": 25, "frame_idx": 317, "global_frame_idx": 7179, "task_index": 5}, {"db_idx": 7180, "episode_idx": 25, "frame_idx": 318, "global_frame_idx": 7180, "task_index": 5}, {"db_idx": 7181, "episode_idx": 25, "frame_idx": 319, "global_frame_idx": 7181, "task_index": 5}, {"db_idx": 7182, "episode_idx": 25, "frame_idx": 320, "global_frame_idx": 7182, "task_index": 5}, {"db_idx": 7183, "episode_idx": 25, "frame_idx": 321, "global_frame_idx": 7183, "task_index": 5}, {"db_idx": 7184, "episode_idx": 25, "frame_idx": 322, "global_frame_idx": 7184, "task_index": 5}, {"db_idx": 7185, "episode_idx": 25, "frame_idx": 323, "global_frame_idx": 7185, "task_index": 5}, {"db_idx": 7186, "episode_idx": 25, "frame_idx": 324, "global_frame_idx": 7186, "task_index": 5}, {"db_idx": 7187, "episode_idx": 25, "frame_idx": 325, "global_frame_idx": 7187, "task_index": 5}, {"db_idx": 7188, "episode_idx": 25, "frame_idx": 326, "global_frame_idx": 7188, "task_index": 5}, {"db_idx": 7189, "episode_idx": 25, "frame_idx": 327, "global_frame_idx": 7189, "task_index": 5}, {"db_idx": 7190, "episode_idx": 25, "frame_idx": 328, "global_frame_idx": 7190, "task_index": 5}, {"db_idx": 7191, "episode_idx": 25, "frame_idx": 329, "global_frame_idx": 7191, "task_index": 5}, {"db_idx": 7192, "episode_idx": 25, "frame_idx": 330, "global_frame_idx": 7192, "task_index": 5}, {"db_idx": 7193, "episode_idx": 25, "frame_idx": 331, "global_frame_idx": 7193, "task_index": 5}, {"db_idx": 7194, "episode_idx": 25, "frame_idx": 332, "global_frame_idx": 7194, "task_index": 5}, {"db_idx": 7195, "episode_idx": 25, "frame_idx": 333, "global_frame_idx": 7195, "task_index": 5}, {"db_idx": 7196, "episode_idx": 25, "frame_idx": 334, "global_frame_idx": 7196, "task_index": 5}, {"db_idx": 7197, "episode_idx": 25, "frame_idx": 335, "global_frame_idx": 7197, "task_index": 5}, {"db_idx": 7198, "episode_idx": 25, "frame_idx": 336, "global_frame_idx": 7198, "task_index": 5}, {"db_idx": 7199, "episode_idx": 25, "frame_idx": 337, "global_frame_idx": 7199, "task_index": 5}, {"db_idx": 7200, "episode_idx": 25, "frame_idx": 338, "global_frame_idx": 7200, "task_index": 5}, {"db_idx": 7201, "episode_idx": 25, "frame_idx": 339, "global_frame_idx": 7201, "task_index": 5}, {"db_idx": 7202, "episode_idx": 25, "frame_idx": 340, "global_frame_idx": 7202, "task_index": 5}, {"db_idx": 7203, "episode_idx": 25, "frame_idx": 341, "global_frame_idx": 7203, "task_index": 5}, {"db_idx": 7204, "episode_idx": 25, "frame_idx": 342, "global_frame_idx": 7204, "task_index": 5}, {"db_idx": 7205, "episode_idx": 25, "frame_idx": 343, "global_frame_idx": 7205, "task_index": 5}, {"db_idx": 7206, "episode_idx": 25, "frame_idx": 344, "global_frame_idx": 7206, "task_index": 5}, {"db_idx": 7207, "episode_idx": 25, "frame_idx": 345, "global_frame_idx": 7207, "task_index": 5}, {"db_idx": 7208, "episode_idx": 25, "frame_idx": 346, "global_frame_idx": 7208, "task_index": 5}, {"db_idx": 7209, "episode_idx": 25, "frame_idx": 347, "global_frame_idx": 7209, "task_index": 5}, {"db_idx": 7210, "episode_idx": 25, "frame_idx": 348, "global_frame_idx": 7210, "task_index": 5}, {"db_idx": 7211, "episode_idx": 25, "frame_idx": 349, "global_frame_idx": 7211, "task_index": 5}, {"db_idx": 7212, "episode_idx": 25, "frame_idx": 350, "global_frame_idx": 7212, "task_index": 5}, {"db_idx": 7213, "episode_idx": 25, "frame_idx": 351, "global_frame_idx": 7213, "task_index": 5}, {"db_idx": 7214, "episode_idx": 25, "frame_idx": 352, "global_frame_idx": 7214, "task_index": 5}, {"db_idx": 7215, "episode_idx": 25, "frame_idx": 353, "global_frame_idx": 7215, "task_index": 5}, {"db_idx": 7216, "episode_idx": 25, "frame_idx": 354, "global_frame_idx": 7216, "task_index": 5}, {"db_idx": 7217, "episode_idx": 25, "frame_idx": 355, "global_frame_idx": 7217, "task_index": 5}, {"db_idx": 7218, "episode_idx": 25, "frame_idx": 356, "global_frame_idx": 7218, "task_index": 5}, {"db_idx": 7219, "episode_idx": 25, "frame_idx": 357, "global_frame_idx": 7219, "task_index": 5}, {"db_idx": 7220, "episode_idx": 25, "frame_idx": 358, "global_frame_idx": 7220, "task_index": 5}, {"db_idx": 7221, "episode_idx": 25, "frame_idx": 359, "global_frame_idx": 7221, "task_index": 5}, {"db_idx": 7222, "episode_idx": 25, "frame_idx": 360, "global_frame_idx": 7222, "task_index": 5}, {"db_idx": 7223, "episode_idx": 25, "frame_idx": 361, "global_frame_idx": 7223, "task_index": 5}, {"db_idx": 7224, "episode_idx": 25, "frame_idx": 362, "global_frame_idx": 7224, "task_index": 5}, {"db_idx": 7225, "episode_idx": 25, "frame_idx": 363, "global_frame_idx": 7225, "task_index": 5}, {"db_idx": 7226, "episode_idx": 25, "frame_idx": 364, "global_frame_idx": 7226, "task_index": 5}, {"db_idx": 7227, "episode_idx": 25, "frame_idx": 365, "global_frame_idx": 7227, "task_index": 5}, {"db_idx": 7228, "episode_idx": 25, "frame_idx": 366, "global_frame_idx": 7228, "task_index": 5}, {"db_idx": 7229, "episode_idx": 25, "frame_idx": 367, "global_frame_idx": 7229, "task_index": 5}, {"db_idx": 7230, "episode_idx": 25, "frame_idx": 368, "global_frame_idx": 7230, "task_index": 5}, {"db_idx": 7231, "episode_idx": 25, "frame_idx": 369, "global_frame_idx": 7231, "task_index": 5}, {"db_idx": 7232, "episode_idx": 25, "frame_idx": 370, "global_frame_idx": 7232, "task_index": 5}, {"db_idx": 7233, "episode_idx": 25, "frame_idx": 371, "global_frame_idx": 7233, "task_index": 5}, {"db_idx": 7234, "episode_idx": 25, "frame_idx": 372, "global_frame_idx": 7234, "task_index": 5}, {"db_idx": 7235, "episode_idx": 25, "frame_idx": 373, "global_frame_idx": 7235, "task_index": 5}, {"db_idx": 7236, "episode_idx": 25, "frame_idx": 374, "global_frame_idx": 7236, "task_index": 5}, {"db_idx": 7237, "episode_idx": 25, "frame_idx": 375, "global_frame_idx": 7237, "task_index": 5}, {"db_idx": 7238, "episode_idx": 25, "frame_idx": 376, "global_frame_idx": 7238, "task_index": 5}, {"db_idx": 7239, "episode_idx": 25, "frame_idx": 377, "global_frame_idx": 7239, "task_index": 5}, {"db_idx": 7240, "episode_idx": 25, "frame_idx": 378, "global_frame_idx": 7240, "task_index": 5}, {"db_idx": 7241, "episode_idx": 25, "frame_idx": 379, "global_frame_idx": 7241, "task_index": 5}, {"db_idx": 7242, "episode_idx": 25, "frame_idx": 380, "global_frame_idx": 7242, "task_index": 5}, {"db_idx": 7243, "episode_idx": 25, "frame_idx": 381, "global_frame_idx": 7243, "task_index": 5}, {"db_idx": 7244, "episode_idx": 25, "frame_idx": 382, "global_frame_idx": 7244, "task_index": 5}, {"db_idx": 7245, "episode_idx": 25, "frame_idx": 383, "global_frame_idx": 7245, "task_index": 5}, {"db_idx": 7246, "episode_idx": 25, "frame_idx": 384, "global_frame_idx": 7246, "task_index": 5}, {"db_idx": 7247, "episode_idx": 25, "frame_idx": 385, "global_frame_idx": 7247, "task_index": 5}, {"db_idx": 7248, "episode_idx": 25, "frame_idx": 386, "global_frame_idx": 7248, "task_index": 5}, {"db_idx": 7249, "episode_idx": 25, "frame_idx": 387, "global_frame_idx": 7249, "task_index": 5}, {"db_idx": 7250, "episode_idx": 25, "frame_idx": 388, "global_frame_idx": 7250, "task_index": 5}, {"db_idx": 7251, "episode_idx": 25, "frame_idx": 389, "global_frame_idx": 7251, "task_index": 5}, {"db_idx": 7252, "episode_idx": 25, "frame_idx": 390, "global_frame_idx": 7252, "task_index": 5}, {"db_idx": 7253, "episode_idx": 25, "frame_idx": 391, "global_frame_idx": 7253, "task_index": 5}, {"db_idx": 7254, "episode_idx": 25, "frame_idx": 392, "global_frame_idx": 7254, "task_index": 5}, {"db_idx": 7255, "episode_idx": 25, "frame_idx": 393, "global_frame_idx": 7255, "task_index": 5}, {"db_idx": 7256, "episode_idx": 25, "frame_idx": 394, "global_frame_idx": 7256, "task_index": 5}, {"db_idx": 7257, "episode_idx": 25, "frame_idx": 395, "global_frame_idx": 7257, "task_index": 5}, {"db_idx": 7258, "episode_idx": 25, "frame_idx": 396, "global_frame_idx": 7258, "task_index": 5}, {"db_idx": 7259, "episode_idx": 25, "frame_idx": 397, "global_frame_idx": 7259, "task_index": 5}, {"db_idx": 7260, "episode_idx": 25, "frame_idx": 398, "global_frame_idx": 7260, "task_index": 5}, {"db_idx": 7261, "episode_idx": 25, "frame_idx": 399, "global_frame_idx": 7261, "task_index": 5}, {"db_idx": 7262, "episode_idx": 25, "frame_idx": 400, "global_frame_idx": 7262, "task_index": 5}, {"db_idx": 7263, "episode_idx": 25, "frame_idx": 401, "global_frame_idx": 7263, "task_index": 5}, {"db_idx": 7264, "episode_idx": 25, "frame_idx": 402, "global_frame_idx": 7264, "task_index": 5}, {"db_idx": 7265, "episode_idx": 25, "frame_idx": 403, "global_frame_idx": 7265, "task_index": 5}, {"db_idx": 7266, "episode_idx": 25, "frame_idx": 404, "global_frame_idx": 7266, "task_index": 5}, {"db_idx": 7267, "episode_idx": 25, "frame_idx": 405, "global_frame_idx": 7267, "task_index": 5}, {"db_idx": 7268, "episode_idx": 25, "frame_idx": 406, "global_frame_idx": 7268, "task_index": 5}, {"db_idx": 7269, "episode_idx": 25, "frame_idx": 407, "global_frame_idx": 7269, "task_index": 5}, {"db_idx": 7270, "episode_idx": 25, "frame_idx": 408, "global_frame_idx": 7270, "task_index": 5}, {"db_idx": 7271, "episode_idx": 25, "frame_idx": 409, "global_frame_idx": 7271, "task_index": 5}, {"db_idx": 7272, "episode_idx": 25, "frame_idx": 410, "global_frame_idx": 7272, "task_index": 5}, {"db_idx": 7273, "episode_idx": 25, "frame_idx": 411, "global_frame_idx": 7273, "task_index": 5}, {"db_idx": 7274, "episode_idx": 25, "frame_idx": 412, "global_frame_idx": 7274, "task_index": 5}, {"db_idx": 7275, "episode_idx": 25, "frame_idx": 413, "global_frame_idx": 7275, "task_index": 5}, {"db_idx": 7276, "episode_idx": 25, "frame_idx": 414, "global_frame_idx": 7276, "task_index": 5}, {"db_idx": 7277, "episode_idx": 25, "frame_idx": 415, "global_frame_idx": 7277, "task_index": 5}, {"db_idx": 7278, "episode_idx": 25, "frame_idx": 416, "global_frame_idx": 7278, "task_index": 5}, {"db_idx": 7279, "episode_idx": 25, "frame_idx": 417, "global_frame_idx": 7279, "task_index": 5}, {"db_idx": 7280, "episode_idx": 25, "frame_idx": 418, "global_frame_idx": 7280, "task_index": 5}, {"db_idx": 7281, "episode_idx": 25, "frame_idx": 419, "global_frame_idx": 7281, "task_index": 5}, {"db_idx": 7282, "episode_idx": 25, "frame_idx": 420, "global_frame_idx": 7282, "task_index": 5}, {"db_idx": 7283, "episode_idx": 25, "frame_idx": 421, "global_frame_idx": 7283, "task_index": 5}, {"db_idx": 7284, "episode_idx": 25, "frame_idx": 422, "global_frame_idx": 7284, "task_index": 5}, {"db_idx": 7285, "episode_idx": 25, "frame_idx": 423, "global_frame_idx": 7285, "task_index": 5}, {"db_idx": 7286, "episode_idx": 25, "frame_idx": 424, "global_frame_idx": 7286, "task_index": 5}, {"db_idx": 7287, "episode_idx": 25, "frame_idx": 425, "global_frame_idx": 7287, "task_index": 5}, {"db_idx": 7288, "episode_idx": 25, "frame_idx": 426, "global_frame_idx": 7288, "task_index": 5}, {"db_idx": 7289, "episode_idx": 25, "frame_idx": 427, "global_frame_idx": 7289, "task_index": 5}, {"db_idx": 7290, "episode_idx": 25, "frame_idx": 428, "global_frame_idx": 7290, "task_index": 5}, {"db_idx": 7291, "episode_idx": 25, "frame_idx": 429, "global_frame_idx": 7291, "task_index": 5}, {"db_idx": 7292, "episode_idx": 25, "frame_idx": 430, "global_frame_idx": 7292, "task_index": 5}, {"db_idx": 7293, "episode_idx": 25, "frame_idx": 431, "global_frame_idx": 7293, "task_index": 5}, {"db_idx": 7294, "episode_idx": 25, "frame_idx": 432, "global_frame_idx": 7294, "task_index": 5}, {"db_idx": 7295, "episode_idx": 25, "frame_idx": 433, "global_frame_idx": 7295, "task_index": 5}, {"db_idx": 7296, "episode_idx": 25, "frame_idx": 434, "global_frame_idx": 7296, "task_index": 5}, {"db_idx": 7297, "episode_idx": 25, "frame_idx": 435, "global_frame_idx": 7297, "task_index": 5}, {"db_idx": 7298, "episode_idx": 25, "frame_idx": 436, "global_frame_idx": 7298, "task_index": 5}, {"db_idx": 7299, "episode_idx": 25, "frame_idx": 437, "global_frame_idx": 7299, "task_index": 5}, {"db_idx": 7300, "episode_idx": 25, "frame_idx": 438, "global_frame_idx": 7300, "task_index": 5}, {"db_idx": 7301, "episode_idx": 25, "frame_idx": 439, "global_frame_idx": 7301, "task_index": 5}, {"db_idx": 7302, "episode_idx": 25, "frame_idx": 440, "global_frame_idx": 7302, "task_index": 5}, {"db_idx": 7303, "episode_idx": 25, "frame_idx": 441, "global_frame_idx": 7303, "task_index": 5}, {"db_idx": 7304, "episode_idx": 25, "frame_idx": 442, "global_frame_idx": 7304, "task_index": 5}, {"db_idx": 7305, "episode_idx": 25, "frame_idx": 443, "global_frame_idx": 7305, "task_index": 5}, {"db_idx": 7306, "episode_idx": 25, "frame_idx": 444, "global_frame_idx": 7306, "task_index": 5}, {"db_idx": 7307, "episode_idx": 25, "frame_idx": 445, "global_frame_idx": 7307, "task_index": 5}, {"db_idx": 7308, "episode_idx": 25, "frame_idx": 446, "global_frame_idx": 7308, "task_index": 5}, {"db_idx": 7309, "episode_idx": 25, "frame_idx": 447, "global_frame_idx": 7309, "task_index": 5}, {"db_idx": 7310, "episode_idx": 25, "frame_idx": 448, "global_frame_idx": 7310, "task_index": 5}, {"db_idx": 7311, "episode_idx": 26, "frame_idx": 0, "global_frame_idx": 7311, "task_index": 5}, {"db_idx": 7312, "episode_idx": 26, "frame_idx": 1, "global_frame_idx": 7312, "task_index": 5}, {"db_idx": 7313, "episode_idx": 26, "frame_idx": 2, "global_frame_idx": 7313, "task_index": 5}, {"db_idx": 7314, "episode_idx": 26, "frame_idx": 3, "global_frame_idx": 7314, "task_index": 5}, {"db_idx": 7315, "episode_idx": 26, "frame_idx": 4, "global_frame_idx": 7315, "task_index": 5}, {"db_idx": 7316, "episode_idx": 26, "frame_idx": 5, "global_frame_idx": 7316, "task_index": 5}, {"db_idx": 7317, "episode_idx": 26, "frame_idx": 6, "global_frame_idx": 7317, "task_index": 5}, {"db_idx": 7318, "episode_idx": 26, "frame_idx": 7, "global_frame_idx": 7318, "task_index": 5}, {"db_idx": 7319, "episode_idx": 26, "frame_idx": 8, "global_frame_idx": 7319, "task_index": 5}, {"db_idx": 7320, "episode_idx": 26, "frame_idx": 9, "global_frame_idx": 7320, "task_index": 5}, {"db_idx": 7321, "episode_idx": 26, "frame_idx": 10, "global_frame_idx": 7321, "task_index": 5}, {"db_idx": 7322, "episode_idx": 26, "frame_idx": 11, "global_frame_idx": 7322, "task_index": 5}, {"db_idx": 7323, "episode_idx": 26, "frame_idx": 12, "global_frame_idx": 7323, "task_index": 5}, {"db_idx": 7324, "episode_idx": 26, "frame_idx": 13, "global_frame_idx": 7324, "task_index": 5}, {"db_idx": 7325, "episode_idx": 26, "frame_idx": 14, "global_frame_idx": 7325, "task_index": 5}, {"db_idx": 7326, "episode_idx": 26, "frame_idx": 15, "global_frame_idx": 7326, "task_index": 5}, {"db_idx": 7327, "episode_idx": 26, "frame_idx": 16, "global_frame_idx": 7327, "task_index": 5}, {"db_idx": 7328, "episode_idx": 26, "frame_idx": 17, "global_frame_idx": 7328, "task_index": 5}, {"db_idx": 7329, "episode_idx": 26, "frame_idx": 18, "global_frame_idx": 7329, "task_index": 5}, {"db_idx": 7330, "episode_idx": 26, "frame_idx": 19, "global_frame_idx": 7330, "task_index": 5}, {"db_idx": 7331, "episode_idx": 26, "frame_idx": 20, "global_frame_idx": 7331, "task_index": 5}, {"db_idx": 7332, "episode_idx": 26, "frame_idx": 21, "global_frame_idx": 7332, "task_index": 5}, {"db_idx": 7333, "episode_idx": 26, "frame_idx": 22, "global_frame_idx": 7333, "task_index": 5}, {"db_idx": 7334, "episode_idx": 26, "frame_idx": 23, "global_frame_idx": 7334, "task_index": 5}, {"db_idx": 7335, "episode_idx": 26, "frame_idx": 24, "global_frame_idx": 7335, "task_index": 5}, {"db_idx": 7336, "episode_idx": 26, "frame_idx": 25, "global_frame_idx": 7336, "task_index": 5}, {"db_idx": 7337, "episode_idx": 26, "frame_idx": 26, "global_frame_idx": 7337, "task_index": 5}, {"db_idx": 7338, "episode_idx": 26, "frame_idx": 27, "global_frame_idx": 7338, "task_index": 5}, {"db_idx": 7339, "episode_idx": 26, "frame_idx": 28, "global_frame_idx": 7339, "task_index": 5}, {"db_idx": 7340, "episode_idx": 26, "frame_idx": 29, "global_frame_idx": 7340, "task_index": 5}, {"db_idx": 7341, "episode_idx": 26, "frame_idx": 30, "global_frame_idx": 7341, "task_index": 5}, {"db_idx": 7342, "episode_idx": 26, "frame_idx": 31, "global_frame_idx": 7342, "task_index": 5}, {"db_idx": 7343, "episode_idx": 26, "frame_idx": 32, "global_frame_idx": 7343, "task_index": 5}, {"db_idx": 7344, "episode_idx": 26, "frame_idx": 33, "global_frame_idx": 7344, "task_index": 5}, {"db_idx": 7345, "episode_idx": 26, "frame_idx": 34, "global_frame_idx": 7345, "task_index": 5}, {"db_idx": 7346, "episode_idx": 26, "frame_idx": 35, "global_frame_idx": 7346, "task_index": 5}, {"db_idx": 7347, "episode_idx": 26, "frame_idx": 36, "global_frame_idx": 7347, "task_index": 5}, {"db_idx": 7348, "episode_idx": 26, "frame_idx": 37, "global_frame_idx": 7348, "task_index": 5}, {"db_idx": 7349, "episode_idx": 26, "frame_idx": 38, "global_frame_idx": 7349, "task_index": 5}, {"db_idx": 7350, "episode_idx": 26, "frame_idx": 39, "global_frame_idx": 7350, "task_index": 5}, {"db_idx": 7351, "episode_idx": 26, "frame_idx": 40, "global_frame_idx": 7351, "task_index": 5}, {"db_idx": 7352, "episode_idx": 26, "frame_idx": 41, "global_frame_idx": 7352, "task_index": 5}, {"db_idx": 7353, "episode_idx": 26, "frame_idx": 42, "global_frame_idx": 7353, "task_index": 5}, {"db_idx": 7354, "episode_idx": 26, "frame_idx": 43, "global_frame_idx": 7354, "task_index": 5}, {"db_idx": 7355, "episode_idx": 26, "frame_idx": 44, "global_frame_idx": 7355, "task_index": 5}, {"db_idx": 7356, "episode_idx": 26, "frame_idx": 45, "global_frame_idx": 7356, "task_index": 5}, {"db_idx": 7357, "episode_idx": 26, "frame_idx": 46, "global_frame_idx": 7357, "task_index": 5}, {"db_idx": 7358, "episode_idx": 26, "frame_idx": 47, "global_frame_idx": 7358, "task_index": 5}, {"db_idx": 7359, "episode_idx": 26, "frame_idx": 48, "global_frame_idx": 7359, "task_index": 5}, {"db_idx": 7360, "episode_idx": 26, "frame_idx": 49, "global_frame_idx": 7360, "task_index": 5}, {"db_idx": 7361, "episode_idx": 26, "frame_idx": 50, "global_frame_idx": 7361, "task_index": 5}, {"db_idx": 7362, "episode_idx": 26, "frame_idx": 51, "global_frame_idx": 7362, "task_index": 5}, {"db_idx": 7363, "episode_idx": 26, "frame_idx": 52, "global_frame_idx": 7363, "task_index": 5}, {"db_idx": 7364, "episode_idx": 26, "frame_idx": 53, "global_frame_idx": 7364, "task_index": 5}, {"db_idx": 7365, "episode_idx": 26, "frame_idx": 54, "global_frame_idx": 7365, "task_index": 5}, {"db_idx": 7366, "episode_idx": 26, "frame_idx": 55, "global_frame_idx": 7366, "task_index": 5}, {"db_idx": 7367, "episode_idx": 26, "frame_idx": 56, "global_frame_idx": 7367, "task_index": 5}, {"db_idx": 7368, "episode_idx": 26, "frame_idx": 57, "global_frame_idx": 7368, "task_index": 5}, {"db_idx": 7369, "episode_idx": 26, "frame_idx": 58, "global_frame_idx": 7369, "task_index": 5}, {"db_idx": 7370, "episode_idx": 26, "frame_idx": 59, "global_frame_idx": 7370, "task_index": 5}, {"db_idx": 7371, "episode_idx": 26, "frame_idx": 60, "global_frame_idx": 7371, "task_index": 5}, {"db_idx": 7372, "episode_idx": 26, "frame_idx": 61, "global_frame_idx": 7372, "task_index": 5}, {"db_idx": 7373, "episode_idx": 26, "frame_idx": 62, "global_frame_idx": 7373, "task_index": 5}, {"db_idx": 7374, "episode_idx": 26, "frame_idx": 63, "global_frame_idx": 7374, "task_index": 5}, {"db_idx": 7375, "episode_idx": 26, "frame_idx": 64, "global_frame_idx": 7375, "task_index": 5}, {"db_idx": 7376, "episode_idx": 26, "frame_idx": 65, "global_frame_idx": 7376, "task_index": 5}, {"db_idx": 7377, "episode_idx": 26, "frame_idx": 66, "global_frame_idx": 7377, "task_index": 5}, {"db_idx": 7378, "episode_idx": 26, "frame_idx": 67, "global_frame_idx": 7378, "task_index": 5}, {"db_idx": 7379, "episode_idx": 26, "frame_idx": 68, "global_frame_idx": 7379, "task_index": 5}, {"db_idx": 7380, "episode_idx": 26, "frame_idx": 69, "global_frame_idx": 7380, "task_index": 5}, {"db_idx": 7381, "episode_idx": 26, "frame_idx": 70, "global_frame_idx": 7381, "task_index": 5}, {"db_idx": 7382, "episode_idx": 26, "frame_idx": 71, "global_frame_idx": 7382, "task_index": 5}, {"db_idx": 7383, "episode_idx": 26, "frame_idx": 72, "global_frame_idx": 7383, "task_index": 5}, {"db_idx": 7384, "episode_idx": 26, "frame_idx": 73, "global_frame_idx": 7384, "task_index": 5}, {"db_idx": 7385, "episode_idx": 26, "frame_idx": 74, "global_frame_idx": 7385, "task_index": 5}, {"db_idx": 7386, "episode_idx": 26, "frame_idx": 75, "global_frame_idx": 7386, "task_index": 5}, {"db_idx": 7387, "episode_idx": 26, "frame_idx": 76, "global_frame_idx": 7387, "task_index": 5}, {"db_idx": 7388, "episode_idx": 26, "frame_idx": 77, "global_frame_idx": 7388, "task_index": 5}, {"db_idx": 7389, "episode_idx": 26, "frame_idx": 78, "global_frame_idx": 7389, "task_index": 5}, {"db_idx": 7390, "episode_idx": 26, "frame_idx": 79, "global_frame_idx": 7390, "task_index": 5}, {"db_idx": 7391, "episode_idx": 26, "frame_idx": 80, "global_frame_idx": 7391, "task_index": 5}, {"db_idx": 7392, "episode_idx": 26, "frame_idx": 81, "global_frame_idx": 7392, "task_index": 5}, {"db_idx": 7393, "episode_idx": 26, "frame_idx": 82, "global_frame_idx": 7393, "task_index": 5}, {"db_idx": 7394, "episode_idx": 26, "frame_idx": 83, "global_frame_idx": 7394, "task_index": 5}, {"db_idx": 7395, "episode_idx": 26, "frame_idx": 84, "global_frame_idx": 7395, "task_index": 5}, {"db_idx": 7396, "episode_idx": 26, "frame_idx": 85, "global_frame_idx": 7396, "task_index": 5}, {"db_idx": 7397, "episode_idx": 26, "frame_idx": 86, "global_frame_idx": 7397, "task_index": 5}, {"db_idx": 7398, "episode_idx": 26, "frame_idx": 87, "global_frame_idx": 7398, "task_index": 5}, {"db_idx": 7399, "episode_idx": 26, "frame_idx": 88, "global_frame_idx": 7399, "task_index": 5}, {"db_idx": 7400, "episode_idx": 26, "frame_idx": 89, "global_frame_idx": 7400, "task_index": 5}, {"db_idx": 7401, "episode_idx": 26, "frame_idx": 90, "global_frame_idx": 7401, "task_index": 5}, {"db_idx": 7402, "episode_idx": 26, "frame_idx": 91, "global_frame_idx": 7402, "task_index": 5}, {"db_idx": 7403, "episode_idx": 26, "frame_idx": 92, "global_frame_idx": 7403, "task_index": 5}, {"db_idx": 7404, "episode_idx": 26, "frame_idx": 93, "global_frame_idx": 7404, "task_index": 5}, {"db_idx": 7405, "episode_idx": 26, "frame_idx": 94, "global_frame_idx": 7405, "task_index": 5}, {"db_idx": 7406, "episode_idx": 26, "frame_idx": 95, "global_frame_idx": 7406, "task_index": 5}, {"db_idx": 7407, "episode_idx": 26, "frame_idx": 96, "global_frame_idx": 7407, "task_index": 5}, {"db_idx": 7408, "episode_idx": 26, "frame_idx": 97, "global_frame_idx": 7408, "task_index": 5}, {"db_idx": 7409, "episode_idx": 26, "frame_idx": 98, "global_frame_idx": 7409, "task_index": 5}, {"db_idx": 7410, "episode_idx": 26, "frame_idx": 99, "global_frame_idx": 7410, "task_index": 5}, {"db_idx": 7411, "episode_idx": 26, "frame_idx": 100, "global_frame_idx": 7411, "task_index": 5}, {"db_idx": 7412, "episode_idx": 26, "frame_idx": 101, "global_frame_idx": 7412, "task_index": 5}, {"db_idx": 7413, "episode_idx": 26, "frame_idx": 102, "global_frame_idx": 7413, "task_index": 5}, {"db_idx": 7414, "episode_idx": 26, "frame_idx": 103, "global_frame_idx": 7414, "task_index": 5}, {"db_idx": 7415, "episode_idx": 26, "frame_idx": 104, "global_frame_idx": 7415, "task_index": 5}, {"db_idx": 7416, "episode_idx": 26, "frame_idx": 105, "global_frame_idx": 7416, "task_index": 5}, {"db_idx": 7417, "episode_idx": 26, "frame_idx": 106, "global_frame_idx": 7417, "task_index": 5}, {"db_idx": 7418, "episode_idx": 26, "frame_idx": 107, "global_frame_idx": 7418, "task_index": 5}, {"db_idx": 7419, "episode_idx": 26, "frame_idx": 108, "global_frame_idx": 7419, "task_index": 5}, {"db_idx": 7420, "episode_idx": 26, "frame_idx": 109, "global_frame_idx": 7420, "task_index": 5}, {"db_idx": 7421, "episode_idx": 26, "frame_idx": 110, "global_frame_idx": 7421, "task_index": 5}, {"db_idx": 7422, "episode_idx": 26, "frame_idx": 111, "global_frame_idx": 7422, "task_index": 5}, {"db_idx": 7423, "episode_idx": 26, "frame_idx": 112, "global_frame_idx": 7423, "task_index": 5}, {"db_idx": 7424, "episode_idx": 26, "frame_idx": 113, "global_frame_idx": 7424, "task_index": 5}, {"db_idx": 7425, "episode_idx": 26, "frame_idx": 114, "global_frame_idx": 7425, "task_index": 5}, {"db_idx": 7426, "episode_idx": 26, "frame_idx": 115, "global_frame_idx": 7426, "task_index": 5}, {"db_idx": 7427, "episode_idx": 26, "frame_idx": 116, "global_frame_idx": 7427, "task_index": 5}, {"db_idx": 7428, "episode_idx": 26, "frame_idx": 117, "global_frame_idx": 7428, "task_index": 5}, {"db_idx": 7429, "episode_idx": 26, "frame_idx": 118, "global_frame_idx": 7429, "task_index": 5}, {"db_idx": 7430, "episode_idx": 26, "frame_idx": 119, "global_frame_idx": 7430, "task_index": 5}, {"db_idx": 7431, "episode_idx": 26, "frame_idx": 120, "global_frame_idx": 7431, "task_index": 5}, {"db_idx": 7432, "episode_idx": 26, "frame_idx": 121, "global_frame_idx": 7432, "task_index": 5}, {"db_idx": 7433, "episode_idx": 26, "frame_idx": 122, "global_frame_idx": 7433, "task_index": 5}, {"db_idx": 7434, "episode_idx": 26, "frame_idx": 123, "global_frame_idx": 7434, "task_index": 5}, {"db_idx": 7435, "episode_idx": 26, "frame_idx": 124, "global_frame_idx": 7435, "task_index": 5}, {"db_idx": 7436, "episode_idx": 26, "frame_idx": 125, "global_frame_idx": 7436, "task_index": 5}, {"db_idx": 7437, "episode_idx": 26, "frame_idx": 126, "global_frame_idx": 7437, "task_index": 5}, {"db_idx": 7438, "episode_idx": 26, "frame_idx": 127, "global_frame_idx": 7438, "task_index": 5}, {"db_idx": 7439, "episode_idx": 26, "frame_idx": 128, "global_frame_idx": 7439, "task_index": 5}, {"db_idx": 7440, "episode_idx": 26, "frame_idx": 129, "global_frame_idx": 7440, "task_index": 5}, {"db_idx": 7441, "episode_idx": 26, "frame_idx": 130, "global_frame_idx": 7441, "task_index": 5}, {"db_idx": 7442, "episode_idx": 26, "frame_idx": 131, "global_frame_idx": 7442, "task_index": 5}, {"db_idx": 7443, "episode_idx": 26, "frame_idx": 132, "global_frame_idx": 7443, "task_index": 5}, {"db_idx": 7444, "episode_idx": 26, "frame_idx": 133, "global_frame_idx": 7444, "task_index": 5}, {"db_idx": 7445, "episode_idx": 26, "frame_idx": 134, "global_frame_idx": 7445, "task_index": 5}, {"db_idx": 7446, "episode_idx": 26, "frame_idx": 135, "global_frame_idx": 7446, "task_index": 5}, {"db_idx": 7447, "episode_idx": 26, "frame_idx": 136, "global_frame_idx": 7447, "task_index": 5}, {"db_idx": 7448, "episode_idx": 26, "frame_idx": 137, "global_frame_idx": 7448, "task_index": 5}, {"db_idx": 7449, "episode_idx": 26, "frame_idx": 138, "global_frame_idx": 7449, "task_index": 5}, {"db_idx": 7450, "episode_idx": 26, "frame_idx": 139, "global_frame_idx": 7450, "task_index": 5}, {"db_idx": 7451, "episode_idx": 26, "frame_idx": 140, "global_frame_idx": 7451, "task_index": 5}, {"db_idx": 7452, "episode_idx": 26, "frame_idx": 141, "global_frame_idx": 7452, "task_index": 5}, {"db_idx": 7453, "episode_idx": 26, "frame_idx": 142, "global_frame_idx": 7453, "task_index": 5}, {"db_idx": 7454, "episode_idx": 26, "frame_idx": 143, "global_frame_idx": 7454, "task_index": 5}, {"db_idx": 7455, "episode_idx": 26, "frame_idx": 144, "global_frame_idx": 7455, "task_index": 5}, {"db_idx": 7456, "episode_idx": 26, "frame_idx": 145, "global_frame_idx": 7456, "task_index": 5}, {"db_idx": 7457, "episode_idx": 26, "frame_idx": 146, "global_frame_idx": 7457, "task_index": 5}, {"db_idx": 7458, "episode_idx": 26, "frame_idx": 147, "global_frame_idx": 7458, "task_index": 5}, {"db_idx": 7459, "episode_idx": 26, "frame_idx": 148, "global_frame_idx": 7459, "task_index": 5}, {"db_idx": 7460, "episode_idx": 26, "frame_idx": 149, "global_frame_idx": 7460, "task_index": 5}, {"db_idx": 7461, "episode_idx": 26, "frame_idx": 150, "global_frame_idx": 7461, "task_index": 5}, {"db_idx": 7462, "episode_idx": 26, "frame_idx": 151, "global_frame_idx": 7462, "task_index": 5}, {"db_idx": 7463, "episode_idx": 26, "frame_idx": 152, "global_frame_idx": 7463, "task_index": 5}, {"db_idx": 7464, "episode_idx": 26, "frame_idx": 153, "global_frame_idx": 7464, "task_index": 5}, {"db_idx": 7465, "episode_idx": 26, "frame_idx": 154, "global_frame_idx": 7465, "task_index": 5}, {"db_idx": 7466, "episode_idx": 26, "frame_idx": 155, "global_frame_idx": 7466, "task_index": 5}, {"db_idx": 7467, "episode_idx": 26, "frame_idx": 156, "global_frame_idx": 7467, "task_index": 5}, {"db_idx": 7468, "episode_idx": 26, "frame_idx": 157, "global_frame_idx": 7468, "task_index": 5}, {"db_idx": 7469, "episode_idx": 26, "frame_idx": 158, "global_frame_idx": 7469, "task_index": 5}, {"db_idx": 7470, "episode_idx": 26, "frame_idx": 159, "global_frame_idx": 7470, "task_index": 5}, {"db_idx": 7471, "episode_idx": 26, "frame_idx": 160, "global_frame_idx": 7471, "task_index": 5}, {"db_idx": 7472, "episode_idx": 26, "frame_idx": 161, "global_frame_idx": 7472, "task_index": 5}, {"db_idx": 7473, "episode_idx": 26, "frame_idx": 162, "global_frame_idx": 7473, "task_index": 5}, {"db_idx": 7474, "episode_idx": 26, "frame_idx": 163, "global_frame_idx": 7474, "task_index": 5}, {"db_idx": 7475, "episode_idx": 26, "frame_idx": 164, "global_frame_idx": 7475, "task_index": 5}, {"db_idx": 7476, "episode_idx": 26, "frame_idx": 165, "global_frame_idx": 7476, "task_index": 5}, {"db_idx": 7477, "episode_idx": 26, "frame_idx": 166, "global_frame_idx": 7477, "task_index": 5}, {"db_idx": 7478, "episode_idx": 26, "frame_idx": 167, "global_frame_idx": 7478, "task_index": 5}, {"db_idx": 7479, "episode_idx": 26, "frame_idx": 168, "global_frame_idx": 7479, "task_index": 5}, {"db_idx": 7480, "episode_idx": 26, "frame_idx": 169, "global_frame_idx": 7480, "task_index": 5}, {"db_idx": 7481, "episode_idx": 26, "frame_idx": 170, "global_frame_idx": 7481, "task_index": 5}, {"db_idx": 7482, "episode_idx": 26, "frame_idx": 171, "global_frame_idx": 7482, "task_index": 5}, {"db_idx": 7483, "episode_idx": 26, "frame_idx": 172, "global_frame_idx": 7483, "task_index": 5}, {"db_idx": 7484, "episode_idx": 26, "frame_idx": 173, "global_frame_idx": 7484, "task_index": 5}, {"db_idx": 7485, "episode_idx": 26, "frame_idx": 174, "global_frame_idx": 7485, "task_index": 5}, {"db_idx": 7486, "episode_idx": 26, "frame_idx": 175, "global_frame_idx": 7486, "task_index": 5}, {"db_idx": 7487, "episode_idx": 26, "frame_idx": 176, "global_frame_idx": 7487, "task_index": 5}, {"db_idx": 7488, "episode_idx": 26, "frame_idx": 177, "global_frame_idx": 7488, "task_index": 5}, {"db_idx": 7489, "episode_idx": 26, "frame_idx": 178, "global_frame_idx": 7489, "task_index": 5}, {"db_idx": 7490, "episode_idx": 26, "frame_idx": 179, "global_frame_idx": 7490, "task_index": 5}, {"db_idx": 7491, "episode_idx": 26, "frame_idx": 180, "global_frame_idx": 7491, "task_index": 5}, {"db_idx": 7492, "episode_idx": 26, "frame_idx": 181, "global_frame_idx": 7492, "task_index": 5}, {"db_idx": 7493, "episode_idx": 26, "frame_idx": 182, "global_frame_idx": 7493, "task_index": 5}, {"db_idx": 7494, "episode_idx": 26, "frame_idx": 183, "global_frame_idx": 7494, "task_index": 5}, {"db_idx": 7495, "episode_idx": 26, "frame_idx": 184, "global_frame_idx": 7495, "task_index": 5}, {"db_idx": 7496, "episode_idx": 26, "frame_idx": 185, "global_frame_idx": 7496, "task_index": 5}, {"db_idx": 7497, "episode_idx": 26, "frame_idx": 186, "global_frame_idx": 7497, "task_index": 5}, {"db_idx": 7498, "episode_idx": 26, "frame_idx": 187, "global_frame_idx": 7498, "task_index": 5}, {"db_idx": 7499, "episode_idx": 26, "frame_idx": 188, "global_frame_idx": 7499, "task_index": 5}, {"db_idx": 7500, "episode_idx": 26, "frame_idx": 189, "global_frame_idx": 7500, "task_index": 5}, {"db_idx": 7501, "episode_idx": 26, "frame_idx": 190, "global_frame_idx": 7501, "task_index": 5}, {"db_idx": 7502, "episode_idx": 26, "frame_idx": 191, "global_frame_idx": 7502, "task_index": 5}, {"db_idx": 7503, "episode_idx": 26, "frame_idx": 192, "global_frame_idx": 7503, "task_index": 5}, {"db_idx": 7504, "episode_idx": 26, "frame_idx": 193, "global_frame_idx": 7504, "task_index": 5}, {"db_idx": 7505, "episode_idx": 26, "frame_idx": 194, "global_frame_idx": 7505, "task_index": 5}, {"db_idx": 7506, "episode_idx": 26, "frame_idx": 195, "global_frame_idx": 7506, "task_index": 5}, {"db_idx": 7507, "episode_idx": 26, "frame_idx": 196, "global_frame_idx": 7507, "task_index": 5}, {"db_idx": 7508, "episode_idx": 26, "frame_idx": 197, "global_frame_idx": 7508, "task_index": 5}, {"db_idx": 7509, "episode_idx": 26, "frame_idx": 198, "global_frame_idx": 7509, "task_index": 5}, {"db_idx": 7510, "episode_idx": 26, "frame_idx": 199, "global_frame_idx": 7510, "task_index": 5}, {"db_idx": 7511, "episode_idx": 26, "frame_idx": 200, "global_frame_idx": 7511, "task_index": 5}, {"db_idx": 7512, "episode_idx": 26, "frame_idx": 201, "global_frame_idx": 7512, "task_index": 5}, {"db_idx": 7513, "episode_idx": 26, "frame_idx": 202, "global_frame_idx": 7513, "task_index": 5}, {"db_idx": 7514, "episode_idx": 26, "frame_idx": 203, "global_frame_idx": 7514, "task_index": 5}, {"db_idx": 7515, "episode_idx": 26, "frame_idx": 204, "global_frame_idx": 7515, "task_index": 5}, {"db_idx": 7516, "episode_idx": 26, "frame_idx": 205, "global_frame_idx": 7516, "task_index": 5}, {"db_idx": 7517, "episode_idx": 26, "frame_idx": 206, "global_frame_idx": 7517, "task_index": 5}, {"db_idx": 7518, "episode_idx": 26, "frame_idx": 207, "global_frame_idx": 7518, "task_index": 5}, {"db_idx": 7519, "episode_idx": 26, "frame_idx": 208, "global_frame_idx": 7519, "task_index": 5}, {"db_idx": 7520, "episode_idx": 26, "frame_idx": 209, "global_frame_idx": 7520, "task_index": 5}, {"db_idx": 7521, "episode_idx": 26, "frame_idx": 210, "global_frame_idx": 7521, "task_index": 5}, {"db_idx": 7522, "episode_idx": 26, "frame_idx": 211, "global_frame_idx": 7522, "task_index": 5}, {"db_idx": 7523, "episode_idx": 26, "frame_idx": 212, "global_frame_idx": 7523, "task_index": 5}, {"db_idx": 7524, "episode_idx": 26, "frame_idx": 213, "global_frame_idx": 7524, "task_index": 5}, {"db_idx": 7525, "episode_idx": 26, "frame_idx": 214, "global_frame_idx": 7525, "task_index": 5}, {"db_idx": 7526, "episode_idx": 26, "frame_idx": 215, "global_frame_idx": 7526, "task_index": 5}, {"db_idx": 7527, "episode_idx": 26, "frame_idx": 216, "global_frame_idx": 7527, "task_index": 5}, {"db_idx": 7528, "episode_idx": 26, "frame_idx": 217, "global_frame_idx": 7528, "task_index": 5}, {"db_idx": 7529, "episode_idx": 26, "frame_idx": 218, "global_frame_idx": 7529, "task_index": 5}, {"db_idx": 7530, "episode_idx": 26, "frame_idx": 219, "global_frame_idx": 7530, "task_index": 5}, {"db_idx": 7531, "episode_idx": 26, "frame_idx": 220, "global_frame_idx": 7531, "task_index": 5}, {"db_idx": 7532, "episode_idx": 26, "frame_idx": 221, "global_frame_idx": 7532, "task_index": 5}, {"db_idx": 7533, "episode_idx": 26, "frame_idx": 222, "global_frame_idx": 7533, "task_index": 5}, {"db_idx": 7534, "episode_idx": 26, "frame_idx": 223, "global_frame_idx": 7534, "task_index": 5}, {"db_idx": 7535, "episode_idx": 26, "frame_idx": 224, "global_frame_idx": 7535, "task_index": 5}, {"db_idx": 7536, "episode_idx": 26, "frame_idx": 225, "global_frame_idx": 7536, "task_index": 5}, {"db_idx": 7537, "episode_idx": 26, "frame_idx": 226, "global_frame_idx": 7537, "task_index": 5}, {"db_idx": 7538, "episode_idx": 26, "frame_idx": 227, "global_frame_idx": 7538, "task_index": 5}, {"db_idx": 7539, "episode_idx": 26, "frame_idx": 228, "global_frame_idx": 7539, "task_index": 5}, {"db_idx": 7540, "episode_idx": 26, "frame_idx": 229, "global_frame_idx": 7540, "task_index": 5}, {"db_idx": 7541, "episode_idx": 26, "frame_idx": 230, "global_frame_idx": 7541, "task_index": 5}, {"db_idx": 7542, "episode_idx": 26, "frame_idx": 231, "global_frame_idx": 7542, "task_index": 5}, {"db_idx": 7543, "episode_idx": 26, "frame_idx": 232, "global_frame_idx": 7543, "task_index": 5}, {"db_idx": 7544, "episode_idx": 26, "frame_idx": 233, "global_frame_idx": 7544, "task_index": 5}, {"db_idx": 7545, "episode_idx": 26, "frame_idx": 234, "global_frame_idx": 7545, "task_index": 5}, {"db_idx": 7546, "episode_idx": 26, "frame_idx": 235, "global_frame_idx": 7546, "task_index": 5}, {"db_idx": 7547, "episode_idx": 26, "frame_idx": 236, "global_frame_idx": 7547, "task_index": 5}, {"db_idx": 7548, "episode_idx": 26, "frame_idx": 237, "global_frame_idx": 7548, "task_index": 5}, {"db_idx": 7549, "episode_idx": 26, "frame_idx": 238, "global_frame_idx": 7549, "task_index": 5}, {"db_idx": 7550, "episode_idx": 26, "frame_idx": 239, "global_frame_idx": 7550, "task_index": 5}, {"db_idx": 7551, "episode_idx": 26, "frame_idx": 240, "global_frame_idx": 7551, "task_index": 5}, {"db_idx": 7552, "episode_idx": 26, "frame_idx": 241, "global_frame_idx": 7552, "task_index": 5}, {"db_idx": 7553, "episode_idx": 26, "frame_idx": 242, "global_frame_idx": 7553, "task_index": 5}, {"db_idx": 7554, "episode_idx": 26, "frame_idx": 243, "global_frame_idx": 7554, "task_index": 5}, {"db_idx": 7555, "episode_idx": 26, "frame_idx": 244, "global_frame_idx": 7555, "task_index": 5}, {"db_idx": 7556, "episode_idx": 26, "frame_idx": 245, "global_frame_idx": 7556, "task_index": 5}, {"db_idx": 7557, "episode_idx": 26, "frame_idx": 246, "global_frame_idx": 7557, "task_index": 5}, {"db_idx": 7558, "episode_idx": 26, "frame_idx": 247, "global_frame_idx": 7558, "task_index": 5}, {"db_idx": 7559, "episode_idx": 26, "frame_idx": 248, "global_frame_idx": 7559, "task_index": 5}, {"db_idx": 7560, "episode_idx": 26, "frame_idx": 249, "global_frame_idx": 7560, "task_index": 5}, {"db_idx": 7561, "episode_idx": 26, "frame_idx": 250, "global_frame_idx": 7561, "task_index": 5}, {"db_idx": 7562, "episode_idx": 26, "frame_idx": 251, "global_frame_idx": 7562, "task_index": 5}, {"db_idx": 7563, "episode_idx": 26, "frame_idx": 252, "global_frame_idx": 7563, "task_index": 5}, {"db_idx": 7564, "episode_idx": 26, "frame_idx": 253, "global_frame_idx": 7564, "task_index": 5}, {"db_idx": 7565, "episode_idx": 26, "frame_idx": 254, "global_frame_idx": 7565, "task_index": 5}, {"db_idx": 7566, "episode_idx": 26, "frame_idx": 255, "global_frame_idx": 7566, "task_index": 5}, {"db_idx": 7567, "episode_idx": 26, "frame_idx": 256, "global_frame_idx": 7567, "task_index": 5}, {"db_idx": 7568, "episode_idx": 26, "frame_idx": 257, "global_frame_idx": 7568, "task_index": 5}, {"db_idx": 7569, "episode_idx": 26, "frame_idx": 258, "global_frame_idx": 7569, "task_index": 5}, {"db_idx": 7570, "episode_idx": 26, "frame_idx": 259, "global_frame_idx": 7570, "task_index": 5}, {"db_idx": 7571, "episode_idx": 26, "frame_idx": 260, "global_frame_idx": 7571, "task_index": 5}, {"db_idx": 7572, "episode_idx": 26, "frame_idx": 261, "global_frame_idx": 7572, "task_index": 5}, {"db_idx": 7573, "episode_idx": 26, "frame_idx": 262, "global_frame_idx": 7573, "task_index": 5}, {"db_idx": 7574, "episode_idx": 26, "frame_idx": 263, "global_frame_idx": 7574, "task_index": 5}, {"db_idx": 7575, "episode_idx": 26, "frame_idx": 264, "global_frame_idx": 7575, "task_index": 5}, {"db_idx": 7576, "episode_idx": 27, "frame_idx": 0, "global_frame_idx": 7576, "task_index": 5}, {"db_idx": 7577, "episode_idx": 27, "frame_idx": 1, "global_frame_idx": 7577, "task_index": 5}, {"db_idx": 7578, "episode_idx": 27, "frame_idx": 2, "global_frame_idx": 7578, "task_index": 5}, {"db_idx": 7579, "episode_idx": 27, "frame_idx": 3, "global_frame_idx": 7579, "task_index": 5}, {"db_idx": 7580, "episode_idx": 27, "frame_idx": 4, "global_frame_idx": 7580, "task_index": 5}, {"db_idx": 7581, "episode_idx": 27, "frame_idx": 5, "global_frame_idx": 7581, "task_index": 5}, {"db_idx": 7582, "episode_idx": 27, "frame_idx": 6, "global_frame_idx": 7582, "task_index": 5}, {"db_idx": 7583, "episode_idx": 27, "frame_idx": 7, "global_frame_idx": 7583, "task_index": 5}, {"db_idx": 7584, "episode_idx": 27, "frame_idx": 8, "global_frame_idx": 7584, "task_index": 5}, {"db_idx": 7585, "episode_idx": 27, "frame_idx": 9, "global_frame_idx": 7585, "task_index": 5}, {"db_idx": 7586, "episode_idx": 27, "frame_idx": 10, "global_frame_idx": 7586, "task_index": 5}, {"db_idx": 7587, "episode_idx": 27, "frame_idx": 11, "global_frame_idx": 7587, "task_index": 5}, {"db_idx": 7588, "episode_idx": 27, "frame_idx": 12, "global_frame_idx": 7588, "task_index": 5}, {"db_idx": 7589, "episode_idx": 27, "frame_idx": 13, "global_frame_idx": 7589, "task_index": 5}, {"db_idx": 7590, "episode_idx": 27, "frame_idx": 14, "global_frame_idx": 7590, "task_index": 5}, {"db_idx": 7591, "episode_idx": 27, "frame_idx": 15, "global_frame_idx": 7591, "task_index": 5}, {"db_idx": 7592, "episode_idx": 27, "frame_idx": 16, "global_frame_idx": 7592, "task_index": 5}, {"db_idx": 7593, "episode_idx": 27, "frame_idx": 17, "global_frame_idx": 7593, "task_index": 5}, {"db_idx": 7594, "episode_idx": 27, "frame_idx": 18, "global_frame_idx": 7594, "task_index": 5}, {"db_idx": 7595, "episode_idx": 27, "frame_idx": 19, "global_frame_idx": 7595, "task_index": 5}, {"db_idx": 7596, "episode_idx": 27, "frame_idx": 20, "global_frame_idx": 7596, "task_index": 5}, {"db_idx": 7597, "episode_idx": 27, "frame_idx": 21, "global_frame_idx": 7597, "task_index": 5}, {"db_idx": 7598, "episode_idx": 27, "frame_idx": 22, "global_frame_idx": 7598, "task_index": 5}, {"db_idx": 7599, "episode_idx": 27, "frame_idx": 23, "global_frame_idx": 7599, "task_index": 5}, {"db_idx": 7600, "episode_idx": 27, "frame_idx": 24, "global_frame_idx": 7600, "task_index": 5}, {"db_idx": 7601, "episode_idx": 27, "frame_idx": 25, "global_frame_idx": 7601, "task_index": 5}, {"db_idx": 7602, "episode_idx": 27, "frame_idx": 26, "global_frame_idx": 7602, "task_index": 5}, {"db_idx": 7603, "episode_idx": 27, "frame_idx": 27, "global_frame_idx": 7603, "task_index": 5}, {"db_idx": 7604, "episode_idx": 27, "frame_idx": 28, "global_frame_idx": 7604, "task_index": 5}, {"db_idx": 7605, "episode_idx": 27, "frame_idx": 29, "global_frame_idx": 7605, "task_index": 5}, {"db_idx": 7606, "episode_idx": 27, "frame_idx": 30, "global_frame_idx": 7606, "task_index": 5}, {"db_idx": 7607, "episode_idx": 27, "frame_idx": 31, "global_frame_idx": 7607, "task_index": 5}, {"db_idx": 7608, "episode_idx": 27, "frame_idx": 32, "global_frame_idx": 7608, "task_index": 5}, {"db_idx": 7609, "episode_idx": 27, "frame_idx": 33, "global_frame_idx": 7609, "task_index": 5}, {"db_idx": 7610, "episode_idx": 27, "frame_idx": 34, "global_frame_idx": 7610, "task_index": 5}, {"db_idx": 7611, "episode_idx": 27, "frame_idx": 35, "global_frame_idx": 7611, "task_index": 5}, {"db_idx": 7612, "episode_idx": 27, "frame_idx": 36, "global_frame_idx": 7612, "task_index": 5}, {"db_idx": 7613, "episode_idx": 27, "frame_idx": 37, "global_frame_idx": 7613, "task_index": 5}, {"db_idx": 7614, "episode_idx": 27, "frame_idx": 38, "global_frame_idx": 7614, "task_index": 5}, {"db_idx": 7615, "episode_idx": 27, "frame_idx": 39, "global_frame_idx": 7615, "task_index": 5}, {"db_idx": 7616, "episode_idx": 27, "frame_idx": 40, "global_frame_idx": 7616, "task_index": 5}, {"db_idx": 7617, "episode_idx": 27, "frame_idx": 41, "global_frame_idx": 7617, "task_index": 5}, {"db_idx": 7618, "episode_idx": 27, "frame_idx": 42, "global_frame_idx": 7618, "task_index": 5}, {"db_idx": 7619, "episode_idx": 27, "frame_idx": 43, "global_frame_idx": 7619, "task_index": 5}, {"db_idx": 7620, "episode_idx": 27, "frame_idx": 44, "global_frame_idx": 7620, "task_index": 5}, {"db_idx": 7621, "episode_idx": 27, "frame_idx": 45, "global_frame_idx": 7621, "task_index": 5}, {"db_idx": 7622, "episode_idx": 27, "frame_idx": 46, "global_frame_idx": 7622, "task_index": 5}, {"db_idx": 7623, "episode_idx": 27, "frame_idx": 47, "global_frame_idx": 7623, "task_index": 5}, {"db_idx": 7624, "episode_idx": 27, "frame_idx": 48, "global_frame_idx": 7624, "task_index": 5}, {"db_idx": 7625, "episode_idx": 27, "frame_idx": 49, "global_frame_idx": 7625, "task_index": 5}, {"db_idx": 7626, "episode_idx": 27, "frame_idx": 50, "global_frame_idx": 7626, "task_index": 5}, {"db_idx": 7627, "episode_idx": 27, "frame_idx": 51, "global_frame_idx": 7627, "task_index": 5}, {"db_idx": 7628, "episode_idx": 27, "frame_idx": 52, "global_frame_idx": 7628, "task_index": 5}, {"db_idx": 7629, "episode_idx": 27, "frame_idx": 53, "global_frame_idx": 7629, "task_index": 5}, {"db_idx": 7630, "episode_idx": 27, "frame_idx": 54, "global_frame_idx": 7630, "task_index": 5}, {"db_idx": 7631, "episode_idx": 27, "frame_idx": 55, "global_frame_idx": 7631, "task_index": 5}, {"db_idx": 7632, "episode_idx": 27, "frame_idx": 56, "global_frame_idx": 7632, "task_index": 5}, {"db_idx": 7633, "episode_idx": 27, "frame_idx": 57, "global_frame_idx": 7633, "task_index": 5}, {"db_idx": 7634, "episode_idx": 27, "frame_idx": 58, "global_frame_idx": 7634, "task_index": 5}, {"db_idx": 7635, "episode_idx": 27, "frame_idx": 59, "global_frame_idx": 7635, "task_index": 5}, {"db_idx": 7636, "episode_idx": 27, "frame_idx": 60, "global_frame_idx": 7636, "task_index": 5}, {"db_idx": 7637, "episode_idx": 27, "frame_idx": 61, "global_frame_idx": 7637, "task_index": 5}, {"db_idx": 7638, "episode_idx": 27, "frame_idx": 62, "global_frame_idx": 7638, "task_index": 5}, {"db_idx": 7639, "episode_idx": 27, "frame_idx": 63, "global_frame_idx": 7639, "task_index": 5}, {"db_idx": 7640, "episode_idx": 27, "frame_idx": 64, "global_frame_idx": 7640, "task_index": 5}, {"db_idx": 7641, "episode_idx": 27, "frame_idx": 65, "global_frame_idx": 7641, "task_index": 5}, {"db_idx": 7642, "episode_idx": 27, "frame_idx": 66, "global_frame_idx": 7642, "task_index": 5}, {"db_idx": 7643, "episode_idx": 27, "frame_idx": 67, "global_frame_idx": 7643, "task_index": 5}, {"db_idx": 7644, "episode_idx": 27, "frame_idx": 68, "global_frame_idx": 7644, "task_index": 5}, {"db_idx": 7645, "episode_idx": 27, "frame_idx": 69, "global_frame_idx": 7645, "task_index": 5}, {"db_idx": 7646, "episode_idx": 27, "frame_idx": 70, "global_frame_idx": 7646, "task_index": 5}, {"db_idx": 7647, "episode_idx": 27, "frame_idx": 71, "global_frame_idx": 7647, "task_index": 5}, {"db_idx": 7648, "episode_idx": 27, "frame_idx": 72, "global_frame_idx": 7648, "task_index": 5}, {"db_idx": 7649, "episode_idx": 27, "frame_idx": 73, "global_frame_idx": 7649, "task_index": 5}, {"db_idx": 7650, "episode_idx": 27, "frame_idx": 74, "global_frame_idx": 7650, "task_index": 5}, {"db_idx": 7651, "episode_idx": 27, "frame_idx": 75, "global_frame_idx": 7651, "task_index": 5}, {"db_idx": 7652, "episode_idx": 27, "frame_idx": 76, "global_frame_idx": 7652, "task_index": 5}, {"db_idx": 7653, "episode_idx": 27, "frame_idx": 77, "global_frame_idx": 7653, "task_index": 5}, {"db_idx": 7654, "episode_idx": 27, "frame_idx": 78, "global_frame_idx": 7654, "task_index": 5}, {"db_idx": 7655, "episode_idx": 27, "frame_idx": 79, "global_frame_idx": 7655, "task_index": 5}, {"db_idx": 7656, "episode_idx": 27, "frame_idx": 80, "global_frame_idx": 7656, "task_index": 5}, {"db_idx": 7657, "episode_idx": 27, "frame_idx": 81, "global_frame_idx": 7657, "task_index": 5}, {"db_idx": 7658, "episode_idx": 27, "frame_idx": 82, "global_frame_idx": 7658, "task_index": 5}, {"db_idx": 7659, "episode_idx": 27, "frame_idx": 83, "global_frame_idx": 7659, "task_index": 5}, {"db_idx": 7660, "episode_idx": 27, "frame_idx": 84, "global_frame_idx": 7660, "task_index": 5}, {"db_idx": 7661, "episode_idx": 27, "frame_idx": 85, "global_frame_idx": 7661, "task_index": 5}, {"db_idx": 7662, "episode_idx": 27, "frame_idx": 86, "global_frame_idx": 7662, "task_index": 5}, {"db_idx": 7663, "episode_idx": 27, "frame_idx": 87, "global_frame_idx": 7663, "task_index": 5}, {"db_idx": 7664, "episode_idx": 27, "frame_idx": 88, "global_frame_idx": 7664, "task_index": 5}, {"db_idx": 7665, "episode_idx": 27, "frame_idx": 89, "global_frame_idx": 7665, "task_index": 5}, {"db_idx": 7666, "episode_idx": 27, "frame_idx": 90, "global_frame_idx": 7666, "task_index": 5}, {"db_idx": 7667, "episode_idx": 27, "frame_idx": 91, "global_frame_idx": 7667, "task_index": 5}, {"db_idx": 7668, "episode_idx": 27, "frame_idx": 92, "global_frame_idx": 7668, "task_index": 5}, {"db_idx": 7669, "episode_idx": 27, "frame_idx": 93, "global_frame_idx": 7669, "task_index": 5}, {"db_idx": 7670, "episode_idx": 27, "frame_idx": 94, "global_frame_idx": 7670, "task_index": 5}, {"db_idx": 7671, "episode_idx": 27, "frame_idx": 95, "global_frame_idx": 7671, "task_index": 5}, {"db_idx": 7672, "episode_idx": 27, "frame_idx": 96, "global_frame_idx": 7672, "task_index": 5}, {"db_idx": 7673, "episode_idx": 27, "frame_idx": 97, "global_frame_idx": 7673, "task_index": 5}, {"db_idx": 7674, "episode_idx": 27, "frame_idx": 98, "global_frame_idx": 7674, "task_index": 5}, {"db_idx": 7675, "episode_idx": 27, "frame_idx": 99, "global_frame_idx": 7675, "task_index": 5}, {"db_idx": 7676, "episode_idx": 27, "frame_idx": 100, "global_frame_idx": 7676, "task_index": 5}, {"db_idx": 7677, "episode_idx": 27, "frame_idx": 101, "global_frame_idx": 7677, "task_index": 5}, {"db_idx": 7678, "episode_idx": 27, "frame_idx": 102, "global_frame_idx": 7678, "task_index": 5}, {"db_idx": 7679, "episode_idx": 27, "frame_idx": 103, "global_frame_idx": 7679, "task_index": 5}, {"db_idx": 7680, "episode_idx": 27, "frame_idx": 104, "global_frame_idx": 7680, "task_index": 5}, {"db_idx": 7681, "episode_idx": 27, "frame_idx": 105, "global_frame_idx": 7681, "task_index": 5}, {"db_idx": 7682, "episode_idx": 27, "frame_idx": 106, "global_frame_idx": 7682, "task_index": 5}, {"db_idx": 7683, "episode_idx": 27, "frame_idx": 107, "global_frame_idx": 7683, "task_index": 5}, {"db_idx": 7684, "episode_idx": 27, "frame_idx": 108, "global_frame_idx": 7684, "task_index": 5}, {"db_idx": 7685, "episode_idx": 27, "frame_idx": 109, "global_frame_idx": 7685, "task_index": 5}, {"db_idx": 7686, "episode_idx": 27, "frame_idx": 110, "global_frame_idx": 7686, "task_index": 5}, {"db_idx": 7687, "episode_idx": 27, "frame_idx": 111, "global_frame_idx": 7687, "task_index": 5}, {"db_idx": 7688, "episode_idx": 27, "frame_idx": 112, "global_frame_idx": 7688, "task_index": 5}, {"db_idx": 7689, "episode_idx": 27, "frame_idx": 113, "global_frame_idx": 7689, "task_index": 5}, {"db_idx": 7690, "episode_idx": 27, "frame_idx": 114, "global_frame_idx": 7690, "task_index": 5}, {"db_idx": 7691, "episode_idx": 27, "frame_idx": 115, "global_frame_idx": 7691, "task_index": 5}, {"db_idx": 7692, "episode_idx": 27, "frame_idx": 116, "global_frame_idx": 7692, "task_index": 5}, {"db_idx": 7693, "episode_idx": 27, "frame_idx": 117, "global_frame_idx": 7693, "task_index": 5}, {"db_idx": 7694, "episode_idx": 27, "frame_idx": 118, "global_frame_idx": 7694, "task_index": 5}, {"db_idx": 7695, "episode_idx": 27, "frame_idx": 119, "global_frame_idx": 7695, "task_index": 5}, {"db_idx": 7696, "episode_idx": 27, "frame_idx": 120, "global_frame_idx": 7696, "task_index": 5}, {"db_idx": 7697, "episode_idx": 27, "frame_idx": 121, "global_frame_idx": 7697, "task_index": 5}, {"db_idx": 7698, "episode_idx": 27, "frame_idx": 122, "global_frame_idx": 7698, "task_index": 5}, {"db_idx": 7699, "episode_idx": 27, "frame_idx": 123, "global_frame_idx": 7699, "task_index": 5}, {"db_idx": 7700, "episode_idx": 27, "frame_idx": 124, "global_frame_idx": 7700, "task_index": 5}, {"db_idx": 7701, "episode_idx": 27, "frame_idx": 125, "global_frame_idx": 7701, "task_index": 5}, {"db_idx": 7702, "episode_idx": 27, "frame_idx": 126, "global_frame_idx": 7702, "task_index": 5}, {"db_idx": 7703, "episode_idx": 27, "frame_idx": 127, "global_frame_idx": 7703, "task_index": 5}, {"db_idx": 7704, "episode_idx": 27, "frame_idx": 128, "global_frame_idx": 7704, "task_index": 5}, {"db_idx": 7705, "episode_idx": 27, "frame_idx": 129, "global_frame_idx": 7705, "task_index": 5}, {"db_idx": 7706, "episode_idx": 27, "frame_idx": 130, "global_frame_idx": 7706, "task_index": 5}, {"db_idx": 7707, "episode_idx": 27, "frame_idx": 131, "global_frame_idx": 7707, "task_index": 5}, {"db_idx": 7708, "episode_idx": 27, "frame_idx": 132, "global_frame_idx": 7708, "task_index": 5}, {"db_idx": 7709, "episode_idx": 27, "frame_idx": 133, "global_frame_idx": 7709, "task_index": 5}, {"db_idx": 7710, "episode_idx": 27, "frame_idx": 134, "global_frame_idx": 7710, "task_index": 5}, {"db_idx": 7711, "episode_idx": 27, "frame_idx": 135, "global_frame_idx": 7711, "task_index": 5}, {"db_idx": 7712, "episode_idx": 27, "frame_idx": 136, "global_frame_idx": 7712, "task_index": 5}, {"db_idx": 7713, "episode_idx": 27, "frame_idx": 137, "global_frame_idx": 7713, "task_index": 5}, {"db_idx": 7714, "episode_idx": 27, "frame_idx": 138, "global_frame_idx": 7714, "task_index": 5}, {"db_idx": 7715, "episode_idx": 27, "frame_idx": 139, "global_frame_idx": 7715, "task_index": 5}, {"db_idx": 7716, "episode_idx": 27, "frame_idx": 140, "global_frame_idx": 7716, "task_index": 5}, {"db_idx": 7717, "episode_idx": 27, "frame_idx": 141, "global_frame_idx": 7717, "task_index": 5}, {"db_idx": 7718, "episode_idx": 27, "frame_idx": 142, "global_frame_idx": 7718, "task_index": 5}, {"db_idx": 7719, "episode_idx": 27, "frame_idx": 143, "global_frame_idx": 7719, "task_index": 5}, {"db_idx": 7720, "episode_idx": 27, "frame_idx": 144, "global_frame_idx": 7720, "task_index": 5}, {"db_idx": 7721, "episode_idx": 27, "frame_idx": 145, "global_frame_idx": 7721, "task_index": 5}, {"db_idx": 7722, "episode_idx": 27, "frame_idx": 146, "global_frame_idx": 7722, "task_index": 5}, {"db_idx": 7723, "episode_idx": 27, "frame_idx": 147, "global_frame_idx": 7723, "task_index": 5}, {"db_idx": 7724, "episode_idx": 27, "frame_idx": 148, "global_frame_idx": 7724, "task_index": 5}, {"db_idx": 7725, "episode_idx": 27, "frame_idx": 149, "global_frame_idx": 7725, "task_index": 5}, {"db_idx": 7726, "episode_idx": 27, "frame_idx": 150, "global_frame_idx": 7726, "task_index": 5}, {"db_idx": 7727, "episode_idx": 27, "frame_idx": 151, "global_frame_idx": 7727, "task_index": 5}, {"db_idx": 7728, "episode_idx": 27, "frame_idx": 152, "global_frame_idx": 7728, "task_index": 5}, {"db_idx": 7729, "episode_idx": 27, "frame_idx": 153, "global_frame_idx": 7729, "task_index": 5}, {"db_idx": 7730, "episode_idx": 27, "frame_idx": 154, "global_frame_idx": 7730, "task_index": 5}, {"db_idx": 7731, "episode_idx": 27, "frame_idx": 155, "global_frame_idx": 7731, "task_index": 5}, {"db_idx": 7732, "episode_idx": 27, "frame_idx": 156, "global_frame_idx": 7732, "task_index": 5}, {"db_idx": 7733, "episode_idx": 27, "frame_idx": 157, "global_frame_idx": 7733, "task_index": 5}, {"db_idx": 7734, "episode_idx": 27, "frame_idx": 158, "global_frame_idx": 7734, "task_index": 5}, {"db_idx": 7735, "episode_idx": 27, "frame_idx": 159, "global_frame_idx": 7735, "task_index": 5}, {"db_idx": 7736, "episode_idx": 27, "frame_idx": 160, "global_frame_idx": 7736, "task_index": 5}, {"db_idx": 7737, "episode_idx": 27, "frame_idx": 161, "global_frame_idx": 7737, "task_index": 5}, {"db_idx": 7738, "episode_idx": 27, "frame_idx": 162, "global_frame_idx": 7738, "task_index": 5}, {"db_idx": 7739, "episode_idx": 27, "frame_idx": 163, "global_frame_idx": 7739, "task_index": 5}, {"db_idx": 7740, "episode_idx": 27, "frame_idx": 164, "global_frame_idx": 7740, "task_index": 5}, {"db_idx": 7741, "episode_idx": 27, "frame_idx": 165, "global_frame_idx": 7741, "task_index": 5}, {"db_idx": 7742, "episode_idx": 27, "frame_idx": 166, "global_frame_idx": 7742, "task_index": 5}, {"db_idx": 7743, "episode_idx": 27, "frame_idx": 167, "global_frame_idx": 7743, "task_index": 5}, {"db_idx": 7744, "episode_idx": 27, "frame_idx": 168, "global_frame_idx": 7744, "task_index": 5}, {"db_idx": 7745, "episode_idx": 27, "frame_idx": 169, "global_frame_idx": 7745, "task_index": 5}, {"db_idx": 7746, "episode_idx": 27, "frame_idx": 170, "global_frame_idx": 7746, "task_index": 5}, {"db_idx": 7747, "episode_idx": 27, "frame_idx": 171, "global_frame_idx": 7747, "task_index": 5}, {"db_idx": 7748, "episode_idx": 27, "frame_idx": 172, "global_frame_idx": 7748, "task_index": 5}, {"db_idx": 7749, "episode_idx": 27, "frame_idx": 173, "global_frame_idx": 7749, "task_index": 5}, {"db_idx": 7750, "episode_idx": 27, "frame_idx": 174, "global_frame_idx": 7750, "task_index": 5}, {"db_idx": 7751, "episode_idx": 27, "frame_idx": 175, "global_frame_idx": 7751, "task_index": 5}, {"db_idx": 7752, "episode_idx": 27, "frame_idx": 176, "global_frame_idx": 7752, "task_index": 5}, {"db_idx": 7753, "episode_idx": 27, "frame_idx": 177, "global_frame_idx": 7753, "task_index": 5}, {"db_idx": 7754, "episode_idx": 27, "frame_idx": 178, "global_frame_idx": 7754, "task_index": 5}, {"db_idx": 7755, "episode_idx": 27, "frame_idx": 179, "global_frame_idx": 7755, "task_index": 5}, {"db_idx": 7756, "episode_idx": 27, "frame_idx": 180, "global_frame_idx": 7756, "task_index": 5}, {"db_idx": 7757, "episode_idx": 27, "frame_idx": 181, "global_frame_idx": 7757, "task_index": 5}, {"db_idx": 7758, "episode_idx": 27, "frame_idx": 182, "global_frame_idx": 7758, "task_index": 5}, {"db_idx": 7759, "episode_idx": 27, "frame_idx": 183, "global_frame_idx": 7759, "task_index": 5}, {"db_idx": 7760, "episode_idx": 27, "frame_idx": 184, "global_frame_idx": 7760, "task_index": 5}, {"db_idx": 7761, "episode_idx": 27, "frame_idx": 185, "global_frame_idx": 7761, "task_index": 5}, {"db_idx": 7762, "episode_idx": 27, "frame_idx": 186, "global_frame_idx": 7762, "task_index": 5}, {"db_idx": 7763, "episode_idx": 27, "frame_idx": 187, "global_frame_idx": 7763, "task_index": 5}, {"db_idx": 7764, "episode_idx": 27, "frame_idx": 188, "global_frame_idx": 7764, "task_index": 5}, {"db_idx": 7765, "episode_idx": 27, "frame_idx": 189, "global_frame_idx": 7765, "task_index": 5}, {"db_idx": 7766, "episode_idx": 27, "frame_idx": 190, "global_frame_idx": 7766, "task_index": 5}, {"db_idx": 7767, "episode_idx": 27, "frame_idx": 191, "global_frame_idx": 7767, "task_index": 5}, {"db_idx": 7768, "episode_idx": 27, "frame_idx": 192, "global_frame_idx": 7768, "task_index": 5}, {"db_idx": 7769, "episode_idx": 27, "frame_idx": 193, "global_frame_idx": 7769, "task_index": 5}, {"db_idx": 7770, "episode_idx": 27, "frame_idx": 194, "global_frame_idx": 7770, "task_index": 5}, {"db_idx": 7771, "episode_idx": 27, "frame_idx": 195, "global_frame_idx": 7771, "task_index": 5}, {"db_idx": 7772, "episode_idx": 27, "frame_idx": 196, "global_frame_idx": 7772, "task_index": 5}, {"db_idx": 7773, "episode_idx": 27, "frame_idx": 197, "global_frame_idx": 7773, "task_index": 5}, {"db_idx": 7774, "episode_idx": 27, "frame_idx": 198, "global_frame_idx": 7774, "task_index": 5}, {"db_idx": 7775, "episode_idx": 27, "frame_idx": 199, "global_frame_idx": 7775, "task_index": 5}, {"db_idx": 7776, "episode_idx": 27, "frame_idx": 200, "global_frame_idx": 7776, "task_index": 5}, {"db_idx": 7777, "episode_idx": 27, "frame_idx": 201, "global_frame_idx": 7777, "task_index": 5}, {"db_idx": 7778, "episode_idx": 27, "frame_idx": 202, "global_frame_idx": 7778, "task_index": 5}, {"db_idx": 7779, "episode_idx": 27, "frame_idx": 203, "global_frame_idx": 7779, "task_index": 5}, {"db_idx": 7780, "episode_idx": 27, "frame_idx": 204, "global_frame_idx": 7780, "task_index": 5}, {"db_idx": 7781, "episode_idx": 27, "frame_idx": 205, "global_frame_idx": 7781, "task_index": 5}, {"db_idx": 7782, "episode_idx": 27, "frame_idx": 206, "global_frame_idx": 7782, "task_index": 5}, {"db_idx": 7783, "episode_idx": 27, "frame_idx": 207, "global_frame_idx": 7783, "task_index": 5}, {"db_idx": 7784, "episode_idx": 27, "frame_idx": 208, "global_frame_idx": 7784, "task_index": 5}, {"db_idx": 7785, "episode_idx": 27, "frame_idx": 209, "global_frame_idx": 7785, "task_index": 5}, {"db_idx": 7786, "episode_idx": 27, "frame_idx": 210, "global_frame_idx": 7786, "task_index": 5}, {"db_idx": 7787, "episode_idx": 27, "frame_idx": 211, "global_frame_idx": 7787, "task_index": 5}, {"db_idx": 7788, "episode_idx": 27, "frame_idx": 212, "global_frame_idx": 7788, "task_index": 5}, {"db_idx": 7789, "episode_idx": 27, "frame_idx": 213, "global_frame_idx": 7789, "task_index": 5}, {"db_idx": 7790, "episode_idx": 27, "frame_idx": 214, "global_frame_idx": 7790, "task_index": 5}, {"db_idx": 7791, "episode_idx": 27, "frame_idx": 215, "global_frame_idx": 7791, "task_index": 5}, {"db_idx": 7792, "episode_idx": 27, "frame_idx": 216, "global_frame_idx": 7792, "task_index": 5}, {"db_idx": 7793, "episode_idx": 27, "frame_idx": 217, "global_frame_idx": 7793, "task_index": 5}, {"db_idx": 7794, "episode_idx": 27, "frame_idx": 218, "global_frame_idx": 7794, "task_index": 5}, {"db_idx": 7795, "episode_idx": 27, "frame_idx": 219, "global_frame_idx": 7795, "task_index": 5}, {"db_idx": 7796, "episode_idx": 27, "frame_idx": 220, "global_frame_idx": 7796, "task_index": 5}, {"db_idx": 7797, "episode_idx": 27, "frame_idx": 221, "global_frame_idx": 7797, "task_index": 5}, {"db_idx": 7798, "episode_idx": 27, "frame_idx": 222, "global_frame_idx": 7798, "task_index": 5}, {"db_idx": 7799, "episode_idx": 27, "frame_idx": 223, "global_frame_idx": 7799, "task_index": 5}, {"db_idx": 7800, "episode_idx": 27, "frame_idx": 224, "global_frame_idx": 7800, "task_index": 5}, {"db_idx": 7801, "episode_idx": 27, "frame_idx": 225, "global_frame_idx": 7801, "task_index": 5}, {"db_idx": 7802, "episode_idx": 27, "frame_idx": 226, "global_frame_idx": 7802, "task_index": 5}, {"db_idx": 7803, "episode_idx": 27, "frame_idx": 227, "global_frame_idx": 7803, "task_index": 5}, {"db_idx": 7804, "episode_idx": 27, "frame_idx": 228, "global_frame_idx": 7804, "task_index": 5}, {"db_idx": 7805, "episode_idx": 27, "frame_idx": 229, "global_frame_idx": 7805, "task_index": 5}, {"db_idx": 7806, "episode_idx": 27, "frame_idx": 230, "global_frame_idx": 7806, "task_index": 5}, {"db_idx": 7807, "episode_idx": 27, "frame_idx": 231, "global_frame_idx": 7807, "task_index": 5}, {"db_idx": 7808, "episode_idx": 27, "frame_idx": 232, "global_frame_idx": 7808, "task_index": 5}, {"db_idx": 7809, "episode_idx": 27, "frame_idx": 233, "global_frame_idx": 7809, "task_index": 5}, {"db_idx": 7810, "episode_idx": 27, "frame_idx": 234, "global_frame_idx": 7810, "task_index": 5}, {"db_idx": 7811, "episode_idx": 27, "frame_idx": 235, "global_frame_idx": 7811, "task_index": 5}, {"db_idx": 7812, "episode_idx": 27, "frame_idx": 236, "global_frame_idx": 7812, "task_index": 5}, {"db_idx": 7813, "episode_idx": 27, "frame_idx": 237, "global_frame_idx": 7813, "task_index": 5}, {"db_idx": 7814, "episode_idx": 27, "frame_idx": 238, "global_frame_idx": 7814, "task_index": 5}, {"db_idx": 7815, "episode_idx": 27, "frame_idx": 239, "global_frame_idx": 7815, "task_index": 5}, {"db_idx": 7816, "episode_idx": 27, "frame_idx": 240, "global_frame_idx": 7816, "task_index": 5}, {"db_idx": 7817, "episode_idx": 27, "frame_idx": 241, "global_frame_idx": 7817, "task_index": 5}, {"db_idx": 7818, "episode_idx": 27, "frame_idx": 242, "global_frame_idx": 7818, "task_index": 5}, {"db_idx": 7819, "episode_idx": 27, "frame_idx": 243, "global_frame_idx": 7819, "task_index": 5}, {"db_idx": 7820, "episode_idx": 27, "frame_idx": 244, "global_frame_idx": 7820, "task_index": 5}, {"db_idx": 7821, "episode_idx": 27, "frame_idx": 245, "global_frame_idx": 7821, "task_index": 5}, {"db_idx": 7822, "episode_idx": 27, "frame_idx": 246, "global_frame_idx": 7822, "task_index": 5}, {"db_idx": 7823, "episode_idx": 27, "frame_idx": 247, "global_frame_idx": 7823, "task_index": 5}, {"db_idx": 7824, "episode_idx": 28, "frame_idx": 0, "global_frame_idx": 7824, "task_index": 5}, {"db_idx": 7825, "episode_idx": 28, "frame_idx": 1, "global_frame_idx": 7825, "task_index": 5}, {"db_idx": 7826, "episode_idx": 28, "frame_idx": 2, "global_frame_idx": 7826, "task_index": 5}, {"db_idx": 7827, "episode_idx": 28, "frame_idx": 3, "global_frame_idx": 7827, "task_index": 5}, {"db_idx": 7828, "episode_idx": 28, "frame_idx": 4, "global_frame_idx": 7828, "task_index": 5}, {"db_idx": 7829, "episode_idx": 28, "frame_idx": 5, "global_frame_idx": 7829, "task_index": 5}, {"db_idx": 7830, "episode_idx": 28, "frame_idx": 6, "global_frame_idx": 7830, "task_index": 5}, {"db_idx": 7831, "episode_idx": 28, "frame_idx": 7, "global_frame_idx": 7831, "task_index": 5}, {"db_idx": 7832, "episode_idx": 28, "frame_idx": 8, "global_frame_idx": 7832, "task_index": 5}, {"db_idx": 7833, "episode_idx": 28, "frame_idx": 9, "global_frame_idx": 7833, "task_index": 5}, {"db_idx": 7834, "episode_idx": 28, "frame_idx": 10, "global_frame_idx": 7834, "task_index": 5}, {"db_idx": 7835, "episode_idx": 28, "frame_idx": 11, "global_frame_idx": 7835, "task_index": 5}, {"db_idx": 7836, "episode_idx": 28, "frame_idx": 12, "global_frame_idx": 7836, "task_index": 5}, {"db_idx": 7837, "episode_idx": 28, "frame_idx": 13, "global_frame_idx": 7837, "task_index": 5}, {"db_idx": 7838, "episode_idx": 28, "frame_idx": 14, "global_frame_idx": 7838, "task_index": 5}, {"db_idx": 7839, "episode_idx": 28, "frame_idx": 15, "global_frame_idx": 7839, "task_index": 5}, {"db_idx": 7840, "episode_idx": 28, "frame_idx": 16, "global_frame_idx": 7840, "task_index": 5}, {"db_idx": 7841, "episode_idx": 28, "frame_idx": 17, "global_frame_idx": 7841, "task_index": 5}, {"db_idx": 7842, "episode_idx": 28, "frame_idx": 18, "global_frame_idx": 7842, "task_index": 5}, {"db_idx": 7843, "episode_idx": 28, "frame_idx": 19, "global_frame_idx": 7843, "task_index": 5}, {"db_idx": 7844, "episode_idx": 28, "frame_idx": 20, "global_frame_idx": 7844, "task_index": 5}, {"db_idx": 7845, "episode_idx": 28, "frame_idx": 21, "global_frame_idx": 7845, "task_index": 5}, {"db_idx": 7846, "episode_idx": 28, "frame_idx": 22, "global_frame_idx": 7846, "task_index": 5}, {"db_idx": 7847, "episode_idx": 28, "frame_idx": 23, "global_frame_idx": 7847, "task_index": 5}, {"db_idx": 7848, "episode_idx": 28, "frame_idx": 24, "global_frame_idx": 7848, "task_index": 5}, {"db_idx": 7849, "episode_idx": 28, "frame_idx": 25, "global_frame_idx": 7849, "task_index": 5}, {"db_idx": 7850, "episode_idx": 28, "frame_idx": 26, "global_frame_idx": 7850, "task_index": 5}, {"db_idx": 7851, "episode_idx": 28, "frame_idx": 27, "global_frame_idx": 7851, "task_index": 5}, {"db_idx": 7852, "episode_idx": 28, "frame_idx": 28, "global_frame_idx": 7852, "task_index": 5}, {"db_idx": 7853, "episode_idx": 28, "frame_idx": 29, "global_frame_idx": 7853, "task_index": 5}, {"db_idx": 7854, "episode_idx": 28, "frame_idx": 30, "global_frame_idx": 7854, "task_index": 5}, {"db_idx": 7855, "episode_idx": 28, "frame_idx": 31, "global_frame_idx": 7855, "task_index": 5}, {"db_idx": 7856, "episode_idx": 28, "frame_idx": 32, "global_frame_idx": 7856, "task_index": 5}, {"db_idx": 7857, "episode_idx": 28, "frame_idx": 33, "global_frame_idx": 7857, "task_index": 5}, {"db_idx": 7858, "episode_idx": 28, "frame_idx": 34, "global_frame_idx": 7858, "task_index": 5}, {"db_idx": 7859, "episode_idx": 28, "frame_idx": 35, "global_frame_idx": 7859, "task_index": 5}, {"db_idx": 7860, "episode_idx": 28, "frame_idx": 36, "global_frame_idx": 7860, "task_index": 5}, {"db_idx": 7861, "episode_idx": 28, "frame_idx": 37, "global_frame_idx": 7861, "task_index": 5}, {"db_idx": 7862, "episode_idx": 28, "frame_idx": 38, "global_frame_idx": 7862, "task_index": 5}, {"db_idx": 7863, "episode_idx": 28, "frame_idx": 39, "global_frame_idx": 7863, "task_index": 5}, {"db_idx": 7864, "episode_idx": 28, "frame_idx": 40, "global_frame_idx": 7864, "task_index": 5}, {"db_idx": 7865, "episode_idx": 28, "frame_idx": 41, "global_frame_idx": 7865, "task_index": 5}, {"db_idx": 7866, "episode_idx": 28, "frame_idx": 42, "global_frame_idx": 7866, "task_index": 5}, {"db_idx": 7867, "episode_idx": 28, "frame_idx": 43, "global_frame_idx": 7867, "task_index": 5}, {"db_idx": 7868, "episode_idx": 28, "frame_idx": 44, "global_frame_idx": 7868, "task_index": 5}, {"db_idx": 7869, "episode_idx": 28, "frame_idx": 45, "global_frame_idx": 7869, "task_index": 5}, {"db_idx": 7870, "episode_idx": 28, "frame_idx": 46, "global_frame_idx": 7870, "task_index": 5}, {"db_idx": 7871, "episode_idx": 28, "frame_idx": 47, "global_frame_idx": 7871, "task_index": 5}, {"db_idx": 7872, "episode_idx": 28, "frame_idx": 48, "global_frame_idx": 7872, "task_index": 5}, {"db_idx": 7873, "episode_idx": 28, "frame_idx": 49, "global_frame_idx": 7873, "task_index": 5}, {"db_idx": 7874, "episode_idx": 28, "frame_idx": 50, "global_frame_idx": 7874, "task_index": 5}, {"db_idx": 7875, "episode_idx": 28, "frame_idx": 51, "global_frame_idx": 7875, "task_index": 5}, {"db_idx": 7876, "episode_idx": 28, "frame_idx": 52, "global_frame_idx": 7876, "task_index": 5}, {"db_idx": 7877, "episode_idx": 28, "frame_idx": 53, "global_frame_idx": 7877, "task_index": 5}, {"db_idx": 7878, "episode_idx": 28, "frame_idx": 54, "global_frame_idx": 7878, "task_index": 5}, {"db_idx": 7879, "episode_idx": 28, "frame_idx": 55, "global_frame_idx": 7879, "task_index": 5}, {"db_idx": 7880, "episode_idx": 28, "frame_idx": 56, "global_frame_idx": 7880, "task_index": 5}, {"db_idx": 7881, "episode_idx": 28, "frame_idx": 57, "global_frame_idx": 7881, "task_index": 5}, {"db_idx": 7882, "episode_idx": 28, "frame_idx": 58, "global_frame_idx": 7882, "task_index": 5}, {"db_idx": 7883, "episode_idx": 28, "frame_idx": 59, "global_frame_idx": 7883, "task_index": 5}, {"db_idx": 7884, "episode_idx": 28, "frame_idx": 60, "global_frame_idx": 7884, "task_index": 5}, {"db_idx": 7885, "episode_idx": 28, "frame_idx": 61, "global_frame_idx": 7885, "task_index": 5}, {"db_idx": 7886, "episode_idx": 28, "frame_idx": 62, "global_frame_idx": 7886, "task_index": 5}, {"db_idx": 7887, "episode_idx": 28, "frame_idx": 63, "global_frame_idx": 7887, "task_index": 5}, {"db_idx": 7888, "episode_idx": 28, "frame_idx": 64, "global_frame_idx": 7888, "task_index": 5}, {"db_idx": 7889, "episode_idx": 28, "frame_idx": 65, "global_frame_idx": 7889, "task_index": 5}, {"db_idx": 7890, "episode_idx": 28, "frame_idx": 66, "global_frame_idx": 7890, "task_index": 5}, {"db_idx": 7891, "episode_idx": 28, "frame_idx": 67, "global_frame_idx": 7891, "task_index": 5}, {"db_idx": 7892, "episode_idx": 28, "frame_idx": 68, "global_frame_idx": 7892, "task_index": 5}, {"db_idx": 7893, "episode_idx": 28, "frame_idx": 69, "global_frame_idx": 7893, "task_index": 5}, {"db_idx": 7894, "episode_idx": 28, "frame_idx": 70, "global_frame_idx": 7894, "task_index": 5}, {"db_idx": 7895, "episode_idx": 28, "frame_idx": 71, "global_frame_idx": 7895, "task_index": 5}, {"db_idx": 7896, "episode_idx": 28, "frame_idx": 72, "global_frame_idx": 7896, "task_index": 5}, {"db_idx": 7897, "episode_idx": 28, "frame_idx": 73, "global_frame_idx": 7897, "task_index": 5}, {"db_idx": 7898, "episode_idx": 28, "frame_idx": 74, "global_frame_idx": 7898, "task_index": 5}, {"db_idx": 7899, "episode_idx": 28, "frame_idx": 75, "global_frame_idx": 7899, "task_index": 5}, {"db_idx": 7900, "episode_idx": 28, "frame_idx": 76, "global_frame_idx": 7900, "task_index": 5}, {"db_idx": 7901, "episode_idx": 28, "frame_idx": 77, "global_frame_idx": 7901, "task_index": 5}, {"db_idx": 7902, "episode_idx": 28, "frame_idx": 78, "global_frame_idx": 7902, "task_index": 5}, {"db_idx": 7903, "episode_idx": 28, "frame_idx": 79, "global_frame_idx": 7903, "task_index": 5}, {"db_idx": 7904, "episode_idx": 28, "frame_idx": 80, "global_frame_idx": 7904, "task_index": 5}, {"db_idx": 7905, "episode_idx": 28, "frame_idx": 81, "global_frame_idx": 7905, "task_index": 5}, {"db_idx": 7906, "episode_idx": 28, "frame_idx": 82, "global_frame_idx": 7906, "task_index": 5}, {"db_idx": 7907, "episode_idx": 28, "frame_idx": 83, "global_frame_idx": 7907, "task_index": 5}, {"db_idx": 7908, "episode_idx": 28, "frame_idx": 84, "global_frame_idx": 7908, "task_index": 5}, {"db_idx": 7909, "episode_idx": 28, "frame_idx": 85, "global_frame_idx": 7909, "task_index": 5}, {"db_idx": 7910, "episode_idx": 28, "frame_idx": 86, "global_frame_idx": 7910, "task_index": 5}, {"db_idx": 7911, "episode_idx": 28, "frame_idx": 87, "global_frame_idx": 7911, "task_index": 5}, {"db_idx": 7912, "episode_idx": 28, "frame_idx": 88, "global_frame_idx": 7912, "task_index": 5}, {"db_idx": 7913, "episode_idx": 28, "frame_idx": 89, "global_frame_idx": 7913, "task_index": 5}, {"db_idx": 7914, "episode_idx": 28, "frame_idx": 90, "global_frame_idx": 7914, "task_index": 5}, {"db_idx": 7915, "episode_idx": 28, "frame_idx": 91, "global_frame_idx": 7915, "task_index": 5}, {"db_idx": 7916, "episode_idx": 28, "frame_idx": 92, "global_frame_idx": 7916, "task_index": 5}, {"db_idx": 7917, "episode_idx": 28, "frame_idx": 93, "global_frame_idx": 7917, "task_index": 5}, {"db_idx": 7918, "episode_idx": 28, "frame_idx": 94, "global_frame_idx": 7918, "task_index": 5}, {"db_idx": 7919, "episode_idx": 28, "frame_idx": 95, "global_frame_idx": 7919, "task_index": 5}, {"db_idx": 7920, "episode_idx": 28, "frame_idx": 96, "global_frame_idx": 7920, "task_index": 5}, {"db_idx": 7921, "episode_idx": 28, "frame_idx": 97, "global_frame_idx": 7921, "task_index": 5}, {"db_idx": 7922, "episode_idx": 28, "frame_idx": 98, "global_frame_idx": 7922, "task_index": 5}, {"db_idx": 7923, "episode_idx": 28, "frame_idx": 99, "global_frame_idx": 7923, "task_index": 5}, {"db_idx": 7924, "episode_idx": 28, "frame_idx": 100, "global_frame_idx": 7924, "task_index": 5}, {"db_idx": 7925, "episode_idx": 28, "frame_idx": 101, "global_frame_idx": 7925, "task_index": 5}, {"db_idx": 7926, "episode_idx": 28, "frame_idx": 102, "global_frame_idx": 7926, "task_index": 5}, {"db_idx": 7927, "episode_idx": 28, "frame_idx": 103, "global_frame_idx": 7927, "task_index": 5}, {"db_idx": 7928, "episode_idx": 28, "frame_idx": 104, "global_frame_idx": 7928, "task_index": 5}, {"db_idx": 7929, "episode_idx": 28, "frame_idx": 105, "global_frame_idx": 7929, "task_index": 5}, {"db_idx": 7930, "episode_idx": 28, "frame_idx": 106, "global_frame_idx": 7930, "task_index": 5}, {"db_idx": 7931, "episode_idx": 28, "frame_idx": 107, "global_frame_idx": 7931, "task_index": 5}, {"db_idx": 7932, "episode_idx": 28, "frame_idx": 108, "global_frame_idx": 7932, "task_index": 5}, {"db_idx": 7933, "episode_idx": 28, "frame_idx": 109, "global_frame_idx": 7933, "task_index": 5}, {"db_idx": 7934, "episode_idx": 28, "frame_idx": 110, "global_frame_idx": 7934, "task_index": 5}, {"db_idx": 7935, "episode_idx": 28, "frame_idx": 111, "global_frame_idx": 7935, "task_index": 5}, {"db_idx": 7936, "episode_idx": 28, "frame_idx": 112, "global_frame_idx": 7936, "task_index": 5}, {"db_idx": 7937, "episode_idx": 28, "frame_idx": 113, "global_frame_idx": 7937, "task_index": 5}, {"db_idx": 7938, "episode_idx": 28, "frame_idx": 114, "global_frame_idx": 7938, "task_index": 5}, {"db_idx": 7939, "episode_idx": 28, "frame_idx": 115, "global_frame_idx": 7939, "task_index": 5}, {"db_idx": 7940, "episode_idx": 28, "frame_idx": 116, "global_frame_idx": 7940, "task_index": 5}, {"db_idx": 7941, "episode_idx": 28, "frame_idx": 117, "global_frame_idx": 7941, "task_index": 5}, {"db_idx": 7942, "episode_idx": 28, "frame_idx": 118, "global_frame_idx": 7942, "task_index": 5}, {"db_idx": 7943, "episode_idx": 28, "frame_idx": 119, "global_frame_idx": 7943, "task_index": 5}, {"db_idx": 7944, "episode_idx": 28, "frame_idx": 120, "global_frame_idx": 7944, "task_index": 5}, {"db_idx": 7945, "episode_idx": 28, "frame_idx": 121, "global_frame_idx": 7945, "task_index": 5}, {"db_idx": 7946, "episode_idx": 28, "frame_idx": 122, "global_frame_idx": 7946, "task_index": 5}, {"db_idx": 7947, "episode_idx": 28, "frame_idx": 123, "global_frame_idx": 7947, "task_index": 5}, {"db_idx": 7948, "episode_idx": 28, "frame_idx": 124, "global_frame_idx": 7948, "task_index": 5}, {"db_idx": 7949, "episode_idx": 28, "frame_idx": 125, "global_frame_idx": 7949, "task_index": 5}, {"db_idx": 7950, "episode_idx": 28, "frame_idx": 126, "global_frame_idx": 7950, "task_index": 5}, {"db_idx": 7951, "episode_idx": 28, "frame_idx": 127, "global_frame_idx": 7951, "task_index": 5}, {"db_idx": 7952, "episode_idx": 28, "frame_idx": 128, "global_frame_idx": 7952, "task_index": 5}, {"db_idx": 7953, "episode_idx": 28, "frame_idx": 129, "global_frame_idx": 7953, "task_index": 5}, {"db_idx": 7954, "episode_idx": 28, "frame_idx": 130, "global_frame_idx": 7954, "task_index": 5}, {"db_idx": 7955, "episode_idx": 28, "frame_idx": 131, "global_frame_idx": 7955, "task_index": 5}, {"db_idx": 7956, "episode_idx": 28, "frame_idx": 132, "global_frame_idx": 7956, "task_index": 5}, {"db_idx": 7957, "episode_idx": 28, "frame_idx": 133, "global_frame_idx": 7957, "task_index": 5}, {"db_idx": 7958, "episode_idx": 28, "frame_idx": 134, "global_frame_idx": 7958, "task_index": 5}, {"db_idx": 7959, "episode_idx": 28, "frame_idx": 135, "global_frame_idx": 7959, "task_index": 5}, {"db_idx": 7960, "episode_idx": 28, "frame_idx": 136, "global_frame_idx": 7960, "task_index": 5}, {"db_idx": 7961, "episode_idx": 28, "frame_idx": 137, "global_frame_idx": 7961, "task_index": 5}, {"db_idx": 7962, "episode_idx": 28, "frame_idx": 138, "global_frame_idx": 7962, "task_index": 5}, {"db_idx": 7963, "episode_idx": 28, "frame_idx": 139, "global_frame_idx": 7963, "task_index": 5}, {"db_idx": 7964, "episode_idx": 28, "frame_idx": 140, "global_frame_idx": 7964, "task_index": 5}, {"db_idx": 7965, "episode_idx": 28, "frame_idx": 141, "global_frame_idx": 7965, "task_index": 5}, {"db_idx": 7966, "episode_idx": 28, "frame_idx": 142, "global_frame_idx": 7966, "task_index": 5}, {"db_idx": 7967, "episode_idx": 28, "frame_idx": 143, "global_frame_idx": 7967, "task_index": 5}, {"db_idx": 7968, "episode_idx": 28, "frame_idx": 144, "global_frame_idx": 7968, "task_index": 5}, {"db_idx": 7969, "episode_idx": 28, "frame_idx": 145, "global_frame_idx": 7969, "task_index": 5}, {"db_idx": 7970, "episode_idx": 28, "frame_idx": 146, "global_frame_idx": 7970, "task_index": 5}, {"db_idx": 7971, "episode_idx": 28, "frame_idx": 147, "global_frame_idx": 7971, "task_index": 5}, {"db_idx": 7972, "episode_idx": 28, "frame_idx": 148, "global_frame_idx": 7972, "task_index": 5}, {"db_idx": 7973, "episode_idx": 28, "frame_idx": 149, "global_frame_idx": 7973, "task_index": 5}, {"db_idx": 7974, "episode_idx": 28, "frame_idx": 150, "global_frame_idx": 7974, "task_index": 5}, {"db_idx": 7975, "episode_idx": 28, "frame_idx": 151, "global_frame_idx": 7975, "task_index": 5}, {"db_idx": 7976, "episode_idx": 28, "frame_idx": 152, "global_frame_idx": 7976, "task_index": 5}, {"db_idx": 7977, "episode_idx": 28, "frame_idx": 153, "global_frame_idx": 7977, "task_index": 5}, {"db_idx": 7978, "episode_idx": 28, "frame_idx": 154, "global_frame_idx": 7978, "task_index": 5}, {"db_idx": 7979, "episode_idx": 28, "frame_idx": 155, "global_frame_idx": 7979, "task_index": 5}, {"db_idx": 7980, "episode_idx": 28, "frame_idx": 156, "global_frame_idx": 7980, "task_index": 5}, {"db_idx": 7981, "episode_idx": 28, "frame_idx": 157, "global_frame_idx": 7981, "task_index": 5}, {"db_idx": 7982, "episode_idx": 28, "frame_idx": 158, "global_frame_idx": 7982, "task_index": 5}, {"db_idx": 7983, "episode_idx": 28, "frame_idx": 159, "global_frame_idx": 7983, "task_index": 5}, {"db_idx": 7984, "episode_idx": 28, "frame_idx": 160, "global_frame_idx": 7984, "task_index": 5}, {"db_idx": 7985, "episode_idx": 28, "frame_idx": 161, "global_frame_idx": 7985, "task_index": 5}, {"db_idx": 7986, "episode_idx": 28, "frame_idx": 162, "global_frame_idx": 7986, "task_index": 5}, {"db_idx": 7987, "episode_idx": 28, "frame_idx": 163, "global_frame_idx": 7987, "task_index": 5}, {"db_idx": 7988, "episode_idx": 28, "frame_idx": 164, "global_frame_idx": 7988, "task_index": 5}, {"db_idx": 7989, "episode_idx": 28, "frame_idx": 165, "global_frame_idx": 7989, "task_index": 5}, {"db_idx": 7990, "episode_idx": 28, "frame_idx": 166, "global_frame_idx": 7990, "task_index": 5}, {"db_idx": 7991, "episode_idx": 28, "frame_idx": 167, "global_frame_idx": 7991, "task_index": 5}, {"db_idx": 7992, "episode_idx": 28, "frame_idx": 168, "global_frame_idx": 7992, "task_index": 5}, {"db_idx": 7993, "episode_idx": 28, "frame_idx": 169, "global_frame_idx": 7993, "task_index": 5}, {"db_idx": 7994, "episode_idx": 28, "frame_idx": 170, "global_frame_idx": 7994, "task_index": 5}, {"db_idx": 7995, "episode_idx": 28, "frame_idx": 171, "global_frame_idx": 7995, "task_index": 5}, {"db_idx": 7996, "episode_idx": 28, "frame_idx": 172, "global_frame_idx": 7996, "task_index": 5}, {"db_idx": 7997, "episode_idx": 28, "frame_idx": 173, "global_frame_idx": 7997, "task_index": 5}, {"db_idx": 7998, "episode_idx": 28, "frame_idx": 174, "global_frame_idx": 7998, "task_index": 5}, {"db_idx": 7999, "episode_idx": 28, "frame_idx": 175, "global_frame_idx": 7999, "task_index": 5}, {"db_idx": 8000, "episode_idx": 28, "frame_idx": 176, "global_frame_idx": 8000, "task_index": 5}, {"db_idx": 8001, "episode_idx": 28, "frame_idx": 177, "global_frame_idx": 8001, "task_index": 5}, {"db_idx": 8002, "episode_idx": 28, "frame_idx": 178, "global_frame_idx": 8002, "task_index": 5}, {"db_idx": 8003, "episode_idx": 28, "frame_idx": 179, "global_frame_idx": 8003, "task_index": 5}, {"db_idx": 8004, "episode_idx": 28, "frame_idx": 180, "global_frame_idx": 8004, "task_index": 5}, {"db_idx": 8005, "episode_idx": 28, "frame_idx": 181, "global_frame_idx": 8005, "task_index": 5}, {"db_idx": 8006, "episode_idx": 28, "frame_idx": 182, "global_frame_idx": 8006, "task_index": 5}, {"db_idx": 8007, "episode_idx": 28, "frame_idx": 183, "global_frame_idx": 8007, "task_index": 5}, {"db_idx": 8008, "episode_idx": 28, "frame_idx": 184, "global_frame_idx": 8008, "task_index": 5}, {"db_idx": 8009, "episode_idx": 28, "frame_idx": 185, "global_frame_idx": 8009, "task_index": 5}, {"db_idx": 8010, "episode_idx": 28, "frame_idx": 186, "global_frame_idx": 8010, "task_index": 5}, {"db_idx": 8011, "episode_idx": 28, "frame_idx": 187, "global_frame_idx": 8011, "task_index": 5}, {"db_idx": 8012, "episode_idx": 28, "frame_idx": 188, "global_frame_idx": 8012, "task_index": 5}, {"db_idx": 8013, "episode_idx": 28, "frame_idx": 189, "global_frame_idx": 8013, "task_index": 5}, {"db_idx": 8014, "episode_idx": 28, "frame_idx": 190, "global_frame_idx": 8014, "task_index": 5}, {"db_idx": 8015, "episode_idx": 28, "frame_idx": 191, "global_frame_idx": 8015, "task_index": 5}, {"db_idx": 8016, "episode_idx": 28, "frame_idx": 192, "global_frame_idx": 8016, "task_index": 5}, {"db_idx": 8017, "episode_idx": 28, "frame_idx": 193, "global_frame_idx": 8017, "task_index": 5}, {"db_idx": 8018, "episode_idx": 28, "frame_idx": 194, "global_frame_idx": 8018, "task_index": 5}, {"db_idx": 8019, "episode_idx": 28, "frame_idx": 195, "global_frame_idx": 8019, "task_index": 5}, {"db_idx": 8020, "episode_idx": 28, "frame_idx": 196, "global_frame_idx": 8020, "task_index": 5}, {"db_idx": 8021, "episode_idx": 28, "frame_idx": 197, "global_frame_idx": 8021, "task_index": 5}, {"db_idx": 8022, "episode_idx": 28, "frame_idx": 198, "global_frame_idx": 8022, "task_index": 5}, {"db_idx": 8023, "episode_idx": 28, "frame_idx": 199, "global_frame_idx": 8023, "task_index": 5}, {"db_idx": 8024, "episode_idx": 28, "frame_idx": 200, "global_frame_idx": 8024, "task_index": 5}, {"db_idx": 8025, "episode_idx": 28, "frame_idx": 201, "global_frame_idx": 8025, "task_index": 5}, {"db_idx": 8026, "episode_idx": 28, "frame_idx": 202, "global_frame_idx": 8026, "task_index": 5}, {"db_idx": 8027, "episode_idx": 28, "frame_idx": 203, "global_frame_idx": 8027, "task_index": 5}, {"db_idx": 8028, "episode_idx": 28, "frame_idx": 204, "global_frame_idx": 8028, "task_index": 5}, {"db_idx": 8029, "episode_idx": 28, "frame_idx": 205, "global_frame_idx": 8029, "task_index": 5}, {"db_idx": 8030, "episode_idx": 28, "frame_idx": 206, "global_frame_idx": 8030, "task_index": 5}, {"db_idx": 8031, "episode_idx": 28, "frame_idx": 207, "global_frame_idx": 8031, "task_index": 5}, {"db_idx": 8032, "episode_idx": 28, "frame_idx": 208, "global_frame_idx": 8032, "task_index": 5}, {"db_idx": 8033, "episode_idx": 28, "frame_idx": 209, "global_frame_idx": 8033, "task_index": 5}, {"db_idx": 8034, "episode_idx": 28, "frame_idx": 210, "global_frame_idx": 8034, "task_index": 5}, {"db_idx": 8035, "episode_idx": 28, "frame_idx": 211, "global_frame_idx": 8035, "task_index": 5}, {"db_idx": 8036, "episode_idx": 28, "frame_idx": 212, "global_frame_idx": 8036, "task_index": 5}, {"db_idx": 8037, "episode_idx": 28, "frame_idx": 213, "global_frame_idx": 8037, "task_index": 5}, {"db_idx": 8038, "episode_idx": 28, "frame_idx": 214, "global_frame_idx": 8038, "task_index": 5}, {"db_idx": 8039, "episode_idx": 28, "frame_idx": 215, "global_frame_idx": 8039, "task_index": 5}, {"db_idx": 8040, "episode_idx": 28, "frame_idx": 216, "global_frame_idx": 8040, "task_index": 5}, {"db_idx": 8041, "episode_idx": 28, "frame_idx": 217, "global_frame_idx": 8041, "task_index": 5}, {"db_idx": 8042, "episode_idx": 28, "frame_idx": 218, "global_frame_idx": 8042, "task_index": 5}, {"db_idx": 8043, "episode_idx": 28, "frame_idx": 219, "global_frame_idx": 8043, "task_index": 5}, {"db_idx": 8044, "episode_idx": 28, "frame_idx": 220, "global_frame_idx": 8044, "task_index": 5}, {"db_idx": 8045, "episode_idx": 28, "frame_idx": 221, "global_frame_idx": 8045, "task_index": 5}, {"db_idx": 8046, "episode_idx": 28, "frame_idx": 222, "global_frame_idx": 8046, "task_index": 5}, {"db_idx": 8047, "episode_idx": 28, "frame_idx": 223, "global_frame_idx": 8047, "task_index": 5}, {"db_idx": 8048, "episode_idx": 28, "frame_idx": 224, "global_frame_idx": 8048, "task_index": 5}, {"db_idx": 8049, "episode_idx": 28, "frame_idx": 225, "global_frame_idx": 8049, "task_index": 5}, {"db_idx": 8050, "episode_idx": 28, "frame_idx": 226, "global_frame_idx": 8050, "task_index": 5}, {"db_idx": 8051, "episode_idx": 28, "frame_idx": 227, "global_frame_idx": 8051, "task_index": 5}, {"db_idx": 8052, "episode_idx": 28, "frame_idx": 228, "global_frame_idx": 8052, "task_index": 5}, {"db_idx": 8053, "episode_idx": 28, "frame_idx": 229, "global_frame_idx": 8053, "task_index": 5}, {"db_idx": 8054, "episode_idx": 28, "frame_idx": 230, "global_frame_idx": 8054, "task_index": 5}, {"db_idx": 8055, "episode_idx": 28, "frame_idx": 231, "global_frame_idx": 8055, "task_index": 5}, {"db_idx": 8056, "episode_idx": 28, "frame_idx": 232, "global_frame_idx": 8056, "task_index": 5}, {"db_idx": 8057, "episode_idx": 28, "frame_idx": 233, "global_frame_idx": 8057, "task_index": 5}, {"db_idx": 8058, "episode_idx": 28, "frame_idx": 234, "global_frame_idx": 8058, "task_index": 5}, {"db_idx": 8059, "episode_idx": 28, "frame_idx": 235, "global_frame_idx": 8059, "task_index": 5}, {"db_idx": 8060, "episode_idx": 28, "frame_idx": 236, "global_frame_idx": 8060, "task_index": 5}, {"db_idx": 8061, "episode_idx": 28, "frame_idx": 237, "global_frame_idx": 8061, "task_index": 5}, {"db_idx": 8062, "episode_idx": 28, "frame_idx": 238, "global_frame_idx": 8062, "task_index": 5}, {"db_idx": 8063, "episode_idx": 28, "frame_idx": 239, "global_frame_idx": 8063, "task_index": 5}, {"db_idx": 8064, "episode_idx": 28, "frame_idx": 240, "global_frame_idx": 8064, "task_index": 5}, {"db_idx": 8065, "episode_idx": 28, "frame_idx": 241, "global_frame_idx": 8065, "task_index": 5}, {"db_idx": 8066, "episode_idx": 28, "frame_idx": 242, "global_frame_idx": 8066, "task_index": 5}, {"db_idx": 8067, "episode_idx": 28, "frame_idx": 243, "global_frame_idx": 8067, "task_index": 5}, {"db_idx": 8068, "episode_idx": 28, "frame_idx": 244, "global_frame_idx": 8068, "task_index": 5}, {"db_idx": 8069, "episode_idx": 28, "frame_idx": 245, "global_frame_idx": 8069, "task_index": 5}, {"db_idx": 8070, "episode_idx": 28, "frame_idx": 246, "global_frame_idx": 8070, "task_index": 5}, {"db_idx": 8071, "episode_idx": 28, "frame_idx": 247, "global_frame_idx": 8071, "task_index": 5}, {"db_idx": 8072, "episode_idx": 28, "frame_idx": 248, "global_frame_idx": 8072, "task_index": 5}, {"db_idx": 8073, "episode_idx": 28, "frame_idx": 249, "global_frame_idx": 8073, "task_index": 5}, {"db_idx": 8074, "episode_idx": 28, "frame_idx": 250, "global_frame_idx": 8074, "task_index": 5}, {"db_idx": 8075, "episode_idx": 28, "frame_idx": 251, "global_frame_idx": 8075, "task_index": 5}, {"db_idx": 8076, "episode_idx": 28, "frame_idx": 252, "global_frame_idx": 8076, "task_index": 5}, {"db_idx": 8077, "episode_idx": 28, "frame_idx": 253, "global_frame_idx": 8077, "task_index": 5}, {"db_idx": 8078, "episode_idx": 28, "frame_idx": 254, "global_frame_idx": 8078, "task_index": 5}, {"db_idx": 8079, "episode_idx": 28, "frame_idx": 255, "global_frame_idx": 8079, "task_index": 5}, {"db_idx": 8080, "episode_idx": 28, "frame_idx": 256, "global_frame_idx": 8080, "task_index": 5}, {"db_idx": 8081, "episode_idx": 28, "frame_idx": 257, "global_frame_idx": 8081, "task_index": 5}, {"db_idx": 8082, "episode_idx": 28, "frame_idx": 258, "global_frame_idx": 8082, "task_index": 5}, {"db_idx": 8083, "episode_idx": 28, "frame_idx": 259, "global_frame_idx": 8083, "task_index": 5}, {"db_idx": 8084, "episode_idx": 28, "frame_idx": 260, "global_frame_idx": 8084, "task_index": 5}, {"db_idx": 8085, "episode_idx": 28, "frame_idx": 261, "global_frame_idx": 8085, "task_index": 5}, {"db_idx": 8086, "episode_idx": 28, "frame_idx": 262, "global_frame_idx": 8086, "task_index": 5}, {"db_idx": 8087, "episode_idx": 28, "frame_idx": 263, "global_frame_idx": 8087, "task_index": 5}, {"db_idx": 8088, "episode_idx": 28, "frame_idx": 264, "global_frame_idx": 8088, "task_index": 5}, {"db_idx": 8089, "episode_idx": 28, "frame_idx": 265, "global_frame_idx": 8089, "task_index": 5}, {"db_idx": 8090, "episode_idx": 28, "frame_idx": 266, "global_frame_idx": 8090, "task_index": 5}, {"db_idx": 8091, "episode_idx": 28, "frame_idx": 267, "global_frame_idx": 8091, "task_index": 5}, {"db_idx": 8092, "episode_idx": 28, "frame_idx": 268, "global_frame_idx": 8092, "task_index": 5}, {"db_idx": 8093, "episode_idx": 28, "frame_idx": 269, "global_frame_idx": 8093, "task_index": 5}, {"db_idx": 8094, "episode_idx": 28, "frame_idx": 270, "global_frame_idx": 8094, "task_index": 5}, {"db_idx": 8095, "episode_idx": 28, "frame_idx": 271, "global_frame_idx": 8095, "task_index": 5}, {"db_idx": 8096, "episode_idx": 28, "frame_idx": 272, "global_frame_idx": 8096, "task_index": 5}, {"db_idx": 8097, "episode_idx": 28, "frame_idx": 273, "global_frame_idx": 8097, "task_index": 5}, {"db_idx": 8098, "episode_idx": 28, "frame_idx": 274, "global_frame_idx": 8098, "task_index": 5}, {"db_idx": 8099, "episode_idx": 28, "frame_idx": 275, "global_frame_idx": 8099, "task_index": 5}, {"db_idx": 8100, "episode_idx": 28, "frame_idx": 276, "global_frame_idx": 8100, "task_index": 5}, {"db_idx": 8101, "episode_idx": 28, "frame_idx": 277, "global_frame_idx": 8101, "task_index": 5}, {"db_idx": 8102, "episode_idx": 28, "frame_idx": 278, "global_frame_idx": 8102, "task_index": 5}, {"db_idx": 8103, "episode_idx": 28, "frame_idx": 279, "global_frame_idx": 8103, "task_index": 5}, {"db_idx": 8104, "episode_idx": 28, "frame_idx": 280, "global_frame_idx": 8104, "task_index": 5}, {"db_idx": 8105, "episode_idx": 28, "frame_idx": 281, "global_frame_idx": 8105, "task_index": 5}, {"db_idx": 8106, "episode_idx": 28, "frame_idx": 282, "global_frame_idx": 8106, "task_index": 5}, {"db_idx": 8107, "episode_idx": 28, "frame_idx": 283, "global_frame_idx": 8107, "task_index": 5}, {"db_idx": 8108, "episode_idx": 28, "frame_idx": 284, "global_frame_idx": 8108, "task_index": 5}, {"db_idx": 8109, "episode_idx": 28, "frame_idx": 285, "global_frame_idx": 8109, "task_index": 5}, {"db_idx": 8110, "episode_idx": 28, "frame_idx": 286, "global_frame_idx": 8110, "task_index": 5}, {"db_idx": 8111, "episode_idx": 28, "frame_idx": 287, "global_frame_idx": 8111, "task_index": 5}, {"db_idx": 8112, "episode_idx": 29, "frame_idx": 0, "global_frame_idx": 8112, "task_index": 5}, {"db_idx": 8113, "episode_idx": 29, "frame_idx": 1, "global_frame_idx": 8113, "task_index": 5}, {"db_idx": 8114, "episode_idx": 29, "frame_idx": 2, "global_frame_idx": 8114, "task_index": 5}, {"db_idx": 8115, "episode_idx": 29, "frame_idx": 3, "global_frame_idx": 8115, "task_index": 5}, {"db_idx": 8116, "episode_idx": 29, "frame_idx": 4, "global_frame_idx": 8116, "task_index": 5}, {"db_idx": 8117, "episode_idx": 29, "frame_idx": 5, "global_frame_idx": 8117, "task_index": 5}, {"db_idx": 8118, "episode_idx": 29, "frame_idx": 6, "global_frame_idx": 8118, "task_index": 5}, {"db_idx": 8119, "episode_idx": 29, "frame_idx": 7, "global_frame_idx": 8119, "task_index": 5}, {"db_idx": 8120, "episode_idx": 29, "frame_idx": 8, "global_frame_idx": 8120, "task_index": 5}, {"db_idx": 8121, "episode_idx": 29, "frame_idx": 9, "global_frame_idx": 8121, "task_index": 5}, {"db_idx": 8122, "episode_idx": 29, "frame_idx": 10, "global_frame_idx": 8122, "task_index": 5}, {"db_idx": 8123, "episode_idx": 29, "frame_idx": 11, "global_frame_idx": 8123, "task_index": 5}, {"db_idx": 8124, "episode_idx": 29, "frame_idx": 12, "global_frame_idx": 8124, "task_index": 5}, {"db_idx": 8125, "episode_idx": 29, "frame_idx": 13, "global_frame_idx": 8125, "task_index": 5}, {"db_idx": 8126, "episode_idx": 29, "frame_idx": 14, "global_frame_idx": 8126, "task_index": 5}, {"db_idx": 8127, "episode_idx": 29, "frame_idx": 15, "global_frame_idx": 8127, "task_index": 5}, {"db_idx": 8128, "episode_idx": 29, "frame_idx": 16, "global_frame_idx": 8128, "task_index": 5}, {"db_idx": 8129, "episode_idx": 29, "frame_idx": 17, "global_frame_idx": 8129, "task_index": 5}, {"db_idx": 8130, "episode_idx": 29, "frame_idx": 18, "global_frame_idx": 8130, "task_index": 5}, {"db_idx": 8131, "episode_idx": 29, "frame_idx": 19, "global_frame_idx": 8131, "task_index": 5}, {"db_idx": 8132, "episode_idx": 29, "frame_idx": 20, "global_frame_idx": 8132, "task_index": 5}, {"db_idx": 8133, "episode_idx": 29, "frame_idx": 21, "global_frame_idx": 8133, "task_index": 5}, {"db_idx": 8134, "episode_idx": 29, "frame_idx": 22, "global_frame_idx": 8134, "task_index": 5}, {"db_idx": 8135, "episode_idx": 29, "frame_idx": 23, "global_frame_idx": 8135, "task_index": 5}, {"db_idx": 8136, "episode_idx": 29, "frame_idx": 24, "global_frame_idx": 8136, "task_index": 5}, {"db_idx": 8137, "episode_idx": 29, "frame_idx": 25, "global_frame_idx": 8137, "task_index": 5}, {"db_idx": 8138, "episode_idx": 29, "frame_idx": 26, "global_frame_idx": 8138, "task_index": 5}, {"db_idx": 8139, "episode_idx": 29, "frame_idx": 27, "global_frame_idx": 8139, "task_index": 5}, {"db_idx": 8140, "episode_idx": 29, "frame_idx": 28, "global_frame_idx": 8140, "task_index": 5}, {"db_idx": 8141, "episode_idx": 29, "frame_idx": 29, "global_frame_idx": 8141, "task_index": 5}, {"db_idx": 8142, "episode_idx": 29, "frame_idx": 30, "global_frame_idx": 8142, "task_index": 5}, {"db_idx": 8143, "episode_idx": 29, "frame_idx": 31, "global_frame_idx": 8143, "task_index": 5}, {"db_idx": 8144, "episode_idx": 29, "frame_idx": 32, "global_frame_idx": 8144, "task_index": 5}, {"db_idx": 8145, "episode_idx": 29, "frame_idx": 33, "global_frame_idx": 8145, "task_index": 5}, {"db_idx": 8146, "episode_idx": 29, "frame_idx": 34, "global_frame_idx": 8146, "task_index": 5}, {"db_idx": 8147, "episode_idx": 29, "frame_idx": 35, "global_frame_idx": 8147, "task_index": 5}, {"db_idx": 8148, "episode_idx": 29, "frame_idx": 36, "global_frame_idx": 8148, "task_index": 5}, {"db_idx": 8149, "episode_idx": 29, "frame_idx": 37, "global_frame_idx": 8149, "task_index": 5}, {"db_idx": 8150, "episode_idx": 29, "frame_idx": 38, "global_frame_idx": 8150, "task_index": 5}, {"db_idx": 8151, "episode_idx": 29, "frame_idx": 39, "global_frame_idx": 8151, "task_index": 5}, {"db_idx": 8152, "episode_idx": 29, "frame_idx": 40, "global_frame_idx": 8152, "task_index": 5}, {"db_idx": 8153, "episode_idx": 29, "frame_idx": 41, "global_frame_idx": 8153, "task_index": 5}, {"db_idx": 8154, "episode_idx": 29, "frame_idx": 42, "global_frame_idx": 8154, "task_index": 5}, {"db_idx": 8155, "episode_idx": 29, "frame_idx": 43, "global_frame_idx": 8155, "task_index": 5}, {"db_idx": 8156, "episode_idx": 29, "frame_idx": 44, "global_frame_idx": 8156, "task_index": 5}, {"db_idx": 8157, "episode_idx": 29, "frame_idx": 45, "global_frame_idx": 8157, "task_index": 5}, {"db_idx": 8158, "episode_idx": 29, "frame_idx": 46, "global_frame_idx": 8158, "task_index": 5}, {"db_idx": 8159, "episode_idx": 29, "frame_idx": 47, "global_frame_idx": 8159, "task_index": 5}, {"db_idx": 8160, "episode_idx": 29, "frame_idx": 48, "global_frame_idx": 8160, "task_index": 5}, {"db_idx": 8161, "episode_idx": 29, "frame_idx": 49, "global_frame_idx": 8161, "task_index": 5}, {"db_idx": 8162, "episode_idx": 29, "frame_idx": 50, "global_frame_idx": 8162, "task_index": 5}, {"db_idx": 8163, "episode_idx": 29, "frame_idx": 51, "global_frame_idx": 8163, "task_index": 5}, {"db_idx": 8164, "episode_idx": 29, "frame_idx": 52, "global_frame_idx": 8164, "task_index": 5}, {"db_idx": 8165, "episode_idx": 29, "frame_idx": 53, "global_frame_idx": 8165, "task_index": 5}, {"db_idx": 8166, "episode_idx": 29, "frame_idx": 54, "global_frame_idx": 8166, "task_index": 5}, {"db_idx": 8167, "episode_idx": 29, "frame_idx": 55, "global_frame_idx": 8167, "task_index": 5}, {"db_idx": 8168, "episode_idx": 29, "frame_idx": 56, "global_frame_idx": 8168, "task_index": 5}, {"db_idx": 8169, "episode_idx": 29, "frame_idx": 57, "global_frame_idx": 8169, "task_index": 5}, {"db_idx": 8170, "episode_idx": 29, "frame_idx": 58, "global_frame_idx": 8170, "task_index": 5}, {"db_idx": 8171, "episode_idx": 29, "frame_idx": 59, "global_frame_idx": 8171, "task_index": 5}, {"db_idx": 8172, "episode_idx": 29, "frame_idx": 60, "global_frame_idx": 8172, "task_index": 5}, {"db_idx": 8173, "episode_idx": 29, "frame_idx": 61, "global_frame_idx": 8173, "task_index": 5}, {"db_idx": 8174, "episode_idx": 29, "frame_idx": 62, "global_frame_idx": 8174, "task_index": 5}, {"db_idx": 8175, "episode_idx": 29, "frame_idx": 63, "global_frame_idx": 8175, "task_index": 5}, {"db_idx": 8176, "episode_idx": 29, "frame_idx": 64, "global_frame_idx": 8176, "task_index": 5}, {"db_idx": 8177, "episode_idx": 29, "frame_idx": 65, "global_frame_idx": 8177, "task_index": 5}, {"db_idx": 8178, "episode_idx": 29, "frame_idx": 66, "global_frame_idx": 8178, "task_index": 5}, {"db_idx": 8179, "episode_idx": 29, "frame_idx": 67, "global_frame_idx": 8179, "task_index": 5}, {"db_idx": 8180, "episode_idx": 29, "frame_idx": 68, "global_frame_idx": 8180, "task_index": 5}, {"db_idx": 8181, "episode_idx": 29, "frame_idx": 69, "global_frame_idx": 8181, "task_index": 5}, {"db_idx": 8182, "episode_idx": 29, "frame_idx": 70, "global_frame_idx": 8182, "task_index": 5}, {"db_idx": 8183, "episode_idx": 29, "frame_idx": 71, "global_frame_idx": 8183, "task_index": 5}, {"db_idx": 8184, "episode_idx": 29, "frame_idx": 72, "global_frame_idx": 8184, "task_index": 5}, {"db_idx": 8185, "episode_idx": 29, "frame_idx": 73, "global_frame_idx": 8185, "task_index": 5}, {"db_idx": 8186, "episode_idx": 29, "frame_idx": 74, "global_frame_idx": 8186, "task_index": 5}, {"db_idx": 8187, "episode_idx": 29, "frame_idx": 75, "global_frame_idx": 8187, "task_index": 5}, {"db_idx": 8188, "episode_idx": 29, "frame_idx": 76, "global_frame_idx": 8188, "task_index": 5}, {"db_idx": 8189, "episode_idx": 29, "frame_idx": 77, "global_frame_idx": 8189, "task_index": 5}, {"db_idx": 8190, "episode_idx": 29, "frame_idx": 78, "global_frame_idx": 8190, "task_index": 5}, {"db_idx": 8191, "episode_idx": 29, "frame_idx": 79, "global_frame_idx": 8191, "task_index": 5}, {"db_idx": 8192, "episode_idx": 29, "frame_idx": 80, "global_frame_idx": 8192, "task_index": 5}, {"db_idx": 8193, "episode_idx": 29, "frame_idx": 81, "global_frame_idx": 8193, "task_index": 5}, {"db_idx": 8194, "episode_idx": 29, "frame_idx": 82, "global_frame_idx": 8194, "task_index": 5}, {"db_idx": 8195, "episode_idx": 29, "frame_idx": 83, "global_frame_idx": 8195, "task_index": 5}, {"db_idx": 8196, "episode_idx": 29, "frame_idx": 84, "global_frame_idx": 8196, "task_index": 5}, {"db_idx": 8197, "episode_idx": 29, "frame_idx": 85, "global_frame_idx": 8197, "task_index": 5}, {"db_idx": 8198, "episode_idx": 29, "frame_idx": 86, "global_frame_idx": 8198, "task_index": 5}, {"db_idx": 8199, "episode_idx": 29, "frame_idx": 87, "global_frame_idx": 8199, "task_index": 5}, {"db_idx": 8200, "episode_idx": 29, "frame_idx": 88, "global_frame_idx": 8200, "task_index": 5}, {"db_idx": 8201, "episode_idx": 29, "frame_idx": 89, "global_frame_idx": 8201, "task_index": 5}, {"db_idx": 8202, "episode_idx": 29, "frame_idx": 90, "global_frame_idx": 8202, "task_index": 5}, {"db_idx": 8203, "episode_idx": 29, "frame_idx": 91, "global_frame_idx": 8203, "task_index": 5}, {"db_idx": 8204, "episode_idx": 29, "frame_idx": 92, "global_frame_idx": 8204, "task_index": 5}, {"db_idx": 8205, "episode_idx": 29, "frame_idx": 93, "global_frame_idx": 8205, "task_index": 5}, {"db_idx": 8206, "episode_idx": 29, "frame_idx": 94, "global_frame_idx": 8206, "task_index": 5}, {"db_idx": 8207, "episode_idx": 29, "frame_idx": 95, "global_frame_idx": 8207, "task_index": 5}, {"db_idx": 8208, "episode_idx": 29, "frame_idx": 96, "global_frame_idx": 8208, "task_index": 5}, {"db_idx": 8209, "episode_idx": 29, "frame_idx": 97, "global_frame_idx": 8209, "task_index": 5}, {"db_idx": 8210, "episode_idx": 29, "frame_idx": 98, "global_frame_idx": 8210, "task_index": 5}, {"db_idx": 8211, "episode_idx": 29, "frame_idx": 99, "global_frame_idx": 8211, "task_index": 5}, {"db_idx": 8212, "episode_idx": 29, "frame_idx": 100, "global_frame_idx": 8212, "task_index": 5}, {"db_idx": 8213, "episode_idx": 29, "frame_idx": 101, "global_frame_idx": 8213, "task_index": 5}, {"db_idx": 8214, "episode_idx": 29, "frame_idx": 102, "global_frame_idx": 8214, "task_index": 5}, {"db_idx": 8215, "episode_idx": 29, "frame_idx": 103, "global_frame_idx": 8215, "task_index": 5}, {"db_idx": 8216, "episode_idx": 29, "frame_idx": 104, "global_frame_idx": 8216, "task_index": 5}, {"db_idx": 8217, "episode_idx": 29, "frame_idx": 105, "global_frame_idx": 8217, "task_index": 5}, {"db_idx": 8218, "episode_idx": 29, "frame_idx": 106, "global_frame_idx": 8218, "task_index": 5}, {"db_idx": 8219, "episode_idx": 29, "frame_idx": 107, "global_frame_idx": 8219, "task_index": 5}, {"db_idx": 8220, "episode_idx": 29, "frame_idx": 108, "global_frame_idx": 8220, "task_index": 5}, {"db_idx": 8221, "episode_idx": 29, "frame_idx": 109, "global_frame_idx": 8221, "task_index": 5}, {"db_idx": 8222, "episode_idx": 29, "frame_idx": 110, "global_frame_idx": 8222, "task_index": 5}, {"db_idx": 8223, "episode_idx": 29, "frame_idx": 111, "global_frame_idx": 8223, "task_index": 5}, {"db_idx": 8224, "episode_idx": 29, "frame_idx": 112, "global_frame_idx": 8224, "task_index": 5}, {"db_idx": 8225, "episode_idx": 29, "frame_idx": 113, "global_frame_idx": 8225, "task_index": 5}, {"db_idx": 8226, "episode_idx": 29, "frame_idx": 114, "global_frame_idx": 8226, "task_index": 5}, {"db_idx": 8227, "episode_idx": 29, "frame_idx": 115, "global_frame_idx": 8227, "task_index": 5}, {"db_idx": 8228, "episode_idx": 29, "frame_idx": 116, "global_frame_idx": 8228, "task_index": 5}, {"db_idx": 8229, "episode_idx": 29, "frame_idx": 117, "global_frame_idx": 8229, "task_index": 5}, {"db_idx": 8230, "episode_idx": 29, "frame_idx": 118, "global_frame_idx": 8230, "task_index": 5}, {"db_idx": 8231, "episode_idx": 29, "frame_idx": 119, "global_frame_idx": 8231, "task_index": 5}, {"db_idx": 8232, "episode_idx": 29, "frame_idx": 120, "global_frame_idx": 8232, "task_index": 5}, {"db_idx": 8233, "episode_idx": 29, "frame_idx": 121, "global_frame_idx": 8233, "task_index": 5}, {"db_idx": 8234, "episode_idx": 29, "frame_idx": 122, "global_frame_idx": 8234, "task_index": 5}, {"db_idx": 8235, "episode_idx": 29, "frame_idx": 123, "global_frame_idx": 8235, "task_index": 5}, {"db_idx": 8236, "episode_idx": 29, "frame_idx": 124, "global_frame_idx": 8236, "task_index": 5}, {"db_idx": 8237, "episode_idx": 29, "frame_idx": 125, "global_frame_idx": 8237, "task_index": 5}, {"db_idx": 8238, "episode_idx": 29, "frame_idx": 126, "global_frame_idx": 8238, "task_index": 5}, {"db_idx": 8239, "episode_idx": 29, "frame_idx": 127, "global_frame_idx": 8239, "task_index": 5}, {"db_idx": 8240, "episode_idx": 29, "frame_idx": 128, "global_frame_idx": 8240, "task_index": 5}, {"db_idx": 8241, "episode_idx": 29, "frame_idx": 129, "global_frame_idx": 8241, "task_index": 5}, {"db_idx": 8242, "episode_idx": 29, "frame_idx": 130, "global_frame_idx": 8242, "task_index": 5}, {"db_idx": 8243, "episode_idx": 29, "frame_idx": 131, "global_frame_idx": 8243, "task_index": 5}, {"db_idx": 8244, "episode_idx": 29, "frame_idx": 132, "global_frame_idx": 8244, "task_index": 5}, {"db_idx": 8245, "episode_idx": 29, "frame_idx": 133, "global_frame_idx": 8245, "task_index": 5}, {"db_idx": 8246, "episode_idx": 29, "frame_idx": 134, "global_frame_idx": 8246, "task_index": 5}, {"db_idx": 8247, "episode_idx": 29, "frame_idx": 135, "global_frame_idx": 8247, "task_index": 5}, {"db_idx": 8248, "episode_idx": 29, "frame_idx": 136, "global_frame_idx": 8248, "task_index": 5}, {"db_idx": 8249, "episode_idx": 29, "frame_idx": 137, "global_frame_idx": 8249, "task_index": 5}, {"db_idx": 8250, "episode_idx": 29, "frame_idx": 138, "global_frame_idx": 8250, "task_index": 5}, {"db_idx": 8251, "episode_idx": 29, "frame_idx": 139, "global_frame_idx": 8251, "task_index": 5}, {"db_idx": 8252, "episode_idx": 29, "frame_idx": 140, "global_frame_idx": 8252, "task_index": 5}, {"db_idx": 8253, "episode_idx": 29, "frame_idx": 141, "global_frame_idx": 8253, "task_index": 5}, {"db_idx": 8254, "episode_idx": 29, "frame_idx": 142, "global_frame_idx": 8254, "task_index": 5}, {"db_idx": 8255, "episode_idx": 29, "frame_idx": 143, "global_frame_idx": 8255, "task_index": 5}, {"db_idx": 8256, "episode_idx": 29, "frame_idx": 144, "global_frame_idx": 8256, "task_index": 5}, {"db_idx": 8257, "episode_idx": 29, "frame_idx": 145, "global_frame_idx": 8257, "task_index": 5}, {"db_idx": 8258, "episode_idx": 29, "frame_idx": 146, "global_frame_idx": 8258, "task_index": 5}, {"db_idx": 8259, "episode_idx": 29, "frame_idx": 147, "global_frame_idx": 8259, "task_index": 5}, {"db_idx": 8260, "episode_idx": 29, "frame_idx": 148, "global_frame_idx": 8260, "task_index": 5}, {"db_idx": 8261, "episode_idx": 29, "frame_idx": 149, "global_frame_idx": 8261, "task_index": 5}, {"db_idx": 8262, "episode_idx": 29, "frame_idx": 150, "global_frame_idx": 8262, "task_index": 5}, {"db_idx": 8263, "episode_idx": 29, "frame_idx": 151, "global_frame_idx": 8263, "task_index": 5}, {"db_idx": 8264, "episode_idx": 29, "frame_idx": 152, "global_frame_idx": 8264, "task_index": 5}, {"db_idx": 8265, "episode_idx": 29, "frame_idx": 153, "global_frame_idx": 8265, "task_index": 5}, {"db_idx": 8266, "episode_idx": 29, "frame_idx": 154, "global_frame_idx": 8266, "task_index": 5}, {"db_idx": 8267, "episode_idx": 29, "frame_idx": 155, "global_frame_idx": 8267, "task_index": 5}, {"db_idx": 8268, "episode_idx": 29, "frame_idx": 156, "global_frame_idx": 8268, "task_index": 5}, {"db_idx": 8269, "episode_idx": 29, "frame_idx": 157, "global_frame_idx": 8269, "task_index": 5}, {"db_idx": 8270, "episode_idx": 29, "frame_idx": 158, "global_frame_idx": 8270, "task_index": 5}, {"db_idx": 8271, "episode_idx": 29, "frame_idx": 159, "global_frame_idx": 8271, "task_index": 5}, {"db_idx": 8272, "episode_idx": 29, "frame_idx": 160, "global_frame_idx": 8272, "task_index": 5}, {"db_idx": 8273, "episode_idx": 29, "frame_idx": 161, "global_frame_idx": 8273, "task_index": 5}, {"db_idx": 8274, "episode_idx": 29, "frame_idx": 162, "global_frame_idx": 8274, "task_index": 5}, {"db_idx": 8275, "episode_idx": 29, "frame_idx": 163, "global_frame_idx": 8275, "task_index": 5}, {"db_idx": 8276, "episode_idx": 29, "frame_idx": 164, "global_frame_idx": 8276, "task_index": 5}, {"db_idx": 8277, "episode_idx": 29, "frame_idx": 165, "global_frame_idx": 8277, "task_index": 5}, {"db_idx": 8278, "episode_idx": 29, "frame_idx": 166, "global_frame_idx": 8278, "task_index": 5}, {"db_idx": 8279, "episode_idx": 29, "frame_idx": 167, "global_frame_idx": 8279, "task_index": 5}, {"db_idx": 8280, "episode_idx": 29, "frame_idx": 168, "global_frame_idx": 8280, "task_index": 5}, {"db_idx": 8281, "episode_idx": 29, "frame_idx": 169, "global_frame_idx": 8281, "task_index": 5}, {"db_idx": 8282, "episode_idx": 29, "frame_idx": 170, "global_frame_idx": 8282, "task_index": 5}, {"db_idx": 8283, "episode_idx": 29, "frame_idx": 171, "global_frame_idx": 8283, "task_index": 5}, {"db_idx": 8284, "episode_idx": 29, "frame_idx": 172, "global_frame_idx": 8284, "task_index": 5}, {"db_idx": 8285, "episode_idx": 29, "frame_idx": 173, "global_frame_idx": 8285, "task_index": 5}, {"db_idx": 8286, "episode_idx": 29, "frame_idx": 174, "global_frame_idx": 8286, "task_index": 5}, {"db_idx": 8287, "episode_idx": 29, "frame_idx": 175, "global_frame_idx": 8287, "task_index": 5}, {"db_idx": 8288, "episode_idx": 29, "frame_idx": 176, "global_frame_idx": 8288, "task_index": 5}, {"db_idx": 8289, "episode_idx": 29, "frame_idx": 177, "global_frame_idx": 8289, "task_index": 5}, {"db_idx": 8290, "episode_idx": 29, "frame_idx": 178, "global_frame_idx": 8290, "task_index": 5}, {"db_idx": 8291, "episode_idx": 29, "frame_idx": 179, "global_frame_idx": 8291, "task_index": 5}, {"db_idx": 8292, "episode_idx": 29, "frame_idx": 180, "global_frame_idx": 8292, "task_index": 5}, {"db_idx": 8293, "episode_idx": 29, "frame_idx": 181, "global_frame_idx": 8293, "task_index": 5}, {"db_idx": 8294, "episode_idx": 29, "frame_idx": 182, "global_frame_idx": 8294, "task_index": 5}, {"db_idx": 8295, "episode_idx": 29, "frame_idx": 183, "global_frame_idx": 8295, "task_index": 5}, {"db_idx": 8296, "episode_idx": 29, "frame_idx": 184, "global_frame_idx": 8296, "task_index": 5}, {"db_idx": 8297, "episode_idx": 29, "frame_idx": 185, "global_frame_idx": 8297, "task_index": 5}, {"db_idx": 8298, "episode_idx": 29, "frame_idx": 186, "global_frame_idx": 8298, "task_index": 5}, {"db_idx": 8299, "episode_idx": 29, "frame_idx": 187, "global_frame_idx": 8299, "task_index": 5}, {"db_idx": 8300, "episode_idx": 29, "frame_idx": 188, "global_frame_idx": 8300, "task_index": 5}, {"db_idx": 8301, "episode_idx": 29, "frame_idx": 189, "global_frame_idx": 8301, "task_index": 5}, {"db_idx": 8302, "episode_idx": 29, "frame_idx": 190, "global_frame_idx": 8302, "task_index": 5}, {"db_idx": 8303, "episode_idx": 29, "frame_idx": 191, "global_frame_idx": 8303, "task_index": 5}, {"db_idx": 8304, "episode_idx": 29, "frame_idx": 192, "global_frame_idx": 8304, "task_index": 5}, {"db_idx": 8305, "episode_idx": 29, "frame_idx": 193, "global_frame_idx": 8305, "task_index": 5}, {"db_idx": 8306, "episode_idx": 29, "frame_idx": 194, "global_frame_idx": 8306, "task_index": 5}, {"db_idx": 8307, "episode_idx": 29, "frame_idx": 195, "global_frame_idx": 8307, "task_index": 5}, {"db_idx": 8308, "episode_idx": 29, "frame_idx": 196, "global_frame_idx": 8308, "task_index": 5}, {"db_idx": 8309, "episode_idx": 29, "frame_idx": 197, "global_frame_idx": 8309, "task_index": 5}, {"db_idx": 8310, "episode_idx": 29, "frame_idx": 198, "global_frame_idx": 8310, "task_index": 5}, {"db_idx": 8311, "episode_idx": 29, "frame_idx": 199, "global_frame_idx": 8311, "task_index": 5}, {"db_idx": 8312, "episode_idx": 29, "frame_idx": 200, "global_frame_idx": 8312, "task_index": 5}, {"db_idx": 8313, "episode_idx": 29, "frame_idx": 201, "global_frame_idx": 8313, "task_index": 5}, {"db_idx": 8314, "episode_idx": 29, "frame_idx": 202, "global_frame_idx": 8314, "task_index": 5}, {"db_idx": 8315, "episode_idx": 29, "frame_idx": 203, "global_frame_idx": 8315, "task_index": 5}, {"db_idx": 8316, "episode_idx": 29, "frame_idx": 204, "global_frame_idx": 8316, "task_index": 5}, {"db_idx": 8317, "episode_idx": 29, "frame_idx": 205, "global_frame_idx": 8317, "task_index": 5}, {"db_idx": 8318, "episode_idx": 29, "frame_idx": 206, "global_frame_idx": 8318, "task_index": 5}, {"db_idx": 8319, "episode_idx": 29, "frame_idx": 207, "global_frame_idx": 8319, "task_index": 5}, {"db_idx": 8320, "episode_idx": 29, "frame_idx": 208, "global_frame_idx": 8320, "task_index": 5}, {"db_idx": 8321, "episode_idx": 29, "frame_idx": 209, "global_frame_idx": 8321, "task_index": 5}, {"db_idx": 8322, "episode_idx": 29, "frame_idx": 210, "global_frame_idx": 8322, "task_index": 5}, {"db_idx": 8323, "episode_idx": 29, "frame_idx": 211, "global_frame_idx": 8323, "task_index": 5}, {"db_idx": 8324, "episode_idx": 29, "frame_idx": 212, "global_frame_idx": 8324, "task_index": 5}, {"db_idx": 8325, "episode_idx": 29, "frame_idx": 213, "global_frame_idx": 8325, "task_index": 5}, {"db_idx": 8326, "episode_idx": 29, "frame_idx": 214, "global_frame_idx": 8326, "task_index": 5}, {"db_idx": 8327, "episode_idx": 29, "frame_idx": 215, "global_frame_idx": 8327, "task_index": 5}, {"db_idx": 8328, "episode_idx": 29, "frame_idx": 216, "global_frame_idx": 8328, "task_index": 5}, {"db_idx": 8329, "episode_idx": 29, "frame_idx": 217, "global_frame_idx": 8329, "task_index": 5}, {"db_idx": 8330, "episode_idx": 29, "frame_idx": 218, "global_frame_idx": 8330, "task_index": 5}, {"db_idx": 8331, "episode_idx": 29, "frame_idx": 219, "global_frame_idx": 8331, "task_index": 5}, {"db_idx": 8332, "episode_idx": 29, "frame_idx": 220, "global_frame_idx": 8332, "task_index": 5}, {"db_idx": 8333, "episode_idx": 29, "frame_idx": 221, "global_frame_idx": 8333, "task_index": 5}, {"db_idx": 8334, "episode_idx": 29, "frame_idx": 222, "global_frame_idx": 8334, "task_index": 5}, {"db_idx": 8335, "episode_idx": 29, "frame_idx": 223, "global_frame_idx": 8335, "task_index": 5}, {"db_idx": 8336, "episode_idx": 29, "frame_idx": 224, "global_frame_idx": 8336, "task_index": 5}, {"db_idx": 8337, "episode_idx": 29, "frame_idx": 225, "global_frame_idx": 8337, "task_index": 5}, {"db_idx": 8338, "episode_idx": 29, "frame_idx": 226, "global_frame_idx": 8338, "task_index": 5}, {"db_idx": 8339, "episode_idx": 29, "frame_idx": 227, "global_frame_idx": 8339, "task_index": 5}, {"db_idx": 8340, "episode_idx": 29, "frame_idx": 228, "global_frame_idx": 8340, "task_index": 5}, {"db_idx": 8341, "episode_idx": 29, "frame_idx": 229, "global_frame_idx": 8341, "task_index": 5}, {"db_idx": 8342, "episode_idx": 29, "frame_idx": 230, "global_frame_idx": 8342, "task_index": 5}, {"db_idx": 8343, "episode_idx": 29, "frame_idx": 231, "global_frame_idx": 8343, "task_index": 5}, {"db_idx": 8344, "episode_idx": 29, "frame_idx": 232, "global_frame_idx": 8344, "task_index": 5}, {"db_idx": 8345, "episode_idx": 29, "frame_idx": 233, "global_frame_idx": 8345, "task_index": 5}, {"db_idx": 8346, "episode_idx": 29, "frame_idx": 234, "global_frame_idx": 8346, "task_index": 5}, {"db_idx": 8347, "episode_idx": 29, "frame_idx": 235, "global_frame_idx": 8347, "task_index": 5}, {"db_idx": 8348, "episode_idx": 29, "frame_idx": 236, "global_frame_idx": 8348, "task_index": 5}, {"db_idx": 8349, "episode_idx": 29, "frame_idx": 237, "global_frame_idx": 8349, "task_index": 5}, {"db_idx": 8350, "episode_idx": 29, "frame_idx": 238, "global_frame_idx": 8350, "task_index": 5}, {"db_idx": 8351, "episode_idx": 29, "frame_idx": 239, "global_frame_idx": 8351, "task_index": 5}, {"db_idx": 8352, "episode_idx": 29, "frame_idx": 240, "global_frame_idx": 8352, "task_index": 5}, {"db_idx": 8353, "episode_idx": 29, "frame_idx": 241, "global_frame_idx": 8353, "task_index": 5}, {"db_idx": 8354, "episode_idx": 29, "frame_idx": 242, "global_frame_idx": 8354, "task_index": 5}, {"db_idx": 8355, "episode_idx": 29, "frame_idx": 243, "global_frame_idx": 8355, "task_index": 5}, {"db_idx": 8356, "episode_idx": 29, "frame_idx": 244, "global_frame_idx": 8356, "task_index": 5}, {"db_idx": 8357, "episode_idx": 29, "frame_idx": 245, "global_frame_idx": 8357, "task_index": 5}, {"db_idx": 8358, "episode_idx": 29, "frame_idx": 246, "global_frame_idx": 8358, "task_index": 5}, {"db_idx": 8359, "episode_idx": 29, "frame_idx": 247, "global_frame_idx": 8359, "task_index": 5}, {"db_idx": 8360, "episode_idx": 29, "frame_idx": 248, "global_frame_idx": 8360, "task_index": 5}, {"db_idx": 8361, "episode_idx": 29, "frame_idx": 249, "global_frame_idx": 8361, "task_index": 5}, {"db_idx": 8362, "episode_idx": 29, "frame_idx": 250, "global_frame_idx": 8362, "task_index": 5}, {"db_idx": 8363, "episode_idx": 29, "frame_idx": 251, "global_frame_idx": 8363, "task_index": 5}, {"db_idx": 8364, "episode_idx": 29, "frame_idx": 252, "global_frame_idx": 8364, "task_index": 5}, {"db_idx": 8365, "episode_idx": 29, "frame_idx": 253, "global_frame_idx": 8365, "task_index": 5}, {"db_idx": 8366, "episode_idx": 29, "frame_idx": 254, "global_frame_idx": 8366, "task_index": 5}, {"db_idx": 8367, "episode_idx": 29, "frame_idx": 255, "global_frame_idx": 8367, "task_index": 5}, {"db_idx": 8368, "episode_idx": 29, "frame_idx": 256, "global_frame_idx": 8368, "task_index": 5}, {"db_idx": 8369, "episode_idx": 29, "frame_idx": 257, "global_frame_idx": 8369, "task_index": 5}, {"db_idx": 8370, "episode_idx": 29, "frame_idx": 258, "global_frame_idx": 8370, "task_index": 5}, {"db_idx": 8371, "episode_idx": 29, "frame_idx": 259, "global_frame_idx": 8371, "task_index": 5}, {"db_idx": 8372, "episode_idx": 29, "frame_idx": 260, "global_frame_idx": 8372, "task_index": 5}, {"db_idx": 8373, "episode_idx": 29, "frame_idx": 261, "global_frame_idx": 8373, "task_index": 5}, {"db_idx": 8374, "episode_idx": 29, "frame_idx": 262, "global_frame_idx": 8374, "task_index": 5}, {"db_idx": 8375, "episode_idx": 29, "frame_idx": 263, "global_frame_idx": 8375, "task_index": 5}, {"db_idx": 8376, "episode_idx": 29, "frame_idx": 264, "global_frame_idx": 8376, "task_index": 5}, {"db_idx": 8377, "episode_idx": 29, "frame_idx": 265, "global_frame_idx": 8377, "task_index": 5}, {"db_idx": 8378, "episode_idx": 29, "frame_idx": 266, "global_frame_idx": 8378, "task_index": 5}, {"db_idx": 8379, "episode_idx": 29, "frame_idx": 267, "global_frame_idx": 8379, "task_index": 5}, {"db_idx": 8380, "episode_idx": 29, "frame_idx": 268, "global_frame_idx": 8380, "task_index": 5}, {"db_idx": 8381, "episode_idx": 29, "frame_idx": 269, "global_frame_idx": 8381, "task_index": 5}, {"db_idx": 8382, "episode_idx": 29, "frame_idx": 270, "global_frame_idx": 8382, "task_index": 5}, {"db_idx": 8383, "episode_idx": 29, "frame_idx": 271, "global_frame_idx": 8383, "task_index": 5}, {"db_idx": 8384, "episode_idx": 29, "frame_idx": 272, "global_frame_idx": 8384, "task_index": 5}, {"db_idx": 8385, "episode_idx": 29, "frame_idx": 273, "global_frame_idx": 8385, "task_index": 5}, {"db_idx": 8386, "episode_idx": 29, "frame_idx": 274, "global_frame_idx": 8386, "task_index": 5}, {"db_idx": 8387, "episode_idx": 29, "frame_idx": 275, "global_frame_idx": 8387, "task_index": 5}, {"db_idx": 8388, "episode_idx": 29, "frame_idx": 276, "global_frame_idx": 8388, "task_index": 5}, {"db_idx": 8389, "episode_idx": 29, "frame_idx": 277, "global_frame_idx": 8389, "task_index": 5}, {"db_idx": 8390, "episode_idx": 29, "frame_idx": 278, "global_frame_idx": 8390, "task_index": 5}, {"db_idx": 8391, "episode_idx": 29, "frame_idx": 279, "global_frame_idx": 8391, "task_index": 5}, {"db_idx": 8392, "episode_idx": 29, "frame_idx": 280, "global_frame_idx": 8392, "task_index": 5}, {"db_idx": 8393, "episode_idx": 29, "frame_idx": 281, "global_frame_idx": 8393, "task_index": 5}, {"db_idx": 8394, "episode_idx": 29, "frame_idx": 282, "global_frame_idx": 8394, "task_index": 5}, {"db_idx": 8395, "episode_idx": 29, "frame_idx": 283, "global_frame_idx": 8395, "task_index": 5}, {"db_idx": 8396, "episode_idx": 29, "frame_idx": 284, "global_frame_idx": 8396, "task_index": 5}, {"db_idx": 8397, "episode_idx": 29, "frame_idx": 285, "global_frame_idx": 8397, "task_index": 5}, {"db_idx": 8398, "episode_idx": 29, "frame_idx": 286, "global_frame_idx": 8398, "task_index": 5}, {"db_idx": 8399, "episode_idx": 29, "frame_idx": 287, "global_frame_idx": 8399, "task_index": 5}, {"db_idx": 8400, "episode_idx": 29, "frame_idx": 288, "global_frame_idx": 8400, "task_index": 5}, {"db_idx": 8401, "episode_idx": 29, "frame_idx": 289, "global_frame_idx": 8401, "task_index": 5}, {"db_idx": 8402, "episode_idx": 29, "frame_idx": 290, "global_frame_idx": 8402, "task_index": 5}, {"db_idx": 8403, "episode_idx": 29, "frame_idx": 291, "global_frame_idx": 8403, "task_index": 5}, {"db_idx": 8404, "episode_idx": 29, "frame_idx": 292, "global_frame_idx": 8404, "task_index": 5}, {"db_idx": 8405, "episode_idx": 29, "frame_idx": 293, "global_frame_idx": 8405, "task_index": 5}, {"db_idx": 8406, "episode_idx": 29, "frame_idx": 294, "global_frame_idx": 8406, "task_index": 5}, {"db_idx": 8407, "episode_idx": 29, "frame_idx": 295, "global_frame_idx": 8407, "task_index": 5}, {"db_idx": 8408, "episode_idx": 29, "frame_idx": 296, "global_frame_idx": 8408, "task_index": 5}, {"db_idx": 8409, "episode_idx": 29, "frame_idx": 297, "global_frame_idx": 8409, "task_index": 5}, {"db_idx": 8410, "episode_idx": 29, "frame_idx": 298, "global_frame_idx": 8410, "task_index": 5}, {"db_idx": 8411, "episode_idx": 29, "frame_idx": 299, "global_frame_idx": 8411, "task_index": 5}, {"db_idx": 8412, "episode_idx": 29, "frame_idx": 300, "global_frame_idx": 8412, "task_index": 5}, {"db_idx": 8413, "episode_idx": 29, "frame_idx": 301, "global_frame_idx": 8413, "task_index": 5}, {"db_idx": 8414, "episode_idx": 29, "frame_idx": 302, "global_frame_idx": 8414, "task_index": 5}, {"db_idx": 8415, "episode_idx": 29, "frame_idx": 303, "global_frame_idx": 8415, "task_index": 5}, {"db_idx": 8416, "episode_idx": 29, "frame_idx": 304, "global_frame_idx": 8416, "task_index": 5}, {"db_idx": 8417, "episode_idx": 29, "frame_idx": 305, "global_frame_idx": 8417, "task_index": 5}, {"db_idx": 8418, "episode_idx": 29, "frame_idx": 306, "global_frame_idx": 8418, "task_index": 5}, {"db_idx": 8419, "episode_idx": 29, "frame_idx": 307, "global_frame_idx": 8419, "task_index": 5}, {"db_idx": 8420, "episode_idx": 29, "frame_idx": 308, "global_frame_idx": 8420, "task_index": 5}, {"db_idx": 8421, "episode_idx": 29, "frame_idx": 309, "global_frame_idx": 8421, "task_index": 5}, {"db_idx": 8422, "episode_idx": 29, "frame_idx": 310, "global_frame_idx": 8422, "task_index": 5}, {"db_idx": 8423, "episode_idx": 29, "frame_idx": 311, "global_frame_idx": 8423, "task_index": 5}, {"db_idx": 8424, "episode_idx": 29, "frame_idx": 312, "global_frame_idx": 8424, "task_index": 5}, {"db_idx": 8425, "episode_idx": 29, "frame_idx": 313, "global_frame_idx": 8425, "task_index": 5}, {"db_idx": 8426, "episode_idx": 29, "frame_idx": 314, "global_frame_idx": 8426, "task_index": 5}, {"db_idx": 8427, "episode_idx": 29, "frame_idx": 315, "global_frame_idx": 8427, "task_index": 5}, {"db_idx": 8428, "episode_idx": 29, "frame_idx": 316, "global_frame_idx": 8428, "task_index": 5}, {"db_idx": 8429, "episode_idx": 29, "frame_idx": 317, "global_frame_idx": 8429, "task_index": 5}, {"db_idx": 8430, "episode_idx": 29, "frame_idx": 318, "global_frame_idx": 8430, "task_index": 5}, {"db_idx": 8431, "episode_idx": 29, "frame_idx": 319, "global_frame_idx": 8431, "task_index": 5}, {"db_idx": 8432, "episode_idx": 29, "frame_idx": 320, "global_frame_idx": 8432, "task_index": 5}, {"db_idx": 8433, "episode_idx": 29, "frame_idx": 321, "global_frame_idx": 8433, "task_index": 5}, {"db_idx": 8434, "episode_idx": 29, "frame_idx": 322, "global_frame_idx": 8434, "task_index": 5}, {"db_idx": 8435, "episode_idx": 29, "frame_idx": 323, "global_frame_idx": 8435, "task_index": 5}, {"db_idx": 8436, "episode_idx": 29, "frame_idx": 324, "global_frame_idx": 8436, "task_index": 5}, {"db_idx": 8437, "episode_idx": 29, "frame_idx": 325, "global_frame_idx": 8437, "task_index": 5}, {"db_idx": 8438, "episode_idx": 29, "frame_idx": 326, "global_frame_idx": 8438, "task_index": 5}, {"db_idx": 8439, "episode_idx": 29, "frame_idx": 327, "global_frame_idx": 8439, "task_index": 5}, {"db_idx": 8440, "episode_idx": 29, "frame_idx": 328, "global_frame_idx": 8440, "task_index": 5}, {"db_idx": 8441, "episode_idx": 29, "frame_idx": 329, "global_frame_idx": 8441, "task_index": 5}, {"db_idx": 8442, "episode_idx": 29, "frame_idx": 330, "global_frame_idx": 8442, "task_index": 5}, {"db_idx": 8443, "episode_idx": 29, "frame_idx": 331, "global_frame_idx": 8443, "task_index": 5}, {"db_idx": 8444, "episode_idx": 29, "frame_idx": 332, "global_frame_idx": 8444, "task_index": 5}, {"db_idx": 8445, "episode_idx": 29, "frame_idx": 333, "global_frame_idx": 8445, "task_index": 5}, {"db_idx": 8446, "episode_idx": 29, "frame_idx": 334, "global_frame_idx": 8446, "task_index": 5}, {"db_idx": 8447, "episode_idx": 29, "frame_idx": 335, "global_frame_idx": 8447, "task_index": 5}, {"db_idx": 8448, "episode_idx": 29, "frame_idx": 336, "global_frame_idx": 8448, "task_index": 5}, {"db_idx": 8449, "episode_idx": 29, "frame_idx": 337, "global_frame_idx": 8449, "task_index": 5}, {"db_idx": 8450, "episode_idx": 29, "frame_idx": 338, "global_frame_idx": 8450, "task_index": 5}, {"db_idx": 8451, "episode_idx": 29, "frame_idx": 339, "global_frame_idx": 8451, "task_index": 5}, {"db_idx": 8452, "episode_idx": 29, "frame_idx": 340, "global_frame_idx": 8452, "task_index": 5}, {"db_idx": 8453, "episode_idx": 29, "frame_idx": 341, "global_frame_idx": 8453, "task_index": 5}, {"db_idx": 8454, "episode_idx": 29, "frame_idx": 342, "global_frame_idx": 8454, "task_index": 5}, {"db_idx": 8455, "episode_idx": 29, "frame_idx": 343, "global_frame_idx": 8455, "task_index": 5}, {"db_idx": 8456, "episode_idx": 29, "frame_idx": 344, "global_frame_idx": 8456, "task_index": 5}, {"db_idx": 8457, "episode_idx": 30, "frame_idx": 0, "global_frame_idx": 8457, "task_index": 6}, {"db_idx": 8458, "episode_idx": 30, "frame_idx": 1, "global_frame_idx": 8458, "task_index": 6}, {"db_idx": 8459, "episode_idx": 30, "frame_idx": 2, "global_frame_idx": 8459, "task_index": 6}, {"db_idx": 8460, "episode_idx": 30, "frame_idx": 3, "global_frame_idx": 8460, "task_index": 6}, {"db_idx": 8461, "episode_idx": 30, "frame_idx": 4, "global_frame_idx": 8461, "task_index": 6}, {"db_idx": 8462, "episode_idx": 30, "frame_idx": 5, "global_frame_idx": 8462, "task_index": 6}, {"db_idx": 8463, "episode_idx": 30, "frame_idx": 6, "global_frame_idx": 8463, "task_index": 6}, {"db_idx": 8464, "episode_idx": 30, "frame_idx": 7, "global_frame_idx": 8464, "task_index": 6}, {"db_idx": 8465, "episode_idx": 30, "frame_idx": 8, "global_frame_idx": 8465, "task_index": 6}, {"db_idx": 8466, "episode_idx": 30, "frame_idx": 9, "global_frame_idx": 8466, "task_index": 6}, {"db_idx": 8467, "episode_idx": 30, "frame_idx": 10, "global_frame_idx": 8467, "task_index": 6}, {"db_idx": 8468, "episode_idx": 30, "frame_idx": 11, "global_frame_idx": 8468, "task_index": 6}, {"db_idx": 8469, "episode_idx": 30, "frame_idx": 12, "global_frame_idx": 8469, "task_index": 6}, {"db_idx": 8470, "episode_idx": 30, "frame_idx": 13, "global_frame_idx": 8470, "task_index": 6}, {"db_idx": 8471, "episode_idx": 30, "frame_idx": 14, "global_frame_idx": 8471, "task_index": 6}, {"db_idx": 8472, "episode_idx": 30, "frame_idx": 15, "global_frame_idx": 8472, "task_index": 6}, {"db_idx": 8473, "episode_idx": 30, "frame_idx": 16, "global_frame_idx": 8473, "task_index": 6}, {"db_idx": 8474, "episode_idx": 30, "frame_idx": 17, "global_frame_idx": 8474, "task_index": 6}, {"db_idx": 8475, "episode_idx": 30, "frame_idx": 18, "global_frame_idx": 8475, "task_index": 6}, {"db_idx": 8476, "episode_idx": 30, "frame_idx": 19, "global_frame_idx": 8476, "task_index": 6}, {"db_idx": 8477, "episode_idx": 30, "frame_idx": 20, "global_frame_idx": 8477, "task_index": 6}, {"db_idx": 8478, "episode_idx": 30, "frame_idx": 21, "global_frame_idx": 8478, "task_index": 6}, {"db_idx": 8479, "episode_idx": 30, "frame_idx": 22, "global_frame_idx": 8479, "task_index": 6}, {"db_idx": 8480, "episode_idx": 30, "frame_idx": 23, "global_frame_idx": 8480, "task_index": 6}, {"db_idx": 8481, "episode_idx": 30, "frame_idx": 24, "global_frame_idx": 8481, "task_index": 6}, {"db_idx": 8482, "episode_idx": 30, "frame_idx": 25, "global_frame_idx": 8482, "task_index": 6}, {"db_idx": 8483, "episode_idx": 30, "frame_idx": 26, "global_frame_idx": 8483, "task_index": 6}, {"db_idx": 8484, "episode_idx": 30, "frame_idx": 27, "global_frame_idx": 8484, "task_index": 6}, {"db_idx": 8485, "episode_idx": 30, "frame_idx": 28, "global_frame_idx": 8485, "task_index": 6}, {"db_idx": 8486, "episode_idx": 30, "frame_idx": 29, "global_frame_idx": 8486, "task_index": 6}, {"db_idx": 8487, "episode_idx": 30, "frame_idx": 30, "global_frame_idx": 8487, "task_index": 6}, {"db_idx": 8488, "episode_idx": 30, "frame_idx": 31, "global_frame_idx": 8488, "task_index": 6}, {"db_idx": 8489, "episode_idx": 30, "frame_idx": 32, "global_frame_idx": 8489, "task_index": 6}, {"db_idx": 8490, "episode_idx": 30, "frame_idx": 33, "global_frame_idx": 8490, "task_index": 6}, {"db_idx": 8491, "episode_idx": 30, "frame_idx": 34, "global_frame_idx": 8491, "task_index": 6}, {"db_idx": 8492, "episode_idx": 30, "frame_idx": 35, "global_frame_idx": 8492, "task_index": 6}, {"db_idx": 8493, "episode_idx": 30, "frame_idx": 36, "global_frame_idx": 8493, "task_index": 6}, {"db_idx": 8494, "episode_idx": 30, "frame_idx": 37, "global_frame_idx": 8494, "task_index": 6}, {"db_idx": 8495, "episode_idx": 30, "frame_idx": 38, "global_frame_idx": 8495, "task_index": 6}, {"db_idx": 8496, "episode_idx": 30, "frame_idx": 39, "global_frame_idx": 8496, "task_index": 6}, {"db_idx": 8497, "episode_idx": 30, "frame_idx": 40, "global_frame_idx": 8497, "task_index": 6}, {"db_idx": 8498, "episode_idx": 30, "frame_idx": 41, "global_frame_idx": 8498, "task_index": 6}, {"db_idx": 8499, "episode_idx": 30, "frame_idx": 42, "global_frame_idx": 8499, "task_index": 6}, {"db_idx": 8500, "episode_idx": 30, "frame_idx": 43, "global_frame_idx": 8500, "task_index": 6}, {"db_idx": 8501, "episode_idx": 30, "frame_idx": 44, "global_frame_idx": 8501, "task_index": 6}, {"db_idx": 8502, "episode_idx": 30, "frame_idx": 45, "global_frame_idx": 8502, "task_index": 6}, {"db_idx": 8503, "episode_idx": 30, "frame_idx": 46, "global_frame_idx": 8503, "task_index": 6}, {"db_idx": 8504, "episode_idx": 30, "frame_idx": 47, "global_frame_idx": 8504, "task_index": 6}, {"db_idx": 8505, "episode_idx": 30, "frame_idx": 48, "global_frame_idx": 8505, "task_index": 6}, {"db_idx": 8506, "episode_idx": 30, "frame_idx": 49, "global_frame_idx": 8506, "task_index": 6}, {"db_idx": 8507, "episode_idx": 30, "frame_idx": 50, "global_frame_idx": 8507, "task_index": 6}, {"db_idx": 8508, "episode_idx": 30, "frame_idx": 51, "global_frame_idx": 8508, "task_index": 6}, {"db_idx": 8509, "episode_idx": 30, "frame_idx": 52, "global_frame_idx": 8509, "task_index": 6}, {"db_idx": 8510, "episode_idx": 30, "frame_idx": 53, "global_frame_idx": 8510, "task_index": 6}, {"db_idx": 8511, "episode_idx": 30, "frame_idx": 54, "global_frame_idx": 8511, "task_index": 6}, {"db_idx": 8512, "episode_idx": 30, "frame_idx": 55, "global_frame_idx": 8512, "task_index": 6}, {"db_idx": 8513, "episode_idx": 30, "frame_idx": 56, "global_frame_idx": 8513, "task_index": 6}, {"db_idx": 8514, "episode_idx": 30, "frame_idx": 57, "global_frame_idx": 8514, "task_index": 6}, {"db_idx": 8515, "episode_idx": 30, "frame_idx": 58, "global_frame_idx": 8515, "task_index": 6}, {"db_idx": 8516, "episode_idx": 30, "frame_idx": 59, "global_frame_idx": 8516, "task_index": 6}, {"db_idx": 8517, "episode_idx": 30, "frame_idx": 60, "global_frame_idx": 8517, "task_index": 6}, {"db_idx": 8518, "episode_idx": 30, "frame_idx": 61, "global_frame_idx": 8518, "task_index": 6}, {"db_idx": 8519, "episode_idx": 30, "frame_idx": 62, "global_frame_idx": 8519, "task_index": 6}, {"db_idx": 8520, "episode_idx": 30, "frame_idx": 63, "global_frame_idx": 8520, "task_index": 6}, {"db_idx": 8521, "episode_idx": 30, "frame_idx": 64, "global_frame_idx": 8521, "task_index": 6}, {"db_idx": 8522, "episode_idx": 30, "frame_idx": 65, "global_frame_idx": 8522, "task_index": 6}, {"db_idx": 8523, "episode_idx": 30, "frame_idx": 66, "global_frame_idx": 8523, "task_index": 6}, {"db_idx": 8524, "episode_idx": 30, "frame_idx": 67, "global_frame_idx": 8524, "task_index": 6}, {"db_idx": 8525, "episode_idx": 30, "frame_idx": 68, "global_frame_idx": 8525, "task_index": 6}, {"db_idx": 8526, "episode_idx": 30, "frame_idx": 69, "global_frame_idx": 8526, "task_index": 6}, {"db_idx": 8527, "episode_idx": 30, "frame_idx": 70, "global_frame_idx": 8527, "task_index": 6}, {"db_idx": 8528, "episode_idx": 30, "frame_idx": 71, "global_frame_idx": 8528, "task_index": 6}, {"db_idx": 8529, "episode_idx": 30, "frame_idx": 72, "global_frame_idx": 8529, "task_index": 6}, {"db_idx": 8530, "episode_idx": 30, "frame_idx": 73, "global_frame_idx": 8530, "task_index": 6}, {"db_idx": 8531, "episode_idx": 30, "frame_idx": 74, "global_frame_idx": 8531, "task_index": 6}, {"db_idx": 8532, "episode_idx": 30, "frame_idx": 75, "global_frame_idx": 8532, "task_index": 6}, {"db_idx": 8533, "episode_idx": 30, "frame_idx": 76, "global_frame_idx": 8533, "task_index": 6}, {"db_idx": 8534, "episode_idx": 30, "frame_idx": 77, "global_frame_idx": 8534, "task_index": 6}, {"db_idx": 8535, "episode_idx": 30, "frame_idx": 78, "global_frame_idx": 8535, "task_index": 6}, {"db_idx": 8536, "episode_idx": 30, "frame_idx": 79, "global_frame_idx": 8536, "task_index": 6}, {"db_idx": 8537, "episode_idx": 30, "frame_idx": 80, "global_frame_idx": 8537, "task_index": 6}, {"db_idx": 8538, "episode_idx": 30, "frame_idx": 81, "global_frame_idx": 8538, "task_index": 6}, {"db_idx": 8539, "episode_idx": 30, "frame_idx": 82, "global_frame_idx": 8539, "task_index": 6}, {"db_idx": 8540, "episode_idx": 30, "frame_idx": 83, "global_frame_idx": 8540, "task_index": 6}, {"db_idx": 8541, "episode_idx": 30, "frame_idx": 84, "global_frame_idx": 8541, "task_index": 6}, {"db_idx": 8542, "episode_idx": 30, "frame_idx": 85, "global_frame_idx": 8542, "task_index": 6}, {"db_idx": 8543, "episode_idx": 30, "frame_idx": 86, "global_frame_idx": 8543, "task_index": 6}, {"db_idx": 8544, "episode_idx": 30, "frame_idx": 87, "global_frame_idx": 8544, "task_index": 6}, {"db_idx": 8545, "episode_idx": 30, "frame_idx": 88, "global_frame_idx": 8545, "task_index": 6}, {"db_idx": 8546, "episode_idx": 30, "frame_idx": 89, "global_frame_idx": 8546, "task_index": 6}, {"db_idx": 8547, "episode_idx": 30, "frame_idx": 90, "global_frame_idx": 8547, "task_index": 6}, {"db_idx": 8548, "episode_idx": 30, "frame_idx": 91, "global_frame_idx": 8548, "task_index": 6}, {"db_idx": 8549, "episode_idx": 30, "frame_idx": 92, "global_frame_idx": 8549, "task_index": 6}, {"db_idx": 8550, "episode_idx": 30, "frame_idx": 93, "global_frame_idx": 8550, "task_index": 6}, {"db_idx": 8551, "episode_idx": 30, "frame_idx": 94, "global_frame_idx": 8551, "task_index": 6}, {"db_idx": 8552, "episode_idx": 30, "frame_idx": 95, "global_frame_idx": 8552, "task_index": 6}, {"db_idx": 8553, "episode_idx": 30, "frame_idx": 96, "global_frame_idx": 8553, "task_index": 6}, {"db_idx": 8554, "episode_idx": 30, "frame_idx": 97, "global_frame_idx": 8554, "task_index": 6}, {"db_idx": 8555, "episode_idx": 30, "frame_idx": 98, "global_frame_idx": 8555, "task_index": 6}, {"db_idx": 8556, "episode_idx": 30, "frame_idx": 99, "global_frame_idx": 8556, "task_index": 6}, {"db_idx": 8557, "episode_idx": 30, "frame_idx": 100, "global_frame_idx": 8557, "task_index": 6}, {"db_idx": 8558, "episode_idx": 30, "frame_idx": 101, "global_frame_idx": 8558, "task_index": 6}, {"db_idx": 8559, "episode_idx": 30, "frame_idx": 102, "global_frame_idx": 8559, "task_index": 6}, {"db_idx": 8560, "episode_idx": 30, "frame_idx": 103, "global_frame_idx": 8560, "task_index": 6}, {"db_idx": 8561, "episode_idx": 30, "frame_idx": 104, "global_frame_idx": 8561, "task_index": 6}, {"db_idx": 8562, "episode_idx": 30, "frame_idx": 105, "global_frame_idx": 8562, "task_index": 6}, {"db_idx": 8563, "episode_idx": 30, "frame_idx": 106, "global_frame_idx": 8563, "task_index": 6}, {"db_idx": 8564, "episode_idx": 30, "frame_idx": 107, "global_frame_idx": 8564, "task_index": 6}, {"db_idx": 8565, "episode_idx": 30, "frame_idx": 108, "global_frame_idx": 8565, "task_index": 6}, {"db_idx": 8566, "episode_idx": 30, "frame_idx": 109, "global_frame_idx": 8566, "task_index": 6}, {"db_idx": 8567, "episode_idx": 30, "frame_idx": 110, "global_frame_idx": 8567, "task_index": 6}, {"db_idx": 8568, "episode_idx": 30, "frame_idx": 111, "global_frame_idx": 8568, "task_index": 6}, {"db_idx": 8569, "episode_idx": 30, "frame_idx": 112, "global_frame_idx": 8569, "task_index": 6}, {"db_idx": 8570, "episode_idx": 30, "frame_idx": 113, "global_frame_idx": 8570, "task_index": 6}, {"db_idx": 8571, "episode_idx": 30, "frame_idx": 114, "global_frame_idx": 8571, "task_index": 6}, {"db_idx": 8572, "episode_idx": 30, "frame_idx": 115, "global_frame_idx": 8572, "task_index": 6}, {"db_idx": 8573, "episode_idx": 30, "frame_idx": 116, "global_frame_idx": 8573, "task_index": 6}, {"db_idx": 8574, "episode_idx": 30, "frame_idx": 117, "global_frame_idx": 8574, "task_index": 6}, {"db_idx": 8575, "episode_idx": 30, "frame_idx": 118, "global_frame_idx": 8575, "task_index": 6}, {"db_idx": 8576, "episode_idx": 30, "frame_idx": 119, "global_frame_idx": 8576, "task_index": 6}, {"db_idx": 8577, "episode_idx": 30, "frame_idx": 120, "global_frame_idx": 8577, "task_index": 6}, {"db_idx": 8578, "episode_idx": 30, "frame_idx": 121, "global_frame_idx": 8578, "task_index": 6}, {"db_idx": 8579, "episode_idx": 30, "frame_idx": 122, "global_frame_idx": 8579, "task_index": 6}, {"db_idx": 8580, "episode_idx": 30, "frame_idx": 123, "global_frame_idx": 8580, "task_index": 6}, {"db_idx": 8581, "episode_idx": 30, "frame_idx": 124, "global_frame_idx": 8581, "task_index": 6}, {"db_idx": 8582, "episode_idx": 30, "frame_idx": 125, "global_frame_idx": 8582, "task_index": 6}, {"db_idx": 8583, "episode_idx": 30, "frame_idx": 126, "global_frame_idx": 8583, "task_index": 6}, {"db_idx": 8584, "episode_idx": 30, "frame_idx": 127, "global_frame_idx": 8584, "task_index": 6}, {"db_idx": 8585, "episode_idx": 30, "frame_idx": 128, "global_frame_idx": 8585, "task_index": 6}, {"db_idx": 8586, "episode_idx": 30, "frame_idx": 129, "global_frame_idx": 8586, "task_index": 6}, {"db_idx": 8587, "episode_idx": 30, "frame_idx": 130, "global_frame_idx": 8587, "task_index": 6}, {"db_idx": 8588, "episode_idx": 30, "frame_idx": 131, "global_frame_idx": 8588, "task_index": 6}, {"db_idx": 8589, "episode_idx": 30, "frame_idx": 132, "global_frame_idx": 8589, "task_index": 6}, {"db_idx": 8590, "episode_idx": 30, "frame_idx": 133, "global_frame_idx": 8590, "task_index": 6}, {"db_idx": 8591, "episode_idx": 30, "frame_idx": 134, "global_frame_idx": 8591, "task_index": 6}, {"db_idx": 8592, "episode_idx": 30, "frame_idx": 135, "global_frame_idx": 8592, "task_index": 6}, {"db_idx": 8593, "episode_idx": 30, "frame_idx": 136, "global_frame_idx": 8593, "task_index": 6}, {"db_idx": 8594, "episode_idx": 30, "frame_idx": 137, "global_frame_idx": 8594, "task_index": 6}, {"db_idx": 8595, "episode_idx": 30, "frame_idx": 138, "global_frame_idx": 8595, "task_index": 6}, {"db_idx": 8596, "episode_idx": 30, "frame_idx": 139, "global_frame_idx": 8596, "task_index": 6}, {"db_idx": 8597, "episode_idx": 30, "frame_idx": 140, "global_frame_idx": 8597, "task_index": 6}, {"db_idx": 8598, "episode_idx": 30, "frame_idx": 141, "global_frame_idx": 8598, "task_index": 6}, {"db_idx": 8599, "episode_idx": 30, "frame_idx": 142, "global_frame_idx": 8599, "task_index": 6}, {"db_idx": 8600, "episode_idx": 30, "frame_idx": 143, "global_frame_idx": 8600, "task_index": 6}, {"db_idx": 8601, "episode_idx": 30, "frame_idx": 144, "global_frame_idx": 8601, "task_index": 6}, {"db_idx": 8602, "episode_idx": 30, "frame_idx": 145, "global_frame_idx": 8602, "task_index": 6}, {"db_idx": 8603, "episode_idx": 30, "frame_idx": 146, "global_frame_idx": 8603, "task_index": 6}, {"db_idx": 8604, "episode_idx": 30, "frame_idx": 147, "global_frame_idx": 8604, "task_index": 6}, {"db_idx": 8605, "episode_idx": 30, "frame_idx": 148, "global_frame_idx": 8605, "task_index": 6}, {"db_idx": 8606, "episode_idx": 30, "frame_idx": 149, "global_frame_idx": 8606, "task_index": 6}, {"db_idx": 8607, "episode_idx": 30, "frame_idx": 150, "global_frame_idx": 8607, "task_index": 6}, {"db_idx": 8608, "episode_idx": 30, "frame_idx": 151, "global_frame_idx": 8608, "task_index": 6}, {"db_idx": 8609, "episode_idx": 30, "frame_idx": 152, "global_frame_idx": 8609, "task_index": 6}, {"db_idx": 8610, "episode_idx": 30, "frame_idx": 153, "global_frame_idx": 8610, "task_index": 6}, {"db_idx": 8611, "episode_idx": 30, "frame_idx": 154, "global_frame_idx": 8611, "task_index": 6}, {"db_idx": 8612, "episode_idx": 30, "frame_idx": 155, "global_frame_idx": 8612, "task_index": 6}, {"db_idx": 8613, "episode_idx": 30, "frame_idx": 156, "global_frame_idx": 8613, "task_index": 6}, {"db_idx": 8614, "episode_idx": 30, "frame_idx": 157, "global_frame_idx": 8614, "task_index": 6}, {"db_idx": 8615, "episode_idx": 30, "frame_idx": 158, "global_frame_idx": 8615, "task_index": 6}, {"db_idx": 8616, "episode_idx": 30, "frame_idx": 159, "global_frame_idx": 8616, "task_index": 6}, {"db_idx": 8617, "episode_idx": 30, "frame_idx": 160, "global_frame_idx": 8617, "task_index": 6}, {"db_idx": 8618, "episode_idx": 30, "frame_idx": 161, "global_frame_idx": 8618, "task_index": 6}, {"db_idx": 8619, "episode_idx": 30, "frame_idx": 162, "global_frame_idx": 8619, "task_index": 6}, {"db_idx": 8620, "episode_idx": 30, "frame_idx": 163, "global_frame_idx": 8620, "task_index": 6}, {"db_idx": 8621, "episode_idx": 30, "frame_idx": 164, "global_frame_idx": 8621, "task_index": 6}, {"db_idx": 8622, "episode_idx": 30, "frame_idx": 165, "global_frame_idx": 8622, "task_index": 6}, {"db_idx": 8623, "episode_idx": 30, "frame_idx": 166, "global_frame_idx": 8623, "task_index": 6}, {"db_idx": 8624, "episode_idx": 30, "frame_idx": 167, "global_frame_idx": 8624, "task_index": 6}, {"db_idx": 8625, "episode_idx": 30, "frame_idx": 168, "global_frame_idx": 8625, "task_index": 6}, {"db_idx": 8626, "episode_idx": 30, "frame_idx": 169, "global_frame_idx": 8626, "task_index": 6}, {"db_idx": 8627, "episode_idx": 30, "frame_idx": 170, "global_frame_idx": 8627, "task_index": 6}, {"db_idx": 8628, "episode_idx": 30, "frame_idx": 171, "global_frame_idx": 8628, "task_index": 6}, {"db_idx": 8629, "episode_idx": 30, "frame_idx": 172, "global_frame_idx": 8629, "task_index": 6}, {"db_idx": 8630, "episode_idx": 30, "frame_idx": 173, "global_frame_idx": 8630, "task_index": 6}, {"db_idx": 8631, "episode_idx": 30, "frame_idx": 174, "global_frame_idx": 8631, "task_index": 6}, {"db_idx": 8632, "episode_idx": 30, "frame_idx": 175, "global_frame_idx": 8632, "task_index": 6}, {"db_idx": 8633, "episode_idx": 30, "frame_idx": 176, "global_frame_idx": 8633, "task_index": 6}, {"db_idx": 8634, "episode_idx": 30, "frame_idx": 177, "global_frame_idx": 8634, "task_index": 6}, {"db_idx": 8635, "episode_idx": 30, "frame_idx": 178, "global_frame_idx": 8635, "task_index": 6}, {"db_idx": 8636, "episode_idx": 30, "frame_idx": 179, "global_frame_idx": 8636, "task_index": 6}, {"db_idx": 8637, "episode_idx": 30, "frame_idx": 180, "global_frame_idx": 8637, "task_index": 6}, {"db_idx": 8638, "episode_idx": 30, "frame_idx": 181, "global_frame_idx": 8638, "task_index": 6}, {"db_idx": 8639, "episode_idx": 30, "frame_idx": 182, "global_frame_idx": 8639, "task_index": 6}, {"db_idx": 8640, "episode_idx": 30, "frame_idx": 183, "global_frame_idx": 8640, "task_index": 6}, {"db_idx": 8641, "episode_idx": 30, "frame_idx": 184, "global_frame_idx": 8641, "task_index": 6}, {"db_idx": 8642, "episode_idx": 30, "frame_idx": 185, "global_frame_idx": 8642, "task_index": 6}, {"db_idx": 8643, "episode_idx": 30, "frame_idx": 186, "global_frame_idx": 8643, "task_index": 6}, {"db_idx": 8644, "episode_idx": 30, "frame_idx": 187, "global_frame_idx": 8644, "task_index": 6}, {"db_idx": 8645, "episode_idx": 30, "frame_idx": 188, "global_frame_idx": 8645, "task_index": 6}, {"db_idx": 8646, "episode_idx": 30, "frame_idx": 189, "global_frame_idx": 8646, "task_index": 6}, {"db_idx": 8647, "episode_idx": 30, "frame_idx": 190, "global_frame_idx": 8647, "task_index": 6}, {"db_idx": 8648, "episode_idx": 30, "frame_idx": 191, "global_frame_idx": 8648, "task_index": 6}, {"db_idx": 8649, "episode_idx": 30, "frame_idx": 192, "global_frame_idx": 8649, "task_index": 6}, {"db_idx": 8650, "episode_idx": 30, "frame_idx": 193, "global_frame_idx": 8650, "task_index": 6}, {"db_idx": 8651, "episode_idx": 30, "frame_idx": 194, "global_frame_idx": 8651, "task_index": 6}, {"db_idx": 8652, "episode_idx": 30, "frame_idx": 195, "global_frame_idx": 8652, "task_index": 6}, {"db_idx": 8653, "episode_idx": 30, "frame_idx": 196, "global_frame_idx": 8653, "task_index": 6}, {"db_idx": 8654, "episode_idx": 30, "frame_idx": 197, "global_frame_idx": 8654, "task_index": 6}, {"db_idx": 8655, "episode_idx": 30, "frame_idx": 198, "global_frame_idx": 8655, "task_index": 6}, {"db_idx": 8656, "episode_idx": 30, "frame_idx": 199, "global_frame_idx": 8656, "task_index": 6}, {"db_idx": 8657, "episode_idx": 30, "frame_idx": 200, "global_frame_idx": 8657, "task_index": 6}, {"db_idx": 8658, "episode_idx": 30, "frame_idx": 201, "global_frame_idx": 8658, "task_index": 6}, {"db_idx": 8659, "episode_idx": 30, "frame_idx": 202, "global_frame_idx": 8659, "task_index": 6}, {"db_idx": 8660, "episode_idx": 30, "frame_idx": 203, "global_frame_idx": 8660, "task_index": 6}, {"db_idx": 8661, "episode_idx": 30, "frame_idx": 204, "global_frame_idx": 8661, "task_index": 6}, {"db_idx": 8662, "episode_idx": 30, "frame_idx": 205, "global_frame_idx": 8662, "task_index": 6}, {"db_idx": 8663, "episode_idx": 30, "frame_idx": 206, "global_frame_idx": 8663, "task_index": 6}, {"db_idx": 8664, "episode_idx": 30, "frame_idx": 207, "global_frame_idx": 8664, "task_index": 6}, {"db_idx": 8665, "episode_idx": 30, "frame_idx": 208, "global_frame_idx": 8665, "task_index": 6}, {"db_idx": 8666, "episode_idx": 30, "frame_idx": 209, "global_frame_idx": 8666, "task_index": 6}, {"db_idx": 8667, "episode_idx": 30, "frame_idx": 210, "global_frame_idx": 8667, "task_index": 6}, {"db_idx": 8668, "episode_idx": 30, "frame_idx": 211, "global_frame_idx": 8668, "task_index": 6}, {"db_idx": 8669, "episode_idx": 30, "frame_idx": 212, "global_frame_idx": 8669, "task_index": 6}, {"db_idx": 8670, "episode_idx": 30, "frame_idx": 213, "global_frame_idx": 8670, "task_index": 6}, {"db_idx": 8671, "episode_idx": 30, "frame_idx": 214, "global_frame_idx": 8671, "task_index": 6}, {"db_idx": 8672, "episode_idx": 30, "frame_idx": 215, "global_frame_idx": 8672, "task_index": 6}, {"db_idx": 8673, "episode_idx": 30, "frame_idx": 216, "global_frame_idx": 8673, "task_index": 6}, {"db_idx": 8674, "episode_idx": 30, "frame_idx": 217, "global_frame_idx": 8674, "task_index": 6}, {"db_idx": 8675, "episode_idx": 30, "frame_idx": 218, "global_frame_idx": 8675, "task_index": 6}, {"db_idx": 8676, "episode_idx": 30, "frame_idx": 219, "global_frame_idx": 8676, "task_index": 6}, {"db_idx": 8677, "episode_idx": 30, "frame_idx": 220, "global_frame_idx": 8677, "task_index": 6}, {"db_idx": 8678, "episode_idx": 30, "frame_idx": 221, "global_frame_idx": 8678, "task_index": 6}, {"db_idx": 8679, "episode_idx": 30, "frame_idx": 222, "global_frame_idx": 8679, "task_index": 6}, {"db_idx": 8680, "episode_idx": 30, "frame_idx": 223, "global_frame_idx": 8680, "task_index": 6}, {"db_idx": 8681, "episode_idx": 30, "frame_idx": 224, "global_frame_idx": 8681, "task_index": 6}, {"db_idx": 8682, "episode_idx": 30, "frame_idx": 225, "global_frame_idx": 8682, "task_index": 6}, {"db_idx": 8683, "episode_idx": 30, "frame_idx": 226, "global_frame_idx": 8683, "task_index": 6}, {"db_idx": 8684, "episode_idx": 30, "frame_idx": 227, "global_frame_idx": 8684, "task_index": 6}, {"db_idx": 8685, "episode_idx": 30, "frame_idx": 228, "global_frame_idx": 8685, "task_index": 6}, {"db_idx": 8686, "episode_idx": 30, "frame_idx": 229, "global_frame_idx": 8686, "task_index": 6}, {"db_idx": 8687, "episode_idx": 30, "frame_idx": 230, "global_frame_idx": 8687, "task_index": 6}, {"db_idx": 8688, "episode_idx": 30, "frame_idx": 231, "global_frame_idx": 8688, "task_index": 6}, {"db_idx": 8689, "episode_idx": 30, "frame_idx": 232, "global_frame_idx": 8689, "task_index": 6}, {"db_idx": 8690, "episode_idx": 30, "frame_idx": 233, "global_frame_idx": 8690, "task_index": 6}, {"db_idx": 8691, "episode_idx": 30, "frame_idx": 234, "global_frame_idx": 8691, "task_index": 6}, {"db_idx": 8692, "episode_idx": 30, "frame_idx": 235, "global_frame_idx": 8692, "task_index": 6}, {"db_idx": 8693, "episode_idx": 30, "frame_idx": 236, "global_frame_idx": 8693, "task_index": 6}, {"db_idx": 8694, "episode_idx": 30, "frame_idx": 237, "global_frame_idx": 8694, "task_index": 6}, {"db_idx": 8695, "episode_idx": 30, "frame_idx": 238, "global_frame_idx": 8695, "task_index": 6}, {"db_idx": 8696, "episode_idx": 30, "frame_idx": 239, "global_frame_idx": 8696, "task_index": 6}, {"db_idx": 8697, "episode_idx": 30, "frame_idx": 240, "global_frame_idx": 8697, "task_index": 6}, {"db_idx": 8698, "episode_idx": 30, "frame_idx": 241, "global_frame_idx": 8698, "task_index": 6}, {"db_idx": 8699, "episode_idx": 30, "frame_idx": 242, "global_frame_idx": 8699, "task_index": 6}, {"db_idx": 8700, "episode_idx": 30, "frame_idx": 243, "global_frame_idx": 8700, "task_index": 6}, {"db_idx": 8701, "episode_idx": 30, "frame_idx": 244, "global_frame_idx": 8701, "task_index": 6}, {"db_idx": 8702, "episode_idx": 30, "frame_idx": 245, "global_frame_idx": 8702, "task_index": 6}, {"db_idx": 8703, "episode_idx": 30, "frame_idx": 246, "global_frame_idx": 8703, "task_index": 6}, {"db_idx": 8704, "episode_idx": 30, "frame_idx": 247, "global_frame_idx": 8704, "task_index": 6}, {"db_idx": 8705, "episode_idx": 30, "frame_idx": 248, "global_frame_idx": 8705, "task_index": 6}, {"db_idx": 8706, "episode_idx": 30, "frame_idx": 249, "global_frame_idx": 8706, "task_index": 6}, {"db_idx": 8707, "episode_idx": 30, "frame_idx": 250, "global_frame_idx": 8707, "task_index": 6}, {"db_idx": 8708, "episode_idx": 30, "frame_idx": 251, "global_frame_idx": 8708, "task_index": 6}, {"db_idx": 8709, "episode_idx": 30, "frame_idx": 252, "global_frame_idx": 8709, "task_index": 6}, {"db_idx": 8710, "episode_idx": 30, "frame_idx": 253, "global_frame_idx": 8710, "task_index": 6}, {"db_idx": 8711, "episode_idx": 30, "frame_idx": 254, "global_frame_idx": 8711, "task_index": 6}, {"db_idx": 8712, "episode_idx": 30, "frame_idx": 255, "global_frame_idx": 8712, "task_index": 6}, {"db_idx": 8713, "episode_idx": 30, "frame_idx": 256, "global_frame_idx": 8713, "task_index": 6}, {"db_idx": 8714, "episode_idx": 30, "frame_idx": 257, "global_frame_idx": 8714, "task_index": 6}, {"db_idx": 8715, "episode_idx": 30, "frame_idx": 258, "global_frame_idx": 8715, "task_index": 6}, {"db_idx": 8716, "episode_idx": 30, "frame_idx": 259, "global_frame_idx": 8716, "task_index": 6}, {"db_idx": 8717, "episode_idx": 30, "frame_idx": 260, "global_frame_idx": 8717, "task_index": 6}, {"db_idx": 8718, "episode_idx": 30, "frame_idx": 261, "global_frame_idx": 8718, "task_index": 6}, {"db_idx": 8719, "episode_idx": 30, "frame_idx": 262, "global_frame_idx": 8719, "task_index": 6}, {"db_idx": 8720, "episode_idx": 30, "frame_idx": 263, "global_frame_idx": 8720, "task_index": 6}, {"db_idx": 8721, "episode_idx": 30, "frame_idx": 264, "global_frame_idx": 8721, "task_index": 6}, {"db_idx": 8722, "episode_idx": 30, "frame_idx": 265, "global_frame_idx": 8722, "task_index": 6}, {"db_idx": 8723, "episode_idx": 30, "frame_idx": 266, "global_frame_idx": 8723, "task_index": 6}, {"db_idx": 8724, "episode_idx": 30, "frame_idx": 267, "global_frame_idx": 8724, "task_index": 6}, {"db_idx": 8725, "episode_idx": 30, "frame_idx": 268, "global_frame_idx": 8725, "task_index": 6}, {"db_idx": 8726, "episode_idx": 30, "frame_idx": 269, "global_frame_idx": 8726, "task_index": 6}, {"db_idx": 8727, "episode_idx": 30, "frame_idx": 270, "global_frame_idx": 8727, "task_index": 6}, {"db_idx": 8728, "episode_idx": 30, "frame_idx": 271, "global_frame_idx": 8728, "task_index": 6}, {"db_idx": 8729, "episode_idx": 30, "frame_idx": 272, "global_frame_idx": 8729, "task_index": 6}, {"db_idx": 8730, "episode_idx": 30, "frame_idx": 273, "global_frame_idx": 8730, "task_index": 6}, {"db_idx": 8731, "episode_idx": 30, "frame_idx": 274, "global_frame_idx": 8731, "task_index": 6}, {"db_idx": 8732, "episode_idx": 31, "frame_idx": 0, "global_frame_idx": 8732, "task_index": 6}, {"db_idx": 8733, "episode_idx": 31, "frame_idx": 1, "global_frame_idx": 8733, "task_index": 6}, {"db_idx": 8734, "episode_idx": 31, "frame_idx": 2, "global_frame_idx": 8734, "task_index": 6}, {"db_idx": 8735, "episode_idx": 31, "frame_idx": 3, "global_frame_idx": 8735, "task_index": 6}, {"db_idx": 8736, "episode_idx": 31, "frame_idx": 4, "global_frame_idx": 8736, "task_index": 6}, {"db_idx": 8737, "episode_idx": 31, "frame_idx": 5, "global_frame_idx": 8737, "task_index": 6}, {"db_idx": 8738, "episode_idx": 31, "frame_idx": 6, "global_frame_idx": 8738, "task_index": 6}, {"db_idx": 8739, "episode_idx": 31, "frame_idx": 7, "global_frame_idx": 8739, "task_index": 6}, {"db_idx": 8740, "episode_idx": 31, "frame_idx": 8, "global_frame_idx": 8740, "task_index": 6}, {"db_idx": 8741, "episode_idx": 31, "frame_idx": 9, "global_frame_idx": 8741, "task_index": 6}, {"db_idx": 8742, "episode_idx": 31, "frame_idx": 10, "global_frame_idx": 8742, "task_index": 6}, {"db_idx": 8743, "episode_idx": 31, "frame_idx": 11, "global_frame_idx": 8743, "task_index": 6}, {"db_idx": 8744, "episode_idx": 31, "frame_idx": 12, "global_frame_idx": 8744, "task_index": 6}, {"db_idx": 8745, "episode_idx": 31, "frame_idx": 13, "global_frame_idx": 8745, "task_index": 6}, {"db_idx": 8746, "episode_idx": 31, "frame_idx": 14, "global_frame_idx": 8746, "task_index": 6}, {"db_idx": 8747, "episode_idx": 31, "frame_idx": 15, "global_frame_idx": 8747, "task_index": 6}, {"db_idx": 8748, "episode_idx": 31, "frame_idx": 16, "global_frame_idx": 8748, "task_index": 6}, {"db_idx": 8749, "episode_idx": 31, "frame_idx": 17, "global_frame_idx": 8749, "task_index": 6}, {"db_idx": 8750, "episode_idx": 31, "frame_idx": 18, "global_frame_idx": 8750, "task_index": 6}, {"db_idx": 8751, "episode_idx": 31, "frame_idx": 19, "global_frame_idx": 8751, "task_index": 6}, {"db_idx": 8752, "episode_idx": 31, "frame_idx": 20, "global_frame_idx": 8752, "task_index": 6}, {"db_idx": 8753, "episode_idx": 31, "frame_idx": 21, "global_frame_idx": 8753, "task_index": 6}, {"db_idx": 8754, "episode_idx": 31, "frame_idx": 22, "global_frame_idx": 8754, "task_index": 6}, {"db_idx": 8755, "episode_idx": 31, "frame_idx": 23, "global_frame_idx": 8755, "task_index": 6}, {"db_idx": 8756, "episode_idx": 31, "frame_idx": 24, "global_frame_idx": 8756, "task_index": 6}, {"db_idx": 8757, "episode_idx": 31, "frame_idx": 25, "global_frame_idx": 8757, "task_index": 6}, {"db_idx": 8758, "episode_idx": 31, "frame_idx": 26, "global_frame_idx": 8758, "task_index": 6}, {"db_idx": 8759, "episode_idx": 31, "frame_idx": 27, "global_frame_idx": 8759, "task_index": 6}, {"db_idx": 8760, "episode_idx": 31, "frame_idx": 28, "global_frame_idx": 8760, "task_index": 6}, {"db_idx": 8761, "episode_idx": 31, "frame_idx": 29, "global_frame_idx": 8761, "task_index": 6}, {"db_idx": 8762, "episode_idx": 31, "frame_idx": 30, "global_frame_idx": 8762, "task_index": 6}, {"db_idx": 8763, "episode_idx": 31, "frame_idx": 31, "global_frame_idx": 8763, "task_index": 6}, {"db_idx": 8764, "episode_idx": 31, "frame_idx": 32, "global_frame_idx": 8764, "task_index": 6}, {"db_idx": 8765, "episode_idx": 31, "frame_idx": 33, "global_frame_idx": 8765, "task_index": 6}, {"db_idx": 8766, "episode_idx": 31, "frame_idx": 34, "global_frame_idx": 8766, "task_index": 6}, {"db_idx": 8767, "episode_idx": 31, "frame_idx": 35, "global_frame_idx": 8767, "task_index": 6}, {"db_idx": 8768, "episode_idx": 31, "frame_idx": 36, "global_frame_idx": 8768, "task_index": 6}, {"db_idx": 8769, "episode_idx": 31, "frame_idx": 37, "global_frame_idx": 8769, "task_index": 6}, {"db_idx": 8770, "episode_idx": 31, "frame_idx": 38, "global_frame_idx": 8770, "task_index": 6}, {"db_idx": 8771, "episode_idx": 31, "frame_idx": 39, "global_frame_idx": 8771, "task_index": 6}, {"db_idx": 8772, "episode_idx": 31, "frame_idx": 40, "global_frame_idx": 8772, "task_index": 6}, {"db_idx": 8773, "episode_idx": 31, "frame_idx": 41, "global_frame_idx": 8773, "task_index": 6}, {"db_idx": 8774, "episode_idx": 31, "frame_idx": 42, "global_frame_idx": 8774, "task_index": 6}, {"db_idx": 8775, "episode_idx": 31, "frame_idx": 43, "global_frame_idx": 8775, "task_index": 6}, {"db_idx": 8776, "episode_idx": 31, "frame_idx": 44, "global_frame_idx": 8776, "task_index": 6}, {"db_idx": 8777, "episode_idx": 31, "frame_idx": 45, "global_frame_idx": 8777, "task_index": 6}, {"db_idx": 8778, "episode_idx": 31, "frame_idx": 46, "global_frame_idx": 8778, "task_index": 6}, {"db_idx": 8779, "episode_idx": 31, "frame_idx": 47, "global_frame_idx": 8779, "task_index": 6}, {"db_idx": 8780, "episode_idx": 31, "frame_idx": 48, "global_frame_idx": 8780, "task_index": 6}, {"db_idx": 8781, "episode_idx": 31, "frame_idx": 49, "global_frame_idx": 8781, "task_index": 6}, {"db_idx": 8782, "episode_idx": 31, "frame_idx": 50, "global_frame_idx": 8782, "task_index": 6}, {"db_idx": 8783, "episode_idx": 31, "frame_idx": 51, "global_frame_idx": 8783, "task_index": 6}, {"db_idx": 8784, "episode_idx": 31, "frame_idx": 52, "global_frame_idx": 8784, "task_index": 6}, {"db_idx": 8785, "episode_idx": 31, "frame_idx": 53, "global_frame_idx": 8785, "task_index": 6}, {"db_idx": 8786, "episode_idx": 31, "frame_idx": 54, "global_frame_idx": 8786, "task_index": 6}, {"db_idx": 8787, "episode_idx": 31, "frame_idx": 55, "global_frame_idx": 8787, "task_index": 6}, {"db_idx": 8788, "episode_idx": 31, "frame_idx": 56, "global_frame_idx": 8788, "task_index": 6}, {"db_idx": 8789, "episode_idx": 31, "frame_idx": 57, "global_frame_idx": 8789, "task_index": 6}, {"db_idx": 8790, "episode_idx": 31, "frame_idx": 58, "global_frame_idx": 8790, "task_index": 6}, {"db_idx": 8791, "episode_idx": 31, "frame_idx": 59, "global_frame_idx": 8791, "task_index": 6}, {"db_idx": 8792, "episode_idx": 31, "frame_idx": 60, "global_frame_idx": 8792, "task_index": 6}, {"db_idx": 8793, "episode_idx": 31, "frame_idx": 61, "global_frame_idx": 8793, "task_index": 6}, {"db_idx": 8794, "episode_idx": 31, "frame_idx": 62, "global_frame_idx": 8794, "task_index": 6}, {"db_idx": 8795, "episode_idx": 31, "frame_idx": 63, "global_frame_idx": 8795, "task_index": 6}, {"db_idx": 8796, "episode_idx": 31, "frame_idx": 64, "global_frame_idx": 8796, "task_index": 6}, {"db_idx": 8797, "episode_idx": 31, "frame_idx": 65, "global_frame_idx": 8797, "task_index": 6}, {"db_idx": 8798, "episode_idx": 31, "frame_idx": 66, "global_frame_idx": 8798, "task_index": 6}, {"db_idx": 8799, "episode_idx": 31, "frame_idx": 67, "global_frame_idx": 8799, "task_index": 6}, {"db_idx": 8800, "episode_idx": 31, "frame_idx": 68, "global_frame_idx": 8800, "task_index": 6}, {"db_idx": 8801, "episode_idx": 31, "frame_idx": 69, "global_frame_idx": 8801, "task_index": 6}, {"db_idx": 8802, "episode_idx": 31, "frame_idx": 70, "global_frame_idx": 8802, "task_index": 6}, {"db_idx": 8803, "episode_idx": 31, "frame_idx": 71, "global_frame_idx": 8803, "task_index": 6}, {"db_idx": 8804, "episode_idx": 31, "frame_idx": 72, "global_frame_idx": 8804, "task_index": 6}, {"db_idx": 8805, "episode_idx": 31, "frame_idx": 73, "global_frame_idx": 8805, "task_index": 6}, {"db_idx": 8806, "episode_idx": 31, "frame_idx": 74, "global_frame_idx": 8806, "task_index": 6}, {"db_idx": 8807, "episode_idx": 31, "frame_idx": 75, "global_frame_idx": 8807, "task_index": 6}, {"db_idx": 8808, "episode_idx": 31, "frame_idx": 76, "global_frame_idx": 8808, "task_index": 6}, {"db_idx": 8809, "episode_idx": 31, "frame_idx": 77, "global_frame_idx": 8809, "task_index": 6}, {"db_idx": 8810, "episode_idx": 31, "frame_idx": 78, "global_frame_idx": 8810, "task_index": 6}, {"db_idx": 8811, "episode_idx": 31, "frame_idx": 79, "global_frame_idx": 8811, "task_index": 6}, {"db_idx": 8812, "episode_idx": 31, "frame_idx": 80, "global_frame_idx": 8812, "task_index": 6}, {"db_idx": 8813, "episode_idx": 31, "frame_idx": 81, "global_frame_idx": 8813, "task_index": 6}, {"db_idx": 8814, "episode_idx": 31, "frame_idx": 82, "global_frame_idx": 8814, "task_index": 6}, {"db_idx": 8815, "episode_idx": 31, "frame_idx": 83, "global_frame_idx": 8815, "task_index": 6}, {"db_idx": 8816, "episode_idx": 31, "frame_idx": 84, "global_frame_idx": 8816, "task_index": 6}, {"db_idx": 8817, "episode_idx": 31, "frame_idx": 85, "global_frame_idx": 8817, "task_index": 6}, {"db_idx": 8818, "episode_idx": 31, "frame_idx": 86, "global_frame_idx": 8818, "task_index": 6}, {"db_idx": 8819, "episode_idx": 31, "frame_idx": 87, "global_frame_idx": 8819, "task_index": 6}, {"db_idx": 8820, "episode_idx": 31, "frame_idx": 88, "global_frame_idx": 8820, "task_index": 6}, {"db_idx": 8821, "episode_idx": 31, "frame_idx": 89, "global_frame_idx": 8821, "task_index": 6}, {"db_idx": 8822, "episode_idx": 31, "frame_idx": 90, "global_frame_idx": 8822, "task_index": 6}, {"db_idx": 8823, "episode_idx": 31, "frame_idx": 91, "global_frame_idx": 8823, "task_index": 6}, {"db_idx": 8824, "episode_idx": 31, "frame_idx": 92, "global_frame_idx": 8824, "task_index": 6}, {"db_idx": 8825, "episode_idx": 31, "frame_idx": 93, "global_frame_idx": 8825, "task_index": 6}, {"db_idx": 8826, "episode_idx": 31, "frame_idx": 94, "global_frame_idx": 8826, "task_index": 6}, {"db_idx": 8827, "episode_idx": 31, "frame_idx": 95, "global_frame_idx": 8827, "task_index": 6}, {"db_idx": 8828, "episode_idx": 31, "frame_idx": 96, "global_frame_idx": 8828, "task_index": 6}, {"db_idx": 8829, "episode_idx": 31, "frame_idx": 97, "global_frame_idx": 8829, "task_index": 6}, {"db_idx": 8830, "episode_idx": 31, "frame_idx": 98, "global_frame_idx": 8830, "task_index": 6}, {"db_idx": 8831, "episode_idx": 31, "frame_idx": 99, "global_frame_idx": 8831, "task_index": 6}, {"db_idx": 8832, "episode_idx": 31, "frame_idx": 100, "global_frame_idx": 8832, "task_index": 6}, {"db_idx": 8833, "episode_idx": 31, "frame_idx": 101, "global_frame_idx": 8833, "task_index": 6}, {"db_idx": 8834, "episode_idx": 31, "frame_idx": 102, "global_frame_idx": 8834, "task_index": 6}, {"db_idx": 8835, "episode_idx": 31, "frame_idx": 103, "global_frame_idx": 8835, "task_index": 6}, {"db_idx": 8836, "episode_idx": 31, "frame_idx": 104, "global_frame_idx": 8836, "task_index": 6}, {"db_idx": 8837, "episode_idx": 31, "frame_idx": 105, "global_frame_idx": 8837, "task_index": 6}, {"db_idx": 8838, "episode_idx": 31, "frame_idx": 106, "global_frame_idx": 8838, "task_index": 6}, {"db_idx": 8839, "episode_idx": 31, "frame_idx": 107, "global_frame_idx": 8839, "task_index": 6}, {"db_idx": 8840, "episode_idx": 31, "frame_idx": 108, "global_frame_idx": 8840, "task_index": 6}, {"db_idx": 8841, "episode_idx": 31, "frame_idx": 109, "global_frame_idx": 8841, "task_index": 6}, {"db_idx": 8842, "episode_idx": 31, "frame_idx": 110, "global_frame_idx": 8842, "task_index": 6}, {"db_idx": 8843, "episode_idx": 31, "frame_idx": 111, "global_frame_idx": 8843, "task_index": 6}, {"db_idx": 8844, "episode_idx": 31, "frame_idx": 112, "global_frame_idx": 8844, "task_index": 6}, {"db_idx": 8845, "episode_idx": 31, "frame_idx": 113, "global_frame_idx": 8845, "task_index": 6}, {"db_idx": 8846, "episode_idx": 31, "frame_idx": 114, "global_frame_idx": 8846, "task_index": 6}, {"db_idx": 8847, "episode_idx": 31, "frame_idx": 115, "global_frame_idx": 8847, "task_index": 6}, {"db_idx": 8848, "episode_idx": 31, "frame_idx": 116, "global_frame_idx": 8848, "task_index": 6}, {"db_idx": 8849, "episode_idx": 31, "frame_idx": 117, "global_frame_idx": 8849, "task_index": 6}, {"db_idx": 8850, "episode_idx": 31, "frame_idx": 118, "global_frame_idx": 8850, "task_index": 6}, {"db_idx": 8851, "episode_idx": 31, "frame_idx": 119, "global_frame_idx": 8851, "task_index": 6}, {"db_idx": 8852, "episode_idx": 31, "frame_idx": 120, "global_frame_idx": 8852, "task_index": 6}, {"db_idx": 8853, "episode_idx": 31, "frame_idx": 121, "global_frame_idx": 8853, "task_index": 6}, {"db_idx": 8854, "episode_idx": 31, "frame_idx": 122, "global_frame_idx": 8854, "task_index": 6}, {"db_idx": 8855, "episode_idx": 31, "frame_idx": 123, "global_frame_idx": 8855, "task_index": 6}, {"db_idx": 8856, "episode_idx": 31, "frame_idx": 124, "global_frame_idx": 8856, "task_index": 6}, {"db_idx": 8857, "episode_idx": 31, "frame_idx": 125, "global_frame_idx": 8857, "task_index": 6}, {"db_idx": 8858, "episode_idx": 31, "frame_idx": 126, "global_frame_idx": 8858, "task_index": 6}, {"db_idx": 8859, "episode_idx": 31, "frame_idx": 127, "global_frame_idx": 8859, "task_index": 6}, {"db_idx": 8860, "episode_idx": 31, "frame_idx": 128, "global_frame_idx": 8860, "task_index": 6}, {"db_idx": 8861, "episode_idx": 31, "frame_idx": 129, "global_frame_idx": 8861, "task_index": 6}, {"db_idx": 8862, "episode_idx": 31, "frame_idx": 130, "global_frame_idx": 8862, "task_index": 6}, {"db_idx": 8863, "episode_idx": 31, "frame_idx": 131, "global_frame_idx": 8863, "task_index": 6}, {"db_idx": 8864, "episode_idx": 31, "frame_idx": 132, "global_frame_idx": 8864, "task_index": 6}, {"db_idx": 8865, "episode_idx": 31, "frame_idx": 133, "global_frame_idx": 8865, "task_index": 6}, {"db_idx": 8866, "episode_idx": 31, "frame_idx": 134, "global_frame_idx": 8866, "task_index": 6}, {"db_idx": 8867, "episode_idx": 31, "frame_idx": 135, "global_frame_idx": 8867, "task_index": 6}, {"db_idx": 8868, "episode_idx": 31, "frame_idx": 136, "global_frame_idx": 8868, "task_index": 6}, {"db_idx": 8869, "episode_idx": 31, "frame_idx": 137, "global_frame_idx": 8869, "task_index": 6}, {"db_idx": 8870, "episode_idx": 31, "frame_idx": 138, "global_frame_idx": 8870, "task_index": 6}, {"db_idx": 8871, "episode_idx": 31, "frame_idx": 139, "global_frame_idx": 8871, "task_index": 6}, {"db_idx": 8872, "episode_idx": 31, "frame_idx": 140, "global_frame_idx": 8872, "task_index": 6}, {"db_idx": 8873, "episode_idx": 31, "frame_idx": 141, "global_frame_idx": 8873, "task_index": 6}, {"db_idx": 8874, "episode_idx": 31, "frame_idx": 142, "global_frame_idx": 8874, "task_index": 6}, {"db_idx": 8875, "episode_idx": 31, "frame_idx": 143, "global_frame_idx": 8875, "task_index": 6}, {"db_idx": 8876, "episode_idx": 31, "frame_idx": 144, "global_frame_idx": 8876, "task_index": 6}, {"db_idx": 8877, "episode_idx": 31, "frame_idx": 145, "global_frame_idx": 8877, "task_index": 6}, {"db_idx": 8878, "episode_idx": 31, "frame_idx": 146, "global_frame_idx": 8878, "task_index": 6}, {"db_idx": 8879, "episode_idx": 31, "frame_idx": 147, "global_frame_idx": 8879, "task_index": 6}, {"db_idx": 8880, "episode_idx": 31, "frame_idx": 148, "global_frame_idx": 8880, "task_index": 6}, {"db_idx": 8881, "episode_idx": 31, "frame_idx": 149, "global_frame_idx": 8881, "task_index": 6}, {"db_idx": 8882, "episode_idx": 31, "frame_idx": 150, "global_frame_idx": 8882, "task_index": 6}, {"db_idx": 8883, "episode_idx": 31, "frame_idx": 151, "global_frame_idx": 8883, "task_index": 6}, {"db_idx": 8884, "episode_idx": 31, "frame_idx": 152, "global_frame_idx": 8884, "task_index": 6}, {"db_idx": 8885, "episode_idx": 31, "frame_idx": 153, "global_frame_idx": 8885, "task_index": 6}, {"db_idx": 8886, "episode_idx": 31, "frame_idx": 154, "global_frame_idx": 8886, "task_index": 6}, {"db_idx": 8887, "episode_idx": 31, "frame_idx": 155, "global_frame_idx": 8887, "task_index": 6}, {"db_idx": 8888, "episode_idx": 31, "frame_idx": 156, "global_frame_idx": 8888, "task_index": 6}, {"db_idx": 8889, "episode_idx": 31, "frame_idx": 157, "global_frame_idx": 8889, "task_index": 6}, {"db_idx": 8890, "episode_idx": 31, "frame_idx": 158, "global_frame_idx": 8890, "task_index": 6}, {"db_idx": 8891, "episode_idx": 31, "frame_idx": 159, "global_frame_idx": 8891, "task_index": 6}, {"db_idx": 8892, "episode_idx": 31, "frame_idx": 160, "global_frame_idx": 8892, "task_index": 6}, {"db_idx": 8893, "episode_idx": 31, "frame_idx": 161, "global_frame_idx": 8893, "task_index": 6}, {"db_idx": 8894, "episode_idx": 31, "frame_idx": 162, "global_frame_idx": 8894, "task_index": 6}, {"db_idx": 8895, "episode_idx": 31, "frame_idx": 163, "global_frame_idx": 8895, "task_index": 6}, {"db_idx": 8896, "episode_idx": 31, "frame_idx": 164, "global_frame_idx": 8896, "task_index": 6}, {"db_idx": 8897, "episode_idx": 31, "frame_idx": 165, "global_frame_idx": 8897, "task_index": 6}, {"db_idx": 8898, "episode_idx": 31, "frame_idx": 166, "global_frame_idx": 8898, "task_index": 6}, {"db_idx": 8899, "episode_idx": 31, "frame_idx": 167, "global_frame_idx": 8899, "task_index": 6}, {"db_idx": 8900, "episode_idx": 31, "frame_idx": 168, "global_frame_idx": 8900, "task_index": 6}, {"db_idx": 8901, "episode_idx": 31, "frame_idx": 169, "global_frame_idx": 8901, "task_index": 6}, {"db_idx": 8902, "episode_idx": 31, "frame_idx": 170, "global_frame_idx": 8902, "task_index": 6}, {"db_idx": 8903, "episode_idx": 31, "frame_idx": 171, "global_frame_idx": 8903, "task_index": 6}, {"db_idx": 8904, "episode_idx": 31, "frame_idx": 172, "global_frame_idx": 8904, "task_index": 6}, {"db_idx": 8905, "episode_idx": 31, "frame_idx": 173, "global_frame_idx": 8905, "task_index": 6}, {"db_idx": 8906, "episode_idx": 31, "frame_idx": 174, "global_frame_idx": 8906, "task_index": 6}, {"db_idx": 8907, "episode_idx": 31, "frame_idx": 175, "global_frame_idx": 8907, "task_index": 6}, {"db_idx": 8908, "episode_idx": 31, "frame_idx": 176, "global_frame_idx": 8908, "task_index": 6}, {"db_idx": 8909, "episode_idx": 31, "frame_idx": 177, "global_frame_idx": 8909, "task_index": 6}, {"db_idx": 8910, "episode_idx": 31, "frame_idx": 178, "global_frame_idx": 8910, "task_index": 6}, {"db_idx": 8911, "episode_idx": 31, "frame_idx": 179, "global_frame_idx": 8911, "task_index": 6}, {"db_idx": 8912, "episode_idx": 31, "frame_idx": 180, "global_frame_idx": 8912, "task_index": 6}, {"db_idx": 8913, "episode_idx": 31, "frame_idx": 181, "global_frame_idx": 8913, "task_index": 6}, {"db_idx": 8914, "episode_idx": 31, "frame_idx": 182, "global_frame_idx": 8914, "task_index": 6}, {"db_idx": 8915, "episode_idx": 31, "frame_idx": 183, "global_frame_idx": 8915, "task_index": 6}, {"db_idx": 8916, "episode_idx": 31, "frame_idx": 184, "global_frame_idx": 8916, "task_index": 6}, {"db_idx": 8917, "episode_idx": 31, "frame_idx": 185, "global_frame_idx": 8917, "task_index": 6}, {"db_idx": 8918, "episode_idx": 31, "frame_idx": 186, "global_frame_idx": 8918, "task_index": 6}, {"db_idx": 8919, "episode_idx": 31, "frame_idx": 187, "global_frame_idx": 8919, "task_index": 6}, {"db_idx": 8920, "episode_idx": 31, "frame_idx": 188, "global_frame_idx": 8920, "task_index": 6}, {"db_idx": 8921, "episode_idx": 31, "frame_idx": 189, "global_frame_idx": 8921, "task_index": 6}, {"db_idx": 8922, "episode_idx": 31, "frame_idx": 190, "global_frame_idx": 8922, "task_index": 6}, {"db_idx": 8923, "episode_idx": 31, "frame_idx": 191, "global_frame_idx": 8923, "task_index": 6}, {"db_idx": 8924, "episode_idx": 31, "frame_idx": 192, "global_frame_idx": 8924, "task_index": 6}, {"db_idx": 8925, "episode_idx": 31, "frame_idx": 193, "global_frame_idx": 8925, "task_index": 6}, {"db_idx": 8926, "episode_idx": 31, "frame_idx": 194, "global_frame_idx": 8926, "task_index": 6}, {"db_idx": 8927, "episode_idx": 31, "frame_idx": 195, "global_frame_idx": 8927, "task_index": 6}, {"db_idx": 8928, "episode_idx": 31, "frame_idx": 196, "global_frame_idx": 8928, "task_index": 6}, {"db_idx": 8929, "episode_idx": 31, "frame_idx": 197, "global_frame_idx": 8929, "task_index": 6}, {"db_idx": 8930, "episode_idx": 31, "frame_idx": 198, "global_frame_idx": 8930, "task_index": 6}, {"db_idx": 8931, "episode_idx": 31, "frame_idx": 199, "global_frame_idx": 8931, "task_index": 6}, {"db_idx": 8932, "episode_idx": 31, "frame_idx": 200, "global_frame_idx": 8932, "task_index": 6}, {"db_idx": 8933, "episode_idx": 31, "frame_idx": 201, "global_frame_idx": 8933, "task_index": 6}, {"db_idx": 8934, "episode_idx": 31, "frame_idx": 202, "global_frame_idx": 8934, "task_index": 6}, {"db_idx": 8935, "episode_idx": 31, "frame_idx": 203, "global_frame_idx": 8935, "task_index": 6}, {"db_idx": 8936, "episode_idx": 31, "frame_idx": 204, "global_frame_idx": 8936, "task_index": 6}, {"db_idx": 8937, "episode_idx": 31, "frame_idx": 205, "global_frame_idx": 8937, "task_index": 6}, {"db_idx": 8938, "episode_idx": 31, "frame_idx": 206, "global_frame_idx": 8938, "task_index": 6}, {"db_idx": 8939, "episode_idx": 31, "frame_idx": 207, "global_frame_idx": 8939, "task_index": 6}, {"db_idx": 8940, "episode_idx": 31, "frame_idx": 208, "global_frame_idx": 8940, "task_index": 6}, {"db_idx": 8941, "episode_idx": 31, "frame_idx": 209, "global_frame_idx": 8941, "task_index": 6}, {"db_idx": 8942, "episode_idx": 31, "frame_idx": 210, "global_frame_idx": 8942, "task_index": 6}, {"db_idx": 8943, "episode_idx": 31, "frame_idx": 211, "global_frame_idx": 8943, "task_index": 6}, {"db_idx": 8944, "episode_idx": 31, "frame_idx": 212, "global_frame_idx": 8944, "task_index": 6}, {"db_idx": 8945, "episode_idx": 31, "frame_idx": 213, "global_frame_idx": 8945, "task_index": 6}, {"db_idx": 8946, "episode_idx": 31, "frame_idx": 214, "global_frame_idx": 8946, "task_index": 6}, {"db_idx": 8947, "episode_idx": 31, "frame_idx": 215, "global_frame_idx": 8947, "task_index": 6}, {"db_idx": 8948, "episode_idx": 31, "frame_idx": 216, "global_frame_idx": 8948, "task_index": 6}, {"db_idx": 8949, "episode_idx": 31, "frame_idx": 217, "global_frame_idx": 8949, "task_index": 6}, {"db_idx": 8950, "episode_idx": 31, "frame_idx": 218, "global_frame_idx": 8950, "task_index": 6}, {"db_idx": 8951, "episode_idx": 31, "frame_idx": 219, "global_frame_idx": 8951, "task_index": 6}, {"db_idx": 8952, "episode_idx": 31, "frame_idx": 220, "global_frame_idx": 8952, "task_index": 6}, {"db_idx": 8953, "episode_idx": 31, "frame_idx": 221, "global_frame_idx": 8953, "task_index": 6}, {"db_idx": 8954, "episode_idx": 31, "frame_idx": 222, "global_frame_idx": 8954, "task_index": 6}, {"db_idx": 8955, "episode_idx": 31, "frame_idx": 223, "global_frame_idx": 8955, "task_index": 6}, {"db_idx": 8956, "episode_idx": 31, "frame_idx": 224, "global_frame_idx": 8956, "task_index": 6}, {"db_idx": 8957, "episode_idx": 31, "frame_idx": 225, "global_frame_idx": 8957, "task_index": 6}, {"db_idx": 8958, "episode_idx": 31, "frame_idx": 226, "global_frame_idx": 8958, "task_index": 6}, {"db_idx": 8959, "episode_idx": 31, "frame_idx": 227, "global_frame_idx": 8959, "task_index": 6}, {"db_idx": 8960, "episode_idx": 31, "frame_idx": 228, "global_frame_idx": 8960, "task_index": 6}, {"db_idx": 8961, "episode_idx": 31, "frame_idx": 229, "global_frame_idx": 8961, "task_index": 6}, {"db_idx": 8962, "episode_idx": 31, "frame_idx": 230, "global_frame_idx": 8962, "task_index": 6}, {"db_idx": 8963, "episode_idx": 31, "frame_idx": 231, "global_frame_idx": 8963, "task_index": 6}, {"db_idx": 8964, "episode_idx": 31, "frame_idx": 232, "global_frame_idx": 8964, "task_index": 6}, {"db_idx": 8965, "episode_idx": 31, "frame_idx": 233, "global_frame_idx": 8965, "task_index": 6}, {"db_idx": 8966, "episode_idx": 31, "frame_idx": 234, "global_frame_idx": 8966, "task_index": 6}, {"db_idx": 8967, "episode_idx": 31, "frame_idx": 235, "global_frame_idx": 8967, "task_index": 6}, {"db_idx": 8968, "episode_idx": 31, "frame_idx": 236, "global_frame_idx": 8968, "task_index": 6}, {"db_idx": 8969, "episode_idx": 31, "frame_idx": 237, "global_frame_idx": 8969, "task_index": 6}, {"db_idx": 8970, "episode_idx": 31, "frame_idx": 238, "global_frame_idx": 8970, "task_index": 6}, {"db_idx": 8971, "episode_idx": 31, "frame_idx": 239, "global_frame_idx": 8971, "task_index": 6}, {"db_idx": 8972, "episode_idx": 31, "frame_idx": 240, "global_frame_idx": 8972, "task_index": 6}, {"db_idx": 8973, "episode_idx": 31, "frame_idx": 241, "global_frame_idx": 8973, "task_index": 6}, {"db_idx": 8974, "episode_idx": 31, "frame_idx": 242, "global_frame_idx": 8974, "task_index": 6}, {"db_idx": 8975, "episode_idx": 31, "frame_idx": 243, "global_frame_idx": 8975, "task_index": 6}, {"db_idx": 8976, "episode_idx": 31, "frame_idx": 244, "global_frame_idx": 8976, "task_index": 6}, {"db_idx": 8977, "episode_idx": 31, "frame_idx": 245, "global_frame_idx": 8977, "task_index": 6}, {"db_idx": 8978, "episode_idx": 31, "frame_idx": 246, "global_frame_idx": 8978, "task_index": 6}, {"db_idx": 8979, "episode_idx": 31, "frame_idx": 247, "global_frame_idx": 8979, "task_index": 6}, {"db_idx": 8980, "episode_idx": 32, "frame_idx": 0, "global_frame_idx": 8980, "task_index": 6}, {"db_idx": 8981, "episode_idx": 32, "frame_idx": 1, "global_frame_idx": 8981, "task_index": 6}, {"db_idx": 8982, "episode_idx": 32, "frame_idx": 2, "global_frame_idx": 8982, "task_index": 6}, {"db_idx": 8983, "episode_idx": 32, "frame_idx": 3, "global_frame_idx": 8983, "task_index": 6}, {"db_idx": 8984, "episode_idx": 32, "frame_idx": 4, "global_frame_idx": 8984, "task_index": 6}, {"db_idx": 8985, "episode_idx": 32, "frame_idx": 5, "global_frame_idx": 8985, "task_index": 6}, {"db_idx": 8986, "episode_idx": 32, "frame_idx": 6, "global_frame_idx": 8986, "task_index": 6}, {"db_idx": 8987, "episode_idx": 32, "frame_idx": 7, "global_frame_idx": 8987, "task_index": 6}, {"db_idx": 8988, "episode_idx": 32, "frame_idx": 8, "global_frame_idx": 8988, "task_index": 6}, {"db_idx": 8989, "episode_idx": 32, "frame_idx": 9, "global_frame_idx": 8989, "task_index": 6}, {"db_idx": 8990, "episode_idx": 32, "frame_idx": 10, "global_frame_idx": 8990, "task_index": 6}, {"db_idx": 8991, "episode_idx": 32, "frame_idx": 11, "global_frame_idx": 8991, "task_index": 6}, {"db_idx": 8992, "episode_idx": 32, "frame_idx": 12, "global_frame_idx": 8992, "task_index": 6}, {"db_idx": 8993, "episode_idx": 32, "frame_idx": 13, "global_frame_idx": 8993, "task_index": 6}, {"db_idx": 8994, "episode_idx": 32, "frame_idx": 14, "global_frame_idx": 8994, "task_index": 6}, {"db_idx": 8995, "episode_idx": 32, "frame_idx": 15, "global_frame_idx": 8995, "task_index": 6}, {"db_idx": 8996, "episode_idx": 32, "frame_idx": 16, "global_frame_idx": 8996, "task_index": 6}, {"db_idx": 8997, "episode_idx": 32, "frame_idx": 17, "global_frame_idx": 8997, "task_index": 6}, {"db_idx": 8998, "episode_idx": 32, "frame_idx": 18, "global_frame_idx": 8998, "task_index": 6}, {"db_idx": 8999, "episode_idx": 32, "frame_idx": 19, "global_frame_idx": 8999, "task_index": 6}, {"db_idx": 9000, "episode_idx": 32, "frame_idx": 20, "global_frame_idx": 9000, "task_index": 6}, {"db_idx": 9001, "episode_idx": 32, "frame_idx": 21, "global_frame_idx": 9001, "task_index": 6}, {"db_idx": 9002, "episode_idx": 32, "frame_idx": 22, "global_frame_idx": 9002, "task_index": 6}, {"db_idx": 9003, "episode_idx": 32, "frame_idx": 23, "global_frame_idx": 9003, "task_index": 6}, {"db_idx": 9004, "episode_idx": 32, "frame_idx": 24, "global_frame_idx": 9004, "task_index": 6}, {"db_idx": 9005, "episode_idx": 32, "frame_idx": 25, "global_frame_idx": 9005, "task_index": 6}, {"db_idx": 9006, "episode_idx": 32, "frame_idx": 26, "global_frame_idx": 9006, "task_index": 6}, {"db_idx": 9007, "episode_idx": 32, "frame_idx": 27, "global_frame_idx": 9007, "task_index": 6}, {"db_idx": 9008, "episode_idx": 32, "frame_idx": 28, "global_frame_idx": 9008, "task_index": 6}, {"db_idx": 9009, "episode_idx": 32, "frame_idx": 29, "global_frame_idx": 9009, "task_index": 6}, {"db_idx": 9010, "episode_idx": 32, "frame_idx": 30, "global_frame_idx": 9010, "task_index": 6}, {"db_idx": 9011, "episode_idx": 32, "frame_idx": 31, "global_frame_idx": 9011, "task_index": 6}, {"db_idx": 9012, "episode_idx": 32, "frame_idx": 32, "global_frame_idx": 9012, "task_index": 6}, {"db_idx": 9013, "episode_idx": 32, "frame_idx": 33, "global_frame_idx": 9013, "task_index": 6}, {"db_idx": 9014, "episode_idx": 32, "frame_idx": 34, "global_frame_idx": 9014, "task_index": 6}, {"db_idx": 9015, "episode_idx": 32, "frame_idx": 35, "global_frame_idx": 9015, "task_index": 6}, {"db_idx": 9016, "episode_idx": 32, "frame_idx": 36, "global_frame_idx": 9016, "task_index": 6}, {"db_idx": 9017, "episode_idx": 32, "frame_idx": 37, "global_frame_idx": 9017, "task_index": 6}, {"db_idx": 9018, "episode_idx": 32, "frame_idx": 38, "global_frame_idx": 9018, "task_index": 6}, {"db_idx": 9019, "episode_idx": 32, "frame_idx": 39, "global_frame_idx": 9019, "task_index": 6}, {"db_idx": 9020, "episode_idx": 32, "frame_idx": 40, "global_frame_idx": 9020, "task_index": 6}, {"db_idx": 9021, "episode_idx": 32, "frame_idx": 41, "global_frame_idx": 9021, "task_index": 6}, {"db_idx": 9022, "episode_idx": 32, "frame_idx": 42, "global_frame_idx": 9022, "task_index": 6}, {"db_idx": 9023, "episode_idx": 32, "frame_idx": 43, "global_frame_idx": 9023, "task_index": 6}, {"db_idx": 9024, "episode_idx": 32, "frame_idx": 44, "global_frame_idx": 9024, "task_index": 6}, {"db_idx": 9025, "episode_idx": 32, "frame_idx": 45, "global_frame_idx": 9025, "task_index": 6}, {"db_idx": 9026, "episode_idx": 32, "frame_idx": 46, "global_frame_idx": 9026, "task_index": 6}, {"db_idx": 9027, "episode_idx": 32, "frame_idx": 47, "global_frame_idx": 9027, "task_index": 6}, {"db_idx": 9028, "episode_idx": 32, "frame_idx": 48, "global_frame_idx": 9028, "task_index": 6}, {"db_idx": 9029, "episode_idx": 32, "frame_idx": 49, "global_frame_idx": 9029, "task_index": 6}, {"db_idx": 9030, "episode_idx": 32, "frame_idx": 50, "global_frame_idx": 9030, "task_index": 6}, {"db_idx": 9031, "episode_idx": 32, "frame_idx": 51, "global_frame_idx": 9031, "task_index": 6}, {"db_idx": 9032, "episode_idx": 32, "frame_idx": 52, "global_frame_idx": 9032, "task_index": 6}, {"db_idx": 9033, "episode_idx": 32, "frame_idx": 53, "global_frame_idx": 9033, "task_index": 6}, {"db_idx": 9034, "episode_idx": 32, "frame_idx": 54, "global_frame_idx": 9034, "task_index": 6}, {"db_idx": 9035, "episode_idx": 32, "frame_idx": 55, "global_frame_idx": 9035, "task_index": 6}, {"db_idx": 9036, "episode_idx": 32, "frame_idx": 56, "global_frame_idx": 9036, "task_index": 6}, {"db_idx": 9037, "episode_idx": 32, "frame_idx": 57, "global_frame_idx": 9037, "task_index": 6}, {"db_idx": 9038, "episode_idx": 32, "frame_idx": 58, "global_frame_idx": 9038, "task_index": 6}, {"db_idx": 9039, "episode_idx": 32, "frame_idx": 59, "global_frame_idx": 9039, "task_index": 6}, {"db_idx": 9040, "episode_idx": 32, "frame_idx": 60, "global_frame_idx": 9040, "task_index": 6}, {"db_idx": 9041, "episode_idx": 32, "frame_idx": 61, "global_frame_idx": 9041, "task_index": 6}, {"db_idx": 9042, "episode_idx": 32, "frame_idx": 62, "global_frame_idx": 9042, "task_index": 6}, {"db_idx": 9043, "episode_idx": 32, "frame_idx": 63, "global_frame_idx": 9043, "task_index": 6}, {"db_idx": 9044, "episode_idx": 32, "frame_idx": 64, "global_frame_idx": 9044, "task_index": 6}, {"db_idx": 9045, "episode_idx": 32, "frame_idx": 65, "global_frame_idx": 9045, "task_index": 6}, {"db_idx": 9046, "episode_idx": 32, "frame_idx": 66, "global_frame_idx": 9046, "task_index": 6}, {"db_idx": 9047, "episode_idx": 32, "frame_idx": 67, "global_frame_idx": 9047, "task_index": 6}, {"db_idx": 9048, "episode_idx": 32, "frame_idx": 68, "global_frame_idx": 9048, "task_index": 6}, {"db_idx": 9049, "episode_idx": 32, "frame_idx": 69, "global_frame_idx": 9049, "task_index": 6}, {"db_idx": 9050, "episode_idx": 32, "frame_idx": 70, "global_frame_idx": 9050, "task_index": 6}, {"db_idx": 9051, "episode_idx": 32, "frame_idx": 71, "global_frame_idx": 9051, "task_index": 6}, {"db_idx": 9052, "episode_idx": 32, "frame_idx": 72, "global_frame_idx": 9052, "task_index": 6}, {"db_idx": 9053, "episode_idx": 32, "frame_idx": 73, "global_frame_idx": 9053, "task_index": 6}, {"db_idx": 9054, "episode_idx": 32, "frame_idx": 74, "global_frame_idx": 9054, "task_index": 6}, {"db_idx": 9055, "episode_idx": 32, "frame_idx": 75, "global_frame_idx": 9055, "task_index": 6}, {"db_idx": 9056, "episode_idx": 32, "frame_idx": 76, "global_frame_idx": 9056, "task_index": 6}, {"db_idx": 9057, "episode_idx": 32, "frame_idx": 77, "global_frame_idx": 9057, "task_index": 6}, {"db_idx": 9058, "episode_idx": 32, "frame_idx": 78, "global_frame_idx": 9058, "task_index": 6}, {"db_idx": 9059, "episode_idx": 32, "frame_idx": 79, "global_frame_idx": 9059, "task_index": 6}, {"db_idx": 9060, "episode_idx": 32, "frame_idx": 80, "global_frame_idx": 9060, "task_index": 6}, {"db_idx": 9061, "episode_idx": 32, "frame_idx": 81, "global_frame_idx": 9061, "task_index": 6}, {"db_idx": 9062, "episode_idx": 32, "frame_idx": 82, "global_frame_idx": 9062, "task_index": 6}, {"db_idx": 9063, "episode_idx": 32, "frame_idx": 83, "global_frame_idx": 9063, "task_index": 6}, {"db_idx": 9064, "episode_idx": 32, "frame_idx": 84, "global_frame_idx": 9064, "task_index": 6}, {"db_idx": 9065, "episode_idx": 32, "frame_idx": 85, "global_frame_idx": 9065, "task_index": 6}, {"db_idx": 9066, "episode_idx": 32, "frame_idx": 86, "global_frame_idx": 9066, "task_index": 6}, {"db_idx": 9067, "episode_idx": 32, "frame_idx": 87, "global_frame_idx": 9067, "task_index": 6}, {"db_idx": 9068, "episode_idx": 32, "frame_idx": 88, "global_frame_idx": 9068, "task_index": 6}, {"db_idx": 9069, "episode_idx": 32, "frame_idx": 89, "global_frame_idx": 9069, "task_index": 6}, {"db_idx": 9070, "episode_idx": 32, "frame_idx": 90, "global_frame_idx": 9070, "task_index": 6}, {"db_idx": 9071, "episode_idx": 32, "frame_idx": 91, "global_frame_idx": 9071, "task_index": 6}, {"db_idx": 9072, "episode_idx": 32, "frame_idx": 92, "global_frame_idx": 9072, "task_index": 6}, {"db_idx": 9073, "episode_idx": 32, "frame_idx": 93, "global_frame_idx": 9073, "task_index": 6}, {"db_idx": 9074, "episode_idx": 32, "frame_idx": 94, "global_frame_idx": 9074, "task_index": 6}, {"db_idx": 9075, "episode_idx": 32, "frame_idx": 95, "global_frame_idx": 9075, "task_index": 6}, {"db_idx": 9076, "episode_idx": 32, "frame_idx": 96, "global_frame_idx": 9076, "task_index": 6}, {"db_idx": 9077, "episode_idx": 32, "frame_idx": 97, "global_frame_idx": 9077, "task_index": 6}, {"db_idx": 9078, "episode_idx": 32, "frame_idx": 98, "global_frame_idx": 9078, "task_index": 6}, {"db_idx": 9079, "episode_idx": 32, "frame_idx": 99, "global_frame_idx": 9079, "task_index": 6}, {"db_idx": 9080, "episode_idx": 32, "frame_idx": 100, "global_frame_idx": 9080, "task_index": 6}, {"db_idx": 9081, "episode_idx": 32, "frame_idx": 101, "global_frame_idx": 9081, "task_index": 6}, {"db_idx": 9082, "episode_idx": 32, "frame_idx": 102, "global_frame_idx": 9082, "task_index": 6}, {"db_idx": 9083, "episode_idx": 32, "frame_idx": 103, "global_frame_idx": 9083, "task_index": 6}, {"db_idx": 9084, "episode_idx": 32, "frame_idx": 104, "global_frame_idx": 9084, "task_index": 6}, {"db_idx": 9085, "episode_idx": 32, "frame_idx": 105, "global_frame_idx": 9085, "task_index": 6}, {"db_idx": 9086, "episode_idx": 32, "frame_idx": 106, "global_frame_idx": 9086, "task_index": 6}, {"db_idx": 9087, "episode_idx": 32, "frame_idx": 107, "global_frame_idx": 9087, "task_index": 6}, {"db_idx": 9088, "episode_idx": 32, "frame_idx": 108, "global_frame_idx": 9088, "task_index": 6}, {"db_idx": 9089, "episode_idx": 32, "frame_idx": 109, "global_frame_idx": 9089, "task_index": 6}, {"db_idx": 9090, "episode_idx": 32, "frame_idx": 110, "global_frame_idx": 9090, "task_index": 6}, {"db_idx": 9091, "episode_idx": 32, "frame_idx": 111, "global_frame_idx": 9091, "task_index": 6}, {"db_idx": 9092, "episode_idx": 32, "frame_idx": 112, "global_frame_idx": 9092, "task_index": 6}, {"db_idx": 9093, "episode_idx": 32, "frame_idx": 113, "global_frame_idx": 9093, "task_index": 6}, {"db_idx": 9094, "episode_idx": 32, "frame_idx": 114, "global_frame_idx": 9094, "task_index": 6}, {"db_idx": 9095, "episode_idx": 32, "frame_idx": 115, "global_frame_idx": 9095, "task_index": 6}, {"db_idx": 9096, "episode_idx": 32, "frame_idx": 116, "global_frame_idx": 9096, "task_index": 6}, {"db_idx": 9097, "episode_idx": 32, "frame_idx": 117, "global_frame_idx": 9097, "task_index": 6}, {"db_idx": 9098, "episode_idx": 32, "frame_idx": 118, "global_frame_idx": 9098, "task_index": 6}, {"db_idx": 9099, "episode_idx": 32, "frame_idx": 119, "global_frame_idx": 9099, "task_index": 6}, {"db_idx": 9100, "episode_idx": 32, "frame_idx": 120, "global_frame_idx": 9100, "task_index": 6}, {"db_idx": 9101, "episode_idx": 32, "frame_idx": 121, "global_frame_idx": 9101, "task_index": 6}, {"db_idx": 9102, "episode_idx": 32, "frame_idx": 122, "global_frame_idx": 9102, "task_index": 6}, {"db_idx": 9103, "episode_idx": 32, "frame_idx": 123, "global_frame_idx": 9103, "task_index": 6}, {"db_idx": 9104, "episode_idx": 32, "frame_idx": 124, "global_frame_idx": 9104, "task_index": 6}, {"db_idx": 9105, "episode_idx": 32, "frame_idx": 125, "global_frame_idx": 9105, "task_index": 6}, {"db_idx": 9106, "episode_idx": 32, "frame_idx": 126, "global_frame_idx": 9106, "task_index": 6}, {"db_idx": 9107, "episode_idx": 32, "frame_idx": 127, "global_frame_idx": 9107, "task_index": 6}, {"db_idx": 9108, "episode_idx": 32, "frame_idx": 128, "global_frame_idx": 9108, "task_index": 6}, {"db_idx": 9109, "episode_idx": 32, "frame_idx": 129, "global_frame_idx": 9109, "task_index": 6}, {"db_idx": 9110, "episode_idx": 32, "frame_idx": 130, "global_frame_idx": 9110, "task_index": 6}, {"db_idx": 9111, "episode_idx": 32, "frame_idx": 131, "global_frame_idx": 9111, "task_index": 6}, {"db_idx": 9112, "episode_idx": 32, "frame_idx": 132, "global_frame_idx": 9112, "task_index": 6}, {"db_idx": 9113, "episode_idx": 32, "frame_idx": 133, "global_frame_idx": 9113, "task_index": 6}, {"db_idx": 9114, "episode_idx": 32, "frame_idx": 134, "global_frame_idx": 9114, "task_index": 6}, {"db_idx": 9115, "episode_idx": 32, "frame_idx": 135, "global_frame_idx": 9115, "task_index": 6}, {"db_idx": 9116, "episode_idx": 32, "frame_idx": 136, "global_frame_idx": 9116, "task_index": 6}, {"db_idx": 9117, "episode_idx": 32, "frame_idx": 137, "global_frame_idx": 9117, "task_index": 6}, {"db_idx": 9118, "episode_idx": 32, "frame_idx": 138, "global_frame_idx": 9118, "task_index": 6}, {"db_idx": 9119, "episode_idx": 32, "frame_idx": 139, "global_frame_idx": 9119, "task_index": 6}, {"db_idx": 9120, "episode_idx": 32, "frame_idx": 140, "global_frame_idx": 9120, "task_index": 6}, {"db_idx": 9121, "episode_idx": 32, "frame_idx": 141, "global_frame_idx": 9121, "task_index": 6}, {"db_idx": 9122, "episode_idx": 32, "frame_idx": 142, "global_frame_idx": 9122, "task_index": 6}, {"db_idx": 9123, "episode_idx": 32, "frame_idx": 143, "global_frame_idx": 9123, "task_index": 6}, {"db_idx": 9124, "episode_idx": 32, "frame_idx": 144, "global_frame_idx": 9124, "task_index": 6}, {"db_idx": 9125, "episode_idx": 32, "frame_idx": 145, "global_frame_idx": 9125, "task_index": 6}, {"db_idx": 9126, "episode_idx": 32, "frame_idx": 146, "global_frame_idx": 9126, "task_index": 6}, {"db_idx": 9127, "episode_idx": 32, "frame_idx": 147, "global_frame_idx": 9127, "task_index": 6}, {"db_idx": 9128, "episode_idx": 32, "frame_idx": 148, "global_frame_idx": 9128, "task_index": 6}, {"db_idx": 9129, "episode_idx": 32, "frame_idx": 149, "global_frame_idx": 9129, "task_index": 6}, {"db_idx": 9130, "episode_idx": 32, "frame_idx": 150, "global_frame_idx": 9130, "task_index": 6}, {"db_idx": 9131, "episode_idx": 32, "frame_idx": 151, "global_frame_idx": 9131, "task_index": 6}, {"db_idx": 9132, "episode_idx": 32, "frame_idx": 152, "global_frame_idx": 9132, "task_index": 6}, {"db_idx": 9133, "episode_idx": 32, "frame_idx": 153, "global_frame_idx": 9133, "task_index": 6}, {"db_idx": 9134, "episode_idx": 32, "frame_idx": 154, "global_frame_idx": 9134, "task_index": 6}, {"db_idx": 9135, "episode_idx": 32, "frame_idx": 155, "global_frame_idx": 9135, "task_index": 6}, {"db_idx": 9136, "episode_idx": 32, "frame_idx": 156, "global_frame_idx": 9136, "task_index": 6}, {"db_idx": 9137, "episode_idx": 32, "frame_idx": 157, "global_frame_idx": 9137, "task_index": 6}, {"db_idx": 9138, "episode_idx": 32, "frame_idx": 158, "global_frame_idx": 9138, "task_index": 6}, {"db_idx": 9139, "episode_idx": 32, "frame_idx": 159, "global_frame_idx": 9139, "task_index": 6}, {"db_idx": 9140, "episode_idx": 32, "frame_idx": 160, "global_frame_idx": 9140, "task_index": 6}, {"db_idx": 9141, "episode_idx": 32, "frame_idx": 161, "global_frame_idx": 9141, "task_index": 6}, {"db_idx": 9142, "episode_idx": 32, "frame_idx": 162, "global_frame_idx": 9142, "task_index": 6}, {"db_idx": 9143, "episode_idx": 32, "frame_idx": 163, "global_frame_idx": 9143, "task_index": 6}, {"db_idx": 9144, "episode_idx": 32, "frame_idx": 164, "global_frame_idx": 9144, "task_index": 6}, {"db_idx": 9145, "episode_idx": 32, "frame_idx": 165, "global_frame_idx": 9145, "task_index": 6}, {"db_idx": 9146, "episode_idx": 32, "frame_idx": 166, "global_frame_idx": 9146, "task_index": 6}, {"db_idx": 9147, "episode_idx": 32, "frame_idx": 167, "global_frame_idx": 9147, "task_index": 6}, {"db_idx": 9148, "episode_idx": 32, "frame_idx": 168, "global_frame_idx": 9148, "task_index": 6}, {"db_idx": 9149, "episode_idx": 32, "frame_idx": 169, "global_frame_idx": 9149, "task_index": 6}, {"db_idx": 9150, "episode_idx": 32, "frame_idx": 170, "global_frame_idx": 9150, "task_index": 6}, {"db_idx": 9151, "episode_idx": 32, "frame_idx": 171, "global_frame_idx": 9151, "task_index": 6}, {"db_idx": 9152, "episode_idx": 32, "frame_idx": 172, "global_frame_idx": 9152, "task_index": 6}, {"db_idx": 9153, "episode_idx": 32, "frame_idx": 173, "global_frame_idx": 9153, "task_index": 6}, {"db_idx": 9154, "episode_idx": 32, "frame_idx": 174, "global_frame_idx": 9154, "task_index": 6}, {"db_idx": 9155, "episode_idx": 32, "frame_idx": 175, "global_frame_idx": 9155, "task_index": 6}, {"db_idx": 9156, "episode_idx": 32, "frame_idx": 176, "global_frame_idx": 9156, "task_index": 6}, {"db_idx": 9157, "episode_idx": 32, "frame_idx": 177, "global_frame_idx": 9157, "task_index": 6}, {"db_idx": 9158, "episode_idx": 32, "frame_idx": 178, "global_frame_idx": 9158, "task_index": 6}, {"db_idx": 9159, "episode_idx": 32, "frame_idx": 179, "global_frame_idx": 9159, "task_index": 6}, {"db_idx": 9160, "episode_idx": 32, "frame_idx": 180, "global_frame_idx": 9160, "task_index": 6}, {"db_idx": 9161, "episode_idx": 32, "frame_idx": 181, "global_frame_idx": 9161, "task_index": 6}, {"db_idx": 9162, "episode_idx": 32, "frame_idx": 182, "global_frame_idx": 9162, "task_index": 6}, {"db_idx": 9163, "episode_idx": 32, "frame_idx": 183, "global_frame_idx": 9163, "task_index": 6}, {"db_idx": 9164, "episode_idx": 32, "frame_idx": 184, "global_frame_idx": 9164, "task_index": 6}, {"db_idx": 9165, "episode_idx": 32, "frame_idx": 185, "global_frame_idx": 9165, "task_index": 6}, {"db_idx": 9166, "episode_idx": 32, "frame_idx": 186, "global_frame_idx": 9166, "task_index": 6}, {"db_idx": 9167, "episode_idx": 32, "frame_idx": 187, "global_frame_idx": 9167, "task_index": 6}, {"db_idx": 9168, "episode_idx": 32, "frame_idx": 188, "global_frame_idx": 9168, "task_index": 6}, {"db_idx": 9169, "episode_idx": 32, "frame_idx": 189, "global_frame_idx": 9169, "task_index": 6}, {"db_idx": 9170, "episode_idx": 32, "frame_idx": 190, "global_frame_idx": 9170, "task_index": 6}, {"db_idx": 9171, "episode_idx": 32, "frame_idx": 191, "global_frame_idx": 9171, "task_index": 6}, {"db_idx": 9172, "episode_idx": 32, "frame_idx": 192, "global_frame_idx": 9172, "task_index": 6}, {"db_idx": 9173, "episode_idx": 32, "frame_idx": 193, "global_frame_idx": 9173, "task_index": 6}, {"db_idx": 9174, "episode_idx": 32, "frame_idx": 194, "global_frame_idx": 9174, "task_index": 6}, {"db_idx": 9175, "episode_idx": 32, "frame_idx": 195, "global_frame_idx": 9175, "task_index": 6}, {"db_idx": 9176, "episode_idx": 32, "frame_idx": 196, "global_frame_idx": 9176, "task_index": 6}, {"db_idx": 9177, "episode_idx": 32, "frame_idx": 197, "global_frame_idx": 9177, "task_index": 6}, {"db_idx": 9178, "episode_idx": 32, "frame_idx": 198, "global_frame_idx": 9178, "task_index": 6}, {"db_idx": 9179, "episode_idx": 32, "frame_idx": 199, "global_frame_idx": 9179, "task_index": 6}, {"db_idx": 9180, "episode_idx": 32, "frame_idx": 200, "global_frame_idx": 9180, "task_index": 6}, {"db_idx": 9181, "episode_idx": 32, "frame_idx": 201, "global_frame_idx": 9181, "task_index": 6}, {"db_idx": 9182, "episode_idx": 32, "frame_idx": 202, "global_frame_idx": 9182, "task_index": 6}, {"db_idx": 9183, "episode_idx": 32, "frame_idx": 203, "global_frame_idx": 9183, "task_index": 6}, {"db_idx": 9184, "episode_idx": 32, "frame_idx": 204, "global_frame_idx": 9184, "task_index": 6}, {"db_idx": 9185, "episode_idx": 32, "frame_idx": 205, "global_frame_idx": 9185, "task_index": 6}, {"db_idx": 9186, "episode_idx": 32, "frame_idx": 206, "global_frame_idx": 9186, "task_index": 6}, {"db_idx": 9187, "episode_idx": 32, "frame_idx": 207, "global_frame_idx": 9187, "task_index": 6}, {"db_idx": 9188, "episode_idx": 32, "frame_idx": 208, "global_frame_idx": 9188, "task_index": 6}, {"db_idx": 9189, "episode_idx": 32, "frame_idx": 209, "global_frame_idx": 9189, "task_index": 6}, {"db_idx": 9190, "episode_idx": 32, "frame_idx": 210, "global_frame_idx": 9190, "task_index": 6}, {"db_idx": 9191, "episode_idx": 32, "frame_idx": 211, "global_frame_idx": 9191, "task_index": 6}, {"db_idx": 9192, "episode_idx": 32, "frame_idx": 212, "global_frame_idx": 9192, "task_index": 6}, {"db_idx": 9193, "episode_idx": 32, "frame_idx": 213, "global_frame_idx": 9193, "task_index": 6}, {"db_idx": 9194, "episode_idx": 32, "frame_idx": 214, "global_frame_idx": 9194, "task_index": 6}, {"db_idx": 9195, "episode_idx": 32, "frame_idx": 215, "global_frame_idx": 9195, "task_index": 6}, {"db_idx": 9196, "episode_idx": 32, "frame_idx": 216, "global_frame_idx": 9196, "task_index": 6}, {"db_idx": 9197, "episode_idx": 32, "frame_idx": 217, "global_frame_idx": 9197, "task_index": 6}, {"db_idx": 9198, "episode_idx": 32, "frame_idx": 218, "global_frame_idx": 9198, "task_index": 6}, {"db_idx": 9199, "episode_idx": 32, "frame_idx": 219, "global_frame_idx": 9199, "task_index": 6}, {"db_idx": 9200, "episode_idx": 32, "frame_idx": 220, "global_frame_idx": 9200, "task_index": 6}, {"db_idx": 9201, "episode_idx": 32, "frame_idx": 221, "global_frame_idx": 9201, "task_index": 6}, {"db_idx": 9202, "episode_idx": 32, "frame_idx": 222, "global_frame_idx": 9202, "task_index": 6}, {"db_idx": 9203, "episode_idx": 32, "frame_idx": 223, "global_frame_idx": 9203, "task_index": 6}, {"db_idx": 9204, "episode_idx": 32, "frame_idx": 224, "global_frame_idx": 9204, "task_index": 6}, {"db_idx": 9205, "episode_idx": 32, "frame_idx": 225, "global_frame_idx": 9205, "task_index": 6}, {"db_idx": 9206, "episode_idx": 32, "frame_idx": 226, "global_frame_idx": 9206, "task_index": 6}, {"db_idx": 9207, "episode_idx": 32, "frame_idx": 227, "global_frame_idx": 9207, "task_index": 6}, {"db_idx": 9208, "episode_idx": 32, "frame_idx": 228, "global_frame_idx": 9208, "task_index": 6}, {"db_idx": 9209, "episode_idx": 32, "frame_idx": 229, "global_frame_idx": 9209, "task_index": 6}, {"db_idx": 9210, "episode_idx": 32, "frame_idx": 230, "global_frame_idx": 9210, "task_index": 6}, {"db_idx": 9211, "episode_idx": 32, "frame_idx": 231, "global_frame_idx": 9211, "task_index": 6}, {"db_idx": 9212, "episode_idx": 32, "frame_idx": 232, "global_frame_idx": 9212, "task_index": 6}, {"db_idx": 9213, "episode_idx": 32, "frame_idx": 233, "global_frame_idx": 9213, "task_index": 6}, {"db_idx": 9214, "episode_idx": 32, "frame_idx": 234, "global_frame_idx": 9214, "task_index": 6}, {"db_idx": 9215, "episode_idx": 32, "frame_idx": 235, "global_frame_idx": 9215, "task_index": 6}, {"db_idx": 9216, "episode_idx": 32, "frame_idx": 236, "global_frame_idx": 9216, "task_index": 6}, {"db_idx": 9217, "episode_idx": 32, "frame_idx": 237, "global_frame_idx": 9217, "task_index": 6}, {"db_idx": 9218, "episode_idx": 32, "frame_idx": 238, "global_frame_idx": 9218, "task_index": 6}, {"db_idx": 9219, "episode_idx": 32, "frame_idx": 239, "global_frame_idx": 9219, "task_index": 6}, {"db_idx": 9220, "episode_idx": 32, "frame_idx": 240, "global_frame_idx": 9220, "task_index": 6}, {"db_idx": 9221, "episode_idx": 32, "frame_idx": 241, "global_frame_idx": 9221, "task_index": 6}, {"db_idx": 9222, "episode_idx": 32, "frame_idx": 242, "global_frame_idx": 9222, "task_index": 6}, {"db_idx": 9223, "episode_idx": 32, "frame_idx": 243, "global_frame_idx": 9223, "task_index": 6}, {"db_idx": 9224, "episode_idx": 32, "frame_idx": 244, "global_frame_idx": 9224, "task_index": 6}, {"db_idx": 9225, "episode_idx": 32, "frame_idx": 245, "global_frame_idx": 9225, "task_index": 6}, {"db_idx": 9226, "episode_idx": 32, "frame_idx": 246, "global_frame_idx": 9226, "task_index": 6}, {"db_idx": 9227, "episode_idx": 32, "frame_idx": 247, "global_frame_idx": 9227, "task_index": 6}, {"db_idx": 9228, "episode_idx": 32, "frame_idx": 248, "global_frame_idx": 9228, "task_index": 6}, {"db_idx": 9229, "episode_idx": 32, "frame_idx": 249, "global_frame_idx": 9229, "task_index": 6}, {"db_idx": 9230, "episode_idx": 32, "frame_idx": 250, "global_frame_idx": 9230, "task_index": 6}, {"db_idx": 9231, "episode_idx": 32, "frame_idx": 251, "global_frame_idx": 9231, "task_index": 6}, {"db_idx": 9232, "episode_idx": 32, "frame_idx": 252, "global_frame_idx": 9232, "task_index": 6}, {"db_idx": 9233, "episode_idx": 32, "frame_idx": 253, "global_frame_idx": 9233, "task_index": 6}, {"db_idx": 9234, "episode_idx": 32, "frame_idx": 254, "global_frame_idx": 9234, "task_index": 6}, {"db_idx": 9235, "episode_idx": 32, "frame_idx": 255, "global_frame_idx": 9235, "task_index": 6}, {"db_idx": 9236, "episode_idx": 32, "frame_idx": 256, "global_frame_idx": 9236, "task_index": 6}, {"db_idx": 9237, "episode_idx": 32, "frame_idx": 257, "global_frame_idx": 9237, "task_index": 6}, {"db_idx": 9238, "episode_idx": 32, "frame_idx": 258, "global_frame_idx": 9238, "task_index": 6}, {"db_idx": 9239, "episode_idx": 32, "frame_idx": 259, "global_frame_idx": 9239, "task_index": 6}, {"db_idx": 9240, "episode_idx": 32, "frame_idx": 260, "global_frame_idx": 9240, "task_index": 6}, {"db_idx": 9241, "episode_idx": 32, "frame_idx": 261, "global_frame_idx": 9241, "task_index": 6}, {"db_idx": 9242, "episode_idx": 32, "frame_idx": 262, "global_frame_idx": 9242, "task_index": 6}, {"db_idx": 9243, "episode_idx": 32, "frame_idx": 263, "global_frame_idx": 9243, "task_index": 6}, {"db_idx": 9244, "episode_idx": 32, "frame_idx": 264, "global_frame_idx": 9244, "task_index": 6}, {"db_idx": 9245, "episode_idx": 32, "frame_idx": 265, "global_frame_idx": 9245, "task_index": 6}, {"db_idx": 9246, "episode_idx": 32, "frame_idx": 266, "global_frame_idx": 9246, "task_index": 6}, {"db_idx": 9247, "episode_idx": 33, "frame_idx": 0, "global_frame_idx": 9247, "task_index": 6}, {"db_idx": 9248, "episode_idx": 33, "frame_idx": 1, "global_frame_idx": 9248, "task_index": 6}, {"db_idx": 9249, "episode_idx": 33, "frame_idx": 2, "global_frame_idx": 9249, "task_index": 6}, {"db_idx": 9250, "episode_idx": 33, "frame_idx": 3, "global_frame_idx": 9250, "task_index": 6}, {"db_idx": 9251, "episode_idx": 33, "frame_idx": 4, "global_frame_idx": 9251, "task_index": 6}, {"db_idx": 9252, "episode_idx": 33, "frame_idx": 5, "global_frame_idx": 9252, "task_index": 6}, {"db_idx": 9253, "episode_idx": 33, "frame_idx": 6, "global_frame_idx": 9253, "task_index": 6}, {"db_idx": 9254, "episode_idx": 33, "frame_idx": 7, "global_frame_idx": 9254, "task_index": 6}, {"db_idx": 9255, "episode_idx": 33, "frame_idx": 8, "global_frame_idx": 9255, "task_index": 6}, {"db_idx": 9256, "episode_idx": 33, "frame_idx": 9, "global_frame_idx": 9256, "task_index": 6}, {"db_idx": 9257, "episode_idx": 33, "frame_idx": 10, "global_frame_idx": 9257, "task_index": 6}, {"db_idx": 9258, "episode_idx": 33, "frame_idx": 11, "global_frame_idx": 9258, "task_index": 6}, {"db_idx": 9259, "episode_idx": 33, "frame_idx": 12, "global_frame_idx": 9259, "task_index": 6}, {"db_idx": 9260, "episode_idx": 33, "frame_idx": 13, "global_frame_idx": 9260, "task_index": 6}, {"db_idx": 9261, "episode_idx": 33, "frame_idx": 14, "global_frame_idx": 9261, "task_index": 6}, {"db_idx": 9262, "episode_idx": 33, "frame_idx": 15, "global_frame_idx": 9262, "task_index": 6}, {"db_idx": 9263, "episode_idx": 33, "frame_idx": 16, "global_frame_idx": 9263, "task_index": 6}, {"db_idx": 9264, "episode_idx": 33, "frame_idx": 17, "global_frame_idx": 9264, "task_index": 6}, {"db_idx": 9265, "episode_idx": 33, "frame_idx": 18, "global_frame_idx": 9265, "task_index": 6}, {"db_idx": 9266, "episode_idx": 33, "frame_idx": 19, "global_frame_idx": 9266, "task_index": 6}, {"db_idx": 9267, "episode_idx": 33, "frame_idx": 20, "global_frame_idx": 9267, "task_index": 6}, {"db_idx": 9268, "episode_idx": 33, "frame_idx": 21, "global_frame_idx": 9268, "task_index": 6}, {"db_idx": 9269, "episode_idx": 33, "frame_idx": 22, "global_frame_idx": 9269, "task_index": 6}, {"db_idx": 9270, "episode_idx": 33, "frame_idx": 23, "global_frame_idx": 9270, "task_index": 6}, {"db_idx": 9271, "episode_idx": 33, "frame_idx": 24, "global_frame_idx": 9271, "task_index": 6}, {"db_idx": 9272, "episode_idx": 33, "frame_idx": 25, "global_frame_idx": 9272, "task_index": 6}, {"db_idx": 9273, "episode_idx": 33, "frame_idx": 26, "global_frame_idx": 9273, "task_index": 6}, {"db_idx": 9274, "episode_idx": 33, "frame_idx": 27, "global_frame_idx": 9274, "task_index": 6}, {"db_idx": 9275, "episode_idx": 33, "frame_idx": 28, "global_frame_idx": 9275, "task_index": 6}, {"db_idx": 9276, "episode_idx": 33, "frame_idx": 29, "global_frame_idx": 9276, "task_index": 6}, {"db_idx": 9277, "episode_idx": 33, "frame_idx": 30, "global_frame_idx": 9277, "task_index": 6}, {"db_idx": 9278, "episode_idx": 33, "frame_idx": 31, "global_frame_idx": 9278, "task_index": 6}, {"db_idx": 9279, "episode_idx": 33, "frame_idx": 32, "global_frame_idx": 9279, "task_index": 6}, {"db_idx": 9280, "episode_idx": 33, "frame_idx": 33, "global_frame_idx": 9280, "task_index": 6}, {"db_idx": 9281, "episode_idx": 33, "frame_idx": 34, "global_frame_idx": 9281, "task_index": 6}, {"db_idx": 9282, "episode_idx": 33, "frame_idx": 35, "global_frame_idx": 9282, "task_index": 6}, {"db_idx": 9283, "episode_idx": 33, "frame_idx": 36, "global_frame_idx": 9283, "task_index": 6}, {"db_idx": 9284, "episode_idx": 33, "frame_idx": 37, "global_frame_idx": 9284, "task_index": 6}, {"db_idx": 9285, "episode_idx": 33, "frame_idx": 38, "global_frame_idx": 9285, "task_index": 6}, {"db_idx": 9286, "episode_idx": 33, "frame_idx": 39, "global_frame_idx": 9286, "task_index": 6}, {"db_idx": 9287, "episode_idx": 33, "frame_idx": 40, "global_frame_idx": 9287, "task_index": 6}, {"db_idx": 9288, "episode_idx": 33, "frame_idx": 41, "global_frame_idx": 9288, "task_index": 6}, {"db_idx": 9289, "episode_idx": 33, "frame_idx": 42, "global_frame_idx": 9289, "task_index": 6}, {"db_idx": 9290, "episode_idx": 33, "frame_idx": 43, "global_frame_idx": 9290, "task_index": 6}, {"db_idx": 9291, "episode_idx": 33, "frame_idx": 44, "global_frame_idx": 9291, "task_index": 6}, {"db_idx": 9292, "episode_idx": 33, "frame_idx": 45, "global_frame_idx": 9292, "task_index": 6}, {"db_idx": 9293, "episode_idx": 33, "frame_idx": 46, "global_frame_idx": 9293, "task_index": 6}, {"db_idx": 9294, "episode_idx": 33, "frame_idx": 47, "global_frame_idx": 9294, "task_index": 6}, {"db_idx": 9295, "episode_idx": 33, "frame_idx": 48, "global_frame_idx": 9295, "task_index": 6}, {"db_idx": 9296, "episode_idx": 33, "frame_idx": 49, "global_frame_idx": 9296, "task_index": 6}, {"db_idx": 9297, "episode_idx": 33, "frame_idx": 50, "global_frame_idx": 9297, "task_index": 6}, {"db_idx": 9298, "episode_idx": 33, "frame_idx": 51, "global_frame_idx": 9298, "task_index": 6}, {"db_idx": 9299, "episode_idx": 33, "frame_idx": 52, "global_frame_idx": 9299, "task_index": 6}, {"db_idx": 9300, "episode_idx": 33, "frame_idx": 53, "global_frame_idx": 9300, "task_index": 6}, {"db_idx": 9301, "episode_idx": 33, "frame_idx": 54, "global_frame_idx": 9301, "task_index": 6}, {"db_idx": 9302, "episode_idx": 33, "frame_idx": 55, "global_frame_idx": 9302, "task_index": 6}, {"db_idx": 9303, "episode_idx": 33, "frame_idx": 56, "global_frame_idx": 9303, "task_index": 6}, {"db_idx": 9304, "episode_idx": 33, "frame_idx": 57, "global_frame_idx": 9304, "task_index": 6}, {"db_idx": 9305, "episode_idx": 33, "frame_idx": 58, "global_frame_idx": 9305, "task_index": 6}, {"db_idx": 9306, "episode_idx": 33, "frame_idx": 59, "global_frame_idx": 9306, "task_index": 6}, {"db_idx": 9307, "episode_idx": 33, "frame_idx": 60, "global_frame_idx": 9307, "task_index": 6}, {"db_idx": 9308, "episode_idx": 33, "frame_idx": 61, "global_frame_idx": 9308, "task_index": 6}, {"db_idx": 9309, "episode_idx": 33, "frame_idx": 62, "global_frame_idx": 9309, "task_index": 6}, {"db_idx": 9310, "episode_idx": 33, "frame_idx": 63, "global_frame_idx": 9310, "task_index": 6}, {"db_idx": 9311, "episode_idx": 33, "frame_idx": 64, "global_frame_idx": 9311, "task_index": 6}, {"db_idx": 9312, "episode_idx": 33, "frame_idx": 65, "global_frame_idx": 9312, "task_index": 6}, {"db_idx": 9313, "episode_idx": 33, "frame_idx": 66, "global_frame_idx": 9313, "task_index": 6}, {"db_idx": 9314, "episode_idx": 33, "frame_idx": 67, "global_frame_idx": 9314, "task_index": 6}, {"db_idx": 9315, "episode_idx": 33, "frame_idx": 68, "global_frame_idx": 9315, "task_index": 6}, {"db_idx": 9316, "episode_idx": 33, "frame_idx": 69, "global_frame_idx": 9316, "task_index": 6}, {"db_idx": 9317, "episode_idx": 33, "frame_idx": 70, "global_frame_idx": 9317, "task_index": 6}, {"db_idx": 9318, "episode_idx": 33, "frame_idx": 71, "global_frame_idx": 9318, "task_index": 6}, {"db_idx": 9319, "episode_idx": 33, "frame_idx": 72, "global_frame_idx": 9319, "task_index": 6}, {"db_idx": 9320, "episode_idx": 33, "frame_idx": 73, "global_frame_idx": 9320, "task_index": 6}, {"db_idx": 9321, "episode_idx": 33, "frame_idx": 74, "global_frame_idx": 9321, "task_index": 6}, {"db_idx": 9322, "episode_idx": 33, "frame_idx": 75, "global_frame_idx": 9322, "task_index": 6}, {"db_idx": 9323, "episode_idx": 33, "frame_idx": 76, "global_frame_idx": 9323, "task_index": 6}, {"db_idx": 9324, "episode_idx": 33, "frame_idx": 77, "global_frame_idx": 9324, "task_index": 6}, {"db_idx": 9325, "episode_idx": 33, "frame_idx": 78, "global_frame_idx": 9325, "task_index": 6}, {"db_idx": 9326, "episode_idx": 33, "frame_idx": 79, "global_frame_idx": 9326, "task_index": 6}, {"db_idx": 9327, "episode_idx": 33, "frame_idx": 80, "global_frame_idx": 9327, "task_index": 6}, {"db_idx": 9328, "episode_idx": 33, "frame_idx": 81, "global_frame_idx": 9328, "task_index": 6}, {"db_idx": 9329, "episode_idx": 33, "frame_idx": 82, "global_frame_idx": 9329, "task_index": 6}, {"db_idx": 9330, "episode_idx": 33, "frame_idx": 83, "global_frame_idx": 9330, "task_index": 6}, {"db_idx": 9331, "episode_idx": 33, "frame_idx": 84, "global_frame_idx": 9331, "task_index": 6}, {"db_idx": 9332, "episode_idx": 33, "frame_idx": 85, "global_frame_idx": 9332, "task_index": 6}, {"db_idx": 9333, "episode_idx": 33, "frame_idx": 86, "global_frame_idx": 9333, "task_index": 6}, {"db_idx": 9334, "episode_idx": 33, "frame_idx": 87, "global_frame_idx": 9334, "task_index": 6}, {"db_idx": 9335, "episode_idx": 33, "frame_idx": 88, "global_frame_idx": 9335, "task_index": 6}, {"db_idx": 9336, "episode_idx": 33, "frame_idx": 89, "global_frame_idx": 9336, "task_index": 6}, {"db_idx": 9337, "episode_idx": 33, "frame_idx": 90, "global_frame_idx": 9337, "task_index": 6}, {"db_idx": 9338, "episode_idx": 33, "frame_idx": 91, "global_frame_idx": 9338, "task_index": 6}, {"db_idx": 9339, "episode_idx": 33, "frame_idx": 92, "global_frame_idx": 9339, "task_index": 6}, {"db_idx": 9340, "episode_idx": 33, "frame_idx": 93, "global_frame_idx": 9340, "task_index": 6}, {"db_idx": 9341, "episode_idx": 33, "frame_idx": 94, "global_frame_idx": 9341, "task_index": 6}, {"db_idx": 9342, "episode_idx": 33, "frame_idx": 95, "global_frame_idx": 9342, "task_index": 6}, {"db_idx": 9343, "episode_idx": 33, "frame_idx": 96, "global_frame_idx": 9343, "task_index": 6}, {"db_idx": 9344, "episode_idx": 33, "frame_idx": 97, "global_frame_idx": 9344, "task_index": 6}, {"db_idx": 9345, "episode_idx": 33, "frame_idx": 98, "global_frame_idx": 9345, "task_index": 6}, {"db_idx": 9346, "episode_idx": 33, "frame_idx": 99, "global_frame_idx": 9346, "task_index": 6}, {"db_idx": 9347, "episode_idx": 33, "frame_idx": 100, "global_frame_idx": 9347, "task_index": 6}, {"db_idx": 9348, "episode_idx": 33, "frame_idx": 101, "global_frame_idx": 9348, "task_index": 6}, {"db_idx": 9349, "episode_idx": 33, "frame_idx": 102, "global_frame_idx": 9349, "task_index": 6}, {"db_idx": 9350, "episode_idx": 33, "frame_idx": 103, "global_frame_idx": 9350, "task_index": 6}, {"db_idx": 9351, "episode_idx": 33, "frame_idx": 104, "global_frame_idx": 9351, "task_index": 6}, {"db_idx": 9352, "episode_idx": 33, "frame_idx": 105, "global_frame_idx": 9352, "task_index": 6}, {"db_idx": 9353, "episode_idx": 33, "frame_idx": 106, "global_frame_idx": 9353, "task_index": 6}, {"db_idx": 9354, "episode_idx": 33, "frame_idx": 107, "global_frame_idx": 9354, "task_index": 6}, {"db_idx": 9355, "episode_idx": 33, "frame_idx": 108, "global_frame_idx": 9355, "task_index": 6}, {"db_idx": 9356, "episode_idx": 33, "frame_idx": 109, "global_frame_idx": 9356, "task_index": 6}, {"db_idx": 9357, "episode_idx": 33, "frame_idx": 110, "global_frame_idx": 9357, "task_index": 6}, {"db_idx": 9358, "episode_idx": 33, "frame_idx": 111, "global_frame_idx": 9358, "task_index": 6}, {"db_idx": 9359, "episode_idx": 33, "frame_idx": 112, "global_frame_idx": 9359, "task_index": 6}, {"db_idx": 9360, "episode_idx": 33, "frame_idx": 113, "global_frame_idx": 9360, "task_index": 6}, {"db_idx": 9361, "episode_idx": 33, "frame_idx": 114, "global_frame_idx": 9361, "task_index": 6}, {"db_idx": 9362, "episode_idx": 33, "frame_idx": 115, "global_frame_idx": 9362, "task_index": 6}, {"db_idx": 9363, "episode_idx": 33, "frame_idx": 116, "global_frame_idx": 9363, "task_index": 6}, {"db_idx": 9364, "episode_idx": 33, "frame_idx": 117, "global_frame_idx": 9364, "task_index": 6}, {"db_idx": 9365, "episode_idx": 33, "frame_idx": 118, "global_frame_idx": 9365, "task_index": 6}, {"db_idx": 9366, "episode_idx": 33, "frame_idx": 119, "global_frame_idx": 9366, "task_index": 6}, {"db_idx": 9367, "episode_idx": 33, "frame_idx": 120, "global_frame_idx": 9367, "task_index": 6}, {"db_idx": 9368, "episode_idx": 33, "frame_idx": 121, "global_frame_idx": 9368, "task_index": 6}, {"db_idx": 9369, "episode_idx": 33, "frame_idx": 122, "global_frame_idx": 9369, "task_index": 6}, {"db_idx": 9370, "episode_idx": 33, "frame_idx": 123, "global_frame_idx": 9370, "task_index": 6}, {"db_idx": 9371, "episode_idx": 33, "frame_idx": 124, "global_frame_idx": 9371, "task_index": 6}, {"db_idx": 9372, "episode_idx": 33, "frame_idx": 125, "global_frame_idx": 9372, "task_index": 6}, {"db_idx": 9373, "episode_idx": 33, "frame_idx": 126, "global_frame_idx": 9373, "task_index": 6}, {"db_idx": 9374, "episode_idx": 33, "frame_idx": 127, "global_frame_idx": 9374, "task_index": 6}, {"db_idx": 9375, "episode_idx": 33, "frame_idx": 128, "global_frame_idx": 9375, "task_index": 6}, {"db_idx": 9376, "episode_idx": 33, "frame_idx": 129, "global_frame_idx": 9376, "task_index": 6}, {"db_idx": 9377, "episode_idx": 33, "frame_idx": 130, "global_frame_idx": 9377, "task_index": 6}, {"db_idx": 9378, "episode_idx": 33, "frame_idx": 131, "global_frame_idx": 9378, "task_index": 6}, {"db_idx": 9379, "episode_idx": 33, "frame_idx": 132, "global_frame_idx": 9379, "task_index": 6}, {"db_idx": 9380, "episode_idx": 33, "frame_idx": 133, "global_frame_idx": 9380, "task_index": 6}, {"db_idx": 9381, "episode_idx": 33, "frame_idx": 134, "global_frame_idx": 9381, "task_index": 6}, {"db_idx": 9382, "episode_idx": 33, "frame_idx": 135, "global_frame_idx": 9382, "task_index": 6}, {"db_idx": 9383, "episode_idx": 33, "frame_idx": 136, "global_frame_idx": 9383, "task_index": 6}, {"db_idx": 9384, "episode_idx": 33, "frame_idx": 137, "global_frame_idx": 9384, "task_index": 6}, {"db_idx": 9385, "episode_idx": 33, "frame_idx": 138, "global_frame_idx": 9385, "task_index": 6}, {"db_idx": 9386, "episode_idx": 33, "frame_idx": 139, "global_frame_idx": 9386, "task_index": 6}, {"db_idx": 9387, "episode_idx": 33, "frame_idx": 140, "global_frame_idx": 9387, "task_index": 6}, {"db_idx": 9388, "episode_idx": 33, "frame_idx": 141, "global_frame_idx": 9388, "task_index": 6}, {"db_idx": 9389, "episode_idx": 33, "frame_idx": 142, "global_frame_idx": 9389, "task_index": 6}, {"db_idx": 9390, "episode_idx": 33, "frame_idx": 143, "global_frame_idx": 9390, "task_index": 6}, {"db_idx": 9391, "episode_idx": 33, "frame_idx": 144, "global_frame_idx": 9391, "task_index": 6}, {"db_idx": 9392, "episode_idx": 33, "frame_idx": 145, "global_frame_idx": 9392, "task_index": 6}, {"db_idx": 9393, "episode_idx": 33, "frame_idx": 146, "global_frame_idx": 9393, "task_index": 6}, {"db_idx": 9394, "episode_idx": 33, "frame_idx": 147, "global_frame_idx": 9394, "task_index": 6}, {"db_idx": 9395, "episode_idx": 33, "frame_idx": 148, "global_frame_idx": 9395, "task_index": 6}, {"db_idx": 9396, "episode_idx": 33, "frame_idx": 149, "global_frame_idx": 9396, "task_index": 6}, {"db_idx": 9397, "episode_idx": 33, "frame_idx": 150, "global_frame_idx": 9397, "task_index": 6}, {"db_idx": 9398, "episode_idx": 33, "frame_idx": 151, "global_frame_idx": 9398, "task_index": 6}, {"db_idx": 9399, "episode_idx": 33, "frame_idx": 152, "global_frame_idx": 9399, "task_index": 6}, {"db_idx": 9400, "episode_idx": 33, "frame_idx": 153, "global_frame_idx": 9400, "task_index": 6}, {"db_idx": 9401, "episode_idx": 33, "frame_idx": 154, "global_frame_idx": 9401, "task_index": 6}, {"db_idx": 9402, "episode_idx": 33, "frame_idx": 155, "global_frame_idx": 9402, "task_index": 6}, {"db_idx": 9403, "episode_idx": 33, "frame_idx": 156, "global_frame_idx": 9403, "task_index": 6}, {"db_idx": 9404, "episode_idx": 33, "frame_idx": 157, "global_frame_idx": 9404, "task_index": 6}, {"db_idx": 9405, "episode_idx": 33, "frame_idx": 158, "global_frame_idx": 9405, "task_index": 6}, {"db_idx": 9406, "episode_idx": 33, "frame_idx": 159, "global_frame_idx": 9406, "task_index": 6}, {"db_idx": 9407, "episode_idx": 33, "frame_idx": 160, "global_frame_idx": 9407, "task_index": 6}, {"db_idx": 9408, "episode_idx": 33, "frame_idx": 161, "global_frame_idx": 9408, "task_index": 6}, {"db_idx": 9409, "episode_idx": 33, "frame_idx": 162, "global_frame_idx": 9409, "task_index": 6}, {"db_idx": 9410, "episode_idx": 33, "frame_idx": 163, "global_frame_idx": 9410, "task_index": 6}, {"db_idx": 9411, "episode_idx": 33, "frame_idx": 164, "global_frame_idx": 9411, "task_index": 6}, {"db_idx": 9412, "episode_idx": 33, "frame_idx": 165, "global_frame_idx": 9412, "task_index": 6}, {"db_idx": 9413, "episode_idx": 33, "frame_idx": 166, "global_frame_idx": 9413, "task_index": 6}, {"db_idx": 9414, "episode_idx": 33, "frame_idx": 167, "global_frame_idx": 9414, "task_index": 6}, {"db_idx": 9415, "episode_idx": 33, "frame_idx": 168, "global_frame_idx": 9415, "task_index": 6}, {"db_idx": 9416, "episode_idx": 33, "frame_idx": 169, "global_frame_idx": 9416, "task_index": 6}, {"db_idx": 9417, "episode_idx": 33, "frame_idx": 170, "global_frame_idx": 9417, "task_index": 6}, {"db_idx": 9418, "episode_idx": 33, "frame_idx": 171, "global_frame_idx": 9418, "task_index": 6}, {"db_idx": 9419, "episode_idx": 33, "frame_idx": 172, "global_frame_idx": 9419, "task_index": 6}, {"db_idx": 9420, "episode_idx": 33, "frame_idx": 173, "global_frame_idx": 9420, "task_index": 6}, {"db_idx": 9421, "episode_idx": 33, "frame_idx": 174, "global_frame_idx": 9421, "task_index": 6}, {"db_idx": 9422, "episode_idx": 33, "frame_idx": 175, "global_frame_idx": 9422, "task_index": 6}, {"db_idx": 9423, "episode_idx": 33, "frame_idx": 176, "global_frame_idx": 9423, "task_index": 6}, {"db_idx": 9424, "episode_idx": 33, "frame_idx": 177, "global_frame_idx": 9424, "task_index": 6}, {"db_idx": 9425, "episode_idx": 33, "frame_idx": 178, "global_frame_idx": 9425, "task_index": 6}, {"db_idx": 9426, "episode_idx": 33, "frame_idx": 179, "global_frame_idx": 9426, "task_index": 6}, {"db_idx": 9427, "episode_idx": 33, "frame_idx": 180, "global_frame_idx": 9427, "task_index": 6}, {"db_idx": 9428, "episode_idx": 33, "frame_idx": 181, "global_frame_idx": 9428, "task_index": 6}, {"db_idx": 9429, "episode_idx": 33, "frame_idx": 182, "global_frame_idx": 9429, "task_index": 6}, {"db_idx": 9430, "episode_idx": 33, "frame_idx": 183, "global_frame_idx": 9430, "task_index": 6}, {"db_idx": 9431, "episode_idx": 33, "frame_idx": 184, "global_frame_idx": 9431, "task_index": 6}, {"db_idx": 9432, "episode_idx": 33, "frame_idx": 185, "global_frame_idx": 9432, "task_index": 6}, {"db_idx": 9433, "episode_idx": 33, "frame_idx": 186, "global_frame_idx": 9433, "task_index": 6}, {"db_idx": 9434, "episode_idx": 33, "frame_idx": 187, "global_frame_idx": 9434, "task_index": 6}, {"db_idx": 9435, "episode_idx": 33, "frame_idx": 188, "global_frame_idx": 9435, "task_index": 6}, {"db_idx": 9436, "episode_idx": 33, "frame_idx": 189, "global_frame_idx": 9436, "task_index": 6}, {"db_idx": 9437, "episode_idx": 33, "frame_idx": 190, "global_frame_idx": 9437, "task_index": 6}, {"db_idx": 9438, "episode_idx": 33, "frame_idx": 191, "global_frame_idx": 9438, "task_index": 6}, {"db_idx": 9439, "episode_idx": 33, "frame_idx": 192, "global_frame_idx": 9439, "task_index": 6}, {"db_idx": 9440, "episode_idx": 33, "frame_idx": 193, "global_frame_idx": 9440, "task_index": 6}, {"db_idx": 9441, "episode_idx": 33, "frame_idx": 194, "global_frame_idx": 9441, "task_index": 6}, {"db_idx": 9442, "episode_idx": 33, "frame_idx": 195, "global_frame_idx": 9442, "task_index": 6}, {"db_idx": 9443, "episode_idx": 33, "frame_idx": 196, "global_frame_idx": 9443, "task_index": 6}, {"db_idx": 9444, "episode_idx": 33, "frame_idx": 197, "global_frame_idx": 9444, "task_index": 6}, {"db_idx": 9445, "episode_idx": 33, "frame_idx": 198, "global_frame_idx": 9445, "task_index": 6}, {"db_idx": 9446, "episode_idx": 33, "frame_idx": 199, "global_frame_idx": 9446, "task_index": 6}, {"db_idx": 9447, "episode_idx": 33, "frame_idx": 200, "global_frame_idx": 9447, "task_index": 6}, {"db_idx": 9448, "episode_idx": 33, "frame_idx": 201, "global_frame_idx": 9448, "task_index": 6}, {"db_idx": 9449, "episode_idx": 33, "frame_idx": 202, "global_frame_idx": 9449, "task_index": 6}, {"db_idx": 9450, "episode_idx": 33, "frame_idx": 203, "global_frame_idx": 9450, "task_index": 6}, {"db_idx": 9451, "episode_idx": 33, "frame_idx": 204, "global_frame_idx": 9451, "task_index": 6}, {"db_idx": 9452, "episode_idx": 33, "frame_idx": 205, "global_frame_idx": 9452, "task_index": 6}, {"db_idx": 9453, "episode_idx": 33, "frame_idx": 206, "global_frame_idx": 9453, "task_index": 6}, {"db_idx": 9454, "episode_idx": 33, "frame_idx": 207, "global_frame_idx": 9454, "task_index": 6}, {"db_idx": 9455, "episode_idx": 33, "frame_idx": 208, "global_frame_idx": 9455, "task_index": 6}, {"db_idx": 9456, "episode_idx": 33, "frame_idx": 209, "global_frame_idx": 9456, "task_index": 6}, {"db_idx": 9457, "episode_idx": 33, "frame_idx": 210, "global_frame_idx": 9457, "task_index": 6}, {"db_idx": 9458, "episode_idx": 33, "frame_idx": 211, "global_frame_idx": 9458, "task_index": 6}, {"db_idx": 9459, "episode_idx": 33, "frame_idx": 212, "global_frame_idx": 9459, "task_index": 6}, {"db_idx": 9460, "episode_idx": 33, "frame_idx": 213, "global_frame_idx": 9460, "task_index": 6}, {"db_idx": 9461, "episode_idx": 33, "frame_idx": 214, "global_frame_idx": 9461, "task_index": 6}, {"db_idx": 9462, "episode_idx": 33, "frame_idx": 215, "global_frame_idx": 9462, "task_index": 6}, {"db_idx": 9463, "episode_idx": 33, "frame_idx": 216, "global_frame_idx": 9463, "task_index": 6}, {"db_idx": 9464, "episode_idx": 33, "frame_idx": 217, "global_frame_idx": 9464, "task_index": 6}, {"db_idx": 9465, "episode_idx": 33, "frame_idx": 218, "global_frame_idx": 9465, "task_index": 6}, {"db_idx": 9466, "episode_idx": 33, "frame_idx": 219, "global_frame_idx": 9466, "task_index": 6}, {"db_idx": 9467, "episode_idx": 33, "frame_idx": 220, "global_frame_idx": 9467, "task_index": 6}, {"db_idx": 9468, "episode_idx": 33, "frame_idx": 221, "global_frame_idx": 9468, "task_index": 6}, {"db_idx": 9469, "episode_idx": 33, "frame_idx": 222, "global_frame_idx": 9469, "task_index": 6}, {"db_idx": 9470, "episode_idx": 33, "frame_idx": 223, "global_frame_idx": 9470, "task_index": 6}, {"db_idx": 9471, "episode_idx": 33, "frame_idx": 224, "global_frame_idx": 9471, "task_index": 6}, {"db_idx": 9472, "episode_idx": 33, "frame_idx": 225, "global_frame_idx": 9472, "task_index": 6}, {"db_idx": 9473, "episode_idx": 33, "frame_idx": 226, "global_frame_idx": 9473, "task_index": 6}, {"db_idx": 9474, "episode_idx": 33, "frame_idx": 227, "global_frame_idx": 9474, "task_index": 6}, {"db_idx": 9475, "episode_idx": 33, "frame_idx": 228, "global_frame_idx": 9475, "task_index": 6}, {"db_idx": 9476, "episode_idx": 33, "frame_idx": 229, "global_frame_idx": 9476, "task_index": 6}, {"db_idx": 9477, "episode_idx": 33, "frame_idx": 230, "global_frame_idx": 9477, "task_index": 6}, {"db_idx": 9478, "episode_idx": 33, "frame_idx": 231, "global_frame_idx": 9478, "task_index": 6}, {"db_idx": 9479, "episode_idx": 33, "frame_idx": 232, "global_frame_idx": 9479, "task_index": 6}, {"db_idx": 9480, "episode_idx": 33, "frame_idx": 233, "global_frame_idx": 9480, "task_index": 6}, {"db_idx": 9481, "episode_idx": 33, "frame_idx": 234, "global_frame_idx": 9481, "task_index": 6}, {"db_idx": 9482, "episode_idx": 33, "frame_idx": 235, "global_frame_idx": 9482, "task_index": 6}, {"db_idx": 9483, "episode_idx": 33, "frame_idx": 236, "global_frame_idx": 9483, "task_index": 6}, {"db_idx": 9484, "episode_idx": 33, "frame_idx": 237, "global_frame_idx": 9484, "task_index": 6}, {"db_idx": 9485, "episode_idx": 33, "frame_idx": 238, "global_frame_idx": 9485, "task_index": 6}, {"db_idx": 9486, "episode_idx": 33, "frame_idx": 239, "global_frame_idx": 9486, "task_index": 6}, {"db_idx": 9487, "episode_idx": 33, "frame_idx": 240, "global_frame_idx": 9487, "task_index": 6}, {"db_idx": 9488, "episode_idx": 33, "frame_idx": 241, "global_frame_idx": 9488, "task_index": 6}, {"db_idx": 9489, "episode_idx": 33, "frame_idx": 242, "global_frame_idx": 9489, "task_index": 6}, {"db_idx": 9490, "episode_idx": 33, "frame_idx": 243, "global_frame_idx": 9490, "task_index": 6}, {"db_idx": 9491, "episode_idx": 33, "frame_idx": 244, "global_frame_idx": 9491, "task_index": 6}, {"db_idx": 9492, "episode_idx": 33, "frame_idx": 245, "global_frame_idx": 9492, "task_index": 6}, {"db_idx": 9493, "episode_idx": 33, "frame_idx": 246, "global_frame_idx": 9493, "task_index": 6}, {"db_idx": 9494, "episode_idx": 33, "frame_idx": 247, "global_frame_idx": 9494, "task_index": 6}, {"db_idx": 9495, "episode_idx": 33, "frame_idx": 248, "global_frame_idx": 9495, "task_index": 6}, {"db_idx": 9496, "episode_idx": 33, "frame_idx": 249, "global_frame_idx": 9496, "task_index": 6}, {"db_idx": 9497, "episode_idx": 33, "frame_idx": 250, "global_frame_idx": 9497, "task_index": 6}, {"db_idx": 9498, "episode_idx": 33, "frame_idx": 251, "global_frame_idx": 9498, "task_index": 6}, {"db_idx": 9499, "episode_idx": 33, "frame_idx": 252, "global_frame_idx": 9499, "task_index": 6}, {"db_idx": 9500, "episode_idx": 33, "frame_idx": 253, "global_frame_idx": 9500, "task_index": 6}, {"db_idx": 9501, "episode_idx": 33, "frame_idx": 254, "global_frame_idx": 9501, "task_index": 6}, {"db_idx": 9502, "episode_idx": 33, "frame_idx": 255, "global_frame_idx": 9502, "task_index": 6}, {"db_idx": 9503, "episode_idx": 33, "frame_idx": 256, "global_frame_idx": 9503, "task_index": 6}, {"db_idx": 9504, "episode_idx": 33, "frame_idx": 257, "global_frame_idx": 9504, "task_index": 6}, {"db_idx": 9505, "episode_idx": 33, "frame_idx": 258, "global_frame_idx": 9505, "task_index": 6}, {"db_idx": 9506, "episode_idx": 33, "frame_idx": 259, "global_frame_idx": 9506, "task_index": 6}, {"db_idx": 9507, "episode_idx": 33, "frame_idx": 260, "global_frame_idx": 9507, "task_index": 6}, {"db_idx": 9508, "episode_idx": 33, "frame_idx": 261, "global_frame_idx": 9508, "task_index": 6}, {"db_idx": 9509, "episode_idx": 33, "frame_idx": 262, "global_frame_idx": 9509, "task_index": 6}, {"db_idx": 9510, "episode_idx": 33, "frame_idx": 263, "global_frame_idx": 9510, "task_index": 6}, {"db_idx": 9511, "episode_idx": 33, "frame_idx": 264, "global_frame_idx": 9511, "task_index": 6}, {"db_idx": 9512, "episode_idx": 33, "frame_idx": 265, "global_frame_idx": 9512, "task_index": 6}, {"db_idx": 9513, "episode_idx": 33, "frame_idx": 266, "global_frame_idx": 9513, "task_index": 6}, {"db_idx": 9514, "episode_idx": 33, "frame_idx": 267, "global_frame_idx": 9514, "task_index": 6}, {"db_idx": 9515, "episode_idx": 33, "frame_idx": 268, "global_frame_idx": 9515, "task_index": 6}, {"db_idx": 9516, "episode_idx": 33, "frame_idx": 269, "global_frame_idx": 9516, "task_index": 6}, {"db_idx": 9517, "episode_idx": 33, "frame_idx": 270, "global_frame_idx": 9517, "task_index": 6}, {"db_idx": 9518, "episode_idx": 33, "frame_idx": 271, "global_frame_idx": 9518, "task_index": 6}, {"db_idx": 9519, "episode_idx": 33, "frame_idx": 272, "global_frame_idx": 9519, "task_index": 6}, {"db_idx": 9520, "episode_idx": 33, "frame_idx": 273, "global_frame_idx": 9520, "task_index": 6}, {"db_idx": 9521, "episode_idx": 33, "frame_idx": 274, "global_frame_idx": 9521, "task_index": 6}, {"db_idx": 9522, "episode_idx": 33, "frame_idx": 275, "global_frame_idx": 9522, "task_index": 6}, {"db_idx": 9523, "episode_idx": 33, "frame_idx": 276, "global_frame_idx": 9523, "task_index": 6}, {"db_idx": 9524, "episode_idx": 33, "frame_idx": 277, "global_frame_idx": 9524, "task_index": 6}, {"db_idx": 9525, "episode_idx": 33, "frame_idx": 278, "global_frame_idx": 9525, "task_index": 6}, {"db_idx": 9526, "episode_idx": 33, "frame_idx": 279, "global_frame_idx": 9526, "task_index": 6}, {"db_idx": 9527, "episode_idx": 33, "frame_idx": 280, "global_frame_idx": 9527, "task_index": 6}, {"db_idx": 9528, "episode_idx": 33, "frame_idx": 281, "global_frame_idx": 9528, "task_index": 6}, {"db_idx": 9529, "episode_idx": 33, "frame_idx": 282, "global_frame_idx": 9529, "task_index": 6}, {"db_idx": 9530, "episode_idx": 33, "frame_idx": 283, "global_frame_idx": 9530, "task_index": 6}, {"db_idx": 9531, "episode_idx": 33, "frame_idx": 284, "global_frame_idx": 9531, "task_index": 6}, {"db_idx": 9532, "episode_idx": 33, "frame_idx": 285, "global_frame_idx": 9532, "task_index": 6}, {"db_idx": 9533, "episode_idx": 33, "frame_idx": 286, "global_frame_idx": 9533, "task_index": 6}, {"db_idx": 9534, "episode_idx": 33, "frame_idx": 287, "global_frame_idx": 9534, "task_index": 6}, {"db_idx": 9535, "episode_idx": 33, "frame_idx": 288, "global_frame_idx": 9535, "task_index": 6}, {"db_idx": 9536, "episode_idx": 33, "frame_idx": 289, "global_frame_idx": 9536, "task_index": 6}, {"db_idx": 9537, "episode_idx": 33, "frame_idx": 290, "global_frame_idx": 9537, "task_index": 6}, {"db_idx": 9538, "episode_idx": 33, "frame_idx": 291, "global_frame_idx": 9538, "task_index": 6}, {"db_idx": 9539, "episode_idx": 33, "frame_idx": 292, "global_frame_idx": 9539, "task_index": 6}, {"db_idx": 9540, "episode_idx": 33, "frame_idx": 293, "global_frame_idx": 9540, "task_index": 6}, {"db_idx": 9541, "episode_idx": 33, "frame_idx": 294, "global_frame_idx": 9541, "task_index": 6}, {"db_idx": 9542, "episode_idx": 33, "frame_idx": 295, "global_frame_idx": 9542, "task_index": 6}, {"db_idx": 9543, "episode_idx": 33, "frame_idx": 296, "global_frame_idx": 9543, "task_index": 6}, {"db_idx": 9544, "episode_idx": 33, "frame_idx": 297, "global_frame_idx": 9544, "task_index": 6}, {"db_idx": 9545, "episode_idx": 33, "frame_idx": 298, "global_frame_idx": 9545, "task_index": 6}, {"db_idx": 9546, "episode_idx": 34, "frame_idx": 0, "global_frame_idx": 9546, "task_index": 6}, {"db_idx": 9547, "episode_idx": 34, "frame_idx": 1, "global_frame_idx": 9547, "task_index": 6}, {"db_idx": 9548, "episode_idx": 34, "frame_idx": 2, "global_frame_idx": 9548, "task_index": 6}, {"db_idx": 9549, "episode_idx": 34, "frame_idx": 3, "global_frame_idx": 9549, "task_index": 6}, {"db_idx": 9550, "episode_idx": 34, "frame_idx": 4, "global_frame_idx": 9550, "task_index": 6}, {"db_idx": 9551, "episode_idx": 34, "frame_idx": 5, "global_frame_idx": 9551, "task_index": 6}, {"db_idx": 9552, "episode_idx": 34, "frame_idx": 6, "global_frame_idx": 9552, "task_index": 6}, {"db_idx": 9553, "episode_idx": 34, "frame_idx": 7, "global_frame_idx": 9553, "task_index": 6}, {"db_idx": 9554, "episode_idx": 34, "frame_idx": 8, "global_frame_idx": 9554, "task_index": 6}, {"db_idx": 9555, "episode_idx": 34, "frame_idx": 9, "global_frame_idx": 9555, "task_index": 6}, {"db_idx": 9556, "episode_idx": 34, "frame_idx": 10, "global_frame_idx": 9556, "task_index": 6}, {"db_idx": 9557, "episode_idx": 34, "frame_idx": 11, "global_frame_idx": 9557, "task_index": 6}, {"db_idx": 9558, "episode_idx": 34, "frame_idx": 12, "global_frame_idx": 9558, "task_index": 6}, {"db_idx": 9559, "episode_idx": 34, "frame_idx": 13, "global_frame_idx": 9559, "task_index": 6}, {"db_idx": 9560, "episode_idx": 34, "frame_idx": 14, "global_frame_idx": 9560, "task_index": 6}, {"db_idx": 9561, "episode_idx": 34, "frame_idx": 15, "global_frame_idx": 9561, "task_index": 6}, {"db_idx": 9562, "episode_idx": 34, "frame_idx": 16, "global_frame_idx": 9562, "task_index": 6}, {"db_idx": 9563, "episode_idx": 34, "frame_idx": 17, "global_frame_idx": 9563, "task_index": 6}, {"db_idx": 9564, "episode_idx": 34, "frame_idx": 18, "global_frame_idx": 9564, "task_index": 6}, {"db_idx": 9565, "episode_idx": 34, "frame_idx": 19, "global_frame_idx": 9565, "task_index": 6}, {"db_idx": 9566, "episode_idx": 34, "frame_idx": 20, "global_frame_idx": 9566, "task_index": 6}, {"db_idx": 9567, "episode_idx": 34, "frame_idx": 21, "global_frame_idx": 9567, "task_index": 6}, {"db_idx": 9568, "episode_idx": 34, "frame_idx": 22, "global_frame_idx": 9568, "task_index": 6}, {"db_idx": 9569, "episode_idx": 34, "frame_idx": 23, "global_frame_idx": 9569, "task_index": 6}, {"db_idx": 9570, "episode_idx": 34, "frame_idx": 24, "global_frame_idx": 9570, "task_index": 6}, {"db_idx": 9571, "episode_idx": 34, "frame_idx": 25, "global_frame_idx": 9571, "task_index": 6}, {"db_idx": 9572, "episode_idx": 34, "frame_idx": 26, "global_frame_idx": 9572, "task_index": 6}, {"db_idx": 9573, "episode_idx": 34, "frame_idx": 27, "global_frame_idx": 9573, "task_index": 6}, {"db_idx": 9574, "episode_idx": 34, "frame_idx": 28, "global_frame_idx": 9574, "task_index": 6}, {"db_idx": 9575, "episode_idx": 34, "frame_idx": 29, "global_frame_idx": 9575, "task_index": 6}, {"db_idx": 9576, "episode_idx": 34, "frame_idx": 30, "global_frame_idx": 9576, "task_index": 6}, {"db_idx": 9577, "episode_idx": 34, "frame_idx": 31, "global_frame_idx": 9577, "task_index": 6}, {"db_idx": 9578, "episode_idx": 34, "frame_idx": 32, "global_frame_idx": 9578, "task_index": 6}, {"db_idx": 9579, "episode_idx": 34, "frame_idx": 33, "global_frame_idx": 9579, "task_index": 6}, {"db_idx": 9580, "episode_idx": 34, "frame_idx": 34, "global_frame_idx": 9580, "task_index": 6}, {"db_idx": 9581, "episode_idx": 34, "frame_idx": 35, "global_frame_idx": 9581, "task_index": 6}, {"db_idx": 9582, "episode_idx": 34, "frame_idx": 36, "global_frame_idx": 9582, "task_index": 6}, {"db_idx": 9583, "episode_idx": 34, "frame_idx": 37, "global_frame_idx": 9583, "task_index": 6}, {"db_idx": 9584, "episode_idx": 34, "frame_idx": 38, "global_frame_idx": 9584, "task_index": 6}, {"db_idx": 9585, "episode_idx": 34, "frame_idx": 39, "global_frame_idx": 9585, "task_index": 6}, {"db_idx": 9586, "episode_idx": 34, "frame_idx": 40, "global_frame_idx": 9586, "task_index": 6}, {"db_idx": 9587, "episode_idx": 34, "frame_idx": 41, "global_frame_idx": 9587, "task_index": 6}, {"db_idx": 9588, "episode_idx": 34, "frame_idx": 42, "global_frame_idx": 9588, "task_index": 6}, {"db_idx": 9589, "episode_idx": 34, "frame_idx": 43, "global_frame_idx": 9589, "task_index": 6}, {"db_idx": 9590, "episode_idx": 34, "frame_idx": 44, "global_frame_idx": 9590, "task_index": 6}, {"db_idx": 9591, "episode_idx": 34, "frame_idx": 45, "global_frame_idx": 9591, "task_index": 6}, {"db_idx": 9592, "episode_idx": 34, "frame_idx": 46, "global_frame_idx": 9592, "task_index": 6}, {"db_idx": 9593, "episode_idx": 34, "frame_idx": 47, "global_frame_idx": 9593, "task_index": 6}, {"db_idx": 9594, "episode_idx": 34, "frame_idx": 48, "global_frame_idx": 9594, "task_index": 6}, {"db_idx": 9595, "episode_idx": 34, "frame_idx": 49, "global_frame_idx": 9595, "task_index": 6}, {"db_idx": 9596, "episode_idx": 34, "frame_idx": 50, "global_frame_idx": 9596, "task_index": 6}, {"db_idx": 9597, "episode_idx": 34, "frame_idx": 51, "global_frame_idx": 9597, "task_index": 6}, {"db_idx": 9598, "episode_idx": 34, "frame_idx": 52, "global_frame_idx": 9598, "task_index": 6}, {"db_idx": 9599, "episode_idx": 34, "frame_idx": 53, "global_frame_idx": 9599, "task_index": 6}, {"db_idx": 9600, "episode_idx": 34, "frame_idx": 54, "global_frame_idx": 9600, "task_index": 6}, {"db_idx": 9601, "episode_idx": 34, "frame_idx": 55, "global_frame_idx": 9601, "task_index": 6}, {"db_idx": 9602, "episode_idx": 34, "frame_idx": 56, "global_frame_idx": 9602, "task_index": 6}, {"db_idx": 9603, "episode_idx": 34, "frame_idx": 57, "global_frame_idx": 9603, "task_index": 6}, {"db_idx": 9604, "episode_idx": 34, "frame_idx": 58, "global_frame_idx": 9604, "task_index": 6}, {"db_idx": 9605, "episode_idx": 34, "frame_idx": 59, "global_frame_idx": 9605, "task_index": 6}, {"db_idx": 9606, "episode_idx": 34, "frame_idx": 60, "global_frame_idx": 9606, "task_index": 6}, {"db_idx": 9607, "episode_idx": 34, "frame_idx": 61, "global_frame_idx": 9607, "task_index": 6}, {"db_idx": 9608, "episode_idx": 34, "frame_idx": 62, "global_frame_idx": 9608, "task_index": 6}, {"db_idx": 9609, "episode_idx": 34, "frame_idx": 63, "global_frame_idx": 9609, "task_index": 6}, {"db_idx": 9610, "episode_idx": 34, "frame_idx": 64, "global_frame_idx": 9610, "task_index": 6}, {"db_idx": 9611, "episode_idx": 34, "frame_idx": 65, "global_frame_idx": 9611, "task_index": 6}, {"db_idx": 9612, "episode_idx": 34, "frame_idx": 66, "global_frame_idx": 9612, "task_index": 6}, {"db_idx": 9613, "episode_idx": 34, "frame_idx": 67, "global_frame_idx": 9613, "task_index": 6}, {"db_idx": 9614, "episode_idx": 34, "frame_idx": 68, "global_frame_idx": 9614, "task_index": 6}, {"db_idx": 9615, "episode_idx": 34, "frame_idx": 69, "global_frame_idx": 9615, "task_index": 6}, {"db_idx": 9616, "episode_idx": 34, "frame_idx": 70, "global_frame_idx": 9616, "task_index": 6}, {"db_idx": 9617, "episode_idx": 34, "frame_idx": 71, "global_frame_idx": 9617, "task_index": 6}, {"db_idx": 9618, "episode_idx": 34, "frame_idx": 72, "global_frame_idx": 9618, "task_index": 6}, {"db_idx": 9619, "episode_idx": 34, "frame_idx": 73, "global_frame_idx": 9619, "task_index": 6}, {"db_idx": 9620, "episode_idx": 34, "frame_idx": 74, "global_frame_idx": 9620, "task_index": 6}, {"db_idx": 9621, "episode_idx": 34, "frame_idx": 75, "global_frame_idx": 9621, "task_index": 6}, {"db_idx": 9622, "episode_idx": 34, "frame_idx": 76, "global_frame_idx": 9622, "task_index": 6}, {"db_idx": 9623, "episode_idx": 34, "frame_idx": 77, "global_frame_idx": 9623, "task_index": 6}, {"db_idx": 9624, "episode_idx": 34, "frame_idx": 78, "global_frame_idx": 9624, "task_index": 6}, {"db_idx": 9625, "episode_idx": 34, "frame_idx": 79, "global_frame_idx": 9625, "task_index": 6}, {"db_idx": 9626, "episode_idx": 34, "frame_idx": 80, "global_frame_idx": 9626, "task_index": 6}, {"db_idx": 9627, "episode_idx": 34, "frame_idx": 81, "global_frame_idx": 9627, "task_index": 6}, {"db_idx": 9628, "episode_idx": 34, "frame_idx": 82, "global_frame_idx": 9628, "task_index": 6}, {"db_idx": 9629, "episode_idx": 34, "frame_idx": 83, "global_frame_idx": 9629, "task_index": 6}, {"db_idx": 9630, "episode_idx": 34, "frame_idx": 84, "global_frame_idx": 9630, "task_index": 6}, {"db_idx": 9631, "episode_idx": 34, "frame_idx": 85, "global_frame_idx": 9631, "task_index": 6}, {"db_idx": 9632, "episode_idx": 34, "frame_idx": 86, "global_frame_idx": 9632, "task_index": 6}, {"db_idx": 9633, "episode_idx": 34, "frame_idx": 87, "global_frame_idx": 9633, "task_index": 6}, {"db_idx": 9634, "episode_idx": 34, "frame_idx": 88, "global_frame_idx": 9634, "task_index": 6}, {"db_idx": 9635, "episode_idx": 34, "frame_idx": 89, "global_frame_idx": 9635, "task_index": 6}, {"db_idx": 9636, "episode_idx": 34, "frame_idx": 90, "global_frame_idx": 9636, "task_index": 6}, {"db_idx": 9637, "episode_idx": 34, "frame_idx": 91, "global_frame_idx": 9637, "task_index": 6}, {"db_idx": 9638, "episode_idx": 34, "frame_idx": 92, "global_frame_idx": 9638, "task_index": 6}, {"db_idx": 9639, "episode_idx": 34, "frame_idx": 93, "global_frame_idx": 9639, "task_index": 6}, {"db_idx": 9640, "episode_idx": 34, "frame_idx": 94, "global_frame_idx": 9640, "task_index": 6}, {"db_idx": 9641, "episode_idx": 34, "frame_idx": 95, "global_frame_idx": 9641, "task_index": 6}, {"db_idx": 9642, "episode_idx": 34, "frame_idx": 96, "global_frame_idx": 9642, "task_index": 6}, {"db_idx": 9643, "episode_idx": 34, "frame_idx": 97, "global_frame_idx": 9643, "task_index": 6}, {"db_idx": 9644, "episode_idx": 34, "frame_idx": 98, "global_frame_idx": 9644, "task_index": 6}, {"db_idx": 9645, "episode_idx": 34, "frame_idx": 99, "global_frame_idx": 9645, "task_index": 6}, {"db_idx": 9646, "episode_idx": 34, "frame_idx": 100, "global_frame_idx": 9646, "task_index": 6}, {"db_idx": 9647, "episode_idx": 34, "frame_idx": 101, "global_frame_idx": 9647, "task_index": 6}, {"db_idx": 9648, "episode_idx": 34, "frame_idx": 102, "global_frame_idx": 9648, "task_index": 6}, {"db_idx": 9649, "episode_idx": 34, "frame_idx": 103, "global_frame_idx": 9649, "task_index": 6}, {"db_idx": 9650, "episode_idx": 34, "frame_idx": 104, "global_frame_idx": 9650, "task_index": 6}, {"db_idx": 9651, "episode_idx": 34, "frame_idx": 105, "global_frame_idx": 9651, "task_index": 6}, {"db_idx": 9652, "episode_idx": 34, "frame_idx": 106, "global_frame_idx": 9652, "task_index": 6}, {"db_idx": 9653, "episode_idx": 34, "frame_idx": 107, "global_frame_idx": 9653, "task_index": 6}, {"db_idx": 9654, "episode_idx": 34, "frame_idx": 108, "global_frame_idx": 9654, "task_index": 6}, {"db_idx": 9655, "episode_idx": 34, "frame_idx": 109, "global_frame_idx": 9655, "task_index": 6}, {"db_idx": 9656, "episode_idx": 34, "frame_idx": 110, "global_frame_idx": 9656, "task_index": 6}, {"db_idx": 9657, "episode_idx": 34, "frame_idx": 111, "global_frame_idx": 9657, "task_index": 6}, {"db_idx": 9658, "episode_idx": 34, "frame_idx": 112, "global_frame_idx": 9658, "task_index": 6}, {"db_idx": 9659, "episode_idx": 34, "frame_idx": 113, "global_frame_idx": 9659, "task_index": 6}, {"db_idx": 9660, "episode_idx": 34, "frame_idx": 114, "global_frame_idx": 9660, "task_index": 6}, {"db_idx": 9661, "episode_idx": 34, "frame_idx": 115, "global_frame_idx": 9661, "task_index": 6}, {"db_idx": 9662, "episode_idx": 34, "frame_idx": 116, "global_frame_idx": 9662, "task_index": 6}, {"db_idx": 9663, "episode_idx": 34, "frame_idx": 117, "global_frame_idx": 9663, "task_index": 6}, {"db_idx": 9664, "episode_idx": 34, "frame_idx": 118, "global_frame_idx": 9664, "task_index": 6}, {"db_idx": 9665, "episode_idx": 34, "frame_idx": 119, "global_frame_idx": 9665, "task_index": 6}, {"db_idx": 9666, "episode_idx": 34, "frame_idx": 120, "global_frame_idx": 9666, "task_index": 6}, {"db_idx": 9667, "episode_idx": 34, "frame_idx": 121, "global_frame_idx": 9667, "task_index": 6}, {"db_idx": 9668, "episode_idx": 34, "frame_idx": 122, "global_frame_idx": 9668, "task_index": 6}, {"db_idx": 9669, "episode_idx": 34, "frame_idx": 123, "global_frame_idx": 9669, "task_index": 6}, {"db_idx": 9670, "episode_idx": 34, "frame_idx": 124, "global_frame_idx": 9670, "task_index": 6}, {"db_idx": 9671, "episode_idx": 34, "frame_idx": 125, "global_frame_idx": 9671, "task_index": 6}, {"db_idx": 9672, "episode_idx": 34, "frame_idx": 126, "global_frame_idx": 9672, "task_index": 6}, {"db_idx": 9673, "episode_idx": 34, "frame_idx": 127, "global_frame_idx": 9673, "task_index": 6}, {"db_idx": 9674, "episode_idx": 34, "frame_idx": 128, "global_frame_idx": 9674, "task_index": 6}, {"db_idx": 9675, "episode_idx": 34, "frame_idx": 129, "global_frame_idx": 9675, "task_index": 6}, {"db_idx": 9676, "episode_idx": 34, "frame_idx": 130, "global_frame_idx": 9676, "task_index": 6}, {"db_idx": 9677, "episode_idx": 34, "frame_idx": 131, "global_frame_idx": 9677, "task_index": 6}, {"db_idx": 9678, "episode_idx": 34, "frame_idx": 132, "global_frame_idx": 9678, "task_index": 6}, {"db_idx": 9679, "episode_idx": 34, "frame_idx": 133, "global_frame_idx": 9679, "task_index": 6}, {"db_idx": 9680, "episode_idx": 34, "frame_idx": 134, "global_frame_idx": 9680, "task_index": 6}, {"db_idx": 9681, "episode_idx": 34, "frame_idx": 135, "global_frame_idx": 9681, "task_index": 6}, {"db_idx": 9682, "episode_idx": 34, "frame_idx": 136, "global_frame_idx": 9682, "task_index": 6}, {"db_idx": 9683, "episode_idx": 34, "frame_idx": 137, "global_frame_idx": 9683, "task_index": 6}, {"db_idx": 9684, "episode_idx": 34, "frame_idx": 138, "global_frame_idx": 9684, "task_index": 6}, {"db_idx": 9685, "episode_idx": 34, "frame_idx": 139, "global_frame_idx": 9685, "task_index": 6}, {"db_idx": 9686, "episode_idx": 34, "frame_idx": 140, "global_frame_idx": 9686, "task_index": 6}, {"db_idx": 9687, "episode_idx": 34, "frame_idx": 141, "global_frame_idx": 9687, "task_index": 6}, {"db_idx": 9688, "episode_idx": 34, "frame_idx": 142, "global_frame_idx": 9688, "task_index": 6}, {"db_idx": 9689, "episode_idx": 34, "frame_idx": 143, "global_frame_idx": 9689, "task_index": 6}, {"db_idx": 9690, "episode_idx": 34, "frame_idx": 144, "global_frame_idx": 9690, "task_index": 6}, {"db_idx": 9691, "episode_idx": 34, "frame_idx": 145, "global_frame_idx": 9691, "task_index": 6}, {"db_idx": 9692, "episode_idx": 34, "frame_idx": 146, "global_frame_idx": 9692, "task_index": 6}, {"db_idx": 9693, "episode_idx": 34, "frame_idx": 147, "global_frame_idx": 9693, "task_index": 6}, {"db_idx": 9694, "episode_idx": 34, "frame_idx": 148, "global_frame_idx": 9694, "task_index": 6}, {"db_idx": 9695, "episode_idx": 34, "frame_idx": 149, "global_frame_idx": 9695, "task_index": 6}, {"db_idx": 9696, "episode_idx": 34, "frame_idx": 150, "global_frame_idx": 9696, "task_index": 6}, {"db_idx": 9697, "episode_idx": 34, "frame_idx": 151, "global_frame_idx": 9697, "task_index": 6}, {"db_idx": 9698, "episode_idx": 34, "frame_idx": 152, "global_frame_idx": 9698, "task_index": 6}, {"db_idx": 9699, "episode_idx": 34, "frame_idx": 153, "global_frame_idx": 9699, "task_index": 6}, {"db_idx": 9700, "episode_idx": 34, "frame_idx": 154, "global_frame_idx": 9700, "task_index": 6}, {"db_idx": 9701, "episode_idx": 34, "frame_idx": 155, "global_frame_idx": 9701, "task_index": 6}, {"db_idx": 9702, "episode_idx": 34, "frame_idx": 156, "global_frame_idx": 9702, "task_index": 6}, {"db_idx": 9703, "episode_idx": 34, "frame_idx": 157, "global_frame_idx": 9703, "task_index": 6}, {"db_idx": 9704, "episode_idx": 34, "frame_idx": 158, "global_frame_idx": 9704, "task_index": 6}, {"db_idx": 9705, "episode_idx": 34, "frame_idx": 159, "global_frame_idx": 9705, "task_index": 6}, {"db_idx": 9706, "episode_idx": 34, "frame_idx": 160, "global_frame_idx": 9706, "task_index": 6}, {"db_idx": 9707, "episode_idx": 34, "frame_idx": 161, "global_frame_idx": 9707, "task_index": 6}, {"db_idx": 9708, "episode_idx": 34, "frame_idx": 162, "global_frame_idx": 9708, "task_index": 6}, {"db_idx": 9709, "episode_idx": 34, "frame_idx": 163, "global_frame_idx": 9709, "task_index": 6}, {"db_idx": 9710, "episode_idx": 34, "frame_idx": 164, "global_frame_idx": 9710, "task_index": 6}, {"db_idx": 9711, "episode_idx": 34, "frame_idx": 165, "global_frame_idx": 9711, "task_index": 6}, {"db_idx": 9712, "episode_idx": 34, "frame_idx": 166, "global_frame_idx": 9712, "task_index": 6}, {"db_idx": 9713, "episode_idx": 34, "frame_idx": 167, "global_frame_idx": 9713, "task_index": 6}, {"db_idx": 9714, "episode_idx": 34, "frame_idx": 168, "global_frame_idx": 9714, "task_index": 6}, {"db_idx": 9715, "episode_idx": 34, "frame_idx": 169, "global_frame_idx": 9715, "task_index": 6}, {"db_idx": 9716, "episode_idx": 34, "frame_idx": 170, "global_frame_idx": 9716, "task_index": 6}, {"db_idx": 9717, "episode_idx": 34, "frame_idx": 171, "global_frame_idx": 9717, "task_index": 6}, {"db_idx": 9718, "episode_idx": 34, "frame_idx": 172, "global_frame_idx": 9718, "task_index": 6}, {"db_idx": 9719, "episode_idx": 34, "frame_idx": 173, "global_frame_idx": 9719, "task_index": 6}, {"db_idx": 9720, "episode_idx": 34, "frame_idx": 174, "global_frame_idx": 9720, "task_index": 6}, {"db_idx": 9721, "episode_idx": 34, "frame_idx": 175, "global_frame_idx": 9721, "task_index": 6}, {"db_idx": 9722, "episode_idx": 34, "frame_idx": 176, "global_frame_idx": 9722, "task_index": 6}, {"db_idx": 9723, "episode_idx": 34, "frame_idx": 177, "global_frame_idx": 9723, "task_index": 6}, {"db_idx": 9724, "episode_idx": 34, "frame_idx": 178, "global_frame_idx": 9724, "task_index": 6}, {"db_idx": 9725, "episode_idx": 34, "frame_idx": 179, "global_frame_idx": 9725, "task_index": 6}, {"db_idx": 9726, "episode_idx": 34, "frame_idx": 180, "global_frame_idx": 9726, "task_index": 6}, {"db_idx": 9727, "episode_idx": 34, "frame_idx": 181, "global_frame_idx": 9727, "task_index": 6}, {"db_idx": 9728, "episode_idx": 34, "frame_idx": 182, "global_frame_idx": 9728, "task_index": 6}, {"db_idx": 9729, "episode_idx": 34, "frame_idx": 183, "global_frame_idx": 9729, "task_index": 6}, {"db_idx": 9730, "episode_idx": 34, "frame_idx": 184, "global_frame_idx": 9730, "task_index": 6}, {"db_idx": 9731, "episode_idx": 34, "frame_idx": 185, "global_frame_idx": 9731, "task_index": 6}, {"db_idx": 9732, "episode_idx": 34, "frame_idx": 186, "global_frame_idx": 9732, "task_index": 6}, {"db_idx": 9733, "episode_idx": 34, "frame_idx": 187, "global_frame_idx": 9733, "task_index": 6}, {"db_idx": 9734, "episode_idx": 34, "frame_idx": 188, "global_frame_idx": 9734, "task_index": 6}, {"db_idx": 9735, "episode_idx": 34, "frame_idx": 189, "global_frame_idx": 9735, "task_index": 6}, {"db_idx": 9736, "episode_idx": 34, "frame_idx": 190, "global_frame_idx": 9736, "task_index": 6}, {"db_idx": 9737, "episode_idx": 34, "frame_idx": 191, "global_frame_idx": 9737, "task_index": 6}, {"db_idx": 9738, "episode_idx": 34, "frame_idx": 192, "global_frame_idx": 9738, "task_index": 6}, {"db_idx": 9739, "episode_idx": 34, "frame_idx": 193, "global_frame_idx": 9739, "task_index": 6}, {"db_idx": 9740, "episode_idx": 34, "frame_idx": 194, "global_frame_idx": 9740, "task_index": 6}, {"db_idx": 9741, "episode_idx": 34, "frame_idx": 195, "global_frame_idx": 9741, "task_index": 6}, {"db_idx": 9742, "episode_idx": 34, "frame_idx": 196, "global_frame_idx": 9742, "task_index": 6}, {"db_idx": 9743, "episode_idx": 34, "frame_idx": 197, "global_frame_idx": 9743, "task_index": 6}, {"db_idx": 9744, "episode_idx": 34, "frame_idx": 198, "global_frame_idx": 9744, "task_index": 6}, {"db_idx": 9745, "episode_idx": 34, "frame_idx": 199, "global_frame_idx": 9745, "task_index": 6}, {"db_idx": 9746, "episode_idx": 34, "frame_idx": 200, "global_frame_idx": 9746, "task_index": 6}, {"db_idx": 9747, "episode_idx": 34, "frame_idx": 201, "global_frame_idx": 9747, "task_index": 6}, {"db_idx": 9748, "episode_idx": 34, "frame_idx": 202, "global_frame_idx": 9748, "task_index": 6}, {"db_idx": 9749, "episode_idx": 34, "frame_idx": 203, "global_frame_idx": 9749, "task_index": 6}, {"db_idx": 9750, "episode_idx": 34, "frame_idx": 204, "global_frame_idx": 9750, "task_index": 6}, {"db_idx": 9751, "episode_idx": 34, "frame_idx": 205, "global_frame_idx": 9751, "task_index": 6}, {"db_idx": 9752, "episode_idx": 34, "frame_idx": 206, "global_frame_idx": 9752, "task_index": 6}, {"db_idx": 9753, "episode_idx": 34, "frame_idx": 207, "global_frame_idx": 9753, "task_index": 6}, {"db_idx": 9754, "episode_idx": 34, "frame_idx": 208, "global_frame_idx": 9754, "task_index": 6}, {"db_idx": 9755, "episode_idx": 34, "frame_idx": 209, "global_frame_idx": 9755, "task_index": 6}, {"db_idx": 9756, "episode_idx": 34, "frame_idx": 210, "global_frame_idx": 9756, "task_index": 6}, {"db_idx": 9757, "episode_idx": 34, "frame_idx": 211, "global_frame_idx": 9757, "task_index": 6}, {"db_idx": 9758, "episode_idx": 34, "frame_idx": 212, "global_frame_idx": 9758, "task_index": 6}, {"db_idx": 9759, "episode_idx": 34, "frame_idx": 213, "global_frame_idx": 9759, "task_index": 6}, {"db_idx": 9760, "episode_idx": 34, "frame_idx": 214, "global_frame_idx": 9760, "task_index": 6}, {"db_idx": 9761, "episode_idx": 34, "frame_idx": 215, "global_frame_idx": 9761, "task_index": 6}, {"db_idx": 9762, "episode_idx": 34, "frame_idx": 216, "global_frame_idx": 9762, "task_index": 6}, {"db_idx": 9763, "episode_idx": 34, "frame_idx": 217, "global_frame_idx": 9763, "task_index": 6}, {"db_idx": 9764, "episode_idx": 34, "frame_idx": 218, "global_frame_idx": 9764, "task_index": 6}, {"db_idx": 9765, "episode_idx": 34, "frame_idx": 219, "global_frame_idx": 9765, "task_index": 6}, {"db_idx": 9766, "episode_idx": 34, "frame_idx": 220, "global_frame_idx": 9766, "task_index": 6}, {"db_idx": 9767, "episode_idx": 34, "frame_idx": 221, "global_frame_idx": 9767, "task_index": 6}, {"db_idx": 9768, "episode_idx": 34, "frame_idx": 222, "global_frame_idx": 9768, "task_index": 6}, {"db_idx": 9769, "episode_idx": 34, "frame_idx": 223, "global_frame_idx": 9769, "task_index": 6}, {"db_idx": 9770, "episode_idx": 34, "frame_idx": 224, "global_frame_idx": 9770, "task_index": 6}, {"db_idx": 9771, "episode_idx": 34, "frame_idx": 225, "global_frame_idx": 9771, "task_index": 6}, {"db_idx": 9772, "episode_idx": 34, "frame_idx": 226, "global_frame_idx": 9772, "task_index": 6}, {"db_idx": 9773, "episode_idx": 34, "frame_idx": 227, "global_frame_idx": 9773, "task_index": 6}, {"db_idx": 9774, "episode_idx": 34, "frame_idx": 228, "global_frame_idx": 9774, "task_index": 6}, {"db_idx": 9775, "episode_idx": 34, "frame_idx": 229, "global_frame_idx": 9775, "task_index": 6}, {"db_idx": 9776, "episode_idx": 34, "frame_idx": 230, "global_frame_idx": 9776, "task_index": 6}, {"db_idx": 9777, "episode_idx": 34, "frame_idx": 231, "global_frame_idx": 9777, "task_index": 6}, {"db_idx": 9778, "episode_idx": 34, "frame_idx": 232, "global_frame_idx": 9778, "task_index": 6}, {"db_idx": 9779, "episode_idx": 34, "frame_idx": 233, "global_frame_idx": 9779, "task_index": 6}, {"db_idx": 9780, "episode_idx": 34, "frame_idx": 234, "global_frame_idx": 9780, "task_index": 6}, {"db_idx": 9781, "episode_idx": 34, "frame_idx": 235, "global_frame_idx": 9781, "task_index": 6}, {"db_idx": 9782, "episode_idx": 34, "frame_idx": 236, "global_frame_idx": 9782, "task_index": 6}, {"db_idx": 9783, "episode_idx": 34, "frame_idx": 237, "global_frame_idx": 9783, "task_index": 6}, {"db_idx": 9784, "episode_idx": 34, "frame_idx": 238, "global_frame_idx": 9784, "task_index": 6}, {"db_idx": 9785, "episode_idx": 34, "frame_idx": 239, "global_frame_idx": 9785, "task_index": 6}, {"db_idx": 9786, "episode_idx": 34, "frame_idx": 240, "global_frame_idx": 9786, "task_index": 6}, {"db_idx": 9787, "episode_idx": 34, "frame_idx": 241, "global_frame_idx": 9787, "task_index": 6}, {"db_idx": 9788, "episode_idx": 34, "frame_idx": 242, "global_frame_idx": 9788, "task_index": 6}, {"db_idx": 9789, "episode_idx": 34, "frame_idx": 243, "global_frame_idx": 9789, "task_index": 6}, {"db_idx": 9790, "episode_idx": 34, "frame_idx": 244, "global_frame_idx": 9790, "task_index": 6}, {"db_idx": 9791, "episode_idx": 34, "frame_idx": 245, "global_frame_idx": 9791, "task_index": 6}, {"db_idx": 9792, "episode_idx": 34, "frame_idx": 246, "global_frame_idx": 9792, "task_index": 6}, {"db_idx": 9793, "episode_idx": 34, "frame_idx": 247, "global_frame_idx": 9793, "task_index": 6}, {"db_idx": 9794, "episode_idx": 34, "frame_idx": 248, "global_frame_idx": 9794, "task_index": 6}, {"db_idx": 9795, "episode_idx": 34, "frame_idx": 249, "global_frame_idx": 9795, "task_index": 6}, {"db_idx": 9796, "episode_idx": 34, "frame_idx": 250, "global_frame_idx": 9796, "task_index": 6}, {"db_idx": 9797, "episode_idx": 34, "frame_idx": 251, "global_frame_idx": 9797, "task_index": 6}, {"db_idx": 9798, "episode_idx": 34, "frame_idx": 252, "global_frame_idx": 9798, "task_index": 6}, {"db_idx": 9799, "episode_idx": 34, "frame_idx": 253, "global_frame_idx": 9799, "task_index": 6}, {"db_idx": 9800, "episode_idx": 34, "frame_idx": 254, "global_frame_idx": 9800, "task_index": 6}, {"db_idx": 9801, "episode_idx": 34, "frame_idx": 255, "global_frame_idx": 9801, "task_index": 6}, {"db_idx": 9802, "episode_idx": 34, "frame_idx": 256, "global_frame_idx": 9802, "task_index": 6}, {"db_idx": 9803, "episode_idx": 34, "frame_idx": 257, "global_frame_idx": 9803, "task_index": 6}, {"db_idx": 9804, "episode_idx": 34, "frame_idx": 258, "global_frame_idx": 9804, "task_index": 6}, {"db_idx": 9805, "episode_idx": 34, "frame_idx": 259, "global_frame_idx": 9805, "task_index": 6}, {"db_idx": 9806, "episode_idx": 34, "frame_idx": 260, "global_frame_idx": 9806, "task_index": 6}, {"db_idx": 9807, "episode_idx": 34, "frame_idx": 261, "global_frame_idx": 9807, "task_index": 6}, {"db_idx": 9808, "episode_idx": 34, "frame_idx": 262, "global_frame_idx": 9808, "task_index": 6}, {"db_idx": 9809, "episode_idx": 34, "frame_idx": 263, "global_frame_idx": 9809, "task_index": 6}, {"db_idx": 9810, "episode_idx": 34, "frame_idx": 264, "global_frame_idx": 9810, "task_index": 6}, {"db_idx": 9811, "episode_idx": 34, "frame_idx": 265, "global_frame_idx": 9811, "task_index": 6}, {"db_idx": 9812, "episode_idx": 34, "frame_idx": 266, "global_frame_idx": 9812, "task_index": 6}, {"db_idx": 9813, "episode_idx": 34, "frame_idx": 267, "global_frame_idx": 9813, "task_index": 6}, {"db_idx": 9814, "episode_idx": 34, "frame_idx": 268, "global_frame_idx": 9814, "task_index": 6}, {"db_idx": 9815, "episode_idx": 34, "frame_idx": 269, "global_frame_idx": 9815, "task_index": 6}, {"db_idx": 9816, "episode_idx": 34, "frame_idx": 270, "global_frame_idx": 9816, "task_index": 6}, {"db_idx": 9817, "episode_idx": 34, "frame_idx": 271, "global_frame_idx": 9817, "task_index": 6}, {"db_idx": 9818, "episode_idx": 34, "frame_idx": 272, "global_frame_idx": 9818, "task_index": 6}, {"db_idx": 9819, "episode_idx": 34, "frame_idx": 273, "global_frame_idx": 9819, "task_index": 6}, {"db_idx": 9820, "episode_idx": 34, "frame_idx": 274, "global_frame_idx": 9820, "task_index": 6}, {"db_idx": 9821, "episode_idx": 34, "frame_idx": 275, "global_frame_idx": 9821, "task_index": 6}, {"db_idx": 9822, "episode_idx": 34, "frame_idx": 276, "global_frame_idx": 9822, "task_index": 6}, {"db_idx": 9823, "episode_idx": 34, "frame_idx": 277, "global_frame_idx": 9823, "task_index": 6}, {"db_idx": 9824, "episode_idx": 35, "frame_idx": 0, "global_frame_idx": 9824, "task_index": 7}, {"db_idx": 9825, "episode_idx": 35, "frame_idx": 1, "global_frame_idx": 9825, "task_index": 7}, {"db_idx": 9826, "episode_idx": 35, "frame_idx": 2, "global_frame_idx": 9826, "task_index": 7}, {"db_idx": 9827, "episode_idx": 35, "frame_idx": 3, "global_frame_idx": 9827, "task_index": 7}, {"db_idx": 9828, "episode_idx": 35, "frame_idx": 4, "global_frame_idx": 9828, "task_index": 7}, {"db_idx": 9829, "episode_idx": 35, "frame_idx": 5, "global_frame_idx": 9829, "task_index": 7}, {"db_idx": 9830, "episode_idx": 35, "frame_idx": 6, "global_frame_idx": 9830, "task_index": 7}, {"db_idx": 9831, "episode_idx": 35, "frame_idx": 7, "global_frame_idx": 9831, "task_index": 7}, {"db_idx": 9832, "episode_idx": 35, "frame_idx": 8, "global_frame_idx": 9832, "task_index": 7}, {"db_idx": 9833, "episode_idx": 35, "frame_idx": 9, "global_frame_idx": 9833, "task_index": 7}, {"db_idx": 9834, "episode_idx": 35, "frame_idx": 10, "global_frame_idx": 9834, "task_index": 7}, {"db_idx": 9835, "episode_idx": 35, "frame_idx": 11, "global_frame_idx": 9835, "task_index": 7}, {"db_idx": 9836, "episode_idx": 35, "frame_idx": 12, "global_frame_idx": 9836, "task_index": 7}, {"db_idx": 9837, "episode_idx": 35, "frame_idx": 13, "global_frame_idx": 9837, "task_index": 7}, {"db_idx": 9838, "episode_idx": 35, "frame_idx": 14, "global_frame_idx": 9838, "task_index": 7}, {"db_idx": 9839, "episode_idx": 35, "frame_idx": 15, "global_frame_idx": 9839, "task_index": 7}, {"db_idx": 9840, "episode_idx": 35, "frame_idx": 16, "global_frame_idx": 9840, "task_index": 7}, {"db_idx": 9841, "episode_idx": 35, "frame_idx": 17, "global_frame_idx": 9841, "task_index": 7}, {"db_idx": 9842, "episode_idx": 35, "frame_idx": 18, "global_frame_idx": 9842, "task_index": 7}, {"db_idx": 9843, "episode_idx": 35, "frame_idx": 19, "global_frame_idx": 9843, "task_index": 7}, {"db_idx": 9844, "episode_idx": 35, "frame_idx": 20, "global_frame_idx": 9844, "task_index": 7}, {"db_idx": 9845, "episode_idx": 35, "frame_idx": 21, "global_frame_idx": 9845, "task_index": 7}, {"db_idx": 9846, "episode_idx": 35, "frame_idx": 22, "global_frame_idx": 9846, "task_index": 7}, {"db_idx": 9847, "episode_idx": 35, "frame_idx": 23, "global_frame_idx": 9847, "task_index": 7}, {"db_idx": 9848, "episode_idx": 35, "frame_idx": 24, "global_frame_idx": 9848, "task_index": 7}, {"db_idx": 9849, "episode_idx": 35, "frame_idx": 25, "global_frame_idx": 9849, "task_index": 7}, {"db_idx": 9850, "episode_idx": 35, "frame_idx": 26, "global_frame_idx": 9850, "task_index": 7}, {"db_idx": 9851, "episode_idx": 35, "frame_idx": 27, "global_frame_idx": 9851, "task_index": 7}, {"db_idx": 9852, "episode_idx": 35, "frame_idx": 28, "global_frame_idx": 9852, "task_index": 7}, {"db_idx": 9853, "episode_idx": 35, "frame_idx": 29, "global_frame_idx": 9853, "task_index": 7}, {"db_idx": 9854, "episode_idx": 35, "frame_idx": 30, "global_frame_idx": 9854, "task_index": 7}, {"db_idx": 9855, "episode_idx": 35, "frame_idx": 31, "global_frame_idx": 9855, "task_index": 7}, {"db_idx": 9856, "episode_idx": 35, "frame_idx": 32, "global_frame_idx": 9856, "task_index": 7}, {"db_idx": 9857, "episode_idx": 35, "frame_idx": 33, "global_frame_idx": 9857, "task_index": 7}, {"db_idx": 9858, "episode_idx": 35, "frame_idx": 34, "global_frame_idx": 9858, "task_index": 7}, {"db_idx": 9859, "episode_idx": 35, "frame_idx": 35, "global_frame_idx": 9859, "task_index": 7}, {"db_idx": 9860, "episode_idx": 35, "frame_idx": 36, "global_frame_idx": 9860, "task_index": 7}, {"db_idx": 9861, "episode_idx": 35, "frame_idx": 37, "global_frame_idx": 9861, "task_index": 7}, {"db_idx": 9862, "episode_idx": 35, "frame_idx": 38, "global_frame_idx": 9862, "task_index": 7}, {"db_idx": 9863, "episode_idx": 35, "frame_idx": 39, "global_frame_idx": 9863, "task_index": 7}, {"db_idx": 9864, "episode_idx": 35, "frame_idx": 40, "global_frame_idx": 9864, "task_index": 7}, {"db_idx": 9865, "episode_idx": 35, "frame_idx": 41, "global_frame_idx": 9865, "task_index": 7}, {"db_idx": 9866, "episode_idx": 35, "frame_idx": 42, "global_frame_idx": 9866, "task_index": 7}, {"db_idx": 9867, "episode_idx": 35, "frame_idx": 43, "global_frame_idx": 9867, "task_index": 7}, {"db_idx": 9868, "episode_idx": 35, "frame_idx": 44, "global_frame_idx": 9868, "task_index": 7}, {"db_idx": 9869, "episode_idx": 35, "frame_idx": 45, "global_frame_idx": 9869, "task_index": 7}, {"db_idx": 9870, "episode_idx": 35, "frame_idx": 46, "global_frame_idx": 9870, "task_index": 7}, {"db_idx": 9871, "episode_idx": 35, "frame_idx": 47, "global_frame_idx": 9871, "task_index": 7}, {"db_idx": 9872, "episode_idx": 35, "frame_idx": 48, "global_frame_idx": 9872, "task_index": 7}, {"db_idx": 9873, "episode_idx": 35, "frame_idx": 49, "global_frame_idx": 9873, "task_index": 7}, {"db_idx": 9874, "episode_idx": 35, "frame_idx": 50, "global_frame_idx": 9874, "task_index": 7}, {"db_idx": 9875, "episode_idx": 35, "frame_idx": 51, "global_frame_idx": 9875, "task_index": 7}, {"db_idx": 9876, "episode_idx": 35, "frame_idx": 52, "global_frame_idx": 9876, "task_index": 7}, {"db_idx": 9877, "episode_idx": 35, "frame_idx": 53, "global_frame_idx": 9877, "task_index": 7}, {"db_idx": 9878, "episode_idx": 35, "frame_idx": 54, "global_frame_idx": 9878, "task_index": 7}, {"db_idx": 9879, "episode_idx": 35, "frame_idx": 55, "global_frame_idx": 9879, "task_index": 7}, {"db_idx": 9880, "episode_idx": 35, "frame_idx": 56, "global_frame_idx": 9880, "task_index": 7}, {"db_idx": 9881, "episode_idx": 35, "frame_idx": 57, "global_frame_idx": 9881, "task_index": 7}, {"db_idx": 9882, "episode_idx": 35, "frame_idx": 58, "global_frame_idx": 9882, "task_index": 7}, {"db_idx": 9883, "episode_idx": 35, "frame_idx": 59, "global_frame_idx": 9883, "task_index": 7}, {"db_idx": 9884, "episode_idx": 35, "frame_idx": 60, "global_frame_idx": 9884, "task_index": 7}, {"db_idx": 9885, "episode_idx": 35, "frame_idx": 61, "global_frame_idx": 9885, "task_index": 7}, {"db_idx": 9886, "episode_idx": 35, "frame_idx": 62, "global_frame_idx": 9886, "task_index": 7}, {"db_idx": 9887, "episode_idx": 35, "frame_idx": 63, "global_frame_idx": 9887, "task_index": 7}, {"db_idx": 9888, "episode_idx": 35, "frame_idx": 64, "global_frame_idx": 9888, "task_index": 7}, {"db_idx": 9889, "episode_idx": 35, "frame_idx": 65, "global_frame_idx": 9889, "task_index": 7}, {"db_idx": 9890, "episode_idx": 35, "frame_idx": 66, "global_frame_idx": 9890, "task_index": 7}, {"db_idx": 9891, "episode_idx": 35, "frame_idx": 67, "global_frame_idx": 9891, "task_index": 7}, {"db_idx": 9892, "episode_idx": 35, "frame_idx": 68, "global_frame_idx": 9892, "task_index": 7}, {"db_idx": 9893, "episode_idx": 35, "frame_idx": 69, "global_frame_idx": 9893, "task_index": 7}, {"db_idx": 9894, "episode_idx": 35, "frame_idx": 70, "global_frame_idx": 9894, "task_index": 7}, {"db_idx": 9895, "episode_idx": 35, "frame_idx": 71, "global_frame_idx": 9895, "task_index": 7}, {"db_idx": 9896, "episode_idx": 35, "frame_idx": 72, "global_frame_idx": 9896, "task_index": 7}, {"db_idx": 9897, "episode_idx": 35, "frame_idx": 73, "global_frame_idx": 9897, "task_index": 7}, {"db_idx": 9898, "episode_idx": 35, "frame_idx": 74, "global_frame_idx": 9898, "task_index": 7}, {"db_idx": 9899, "episode_idx": 35, "frame_idx": 75, "global_frame_idx": 9899, "task_index": 7}, {"db_idx": 9900, "episode_idx": 35, "frame_idx": 76, "global_frame_idx": 9900, "task_index": 7}, {"db_idx": 9901, "episode_idx": 35, "frame_idx": 77, "global_frame_idx": 9901, "task_index": 7}, {"db_idx": 9902, "episode_idx": 35, "frame_idx": 78, "global_frame_idx": 9902, "task_index": 7}, {"db_idx": 9903, "episode_idx": 35, "frame_idx": 79, "global_frame_idx": 9903, "task_index": 7}, {"db_idx": 9904, "episode_idx": 35, "frame_idx": 80, "global_frame_idx": 9904, "task_index": 7}, {"db_idx": 9905, "episode_idx": 35, "frame_idx": 81, "global_frame_idx": 9905, "task_index": 7}, {"db_idx": 9906, "episode_idx": 35, "frame_idx": 82, "global_frame_idx": 9906, "task_index": 7}, {"db_idx": 9907, "episode_idx": 35, "frame_idx": 83, "global_frame_idx": 9907, "task_index": 7}, {"db_idx": 9908, "episode_idx": 35, "frame_idx": 84, "global_frame_idx": 9908, "task_index": 7}, {"db_idx": 9909, "episode_idx": 35, "frame_idx": 85, "global_frame_idx": 9909, "task_index": 7}, {"db_idx": 9910, "episode_idx": 35, "frame_idx": 86, "global_frame_idx": 9910, "task_index": 7}, {"db_idx": 9911, "episode_idx": 35, "frame_idx": 87, "global_frame_idx": 9911, "task_index": 7}, {"db_idx": 9912, "episode_idx": 35, "frame_idx": 88, "global_frame_idx": 9912, "task_index": 7}, {"db_idx": 9913, "episode_idx": 35, "frame_idx": 89, "global_frame_idx": 9913, "task_index": 7}, {"db_idx": 9914, "episode_idx": 35, "frame_idx": 90, "global_frame_idx": 9914, "task_index": 7}, {"db_idx": 9915, "episode_idx": 35, "frame_idx": 91, "global_frame_idx": 9915, "task_index": 7}, {"db_idx": 9916, "episode_idx": 35, "frame_idx": 92, "global_frame_idx": 9916, "task_index": 7}, {"db_idx": 9917, "episode_idx": 35, "frame_idx": 93, "global_frame_idx": 9917, "task_index": 7}, {"db_idx": 9918, "episode_idx": 35, "frame_idx": 94, "global_frame_idx": 9918, "task_index": 7}, {"db_idx": 9919, "episode_idx": 35, "frame_idx": 95, "global_frame_idx": 9919, "task_index": 7}, {"db_idx": 9920, "episode_idx": 35, "frame_idx": 96, "global_frame_idx": 9920, "task_index": 7}, {"db_idx": 9921, "episode_idx": 35, "frame_idx": 97, "global_frame_idx": 9921, "task_index": 7}, {"db_idx": 9922, "episode_idx": 35, "frame_idx": 98, "global_frame_idx": 9922, "task_index": 7}, {"db_idx": 9923, "episode_idx": 35, "frame_idx": 99, "global_frame_idx": 9923, "task_index": 7}, {"db_idx": 9924, "episode_idx": 35, "frame_idx": 100, "global_frame_idx": 9924, "task_index": 7}, {"db_idx": 9925, "episode_idx": 35, "frame_idx": 101, "global_frame_idx": 9925, "task_index": 7}, {"db_idx": 9926, "episode_idx": 35, "frame_idx": 102, "global_frame_idx": 9926, "task_index": 7}, {"db_idx": 9927, "episode_idx": 35, "frame_idx": 103, "global_frame_idx": 9927, "task_index": 7}, {"db_idx": 9928, "episode_idx": 35, "frame_idx": 104, "global_frame_idx": 9928, "task_index": 7}, {"db_idx": 9929, "episode_idx": 35, "frame_idx": 105, "global_frame_idx": 9929, "task_index": 7}, {"db_idx": 9930, "episode_idx": 35, "frame_idx": 106, "global_frame_idx": 9930, "task_index": 7}, {"db_idx": 9931, "episode_idx": 35, "frame_idx": 107, "global_frame_idx": 9931, "task_index": 7}, {"db_idx": 9932, "episode_idx": 35, "frame_idx": 108, "global_frame_idx": 9932, "task_index": 7}, {"db_idx": 9933, "episode_idx": 35, "frame_idx": 109, "global_frame_idx": 9933, "task_index": 7}, {"db_idx": 9934, "episode_idx": 35, "frame_idx": 110, "global_frame_idx": 9934, "task_index": 7}, {"db_idx": 9935, "episode_idx": 35, "frame_idx": 111, "global_frame_idx": 9935, "task_index": 7}, {"db_idx": 9936, "episode_idx": 35, "frame_idx": 112, "global_frame_idx": 9936, "task_index": 7}, {"db_idx": 9937, "episode_idx": 35, "frame_idx": 113, "global_frame_idx": 9937, "task_index": 7}, {"db_idx": 9938, "episode_idx": 35, "frame_idx": 114, "global_frame_idx": 9938, "task_index": 7}, {"db_idx": 9939, "episode_idx": 35, "frame_idx": 115, "global_frame_idx": 9939, "task_index": 7}, {"db_idx": 9940, "episode_idx": 35, "frame_idx": 116, "global_frame_idx": 9940, "task_index": 7}, {"db_idx": 9941, "episode_idx": 35, "frame_idx": 117, "global_frame_idx": 9941, "task_index": 7}, {"db_idx": 9942, "episode_idx": 35, "frame_idx": 118, "global_frame_idx": 9942, "task_index": 7}, {"db_idx": 9943, "episode_idx": 35, "frame_idx": 119, "global_frame_idx": 9943, "task_index": 7}, {"db_idx": 9944, "episode_idx": 35, "frame_idx": 120, "global_frame_idx": 9944, "task_index": 7}, {"db_idx": 9945, "episode_idx": 35, "frame_idx": 121, "global_frame_idx": 9945, "task_index": 7}, {"db_idx": 9946, "episode_idx": 35, "frame_idx": 122, "global_frame_idx": 9946, "task_index": 7}, {"db_idx": 9947, "episode_idx": 35, "frame_idx": 123, "global_frame_idx": 9947, "task_index": 7}, {"db_idx": 9948, "episode_idx": 35, "frame_idx": 124, "global_frame_idx": 9948, "task_index": 7}, {"db_idx": 9949, "episode_idx": 35, "frame_idx": 125, "global_frame_idx": 9949, "task_index": 7}, {"db_idx": 9950, "episode_idx": 35, "frame_idx": 126, "global_frame_idx": 9950, "task_index": 7}, {"db_idx": 9951, "episode_idx": 35, "frame_idx": 127, "global_frame_idx": 9951, "task_index": 7}, {"db_idx": 9952, "episode_idx": 35, "frame_idx": 128, "global_frame_idx": 9952, "task_index": 7}, {"db_idx": 9953, "episode_idx": 35, "frame_idx": 129, "global_frame_idx": 9953, "task_index": 7}, {"db_idx": 9954, "episode_idx": 35, "frame_idx": 130, "global_frame_idx": 9954, "task_index": 7}, {"db_idx": 9955, "episode_idx": 35, "frame_idx": 131, "global_frame_idx": 9955, "task_index": 7}, {"db_idx": 9956, "episode_idx": 35, "frame_idx": 132, "global_frame_idx": 9956, "task_index": 7}, {"db_idx": 9957, "episode_idx": 35, "frame_idx": 133, "global_frame_idx": 9957, "task_index": 7}, {"db_idx": 9958, "episode_idx": 35, "frame_idx": 134, "global_frame_idx": 9958, "task_index": 7}, {"db_idx": 9959, "episode_idx": 35, "frame_idx": 135, "global_frame_idx": 9959, "task_index": 7}, {"db_idx": 9960, "episode_idx": 35, "frame_idx": 136, "global_frame_idx": 9960, "task_index": 7}, {"db_idx": 9961, "episode_idx": 35, "frame_idx": 137, "global_frame_idx": 9961, "task_index": 7}, {"db_idx": 9962, "episode_idx": 35, "frame_idx": 138, "global_frame_idx": 9962, "task_index": 7}, {"db_idx": 9963, "episode_idx": 35, "frame_idx": 139, "global_frame_idx": 9963, "task_index": 7}, {"db_idx": 9964, "episode_idx": 35, "frame_idx": 140, "global_frame_idx": 9964, "task_index": 7}, {"db_idx": 9965, "episode_idx": 35, "frame_idx": 141, "global_frame_idx": 9965, "task_index": 7}, {"db_idx": 9966, "episode_idx": 35, "frame_idx": 142, "global_frame_idx": 9966, "task_index": 7}, {"db_idx": 9967, "episode_idx": 35, "frame_idx": 143, "global_frame_idx": 9967, "task_index": 7}, {"db_idx": 9968, "episode_idx": 35, "frame_idx": 144, "global_frame_idx": 9968, "task_index": 7}, {"db_idx": 9969, "episode_idx": 35, "frame_idx": 145, "global_frame_idx": 9969, "task_index": 7}, {"db_idx": 9970, "episode_idx": 35, "frame_idx": 146, "global_frame_idx": 9970, "task_index": 7}, {"db_idx": 9971, "episode_idx": 35, "frame_idx": 147, "global_frame_idx": 9971, "task_index": 7}, {"db_idx": 9972, "episode_idx": 35, "frame_idx": 148, "global_frame_idx": 9972, "task_index": 7}, {"db_idx": 9973, "episode_idx": 35, "frame_idx": 149, "global_frame_idx": 9973, "task_index": 7}, {"db_idx": 9974, "episode_idx": 35, "frame_idx": 150, "global_frame_idx": 9974, "task_index": 7}, {"db_idx": 9975, "episode_idx": 35, "frame_idx": 151, "global_frame_idx": 9975, "task_index": 7}, {"db_idx": 9976, "episode_idx": 35, "frame_idx": 152, "global_frame_idx": 9976, "task_index": 7}, {"db_idx": 9977, "episode_idx": 35, "frame_idx": 153, "global_frame_idx": 9977, "task_index": 7}, {"db_idx": 9978, "episode_idx": 35, "frame_idx": 154, "global_frame_idx": 9978, "task_index": 7}, {"db_idx": 9979, "episode_idx": 35, "frame_idx": 155, "global_frame_idx": 9979, "task_index": 7}, {"db_idx": 9980, "episode_idx": 35, "frame_idx": 156, "global_frame_idx": 9980, "task_index": 7}, {"db_idx": 9981, "episode_idx": 35, "frame_idx": 157, "global_frame_idx": 9981, "task_index": 7}, {"db_idx": 9982, "episode_idx": 35, "frame_idx": 158, "global_frame_idx": 9982, "task_index": 7}, {"db_idx": 9983, "episode_idx": 35, "frame_idx": 159, "global_frame_idx": 9983, "task_index": 7}, {"db_idx": 9984, "episode_idx": 35, "frame_idx": 160, "global_frame_idx": 9984, "task_index": 7}, {"db_idx": 9985, "episode_idx": 35, "frame_idx": 161, "global_frame_idx": 9985, "task_index": 7}, {"db_idx": 9986, "episode_idx": 35, "frame_idx": 162, "global_frame_idx": 9986, "task_index": 7}, {"db_idx": 9987, "episode_idx": 35, "frame_idx": 163, "global_frame_idx": 9987, "task_index": 7}, {"db_idx": 9988, "episode_idx": 35, "frame_idx": 164, "global_frame_idx": 9988, "task_index": 7}, {"db_idx": 9989, "episode_idx": 35, "frame_idx": 165, "global_frame_idx": 9989, "task_index": 7}, {"db_idx": 9990, "episode_idx": 35, "frame_idx": 166, "global_frame_idx": 9990, "task_index": 7}, {"db_idx": 9991, "episode_idx": 35, "frame_idx": 167, "global_frame_idx": 9991, "task_index": 7}, {"db_idx": 9992, "episode_idx": 35, "frame_idx": 168, "global_frame_idx": 9992, "task_index": 7}, {"db_idx": 9993, "episode_idx": 35, "frame_idx": 169, "global_frame_idx": 9993, "task_index": 7}, {"db_idx": 9994, "episode_idx": 35, "frame_idx": 170, "global_frame_idx": 9994, "task_index": 7}, {"db_idx": 9995, "episode_idx": 35, "frame_idx": 171, "global_frame_idx": 9995, "task_index": 7}, {"db_idx": 9996, "episode_idx": 35, "frame_idx": 172, "global_frame_idx": 9996, "task_index": 7}, {"db_idx": 9997, "episode_idx": 35, "frame_idx": 173, "global_frame_idx": 9997, "task_index": 7}, {"db_idx": 9998, "episode_idx": 35, "frame_idx": 174, "global_frame_idx": 9998, "task_index": 7}, {"db_idx": 9999, "episode_idx": 35, "frame_idx": 175, "global_frame_idx": 9999, "task_index": 7}, {"db_idx": 10000, "episode_idx": 35, "frame_idx": 176, "global_frame_idx": 10000, "task_index": 7}, {"db_idx": 10001, "episode_idx": 35, "frame_idx": 177, "global_frame_idx": 10001, "task_index": 7}, {"db_idx": 10002, "episode_idx": 35, "frame_idx": 178, "global_frame_idx": 10002, "task_index": 7}, {"db_idx": 10003, "episode_idx": 35, "frame_idx": 179, "global_frame_idx": 10003, "task_index": 7}, {"db_idx": 10004, "episode_idx": 35, "frame_idx": 180, "global_frame_idx": 10004, "task_index": 7}, {"db_idx": 10005, "episode_idx": 35, "frame_idx": 181, "global_frame_idx": 10005, "task_index": 7}, {"db_idx": 10006, "episode_idx": 35, "frame_idx": 182, "global_frame_idx": 10006, "task_index": 7}, {"db_idx": 10007, "episode_idx": 35, "frame_idx": 183, "global_frame_idx": 10007, "task_index": 7}, {"db_idx": 10008, "episode_idx": 35, "frame_idx": 184, "global_frame_idx": 10008, "task_index": 7}, {"db_idx": 10009, "episode_idx": 35, "frame_idx": 185, "global_frame_idx": 10009, "task_index": 7}, {"db_idx": 10010, "episode_idx": 35, "frame_idx": 186, "global_frame_idx": 10010, "task_index": 7}, {"db_idx": 10011, "episode_idx": 35, "frame_idx": 187, "global_frame_idx": 10011, "task_index": 7}, {"db_idx": 10012, "episode_idx": 35, "frame_idx": 188, "global_frame_idx": 10012, "task_index": 7}, {"db_idx": 10013, "episode_idx": 35, "frame_idx": 189, "global_frame_idx": 10013, "task_index": 7}, {"db_idx": 10014, "episode_idx": 35, "frame_idx": 190, "global_frame_idx": 10014, "task_index": 7}, {"db_idx": 10015, "episode_idx": 35, "frame_idx": 191, "global_frame_idx": 10015, "task_index": 7}, {"db_idx": 10016, "episode_idx": 35, "frame_idx": 192, "global_frame_idx": 10016, "task_index": 7}, {"db_idx": 10017, "episode_idx": 35, "frame_idx": 193, "global_frame_idx": 10017, "task_index": 7}, {"db_idx": 10018, "episode_idx": 35, "frame_idx": 194, "global_frame_idx": 10018, "task_index": 7}, {"db_idx": 10019, "episode_idx": 35, "frame_idx": 195, "global_frame_idx": 10019, "task_index": 7}, {"db_idx": 10020, "episode_idx": 35, "frame_idx": 196, "global_frame_idx": 10020, "task_index": 7}, {"db_idx": 10021, "episode_idx": 35, "frame_idx": 197, "global_frame_idx": 10021, "task_index": 7}, {"db_idx": 10022, "episode_idx": 35, "frame_idx": 198, "global_frame_idx": 10022, "task_index": 7}, {"db_idx": 10023, "episode_idx": 35, "frame_idx": 199, "global_frame_idx": 10023, "task_index": 7}, {"db_idx": 10024, "episode_idx": 35, "frame_idx": 200, "global_frame_idx": 10024, "task_index": 7}, {"db_idx": 10025, "episode_idx": 35, "frame_idx": 201, "global_frame_idx": 10025, "task_index": 7}, {"db_idx": 10026, "episode_idx": 35, "frame_idx": 202, "global_frame_idx": 10026, "task_index": 7}, {"db_idx": 10027, "episode_idx": 35, "frame_idx": 203, "global_frame_idx": 10027, "task_index": 7}, {"db_idx": 10028, "episode_idx": 35, "frame_idx": 204, "global_frame_idx": 10028, "task_index": 7}, {"db_idx": 10029, "episode_idx": 35, "frame_idx": 205, "global_frame_idx": 10029, "task_index": 7}, {"db_idx": 10030, "episode_idx": 35, "frame_idx": 206, "global_frame_idx": 10030, "task_index": 7}, {"db_idx": 10031, "episode_idx": 35, "frame_idx": 207, "global_frame_idx": 10031, "task_index": 7}, {"db_idx": 10032, "episode_idx": 35, "frame_idx": 208, "global_frame_idx": 10032, "task_index": 7}, {"db_idx": 10033, "episode_idx": 35, "frame_idx": 209, "global_frame_idx": 10033, "task_index": 7}, {"db_idx": 10034, "episode_idx": 35, "frame_idx": 210, "global_frame_idx": 10034, "task_index": 7}, {"db_idx": 10035, "episode_idx": 35, "frame_idx": 211, "global_frame_idx": 10035, "task_index": 7}, {"db_idx": 10036, "episode_idx": 35, "frame_idx": 212, "global_frame_idx": 10036, "task_index": 7}, {"db_idx": 10037, "episode_idx": 35, "frame_idx": 213, "global_frame_idx": 10037, "task_index": 7}, {"db_idx": 10038, "episode_idx": 35, "frame_idx": 214, "global_frame_idx": 10038, "task_index": 7}, {"db_idx": 10039, "episode_idx": 35, "frame_idx": 215, "global_frame_idx": 10039, "task_index": 7}, {"db_idx": 10040, "episode_idx": 35, "frame_idx": 216, "global_frame_idx": 10040, "task_index": 7}, {"db_idx": 10041, "episode_idx": 35, "frame_idx": 217, "global_frame_idx": 10041, "task_index": 7}, {"db_idx": 10042, "episode_idx": 35, "frame_idx": 218, "global_frame_idx": 10042, "task_index": 7}, {"db_idx": 10043, "episode_idx": 35, "frame_idx": 219, "global_frame_idx": 10043, "task_index": 7}, {"db_idx": 10044, "episode_idx": 35, "frame_idx": 220, "global_frame_idx": 10044, "task_index": 7}, {"db_idx": 10045, "episode_idx": 35, "frame_idx": 221, "global_frame_idx": 10045, "task_index": 7}, {"db_idx": 10046, "episode_idx": 35, "frame_idx": 222, "global_frame_idx": 10046, "task_index": 7}, {"db_idx": 10047, "episode_idx": 35, "frame_idx": 223, "global_frame_idx": 10047, "task_index": 7}, {"db_idx": 10048, "episode_idx": 35, "frame_idx": 224, "global_frame_idx": 10048, "task_index": 7}, {"db_idx": 10049, "episode_idx": 36, "frame_idx": 0, "global_frame_idx": 10049, "task_index": 7}, {"db_idx": 10050, "episode_idx": 36, "frame_idx": 1, "global_frame_idx": 10050, "task_index": 7}, {"db_idx": 10051, "episode_idx": 36, "frame_idx": 2, "global_frame_idx": 10051, "task_index": 7}, {"db_idx": 10052, "episode_idx": 36, "frame_idx": 3, "global_frame_idx": 10052, "task_index": 7}, {"db_idx": 10053, "episode_idx": 36, "frame_idx": 4, "global_frame_idx": 10053, "task_index": 7}, {"db_idx": 10054, "episode_idx": 36, "frame_idx": 5, "global_frame_idx": 10054, "task_index": 7}, {"db_idx": 10055, "episode_idx": 36, "frame_idx": 6, "global_frame_idx": 10055, "task_index": 7}, {"db_idx": 10056, "episode_idx": 36, "frame_idx": 7, "global_frame_idx": 10056, "task_index": 7}, {"db_idx": 10057, "episode_idx": 36, "frame_idx": 8, "global_frame_idx": 10057, "task_index": 7}, {"db_idx": 10058, "episode_idx": 36, "frame_idx": 9, "global_frame_idx": 10058, "task_index": 7}, {"db_idx": 10059, "episode_idx": 36, "frame_idx": 10, "global_frame_idx": 10059, "task_index": 7}, {"db_idx": 10060, "episode_idx": 36, "frame_idx": 11, "global_frame_idx": 10060, "task_index": 7}, {"db_idx": 10061, "episode_idx": 36, "frame_idx": 12, "global_frame_idx": 10061, "task_index": 7}, {"db_idx": 10062, "episode_idx": 36, "frame_idx": 13, "global_frame_idx": 10062, "task_index": 7}, {"db_idx": 10063, "episode_idx": 36, "frame_idx": 14, "global_frame_idx": 10063, "task_index": 7}, {"db_idx": 10064, "episode_idx": 36, "frame_idx": 15, "global_frame_idx": 10064, "task_index": 7}, {"db_idx": 10065, "episode_idx": 36, "frame_idx": 16, "global_frame_idx": 10065, "task_index": 7}, {"db_idx": 10066, "episode_idx": 36, "frame_idx": 17, "global_frame_idx": 10066, "task_index": 7}, {"db_idx": 10067, "episode_idx": 36, "frame_idx": 18, "global_frame_idx": 10067, "task_index": 7}, {"db_idx": 10068, "episode_idx": 36, "frame_idx": 19, "global_frame_idx": 10068, "task_index": 7}, {"db_idx": 10069, "episode_idx": 36, "frame_idx": 20, "global_frame_idx": 10069, "task_index": 7}, {"db_idx": 10070, "episode_idx": 36, "frame_idx": 21, "global_frame_idx": 10070, "task_index": 7}, {"db_idx": 10071, "episode_idx": 36, "frame_idx": 22, "global_frame_idx": 10071, "task_index": 7}, {"db_idx": 10072, "episode_idx": 36, "frame_idx": 23, "global_frame_idx": 10072, "task_index": 7}, {"db_idx": 10073, "episode_idx": 36, "frame_idx": 24, "global_frame_idx": 10073, "task_index": 7}, {"db_idx": 10074, "episode_idx": 36, "frame_idx": 25, "global_frame_idx": 10074, "task_index": 7}, {"db_idx": 10075, "episode_idx": 36, "frame_idx": 26, "global_frame_idx": 10075, "task_index": 7}, {"db_idx": 10076, "episode_idx": 36, "frame_idx": 27, "global_frame_idx": 10076, "task_index": 7}, {"db_idx": 10077, "episode_idx": 36, "frame_idx": 28, "global_frame_idx": 10077, "task_index": 7}, {"db_idx": 10078, "episode_idx": 36, "frame_idx": 29, "global_frame_idx": 10078, "task_index": 7}, {"db_idx": 10079, "episode_idx": 36, "frame_idx": 30, "global_frame_idx": 10079, "task_index": 7}, {"db_idx": 10080, "episode_idx": 36, "frame_idx": 31, "global_frame_idx": 10080, "task_index": 7}, {"db_idx": 10081, "episode_idx": 36, "frame_idx": 32, "global_frame_idx": 10081, "task_index": 7}, {"db_idx": 10082, "episode_idx": 36, "frame_idx": 33, "global_frame_idx": 10082, "task_index": 7}, {"db_idx": 10083, "episode_idx": 36, "frame_idx": 34, "global_frame_idx": 10083, "task_index": 7}, {"db_idx": 10084, "episode_idx": 36, "frame_idx": 35, "global_frame_idx": 10084, "task_index": 7}, {"db_idx": 10085, "episode_idx": 36, "frame_idx": 36, "global_frame_idx": 10085, "task_index": 7}, {"db_idx": 10086, "episode_idx": 36, "frame_idx": 37, "global_frame_idx": 10086, "task_index": 7}, {"db_idx": 10087, "episode_idx": 36, "frame_idx": 38, "global_frame_idx": 10087, "task_index": 7}, {"db_idx": 10088, "episode_idx": 36, "frame_idx": 39, "global_frame_idx": 10088, "task_index": 7}, {"db_idx": 10089, "episode_idx": 36, "frame_idx": 40, "global_frame_idx": 10089, "task_index": 7}, {"db_idx": 10090, "episode_idx": 36, "frame_idx": 41, "global_frame_idx": 10090, "task_index": 7}, {"db_idx": 10091, "episode_idx": 36, "frame_idx": 42, "global_frame_idx": 10091, "task_index": 7}, {"db_idx": 10092, "episode_idx": 36, "frame_idx": 43, "global_frame_idx": 10092, "task_index": 7}, {"db_idx": 10093, "episode_idx": 36, "frame_idx": 44, "global_frame_idx": 10093, "task_index": 7}, {"db_idx": 10094, "episode_idx": 36, "frame_idx": 45, "global_frame_idx": 10094, "task_index": 7}, {"db_idx": 10095, "episode_idx": 36, "frame_idx": 46, "global_frame_idx": 10095, "task_index": 7}, {"db_idx": 10096, "episode_idx": 36, "frame_idx": 47, "global_frame_idx": 10096, "task_index": 7}, {"db_idx": 10097, "episode_idx": 36, "frame_idx": 48, "global_frame_idx": 10097, "task_index": 7}, {"db_idx": 10098, "episode_idx": 36, "frame_idx": 49, "global_frame_idx": 10098, "task_index": 7}, {"db_idx": 10099, "episode_idx": 36, "frame_idx": 50, "global_frame_idx": 10099, "task_index": 7}, {"db_idx": 10100, "episode_idx": 36, "frame_idx": 51, "global_frame_idx": 10100, "task_index": 7}, {"db_idx": 10101, "episode_idx": 36, "frame_idx": 52, "global_frame_idx": 10101, "task_index": 7}, {"db_idx": 10102, "episode_idx": 36, "frame_idx": 53, "global_frame_idx": 10102, "task_index": 7}, {"db_idx": 10103, "episode_idx": 36, "frame_idx": 54, "global_frame_idx": 10103, "task_index": 7}, {"db_idx": 10104, "episode_idx": 36, "frame_idx": 55, "global_frame_idx": 10104, "task_index": 7}, {"db_idx": 10105, "episode_idx": 36, "frame_idx": 56, "global_frame_idx": 10105, "task_index": 7}, {"db_idx": 10106, "episode_idx": 36, "frame_idx": 57, "global_frame_idx": 10106, "task_index": 7}, {"db_idx": 10107, "episode_idx": 36, "frame_idx": 58, "global_frame_idx": 10107, "task_index": 7}, {"db_idx": 10108, "episode_idx": 36, "frame_idx": 59, "global_frame_idx": 10108, "task_index": 7}, {"db_idx": 10109, "episode_idx": 36, "frame_idx": 60, "global_frame_idx": 10109, "task_index": 7}, {"db_idx": 10110, "episode_idx": 36, "frame_idx": 61, "global_frame_idx": 10110, "task_index": 7}, {"db_idx": 10111, "episode_idx": 36, "frame_idx": 62, "global_frame_idx": 10111, "task_index": 7}, {"db_idx": 10112, "episode_idx": 36, "frame_idx": 63, "global_frame_idx": 10112, "task_index": 7}, {"db_idx": 10113, "episode_idx": 36, "frame_idx": 64, "global_frame_idx": 10113, "task_index": 7}, {"db_idx": 10114, "episode_idx": 36, "frame_idx": 65, "global_frame_idx": 10114, "task_index": 7}, {"db_idx": 10115, "episode_idx": 36, "frame_idx": 66, "global_frame_idx": 10115, "task_index": 7}, {"db_idx": 10116, "episode_idx": 36, "frame_idx": 67, "global_frame_idx": 10116, "task_index": 7}, {"db_idx": 10117, "episode_idx": 36, "frame_idx": 68, "global_frame_idx": 10117, "task_index": 7}, {"db_idx": 10118, "episode_idx": 36, "frame_idx": 69, "global_frame_idx": 10118, "task_index": 7}, {"db_idx": 10119, "episode_idx": 36, "frame_idx": 70, "global_frame_idx": 10119, "task_index": 7}, {"db_idx": 10120, "episode_idx": 36, "frame_idx": 71, "global_frame_idx": 10120, "task_index": 7}, {"db_idx": 10121, "episode_idx": 36, "frame_idx": 72, "global_frame_idx": 10121, "task_index": 7}, {"db_idx": 10122, "episode_idx": 36, "frame_idx": 73, "global_frame_idx": 10122, "task_index": 7}, {"db_idx": 10123, "episode_idx": 36, "frame_idx": 74, "global_frame_idx": 10123, "task_index": 7}, {"db_idx": 10124, "episode_idx": 36, "frame_idx": 75, "global_frame_idx": 10124, "task_index": 7}, {"db_idx": 10125, "episode_idx": 36, "frame_idx": 76, "global_frame_idx": 10125, "task_index": 7}, {"db_idx": 10126, "episode_idx": 36, "frame_idx": 77, "global_frame_idx": 10126, "task_index": 7}, {"db_idx": 10127, "episode_idx": 36, "frame_idx": 78, "global_frame_idx": 10127, "task_index": 7}, {"db_idx": 10128, "episode_idx": 36, "frame_idx": 79, "global_frame_idx": 10128, "task_index": 7}, {"db_idx": 10129, "episode_idx": 36, "frame_idx": 80, "global_frame_idx": 10129, "task_index": 7}, {"db_idx": 10130, "episode_idx": 36, "frame_idx": 81, "global_frame_idx": 10130, "task_index": 7}, {"db_idx": 10131, "episode_idx": 36, "frame_idx": 82, "global_frame_idx": 10131, "task_index": 7}, {"db_idx": 10132, "episode_idx": 36, "frame_idx": 83, "global_frame_idx": 10132, "task_index": 7}, {"db_idx": 10133, "episode_idx": 36, "frame_idx": 84, "global_frame_idx": 10133, "task_index": 7}, {"db_idx": 10134, "episode_idx": 36, "frame_idx": 85, "global_frame_idx": 10134, "task_index": 7}, {"db_idx": 10135, "episode_idx": 36, "frame_idx": 86, "global_frame_idx": 10135, "task_index": 7}, {"db_idx": 10136, "episode_idx": 36, "frame_idx": 87, "global_frame_idx": 10136, "task_index": 7}, {"db_idx": 10137, "episode_idx": 36, "frame_idx": 88, "global_frame_idx": 10137, "task_index": 7}, {"db_idx": 10138, "episode_idx": 36, "frame_idx": 89, "global_frame_idx": 10138, "task_index": 7}, {"db_idx": 10139, "episode_idx": 36, "frame_idx": 90, "global_frame_idx": 10139, "task_index": 7}, {"db_idx": 10140, "episode_idx": 36, "frame_idx": 91, "global_frame_idx": 10140, "task_index": 7}, {"db_idx": 10141, "episode_idx": 36, "frame_idx": 92, "global_frame_idx": 10141, "task_index": 7}, {"db_idx": 10142, "episode_idx": 36, "frame_idx": 93, "global_frame_idx": 10142, "task_index": 7}, {"db_idx": 10143, "episode_idx": 36, "frame_idx": 94, "global_frame_idx": 10143, "task_index": 7}, {"db_idx": 10144, "episode_idx": 36, "frame_idx": 95, "global_frame_idx": 10144, "task_index": 7}, {"db_idx": 10145, "episode_idx": 36, "frame_idx": 96, "global_frame_idx": 10145, "task_index": 7}, {"db_idx": 10146, "episode_idx": 36, "frame_idx": 97, "global_frame_idx": 10146, "task_index": 7}, {"db_idx": 10147, "episode_idx": 36, "frame_idx": 98, "global_frame_idx": 10147, "task_index": 7}, {"db_idx": 10148, "episode_idx": 36, "frame_idx": 99, "global_frame_idx": 10148, "task_index": 7}, {"db_idx": 10149, "episode_idx": 36, "frame_idx": 100, "global_frame_idx": 10149, "task_index": 7}, {"db_idx": 10150, "episode_idx": 36, "frame_idx": 101, "global_frame_idx": 10150, "task_index": 7}, {"db_idx": 10151, "episode_idx": 36, "frame_idx": 102, "global_frame_idx": 10151, "task_index": 7}, {"db_idx": 10152, "episode_idx": 36, "frame_idx": 103, "global_frame_idx": 10152, "task_index": 7}, {"db_idx": 10153, "episode_idx": 36, "frame_idx": 104, "global_frame_idx": 10153, "task_index": 7}, {"db_idx": 10154, "episode_idx": 36, "frame_idx": 105, "global_frame_idx": 10154, "task_index": 7}, {"db_idx": 10155, "episode_idx": 36, "frame_idx": 106, "global_frame_idx": 10155, "task_index": 7}, {"db_idx": 10156, "episode_idx": 36, "frame_idx": 107, "global_frame_idx": 10156, "task_index": 7}, {"db_idx": 10157, "episode_idx": 36, "frame_idx": 108, "global_frame_idx": 10157, "task_index": 7}, {"db_idx": 10158, "episode_idx": 36, "frame_idx": 109, "global_frame_idx": 10158, "task_index": 7}, {"db_idx": 10159, "episode_idx": 36, "frame_idx": 110, "global_frame_idx": 10159, "task_index": 7}, {"db_idx": 10160, "episode_idx": 36, "frame_idx": 111, "global_frame_idx": 10160, "task_index": 7}, {"db_idx": 10161, "episode_idx": 36, "frame_idx": 112, "global_frame_idx": 10161, "task_index": 7}, {"db_idx": 10162, "episode_idx": 36, "frame_idx": 113, "global_frame_idx": 10162, "task_index": 7}, {"db_idx": 10163, "episode_idx": 36, "frame_idx": 114, "global_frame_idx": 10163, "task_index": 7}, {"db_idx": 10164, "episode_idx": 36, "frame_idx": 115, "global_frame_idx": 10164, "task_index": 7}, {"db_idx": 10165, "episode_idx": 36, "frame_idx": 116, "global_frame_idx": 10165, "task_index": 7}, {"db_idx": 10166, "episode_idx": 36, "frame_idx": 117, "global_frame_idx": 10166, "task_index": 7}, {"db_idx": 10167, "episode_idx": 36, "frame_idx": 118, "global_frame_idx": 10167, "task_index": 7}, {"db_idx": 10168, "episode_idx": 36, "frame_idx": 119, "global_frame_idx": 10168, "task_index": 7}, {"db_idx": 10169, "episode_idx": 36, "frame_idx": 120, "global_frame_idx": 10169, "task_index": 7}, {"db_idx": 10170, "episode_idx": 36, "frame_idx": 121, "global_frame_idx": 10170, "task_index": 7}, {"db_idx": 10171, "episode_idx": 36, "frame_idx": 122, "global_frame_idx": 10171, "task_index": 7}, {"db_idx": 10172, "episode_idx": 36, "frame_idx": 123, "global_frame_idx": 10172, "task_index": 7}, {"db_idx": 10173, "episode_idx": 36, "frame_idx": 124, "global_frame_idx": 10173, "task_index": 7}, {"db_idx": 10174, "episode_idx": 36, "frame_idx": 125, "global_frame_idx": 10174, "task_index": 7}, {"db_idx": 10175, "episode_idx": 36, "frame_idx": 126, "global_frame_idx": 10175, "task_index": 7}, {"db_idx": 10176, "episode_idx": 36, "frame_idx": 127, "global_frame_idx": 10176, "task_index": 7}, {"db_idx": 10177, "episode_idx": 36, "frame_idx": 128, "global_frame_idx": 10177, "task_index": 7}, {"db_idx": 10178, "episode_idx": 36, "frame_idx": 129, "global_frame_idx": 10178, "task_index": 7}, {"db_idx": 10179, "episode_idx": 36, "frame_idx": 130, "global_frame_idx": 10179, "task_index": 7}, {"db_idx": 10180, "episode_idx": 36, "frame_idx": 131, "global_frame_idx": 10180, "task_index": 7}, {"db_idx": 10181, "episode_idx": 36, "frame_idx": 132, "global_frame_idx": 10181, "task_index": 7}, {"db_idx": 10182, "episode_idx": 36, "frame_idx": 133, "global_frame_idx": 10182, "task_index": 7}, {"db_idx": 10183, "episode_idx": 36, "frame_idx": 134, "global_frame_idx": 10183, "task_index": 7}, {"db_idx": 10184, "episode_idx": 36, "frame_idx": 135, "global_frame_idx": 10184, "task_index": 7}, {"db_idx": 10185, "episode_idx": 36, "frame_idx": 136, "global_frame_idx": 10185, "task_index": 7}, {"db_idx": 10186, "episode_idx": 36, "frame_idx": 137, "global_frame_idx": 10186, "task_index": 7}, {"db_idx": 10187, "episode_idx": 36, "frame_idx": 138, "global_frame_idx": 10187, "task_index": 7}, {"db_idx": 10188, "episode_idx": 36, "frame_idx": 139, "global_frame_idx": 10188, "task_index": 7}, {"db_idx": 10189, "episode_idx": 36, "frame_idx": 140, "global_frame_idx": 10189, "task_index": 7}, {"db_idx": 10190, "episode_idx": 36, "frame_idx": 141, "global_frame_idx": 10190, "task_index": 7}, {"db_idx": 10191, "episode_idx": 36, "frame_idx": 142, "global_frame_idx": 10191, "task_index": 7}, {"db_idx": 10192, "episode_idx": 36, "frame_idx": 143, "global_frame_idx": 10192, "task_index": 7}, {"db_idx": 10193, "episode_idx": 36, "frame_idx": 144, "global_frame_idx": 10193, "task_index": 7}, {"db_idx": 10194, "episode_idx": 36, "frame_idx": 145, "global_frame_idx": 10194, "task_index": 7}, {"db_idx": 10195, "episode_idx": 36, "frame_idx": 146, "global_frame_idx": 10195, "task_index": 7}, {"db_idx": 10196, "episode_idx": 36, "frame_idx": 147, "global_frame_idx": 10196, "task_index": 7}, {"db_idx": 10197, "episode_idx": 36, "frame_idx": 148, "global_frame_idx": 10197, "task_index": 7}, {"db_idx": 10198, "episode_idx": 36, "frame_idx": 149, "global_frame_idx": 10198, "task_index": 7}, {"db_idx": 10199, "episode_idx": 36, "frame_idx": 150, "global_frame_idx": 10199, "task_index": 7}, {"db_idx": 10200, "episode_idx": 36, "frame_idx": 151, "global_frame_idx": 10200, "task_index": 7}, {"db_idx": 10201, "episode_idx": 36, "frame_idx": 152, "global_frame_idx": 10201, "task_index": 7}, {"db_idx": 10202, "episode_idx": 36, "frame_idx": 153, "global_frame_idx": 10202, "task_index": 7}, {"db_idx": 10203, "episode_idx": 36, "frame_idx": 154, "global_frame_idx": 10203, "task_index": 7}, {"db_idx": 10204, "episode_idx": 36, "frame_idx": 155, "global_frame_idx": 10204, "task_index": 7}, {"db_idx": 10205, "episode_idx": 36, "frame_idx": 156, "global_frame_idx": 10205, "task_index": 7}, {"db_idx": 10206, "episode_idx": 36, "frame_idx": 157, "global_frame_idx": 10206, "task_index": 7}, {"db_idx": 10207, "episode_idx": 36, "frame_idx": 158, "global_frame_idx": 10207, "task_index": 7}, {"db_idx": 10208, "episode_idx": 36, "frame_idx": 159, "global_frame_idx": 10208, "task_index": 7}, {"db_idx": 10209, "episode_idx": 36, "frame_idx": 160, "global_frame_idx": 10209, "task_index": 7}, {"db_idx": 10210, "episode_idx": 36, "frame_idx": 161, "global_frame_idx": 10210, "task_index": 7}, {"db_idx": 10211, "episode_idx": 36, "frame_idx": 162, "global_frame_idx": 10211, "task_index": 7}, {"db_idx": 10212, "episode_idx": 36, "frame_idx": 163, "global_frame_idx": 10212, "task_index": 7}, {"db_idx": 10213, "episode_idx": 36, "frame_idx": 164, "global_frame_idx": 10213, "task_index": 7}, {"db_idx": 10214, "episode_idx": 36, "frame_idx": 165, "global_frame_idx": 10214, "task_index": 7}, {"db_idx": 10215, "episode_idx": 36, "frame_idx": 166, "global_frame_idx": 10215, "task_index": 7}, {"db_idx": 10216, "episode_idx": 36, "frame_idx": 167, "global_frame_idx": 10216, "task_index": 7}, {"db_idx": 10217, "episode_idx": 36, "frame_idx": 168, "global_frame_idx": 10217, "task_index": 7}, {"db_idx": 10218, "episode_idx": 36, "frame_idx": 169, "global_frame_idx": 10218, "task_index": 7}, {"db_idx": 10219, "episode_idx": 36, "frame_idx": 170, "global_frame_idx": 10219, "task_index": 7}, {"db_idx": 10220, "episode_idx": 36, "frame_idx": 171, "global_frame_idx": 10220, "task_index": 7}, {"db_idx": 10221, "episode_idx": 36, "frame_idx": 172, "global_frame_idx": 10221, "task_index": 7}, {"db_idx": 10222, "episode_idx": 36, "frame_idx": 173, "global_frame_idx": 10222, "task_index": 7}, {"db_idx": 10223, "episode_idx": 36, "frame_idx": 174, "global_frame_idx": 10223, "task_index": 7}, {"db_idx": 10224, "episode_idx": 36, "frame_idx": 175, "global_frame_idx": 10224, "task_index": 7}, {"db_idx": 10225, "episode_idx": 36, "frame_idx": 176, "global_frame_idx": 10225, "task_index": 7}, {"db_idx": 10226, "episode_idx": 36, "frame_idx": 177, "global_frame_idx": 10226, "task_index": 7}, {"db_idx": 10227, "episode_idx": 36, "frame_idx": 178, "global_frame_idx": 10227, "task_index": 7}, {"db_idx": 10228, "episode_idx": 36, "frame_idx": 179, "global_frame_idx": 10228, "task_index": 7}, {"db_idx": 10229, "episode_idx": 36, "frame_idx": 180, "global_frame_idx": 10229, "task_index": 7}, {"db_idx": 10230, "episode_idx": 36, "frame_idx": 181, "global_frame_idx": 10230, "task_index": 7}, {"db_idx": 10231, "episode_idx": 36, "frame_idx": 182, "global_frame_idx": 10231, "task_index": 7}, {"db_idx": 10232, "episode_idx": 36, "frame_idx": 183, "global_frame_idx": 10232, "task_index": 7}, {"db_idx": 10233, "episode_idx": 36, "frame_idx": 184, "global_frame_idx": 10233, "task_index": 7}, {"db_idx": 10234, "episode_idx": 36, "frame_idx": 185, "global_frame_idx": 10234, "task_index": 7}, {"db_idx": 10235, "episode_idx": 36, "frame_idx": 186, "global_frame_idx": 10235, "task_index": 7}, {"db_idx": 10236, "episode_idx": 36, "frame_idx": 187, "global_frame_idx": 10236, "task_index": 7}, {"db_idx": 10237, "episode_idx": 36, "frame_idx": 188, "global_frame_idx": 10237, "task_index": 7}, {"db_idx": 10238, "episode_idx": 36, "frame_idx": 189, "global_frame_idx": 10238, "task_index": 7}, {"db_idx": 10239, "episode_idx": 36, "frame_idx": 190, "global_frame_idx": 10239, "task_index": 7}, {"db_idx": 10240, "episode_idx": 36, "frame_idx": 191, "global_frame_idx": 10240, "task_index": 7}, {"db_idx": 10241, "episode_idx": 36, "frame_idx": 192, "global_frame_idx": 10241, "task_index": 7}, {"db_idx": 10242, "episode_idx": 36, "frame_idx": 193, "global_frame_idx": 10242, "task_index": 7}, {"db_idx": 10243, "episode_idx": 36, "frame_idx": 194, "global_frame_idx": 10243, "task_index": 7}, {"db_idx": 10244, "episode_idx": 36, "frame_idx": 195, "global_frame_idx": 10244, "task_index": 7}, {"db_idx": 10245, "episode_idx": 36, "frame_idx": 196, "global_frame_idx": 10245, "task_index": 7}, {"db_idx": 10246, "episode_idx": 36, "frame_idx": 197, "global_frame_idx": 10246, "task_index": 7}, {"db_idx": 10247, "episode_idx": 36, "frame_idx": 198, "global_frame_idx": 10247, "task_index": 7}, {"db_idx": 10248, "episode_idx": 36, "frame_idx": 199, "global_frame_idx": 10248, "task_index": 7}, {"db_idx": 10249, "episode_idx": 36, "frame_idx": 200, "global_frame_idx": 10249, "task_index": 7}, {"db_idx": 10250, "episode_idx": 36, "frame_idx": 201, "global_frame_idx": 10250, "task_index": 7}, {"db_idx": 10251, "episode_idx": 36, "frame_idx": 202, "global_frame_idx": 10251, "task_index": 7}, {"db_idx": 10252, "episode_idx": 36, "frame_idx": 203, "global_frame_idx": 10252, "task_index": 7}, {"db_idx": 10253, "episode_idx": 36, "frame_idx": 204, "global_frame_idx": 10253, "task_index": 7}, {"db_idx": 10254, "episode_idx": 36, "frame_idx": 205, "global_frame_idx": 10254, "task_index": 7}, {"db_idx": 10255, "episode_idx": 36, "frame_idx": 206, "global_frame_idx": 10255, "task_index": 7}, {"db_idx": 10256, "episode_idx": 36, "frame_idx": 207, "global_frame_idx": 10256, "task_index": 7}, {"db_idx": 10257, "episode_idx": 36, "frame_idx": 208, "global_frame_idx": 10257, "task_index": 7}, {"db_idx": 10258, "episode_idx": 36, "frame_idx": 209, "global_frame_idx": 10258, "task_index": 7}, {"db_idx": 10259, "episode_idx": 36, "frame_idx": 210, "global_frame_idx": 10259, "task_index": 7}, {"db_idx": 10260, "episode_idx": 36, "frame_idx": 211, "global_frame_idx": 10260, "task_index": 7}, {"db_idx": 10261, "episode_idx": 36, "frame_idx": 212, "global_frame_idx": 10261, "task_index": 7}, {"db_idx": 10262, "episode_idx": 36, "frame_idx": 213, "global_frame_idx": 10262, "task_index": 7}, {"db_idx": 10263, "episode_idx": 36, "frame_idx": 214, "global_frame_idx": 10263, "task_index": 7}, {"db_idx": 10264, "episode_idx": 36, "frame_idx": 215, "global_frame_idx": 10264, "task_index": 7}, {"db_idx": 10265, "episode_idx": 36, "frame_idx": 216, "global_frame_idx": 10265, "task_index": 7}, {"db_idx": 10266, "episode_idx": 36, "frame_idx": 217, "global_frame_idx": 10266, "task_index": 7}, {"db_idx": 10267, "episode_idx": 36, "frame_idx": 218, "global_frame_idx": 10267, "task_index": 7}, {"db_idx": 10268, "episode_idx": 36, "frame_idx": 219, "global_frame_idx": 10268, "task_index": 7}, {"db_idx": 10269, "episode_idx": 36, "frame_idx": 220, "global_frame_idx": 10269, "task_index": 7}, {"db_idx": 10270, "episode_idx": 36, "frame_idx": 221, "global_frame_idx": 10270, "task_index": 7}, {"db_idx": 10271, "episode_idx": 36, "frame_idx": 222, "global_frame_idx": 10271, "task_index": 7}, {"db_idx": 10272, "episode_idx": 36, "frame_idx": 223, "global_frame_idx": 10272, "task_index": 7}, {"db_idx": 10273, "episode_idx": 36, "frame_idx": 224, "global_frame_idx": 10273, "task_index": 7}, {"db_idx": 10274, "episode_idx": 36, "frame_idx": 225, "global_frame_idx": 10274, "task_index": 7}, {"db_idx": 10275, "episode_idx": 36, "frame_idx": 226, "global_frame_idx": 10275, "task_index": 7}, {"db_idx": 10276, "episode_idx": 36, "frame_idx": 227, "global_frame_idx": 10276, "task_index": 7}, {"db_idx": 10277, "episode_idx": 36, "frame_idx": 228, "global_frame_idx": 10277, "task_index": 7}, {"db_idx": 10278, "episode_idx": 36, "frame_idx": 229, "global_frame_idx": 10278, "task_index": 7}, {"db_idx": 10279, "episode_idx": 36, "frame_idx": 230, "global_frame_idx": 10279, "task_index": 7}, {"db_idx": 10280, "episode_idx": 36, "frame_idx": 231, "global_frame_idx": 10280, "task_index": 7}, {"db_idx": 10281, "episode_idx": 36, "frame_idx": 232, "global_frame_idx": 10281, "task_index": 7}, {"db_idx": 10282, "episode_idx": 37, "frame_idx": 0, "global_frame_idx": 10282, "task_index": 7}, {"db_idx": 10283, "episode_idx": 37, "frame_idx": 1, "global_frame_idx": 10283, "task_index": 7}, {"db_idx": 10284, "episode_idx": 37, "frame_idx": 2, "global_frame_idx": 10284, "task_index": 7}, {"db_idx": 10285, "episode_idx": 37, "frame_idx": 3, "global_frame_idx": 10285, "task_index": 7}, {"db_idx": 10286, "episode_idx": 37, "frame_idx": 4, "global_frame_idx": 10286, "task_index": 7}, {"db_idx": 10287, "episode_idx": 37, "frame_idx": 5, "global_frame_idx": 10287, "task_index": 7}, {"db_idx": 10288, "episode_idx": 37, "frame_idx": 6, "global_frame_idx": 10288, "task_index": 7}, {"db_idx": 10289, "episode_idx": 37, "frame_idx": 7, "global_frame_idx": 10289, "task_index": 7}, {"db_idx": 10290, "episode_idx": 37, "frame_idx": 8, "global_frame_idx": 10290, "task_index": 7}, {"db_idx": 10291, "episode_idx": 37, "frame_idx": 9, "global_frame_idx": 10291, "task_index": 7}, {"db_idx": 10292, "episode_idx": 37, "frame_idx": 10, "global_frame_idx": 10292, "task_index": 7}, {"db_idx": 10293, "episode_idx": 37, "frame_idx": 11, "global_frame_idx": 10293, "task_index": 7}, {"db_idx": 10294, "episode_idx": 37, "frame_idx": 12, "global_frame_idx": 10294, "task_index": 7}, {"db_idx": 10295, "episode_idx": 37, "frame_idx": 13, "global_frame_idx": 10295, "task_index": 7}, {"db_idx": 10296, "episode_idx": 37, "frame_idx": 14, "global_frame_idx": 10296, "task_index": 7}, {"db_idx": 10297, "episode_idx": 37, "frame_idx": 15, "global_frame_idx": 10297, "task_index": 7}, {"db_idx": 10298, "episode_idx": 37, "frame_idx": 16, "global_frame_idx": 10298, "task_index": 7}, {"db_idx": 10299, "episode_idx": 37, "frame_idx": 17, "global_frame_idx": 10299, "task_index": 7}, {"db_idx": 10300, "episode_idx": 37, "frame_idx": 18, "global_frame_idx": 10300, "task_index": 7}, {"db_idx": 10301, "episode_idx": 37, "frame_idx": 19, "global_frame_idx": 10301, "task_index": 7}, {"db_idx": 10302, "episode_idx": 37, "frame_idx": 20, "global_frame_idx": 10302, "task_index": 7}, {"db_idx": 10303, "episode_idx": 37, "frame_idx": 21, "global_frame_idx": 10303, "task_index": 7}, {"db_idx": 10304, "episode_idx": 37, "frame_idx": 22, "global_frame_idx": 10304, "task_index": 7}, {"db_idx": 10305, "episode_idx": 37, "frame_idx": 23, "global_frame_idx": 10305, "task_index": 7}, {"db_idx": 10306, "episode_idx": 37, "frame_idx": 24, "global_frame_idx": 10306, "task_index": 7}, {"db_idx": 10307, "episode_idx": 37, "frame_idx": 25, "global_frame_idx": 10307, "task_index": 7}, {"db_idx": 10308, "episode_idx": 37, "frame_idx": 26, "global_frame_idx": 10308, "task_index": 7}, {"db_idx": 10309, "episode_idx": 37, "frame_idx": 27, "global_frame_idx": 10309, "task_index": 7}, {"db_idx": 10310, "episode_idx": 37, "frame_idx": 28, "global_frame_idx": 10310, "task_index": 7}, {"db_idx": 10311, "episode_idx": 37, "frame_idx": 29, "global_frame_idx": 10311, "task_index": 7}, {"db_idx": 10312, "episode_idx": 37, "frame_idx": 30, "global_frame_idx": 10312, "task_index": 7}, {"db_idx": 10313, "episode_idx": 37, "frame_idx": 31, "global_frame_idx": 10313, "task_index": 7}, {"db_idx": 10314, "episode_idx": 37, "frame_idx": 32, "global_frame_idx": 10314, "task_index": 7}, {"db_idx": 10315, "episode_idx": 37, "frame_idx": 33, "global_frame_idx": 10315, "task_index": 7}, {"db_idx": 10316, "episode_idx": 37, "frame_idx": 34, "global_frame_idx": 10316, "task_index": 7}, {"db_idx": 10317, "episode_idx": 37, "frame_idx": 35, "global_frame_idx": 10317, "task_index": 7}, {"db_idx": 10318, "episode_idx": 37, "frame_idx": 36, "global_frame_idx": 10318, "task_index": 7}, {"db_idx": 10319, "episode_idx": 37, "frame_idx": 37, "global_frame_idx": 10319, "task_index": 7}, {"db_idx": 10320, "episode_idx": 37, "frame_idx": 38, "global_frame_idx": 10320, "task_index": 7}, {"db_idx": 10321, "episode_idx": 37, "frame_idx": 39, "global_frame_idx": 10321, "task_index": 7}, {"db_idx": 10322, "episode_idx": 37, "frame_idx": 40, "global_frame_idx": 10322, "task_index": 7}, {"db_idx": 10323, "episode_idx": 37, "frame_idx": 41, "global_frame_idx": 10323, "task_index": 7}, {"db_idx": 10324, "episode_idx": 37, "frame_idx": 42, "global_frame_idx": 10324, "task_index": 7}, {"db_idx": 10325, "episode_idx": 37, "frame_idx": 43, "global_frame_idx": 10325, "task_index": 7}, {"db_idx": 10326, "episode_idx": 37, "frame_idx": 44, "global_frame_idx": 10326, "task_index": 7}, {"db_idx": 10327, "episode_idx": 37, "frame_idx": 45, "global_frame_idx": 10327, "task_index": 7}, {"db_idx": 10328, "episode_idx": 37, "frame_idx": 46, "global_frame_idx": 10328, "task_index": 7}, {"db_idx": 10329, "episode_idx": 37, "frame_idx": 47, "global_frame_idx": 10329, "task_index": 7}, {"db_idx": 10330, "episode_idx": 37, "frame_idx": 48, "global_frame_idx": 10330, "task_index": 7}, {"db_idx": 10331, "episode_idx": 37, "frame_idx": 49, "global_frame_idx": 10331, "task_index": 7}, {"db_idx": 10332, "episode_idx": 37, "frame_idx": 50, "global_frame_idx": 10332, "task_index": 7}, {"db_idx": 10333, "episode_idx": 37, "frame_idx": 51, "global_frame_idx": 10333, "task_index": 7}, {"db_idx": 10334, "episode_idx": 37, "frame_idx": 52, "global_frame_idx": 10334, "task_index": 7}, {"db_idx": 10335, "episode_idx": 37, "frame_idx": 53, "global_frame_idx": 10335, "task_index": 7}, {"db_idx": 10336, "episode_idx": 37, "frame_idx": 54, "global_frame_idx": 10336, "task_index": 7}, {"db_idx": 10337, "episode_idx": 37, "frame_idx": 55, "global_frame_idx": 10337, "task_index": 7}, {"db_idx": 10338, "episode_idx": 37, "frame_idx": 56, "global_frame_idx": 10338, "task_index": 7}, {"db_idx": 10339, "episode_idx": 37, "frame_idx": 57, "global_frame_idx": 10339, "task_index": 7}, {"db_idx": 10340, "episode_idx": 37, "frame_idx": 58, "global_frame_idx": 10340, "task_index": 7}, {"db_idx": 10341, "episode_idx": 37, "frame_idx": 59, "global_frame_idx": 10341, "task_index": 7}, {"db_idx": 10342, "episode_idx": 37, "frame_idx": 60, "global_frame_idx": 10342, "task_index": 7}, {"db_idx": 10343, "episode_idx": 37, "frame_idx": 61, "global_frame_idx": 10343, "task_index": 7}, {"db_idx": 10344, "episode_idx": 37, "frame_idx": 62, "global_frame_idx": 10344, "task_index": 7}, {"db_idx": 10345, "episode_idx": 37, "frame_idx": 63, "global_frame_idx": 10345, "task_index": 7}, {"db_idx": 10346, "episode_idx": 37, "frame_idx": 64, "global_frame_idx": 10346, "task_index": 7}, {"db_idx": 10347, "episode_idx": 37, "frame_idx": 65, "global_frame_idx": 10347, "task_index": 7}, {"db_idx": 10348, "episode_idx": 37, "frame_idx": 66, "global_frame_idx": 10348, "task_index": 7}, {"db_idx": 10349, "episode_idx": 37, "frame_idx": 67, "global_frame_idx": 10349, "task_index": 7}, {"db_idx": 10350, "episode_idx": 37, "frame_idx": 68, "global_frame_idx": 10350, "task_index": 7}, {"db_idx": 10351, "episode_idx": 37, "frame_idx": 69, "global_frame_idx": 10351, "task_index": 7}, {"db_idx": 10352, "episode_idx": 37, "frame_idx": 70, "global_frame_idx": 10352, "task_index": 7}, {"db_idx": 10353, "episode_idx": 37, "frame_idx": 71, "global_frame_idx": 10353, "task_index": 7}, {"db_idx": 10354, "episode_idx": 37, "frame_idx": 72, "global_frame_idx": 10354, "task_index": 7}, {"db_idx": 10355, "episode_idx": 37, "frame_idx": 73, "global_frame_idx": 10355, "task_index": 7}, {"db_idx": 10356, "episode_idx": 37, "frame_idx": 74, "global_frame_idx": 10356, "task_index": 7}, {"db_idx": 10357, "episode_idx": 37, "frame_idx": 75, "global_frame_idx": 10357, "task_index": 7}, {"db_idx": 10358, "episode_idx": 37, "frame_idx": 76, "global_frame_idx": 10358, "task_index": 7}, {"db_idx": 10359, "episode_idx": 37, "frame_idx": 77, "global_frame_idx": 10359, "task_index": 7}, {"db_idx": 10360, "episode_idx": 37, "frame_idx": 78, "global_frame_idx": 10360, "task_index": 7}, {"db_idx": 10361, "episode_idx": 37, "frame_idx": 79, "global_frame_idx": 10361, "task_index": 7}, {"db_idx": 10362, "episode_idx": 37, "frame_idx": 80, "global_frame_idx": 10362, "task_index": 7}, {"db_idx": 10363, "episode_idx": 37, "frame_idx": 81, "global_frame_idx": 10363, "task_index": 7}, {"db_idx": 10364, "episode_idx": 37, "frame_idx": 82, "global_frame_idx": 10364, "task_index": 7}, {"db_idx": 10365, "episode_idx": 37, "frame_idx": 83, "global_frame_idx": 10365, "task_index": 7}, {"db_idx": 10366, "episode_idx": 37, "frame_idx": 84, "global_frame_idx": 10366, "task_index": 7}, {"db_idx": 10367, "episode_idx": 37, "frame_idx": 85, "global_frame_idx": 10367, "task_index": 7}, {"db_idx": 10368, "episode_idx": 37, "frame_idx": 86, "global_frame_idx": 10368, "task_index": 7}, {"db_idx": 10369, "episode_idx": 37, "frame_idx": 87, "global_frame_idx": 10369, "task_index": 7}, {"db_idx": 10370, "episode_idx": 37, "frame_idx": 88, "global_frame_idx": 10370, "task_index": 7}, {"db_idx": 10371, "episode_idx": 37, "frame_idx": 89, "global_frame_idx": 10371, "task_index": 7}, {"db_idx": 10372, "episode_idx": 37, "frame_idx": 90, "global_frame_idx": 10372, "task_index": 7}, {"db_idx": 10373, "episode_idx": 37, "frame_idx": 91, "global_frame_idx": 10373, "task_index": 7}, {"db_idx": 10374, "episode_idx": 37, "frame_idx": 92, "global_frame_idx": 10374, "task_index": 7}, {"db_idx": 10375, "episode_idx": 37, "frame_idx": 93, "global_frame_idx": 10375, "task_index": 7}, {"db_idx": 10376, "episode_idx": 37, "frame_idx": 94, "global_frame_idx": 10376, "task_index": 7}, {"db_idx": 10377, "episode_idx": 37, "frame_idx": 95, "global_frame_idx": 10377, "task_index": 7}, {"db_idx": 10378, "episode_idx": 37, "frame_idx": 96, "global_frame_idx": 10378, "task_index": 7}, {"db_idx": 10379, "episode_idx": 37, "frame_idx": 97, "global_frame_idx": 10379, "task_index": 7}, {"db_idx": 10380, "episode_idx": 37, "frame_idx": 98, "global_frame_idx": 10380, "task_index": 7}, {"db_idx": 10381, "episode_idx": 37, "frame_idx": 99, "global_frame_idx": 10381, "task_index": 7}, {"db_idx": 10382, "episode_idx": 37, "frame_idx": 100, "global_frame_idx": 10382, "task_index": 7}, {"db_idx": 10383, "episode_idx": 37, "frame_idx": 101, "global_frame_idx": 10383, "task_index": 7}, {"db_idx": 10384, "episode_idx": 37, "frame_idx": 102, "global_frame_idx": 10384, "task_index": 7}, {"db_idx": 10385, "episode_idx": 37, "frame_idx": 103, "global_frame_idx": 10385, "task_index": 7}, {"db_idx": 10386, "episode_idx": 37, "frame_idx": 104, "global_frame_idx": 10386, "task_index": 7}, {"db_idx": 10387, "episode_idx": 37, "frame_idx": 105, "global_frame_idx": 10387, "task_index": 7}, {"db_idx": 10388, "episode_idx": 37, "frame_idx": 106, "global_frame_idx": 10388, "task_index": 7}, {"db_idx": 10389, "episode_idx": 37, "frame_idx": 107, "global_frame_idx": 10389, "task_index": 7}, {"db_idx": 10390, "episode_idx": 37, "frame_idx": 108, "global_frame_idx": 10390, "task_index": 7}, {"db_idx": 10391, "episode_idx": 37, "frame_idx": 109, "global_frame_idx": 10391, "task_index": 7}, {"db_idx": 10392, "episode_idx": 37, "frame_idx": 110, "global_frame_idx": 10392, "task_index": 7}, {"db_idx": 10393, "episode_idx": 37, "frame_idx": 111, "global_frame_idx": 10393, "task_index": 7}, {"db_idx": 10394, "episode_idx": 37, "frame_idx": 112, "global_frame_idx": 10394, "task_index": 7}, {"db_idx": 10395, "episode_idx": 37, "frame_idx": 113, "global_frame_idx": 10395, "task_index": 7}, {"db_idx": 10396, "episode_idx": 37, "frame_idx": 114, "global_frame_idx": 10396, "task_index": 7}, {"db_idx": 10397, "episode_idx": 37, "frame_idx": 115, "global_frame_idx": 10397, "task_index": 7}, {"db_idx": 10398, "episode_idx": 37, "frame_idx": 116, "global_frame_idx": 10398, "task_index": 7}, {"db_idx": 10399, "episode_idx": 37, "frame_idx": 117, "global_frame_idx": 10399, "task_index": 7}, {"db_idx": 10400, "episode_idx": 37, "frame_idx": 118, "global_frame_idx": 10400, "task_index": 7}, {"db_idx": 10401, "episode_idx": 37, "frame_idx": 119, "global_frame_idx": 10401, "task_index": 7}, {"db_idx": 10402, "episode_idx": 37, "frame_idx": 120, "global_frame_idx": 10402, "task_index": 7}, {"db_idx": 10403, "episode_idx": 37, "frame_idx": 121, "global_frame_idx": 10403, "task_index": 7}, {"db_idx": 10404, "episode_idx": 37, "frame_idx": 122, "global_frame_idx": 10404, "task_index": 7}, {"db_idx": 10405, "episode_idx": 37, "frame_idx": 123, "global_frame_idx": 10405, "task_index": 7}, {"db_idx": 10406, "episode_idx": 37, "frame_idx": 124, "global_frame_idx": 10406, "task_index": 7}, {"db_idx": 10407, "episode_idx": 37, "frame_idx": 125, "global_frame_idx": 10407, "task_index": 7}, {"db_idx": 10408, "episode_idx": 37, "frame_idx": 126, "global_frame_idx": 10408, "task_index": 7}, {"db_idx": 10409, "episode_idx": 37, "frame_idx": 127, "global_frame_idx": 10409, "task_index": 7}, {"db_idx": 10410, "episode_idx": 37, "frame_idx": 128, "global_frame_idx": 10410, "task_index": 7}, {"db_idx": 10411, "episode_idx": 37, "frame_idx": 129, "global_frame_idx": 10411, "task_index": 7}, {"db_idx": 10412, "episode_idx": 37, "frame_idx": 130, "global_frame_idx": 10412, "task_index": 7}, {"db_idx": 10413, "episode_idx": 37, "frame_idx": 131, "global_frame_idx": 10413, "task_index": 7}, {"db_idx": 10414, "episode_idx": 37, "frame_idx": 132, "global_frame_idx": 10414, "task_index": 7}, {"db_idx": 10415, "episode_idx": 37, "frame_idx": 133, "global_frame_idx": 10415, "task_index": 7}, {"db_idx": 10416, "episode_idx": 37, "frame_idx": 134, "global_frame_idx": 10416, "task_index": 7}, {"db_idx": 10417, "episode_idx": 37, "frame_idx": 135, "global_frame_idx": 10417, "task_index": 7}, {"db_idx": 10418, "episode_idx": 37, "frame_idx": 136, "global_frame_idx": 10418, "task_index": 7}, {"db_idx": 10419, "episode_idx": 37, "frame_idx": 137, "global_frame_idx": 10419, "task_index": 7}, {"db_idx": 10420, "episode_idx": 37, "frame_idx": 138, "global_frame_idx": 10420, "task_index": 7}, {"db_idx": 10421, "episode_idx": 37, "frame_idx": 139, "global_frame_idx": 10421, "task_index": 7}, {"db_idx": 10422, "episode_idx": 37, "frame_idx": 140, "global_frame_idx": 10422, "task_index": 7}, {"db_idx": 10423, "episode_idx": 37, "frame_idx": 141, "global_frame_idx": 10423, "task_index": 7}, {"db_idx": 10424, "episode_idx": 37, "frame_idx": 142, "global_frame_idx": 10424, "task_index": 7}, {"db_idx": 10425, "episode_idx": 37, "frame_idx": 143, "global_frame_idx": 10425, "task_index": 7}, {"db_idx": 10426, "episode_idx": 37, "frame_idx": 144, "global_frame_idx": 10426, "task_index": 7}, {"db_idx": 10427, "episode_idx": 37, "frame_idx": 145, "global_frame_idx": 10427, "task_index": 7}, {"db_idx": 10428, "episode_idx": 37, "frame_idx": 146, "global_frame_idx": 10428, "task_index": 7}, {"db_idx": 10429, "episode_idx": 37, "frame_idx": 147, "global_frame_idx": 10429, "task_index": 7}, {"db_idx": 10430, "episode_idx": 37, "frame_idx": 148, "global_frame_idx": 10430, "task_index": 7}, {"db_idx": 10431, "episode_idx": 37, "frame_idx": 149, "global_frame_idx": 10431, "task_index": 7}, {"db_idx": 10432, "episode_idx": 37, "frame_idx": 150, "global_frame_idx": 10432, "task_index": 7}, {"db_idx": 10433, "episode_idx": 37, "frame_idx": 151, "global_frame_idx": 10433, "task_index": 7}, {"db_idx": 10434, "episode_idx": 37, "frame_idx": 152, "global_frame_idx": 10434, "task_index": 7}, {"db_idx": 10435, "episode_idx": 37, "frame_idx": 153, "global_frame_idx": 10435, "task_index": 7}, {"db_idx": 10436, "episode_idx": 37, "frame_idx": 154, "global_frame_idx": 10436, "task_index": 7}, {"db_idx": 10437, "episode_idx": 37, "frame_idx": 155, "global_frame_idx": 10437, "task_index": 7}, {"db_idx": 10438, "episode_idx": 37, "frame_idx": 156, "global_frame_idx": 10438, "task_index": 7}, {"db_idx": 10439, "episode_idx": 37, "frame_idx": 157, "global_frame_idx": 10439, "task_index": 7}, {"db_idx": 10440, "episode_idx": 37, "frame_idx": 158, "global_frame_idx": 10440, "task_index": 7}, {"db_idx": 10441, "episode_idx": 37, "frame_idx": 159, "global_frame_idx": 10441, "task_index": 7}, {"db_idx": 10442, "episode_idx": 37, "frame_idx": 160, "global_frame_idx": 10442, "task_index": 7}, {"db_idx": 10443, "episode_idx": 37, "frame_idx": 161, "global_frame_idx": 10443, "task_index": 7}, {"db_idx": 10444, "episode_idx": 37, "frame_idx": 162, "global_frame_idx": 10444, "task_index": 7}, {"db_idx": 10445, "episode_idx": 37, "frame_idx": 163, "global_frame_idx": 10445, "task_index": 7}, {"db_idx": 10446, "episode_idx": 37, "frame_idx": 164, "global_frame_idx": 10446, "task_index": 7}, {"db_idx": 10447, "episode_idx": 37, "frame_idx": 165, "global_frame_idx": 10447, "task_index": 7}, {"db_idx": 10448, "episode_idx": 37, "frame_idx": 166, "global_frame_idx": 10448, "task_index": 7}, {"db_idx": 10449, "episode_idx": 37, "frame_idx": 167, "global_frame_idx": 10449, "task_index": 7}, {"db_idx": 10450, "episode_idx": 37, "frame_idx": 168, "global_frame_idx": 10450, "task_index": 7}, {"db_idx": 10451, "episode_idx": 37, "frame_idx": 169, "global_frame_idx": 10451, "task_index": 7}, {"db_idx": 10452, "episode_idx": 37, "frame_idx": 170, "global_frame_idx": 10452, "task_index": 7}, {"db_idx": 10453, "episode_idx": 37, "frame_idx": 171, "global_frame_idx": 10453, "task_index": 7}, {"db_idx": 10454, "episode_idx": 37, "frame_idx": 172, "global_frame_idx": 10454, "task_index": 7}, {"db_idx": 10455, "episode_idx": 37, "frame_idx": 173, "global_frame_idx": 10455, "task_index": 7}, {"db_idx": 10456, "episode_idx": 37, "frame_idx": 174, "global_frame_idx": 10456, "task_index": 7}, {"db_idx": 10457, "episode_idx": 37, "frame_idx": 175, "global_frame_idx": 10457, "task_index": 7}, {"db_idx": 10458, "episode_idx": 37, "frame_idx": 176, "global_frame_idx": 10458, "task_index": 7}, {"db_idx": 10459, "episode_idx": 37, "frame_idx": 177, "global_frame_idx": 10459, "task_index": 7}, {"db_idx": 10460, "episode_idx": 37, "frame_idx": 178, "global_frame_idx": 10460, "task_index": 7}, {"db_idx": 10461, "episode_idx": 37, "frame_idx": 179, "global_frame_idx": 10461, "task_index": 7}, {"db_idx": 10462, "episode_idx": 37, "frame_idx": 180, "global_frame_idx": 10462, "task_index": 7}, {"db_idx": 10463, "episode_idx": 37, "frame_idx": 181, "global_frame_idx": 10463, "task_index": 7}, {"db_idx": 10464, "episode_idx": 37, "frame_idx": 182, "global_frame_idx": 10464, "task_index": 7}, {"db_idx": 10465, "episode_idx": 37, "frame_idx": 183, "global_frame_idx": 10465, "task_index": 7}, {"db_idx": 10466, "episode_idx": 37, "frame_idx": 184, "global_frame_idx": 10466, "task_index": 7}, {"db_idx": 10467, "episode_idx": 37, "frame_idx": 185, "global_frame_idx": 10467, "task_index": 7}, {"db_idx": 10468, "episode_idx": 37, "frame_idx": 186, "global_frame_idx": 10468, "task_index": 7}, {"db_idx": 10469, "episode_idx": 37, "frame_idx": 187, "global_frame_idx": 10469, "task_index": 7}, {"db_idx": 10470, "episode_idx": 37, "frame_idx": 188, "global_frame_idx": 10470, "task_index": 7}, {"db_idx": 10471, "episode_idx": 37, "frame_idx": 189, "global_frame_idx": 10471, "task_index": 7}, {"db_idx": 10472, "episode_idx": 37, "frame_idx": 190, "global_frame_idx": 10472, "task_index": 7}, {"db_idx": 10473, "episode_idx": 37, "frame_idx": 191, "global_frame_idx": 10473, "task_index": 7}, {"db_idx": 10474, "episode_idx": 37, "frame_idx": 192, "global_frame_idx": 10474, "task_index": 7}, {"db_idx": 10475, "episode_idx": 37, "frame_idx": 193, "global_frame_idx": 10475, "task_index": 7}, {"db_idx": 10476, "episode_idx": 37, "frame_idx": 194, "global_frame_idx": 10476, "task_index": 7}, {"db_idx": 10477, "episode_idx": 37, "frame_idx": 195, "global_frame_idx": 10477, "task_index": 7}, {"db_idx": 10478, "episode_idx": 37, "frame_idx": 196, "global_frame_idx": 10478, "task_index": 7}, {"db_idx": 10479, "episode_idx": 37, "frame_idx": 197, "global_frame_idx": 10479, "task_index": 7}, {"db_idx": 10480, "episode_idx": 37, "frame_idx": 198, "global_frame_idx": 10480, "task_index": 7}, {"db_idx": 10481, "episode_idx": 37, "frame_idx": 199, "global_frame_idx": 10481, "task_index": 7}, {"db_idx": 10482, "episode_idx": 37, "frame_idx": 200, "global_frame_idx": 10482, "task_index": 7}, {"db_idx": 10483, "episode_idx": 37, "frame_idx": 201, "global_frame_idx": 10483, "task_index": 7}, {"db_idx": 10484, "episode_idx": 37, "frame_idx": 202, "global_frame_idx": 10484, "task_index": 7}, {"db_idx": 10485, "episode_idx": 37, "frame_idx": 203, "global_frame_idx": 10485, "task_index": 7}, {"db_idx": 10486, "episode_idx": 37, "frame_idx": 204, "global_frame_idx": 10486, "task_index": 7}, {"db_idx": 10487, "episode_idx": 37, "frame_idx": 205, "global_frame_idx": 10487, "task_index": 7}, {"db_idx": 10488, "episode_idx": 37, "frame_idx": 206, "global_frame_idx": 10488, "task_index": 7}, {"db_idx": 10489, "episode_idx": 37, "frame_idx": 207, "global_frame_idx": 10489, "task_index": 7}, {"db_idx": 10490, "episode_idx": 37, "frame_idx": 208, "global_frame_idx": 10490, "task_index": 7}, {"db_idx": 10491, "episode_idx": 37, "frame_idx": 209, "global_frame_idx": 10491, "task_index": 7}, {"db_idx": 10492, "episode_idx": 37, "frame_idx": 210, "global_frame_idx": 10492, "task_index": 7}, {"db_idx": 10493, "episode_idx": 37, "frame_idx": 211, "global_frame_idx": 10493, "task_index": 7}, {"db_idx": 10494, "episode_idx": 37, "frame_idx": 212, "global_frame_idx": 10494, "task_index": 7}, {"db_idx": 10495, "episode_idx": 37, "frame_idx": 213, "global_frame_idx": 10495, "task_index": 7}, {"db_idx": 10496, "episode_idx": 37, "frame_idx": 214, "global_frame_idx": 10496, "task_index": 7}, {"db_idx": 10497, "episode_idx": 37, "frame_idx": 215, "global_frame_idx": 10497, "task_index": 7}, {"db_idx": 10498, "episode_idx": 37, "frame_idx": 216, "global_frame_idx": 10498, "task_index": 7}, {"db_idx": 10499, "episode_idx": 37, "frame_idx": 217, "global_frame_idx": 10499, "task_index": 7}, {"db_idx": 10500, "episode_idx": 37, "frame_idx": 218, "global_frame_idx": 10500, "task_index": 7}, {"db_idx": 10501, "episode_idx": 37, "frame_idx": 219, "global_frame_idx": 10501, "task_index": 7}, {"db_idx": 10502, "episode_idx": 37, "frame_idx": 220, "global_frame_idx": 10502, "task_index": 7}, {"db_idx": 10503, "episode_idx": 37, "frame_idx": 221, "global_frame_idx": 10503, "task_index": 7}, {"db_idx": 10504, "episode_idx": 37, "frame_idx": 222, "global_frame_idx": 10504, "task_index": 7}, {"db_idx": 10505, "episode_idx": 37, "frame_idx": 223, "global_frame_idx": 10505, "task_index": 7}, {"db_idx": 10506, "episode_idx": 37, "frame_idx": 224, "global_frame_idx": 10506, "task_index": 7}, {"db_idx": 10507, "episode_idx": 37, "frame_idx": 225, "global_frame_idx": 10507, "task_index": 7}, {"db_idx": 10508, "episode_idx": 37, "frame_idx": 226, "global_frame_idx": 10508, "task_index": 7}, {"db_idx": 10509, "episode_idx": 37, "frame_idx": 227, "global_frame_idx": 10509, "task_index": 7}, {"db_idx": 10510, "episode_idx": 37, "frame_idx": 228, "global_frame_idx": 10510, "task_index": 7}, {"db_idx": 10511, "episode_idx": 37, "frame_idx": 229, "global_frame_idx": 10511, "task_index": 7}, {"db_idx": 10512, "episode_idx": 37, "frame_idx": 230, "global_frame_idx": 10512, "task_index": 7}, {"db_idx": 10513, "episode_idx": 37, "frame_idx": 231, "global_frame_idx": 10513, "task_index": 7}, {"db_idx": 10514, "episode_idx": 37, "frame_idx": 232, "global_frame_idx": 10514, "task_index": 7}, {"db_idx": 10515, "episode_idx": 38, "frame_idx": 0, "global_frame_idx": 10515, "task_index": 7}, {"db_idx": 10516, "episode_idx": 38, "frame_idx": 1, "global_frame_idx": 10516, "task_index": 7}, {"db_idx": 10517, "episode_idx": 38, "frame_idx": 2, "global_frame_idx": 10517, "task_index": 7}, {"db_idx": 10518, "episode_idx": 38, "frame_idx": 3, "global_frame_idx": 10518, "task_index": 7}, {"db_idx": 10519, "episode_idx": 38, "frame_idx": 4, "global_frame_idx": 10519, "task_index": 7}, {"db_idx": 10520, "episode_idx": 38, "frame_idx": 5, "global_frame_idx": 10520, "task_index": 7}, {"db_idx": 10521, "episode_idx": 38, "frame_idx": 6, "global_frame_idx": 10521, "task_index": 7}, {"db_idx": 10522, "episode_idx": 38, "frame_idx": 7, "global_frame_idx": 10522, "task_index": 7}, {"db_idx": 10523, "episode_idx": 38, "frame_idx": 8, "global_frame_idx": 10523, "task_index": 7}, {"db_idx": 10524, "episode_idx": 38, "frame_idx": 9, "global_frame_idx": 10524, "task_index": 7}, {"db_idx": 10525, "episode_idx": 38, "frame_idx": 10, "global_frame_idx": 10525, "task_index": 7}, {"db_idx": 10526, "episode_idx": 38, "frame_idx": 11, "global_frame_idx": 10526, "task_index": 7}, {"db_idx": 10527, "episode_idx": 38, "frame_idx": 12, "global_frame_idx": 10527, "task_index": 7}, {"db_idx": 10528, "episode_idx": 38, "frame_idx": 13, "global_frame_idx": 10528, "task_index": 7}, {"db_idx": 10529, "episode_idx": 38, "frame_idx": 14, "global_frame_idx": 10529, "task_index": 7}, {"db_idx": 10530, "episode_idx": 38, "frame_idx": 15, "global_frame_idx": 10530, "task_index": 7}, {"db_idx": 10531, "episode_idx": 38, "frame_idx": 16, "global_frame_idx": 10531, "task_index": 7}, {"db_idx": 10532, "episode_idx": 38, "frame_idx": 17, "global_frame_idx": 10532, "task_index": 7}, {"db_idx": 10533, "episode_idx": 38, "frame_idx": 18, "global_frame_idx": 10533, "task_index": 7}, {"db_idx": 10534, "episode_idx": 38, "frame_idx": 19, "global_frame_idx": 10534, "task_index": 7}, {"db_idx": 10535, "episode_idx": 38, "frame_idx": 20, "global_frame_idx": 10535, "task_index": 7}, {"db_idx": 10536, "episode_idx": 38, "frame_idx": 21, "global_frame_idx": 10536, "task_index": 7}, {"db_idx": 10537, "episode_idx": 38, "frame_idx": 22, "global_frame_idx": 10537, "task_index": 7}, {"db_idx": 10538, "episode_idx": 38, "frame_idx": 23, "global_frame_idx": 10538, "task_index": 7}, {"db_idx": 10539, "episode_idx": 38, "frame_idx": 24, "global_frame_idx": 10539, "task_index": 7}, {"db_idx": 10540, "episode_idx": 38, "frame_idx": 25, "global_frame_idx": 10540, "task_index": 7}, {"db_idx": 10541, "episode_idx": 38, "frame_idx": 26, "global_frame_idx": 10541, "task_index": 7}, {"db_idx": 10542, "episode_idx": 38, "frame_idx": 27, "global_frame_idx": 10542, "task_index": 7}, {"db_idx": 10543, "episode_idx": 38, "frame_idx": 28, "global_frame_idx": 10543, "task_index": 7}, {"db_idx": 10544, "episode_idx": 38, "frame_idx": 29, "global_frame_idx": 10544, "task_index": 7}, {"db_idx": 10545, "episode_idx": 38, "frame_idx": 30, "global_frame_idx": 10545, "task_index": 7}, {"db_idx": 10546, "episode_idx": 38, "frame_idx": 31, "global_frame_idx": 10546, "task_index": 7}, {"db_idx": 10547, "episode_idx": 38, "frame_idx": 32, "global_frame_idx": 10547, "task_index": 7}, {"db_idx": 10548, "episode_idx": 38, "frame_idx": 33, "global_frame_idx": 10548, "task_index": 7}, {"db_idx": 10549, "episode_idx": 38, "frame_idx": 34, "global_frame_idx": 10549, "task_index": 7}, {"db_idx": 10550, "episode_idx": 38, "frame_idx": 35, "global_frame_idx": 10550, "task_index": 7}, {"db_idx": 10551, "episode_idx": 38, "frame_idx": 36, "global_frame_idx": 10551, "task_index": 7}, {"db_idx": 10552, "episode_idx": 38, "frame_idx": 37, "global_frame_idx": 10552, "task_index": 7}, {"db_idx": 10553, "episode_idx": 38, "frame_idx": 38, "global_frame_idx": 10553, "task_index": 7}, {"db_idx": 10554, "episode_idx": 38, "frame_idx": 39, "global_frame_idx": 10554, "task_index": 7}, {"db_idx": 10555, "episode_idx": 38, "frame_idx": 40, "global_frame_idx": 10555, "task_index": 7}, {"db_idx": 10556, "episode_idx": 38, "frame_idx": 41, "global_frame_idx": 10556, "task_index": 7}, {"db_idx": 10557, "episode_idx": 38, "frame_idx": 42, "global_frame_idx": 10557, "task_index": 7}, {"db_idx": 10558, "episode_idx": 38, "frame_idx": 43, "global_frame_idx": 10558, "task_index": 7}, {"db_idx": 10559, "episode_idx": 38, "frame_idx": 44, "global_frame_idx": 10559, "task_index": 7}, {"db_idx": 10560, "episode_idx": 38, "frame_idx": 45, "global_frame_idx": 10560, "task_index": 7}, {"db_idx": 10561, "episode_idx": 38, "frame_idx": 46, "global_frame_idx": 10561, "task_index": 7}, {"db_idx": 10562, "episode_idx": 38, "frame_idx": 47, "global_frame_idx": 10562, "task_index": 7}, {"db_idx": 10563, "episode_idx": 38, "frame_idx": 48, "global_frame_idx": 10563, "task_index": 7}, {"db_idx": 10564, "episode_idx": 38, "frame_idx": 49, "global_frame_idx": 10564, "task_index": 7}, {"db_idx": 10565, "episode_idx": 38, "frame_idx": 50, "global_frame_idx": 10565, "task_index": 7}, {"db_idx": 10566, "episode_idx": 38, "frame_idx": 51, "global_frame_idx": 10566, "task_index": 7}, {"db_idx": 10567, "episode_idx": 38, "frame_idx": 52, "global_frame_idx": 10567, "task_index": 7}, {"db_idx": 10568, "episode_idx": 38, "frame_idx": 53, "global_frame_idx": 10568, "task_index": 7}, {"db_idx": 10569, "episode_idx": 38, "frame_idx": 54, "global_frame_idx": 10569, "task_index": 7}, {"db_idx": 10570, "episode_idx": 38, "frame_idx": 55, "global_frame_idx": 10570, "task_index": 7}, {"db_idx": 10571, "episode_idx": 38, "frame_idx": 56, "global_frame_idx": 10571, "task_index": 7}, {"db_idx": 10572, "episode_idx": 38, "frame_idx": 57, "global_frame_idx": 10572, "task_index": 7}, {"db_idx": 10573, "episode_idx": 38, "frame_idx": 58, "global_frame_idx": 10573, "task_index": 7}, {"db_idx": 10574, "episode_idx": 38, "frame_idx": 59, "global_frame_idx": 10574, "task_index": 7}, {"db_idx": 10575, "episode_idx": 38, "frame_idx": 60, "global_frame_idx": 10575, "task_index": 7}, {"db_idx": 10576, "episode_idx": 38, "frame_idx": 61, "global_frame_idx": 10576, "task_index": 7}, {"db_idx": 10577, "episode_idx": 38, "frame_idx": 62, "global_frame_idx": 10577, "task_index": 7}, {"db_idx": 10578, "episode_idx": 38, "frame_idx": 63, "global_frame_idx": 10578, "task_index": 7}, {"db_idx": 10579, "episode_idx": 38, "frame_idx": 64, "global_frame_idx": 10579, "task_index": 7}, {"db_idx": 10580, "episode_idx": 38, "frame_idx": 65, "global_frame_idx": 10580, "task_index": 7}, {"db_idx": 10581, "episode_idx": 38, "frame_idx": 66, "global_frame_idx": 10581, "task_index": 7}, {"db_idx": 10582, "episode_idx": 38, "frame_idx": 67, "global_frame_idx": 10582, "task_index": 7}, {"db_idx": 10583, "episode_idx": 38, "frame_idx": 68, "global_frame_idx": 10583, "task_index": 7}, {"db_idx": 10584, "episode_idx": 38, "frame_idx": 69, "global_frame_idx": 10584, "task_index": 7}, {"db_idx": 10585, "episode_idx": 38, "frame_idx": 70, "global_frame_idx": 10585, "task_index": 7}, {"db_idx": 10586, "episode_idx": 38, "frame_idx": 71, "global_frame_idx": 10586, "task_index": 7}, {"db_idx": 10587, "episode_idx": 38, "frame_idx": 72, "global_frame_idx": 10587, "task_index": 7}, {"db_idx": 10588, "episode_idx": 38, "frame_idx": 73, "global_frame_idx": 10588, "task_index": 7}, {"db_idx": 10589, "episode_idx": 38, "frame_idx": 74, "global_frame_idx": 10589, "task_index": 7}, {"db_idx": 10590, "episode_idx": 38, "frame_idx": 75, "global_frame_idx": 10590, "task_index": 7}, {"db_idx": 10591, "episode_idx": 38, "frame_idx": 76, "global_frame_idx": 10591, "task_index": 7}, {"db_idx": 10592, "episode_idx": 38, "frame_idx": 77, "global_frame_idx": 10592, "task_index": 7}, {"db_idx": 10593, "episode_idx": 38, "frame_idx": 78, "global_frame_idx": 10593, "task_index": 7}, {"db_idx": 10594, "episode_idx": 38, "frame_idx": 79, "global_frame_idx": 10594, "task_index": 7}, {"db_idx": 10595, "episode_idx": 38, "frame_idx": 80, "global_frame_idx": 10595, "task_index": 7}, {"db_idx": 10596, "episode_idx": 38, "frame_idx": 81, "global_frame_idx": 10596, "task_index": 7}, {"db_idx": 10597, "episode_idx": 38, "frame_idx": 82, "global_frame_idx": 10597, "task_index": 7}, {"db_idx": 10598, "episode_idx": 38, "frame_idx": 83, "global_frame_idx": 10598, "task_index": 7}, {"db_idx": 10599, "episode_idx": 38, "frame_idx": 84, "global_frame_idx": 10599, "task_index": 7}, {"db_idx": 10600, "episode_idx": 38, "frame_idx": 85, "global_frame_idx": 10600, "task_index": 7}, {"db_idx": 10601, "episode_idx": 38, "frame_idx": 86, "global_frame_idx": 10601, "task_index": 7}, {"db_idx": 10602, "episode_idx": 38, "frame_idx": 87, "global_frame_idx": 10602, "task_index": 7}, {"db_idx": 10603, "episode_idx": 38, "frame_idx": 88, "global_frame_idx": 10603, "task_index": 7}, {"db_idx": 10604, "episode_idx": 38, "frame_idx": 89, "global_frame_idx": 10604, "task_index": 7}, {"db_idx": 10605, "episode_idx": 38, "frame_idx": 90, "global_frame_idx": 10605, "task_index": 7}, {"db_idx": 10606, "episode_idx": 38, "frame_idx": 91, "global_frame_idx": 10606, "task_index": 7}, {"db_idx": 10607, "episode_idx": 38, "frame_idx": 92, "global_frame_idx": 10607, "task_index": 7}, {"db_idx": 10608, "episode_idx": 38, "frame_idx": 93, "global_frame_idx": 10608, "task_index": 7}, {"db_idx": 10609, "episode_idx": 38, "frame_idx": 94, "global_frame_idx": 10609, "task_index": 7}, {"db_idx": 10610, "episode_idx": 38, "frame_idx": 95, "global_frame_idx": 10610, "task_index": 7}, {"db_idx": 10611, "episode_idx": 38, "frame_idx": 96, "global_frame_idx": 10611, "task_index": 7}, {"db_idx": 10612, "episode_idx": 38, "frame_idx": 97, "global_frame_idx": 10612, "task_index": 7}, {"db_idx": 10613, "episode_idx": 38, "frame_idx": 98, "global_frame_idx": 10613, "task_index": 7}, {"db_idx": 10614, "episode_idx": 38, "frame_idx": 99, "global_frame_idx": 10614, "task_index": 7}, {"db_idx": 10615, "episode_idx": 38, "frame_idx": 100, "global_frame_idx": 10615, "task_index": 7}, {"db_idx": 10616, "episode_idx": 38, "frame_idx": 101, "global_frame_idx": 10616, "task_index": 7}, {"db_idx": 10617, "episode_idx": 38, "frame_idx": 102, "global_frame_idx": 10617, "task_index": 7}, {"db_idx": 10618, "episode_idx": 38, "frame_idx": 103, "global_frame_idx": 10618, "task_index": 7}, {"db_idx": 10619, "episode_idx": 38, "frame_idx": 104, "global_frame_idx": 10619, "task_index": 7}, {"db_idx": 10620, "episode_idx": 38, "frame_idx": 105, "global_frame_idx": 10620, "task_index": 7}, {"db_idx": 10621, "episode_idx": 38, "frame_idx": 106, "global_frame_idx": 10621, "task_index": 7}, {"db_idx": 10622, "episode_idx": 38, "frame_idx": 107, "global_frame_idx": 10622, "task_index": 7}, {"db_idx": 10623, "episode_idx": 38, "frame_idx": 108, "global_frame_idx": 10623, "task_index": 7}, {"db_idx": 10624, "episode_idx": 38, "frame_idx": 109, "global_frame_idx": 10624, "task_index": 7}, {"db_idx": 10625, "episode_idx": 38, "frame_idx": 110, "global_frame_idx": 10625, "task_index": 7}, {"db_idx": 10626, "episode_idx": 38, "frame_idx": 111, "global_frame_idx": 10626, "task_index": 7}, {"db_idx": 10627, "episode_idx": 38, "frame_idx": 112, "global_frame_idx": 10627, "task_index": 7}, {"db_idx": 10628, "episode_idx": 38, "frame_idx": 113, "global_frame_idx": 10628, "task_index": 7}, {"db_idx": 10629, "episode_idx": 38, "frame_idx": 114, "global_frame_idx": 10629, "task_index": 7}, {"db_idx": 10630, "episode_idx": 38, "frame_idx": 115, "global_frame_idx": 10630, "task_index": 7}, {"db_idx": 10631, "episode_idx": 38, "frame_idx": 116, "global_frame_idx": 10631, "task_index": 7}, {"db_idx": 10632, "episode_idx": 38, "frame_idx": 117, "global_frame_idx": 10632, "task_index": 7}, {"db_idx": 10633, "episode_idx": 38, "frame_idx": 118, "global_frame_idx": 10633, "task_index": 7}, {"db_idx": 10634, "episode_idx": 38, "frame_idx": 119, "global_frame_idx": 10634, "task_index": 7}, {"db_idx": 10635, "episode_idx": 38, "frame_idx": 120, "global_frame_idx": 10635, "task_index": 7}, {"db_idx": 10636, "episode_idx": 38, "frame_idx": 121, "global_frame_idx": 10636, "task_index": 7}, {"db_idx": 10637, "episode_idx": 38, "frame_idx": 122, "global_frame_idx": 10637, "task_index": 7}, {"db_idx": 10638, "episode_idx": 38, "frame_idx": 123, "global_frame_idx": 10638, "task_index": 7}, {"db_idx": 10639, "episode_idx": 38, "frame_idx": 124, "global_frame_idx": 10639, "task_index": 7}, {"db_idx": 10640, "episode_idx": 38, "frame_idx": 125, "global_frame_idx": 10640, "task_index": 7}, {"db_idx": 10641, "episode_idx": 38, "frame_idx": 126, "global_frame_idx": 10641, "task_index": 7}, {"db_idx": 10642, "episode_idx": 38, "frame_idx": 127, "global_frame_idx": 10642, "task_index": 7}, {"db_idx": 10643, "episode_idx": 38, "frame_idx": 128, "global_frame_idx": 10643, "task_index": 7}, {"db_idx": 10644, "episode_idx": 38, "frame_idx": 129, "global_frame_idx": 10644, "task_index": 7}, {"db_idx": 10645, "episode_idx": 38, "frame_idx": 130, "global_frame_idx": 10645, "task_index": 7}, {"db_idx": 10646, "episode_idx": 38, "frame_idx": 131, "global_frame_idx": 10646, "task_index": 7}, {"db_idx": 10647, "episode_idx": 38, "frame_idx": 132, "global_frame_idx": 10647, "task_index": 7}, {"db_idx": 10648, "episode_idx": 38, "frame_idx": 133, "global_frame_idx": 10648, "task_index": 7}, {"db_idx": 10649, "episode_idx": 38, "frame_idx": 134, "global_frame_idx": 10649, "task_index": 7}, {"db_idx": 10650, "episode_idx": 38, "frame_idx": 135, "global_frame_idx": 10650, "task_index": 7}, {"db_idx": 10651, "episode_idx": 38, "frame_idx": 136, "global_frame_idx": 10651, "task_index": 7}, {"db_idx": 10652, "episode_idx": 38, "frame_idx": 137, "global_frame_idx": 10652, "task_index": 7}, {"db_idx": 10653, "episode_idx": 38, "frame_idx": 138, "global_frame_idx": 10653, "task_index": 7}, {"db_idx": 10654, "episode_idx": 38, "frame_idx": 139, "global_frame_idx": 10654, "task_index": 7}, {"db_idx": 10655, "episode_idx": 38, "frame_idx": 140, "global_frame_idx": 10655, "task_index": 7}, {"db_idx": 10656, "episode_idx": 38, "frame_idx": 141, "global_frame_idx": 10656, "task_index": 7}, {"db_idx": 10657, "episode_idx": 38, "frame_idx": 142, "global_frame_idx": 10657, "task_index": 7}, {"db_idx": 10658, "episode_idx": 38, "frame_idx": 143, "global_frame_idx": 10658, "task_index": 7}, {"db_idx": 10659, "episode_idx": 38, "frame_idx": 144, "global_frame_idx": 10659, "task_index": 7}, {"db_idx": 10660, "episode_idx": 38, "frame_idx": 145, "global_frame_idx": 10660, "task_index": 7}, {"db_idx": 10661, "episode_idx": 38, "frame_idx": 146, "global_frame_idx": 10661, "task_index": 7}, {"db_idx": 10662, "episode_idx": 38, "frame_idx": 147, "global_frame_idx": 10662, "task_index": 7}, {"db_idx": 10663, "episode_idx": 38, "frame_idx": 148, "global_frame_idx": 10663, "task_index": 7}, {"db_idx": 10664, "episode_idx": 38, "frame_idx": 149, "global_frame_idx": 10664, "task_index": 7}, {"db_idx": 10665, "episode_idx": 38, "frame_idx": 150, "global_frame_idx": 10665, "task_index": 7}, {"db_idx": 10666, "episode_idx": 38, "frame_idx": 151, "global_frame_idx": 10666, "task_index": 7}, {"db_idx": 10667, "episode_idx": 38, "frame_idx": 152, "global_frame_idx": 10667, "task_index": 7}, {"db_idx": 10668, "episode_idx": 38, "frame_idx": 153, "global_frame_idx": 10668, "task_index": 7}, {"db_idx": 10669, "episode_idx": 38, "frame_idx": 154, "global_frame_idx": 10669, "task_index": 7}, {"db_idx": 10670, "episode_idx": 38, "frame_idx": 155, "global_frame_idx": 10670, "task_index": 7}, {"db_idx": 10671, "episode_idx": 38, "frame_idx": 156, "global_frame_idx": 10671, "task_index": 7}, {"db_idx": 10672, "episode_idx": 38, "frame_idx": 157, "global_frame_idx": 10672, "task_index": 7}, {"db_idx": 10673, "episode_idx": 38, "frame_idx": 158, "global_frame_idx": 10673, "task_index": 7}, {"db_idx": 10674, "episode_idx": 38, "frame_idx": 159, "global_frame_idx": 10674, "task_index": 7}, {"db_idx": 10675, "episode_idx": 38, "frame_idx": 160, "global_frame_idx": 10675, "task_index": 7}, {"db_idx": 10676, "episode_idx": 38, "frame_idx": 161, "global_frame_idx": 10676, "task_index": 7}, {"db_idx": 10677, "episode_idx": 38, "frame_idx": 162, "global_frame_idx": 10677, "task_index": 7}, {"db_idx": 10678, "episode_idx": 38, "frame_idx": 163, "global_frame_idx": 10678, "task_index": 7}, {"db_idx": 10679, "episode_idx": 38, "frame_idx": 164, "global_frame_idx": 10679, "task_index": 7}, {"db_idx": 10680, "episode_idx": 38, "frame_idx": 165, "global_frame_idx": 10680, "task_index": 7}, {"db_idx": 10681, "episode_idx": 38, "frame_idx": 166, "global_frame_idx": 10681, "task_index": 7}, {"db_idx": 10682, "episode_idx": 38, "frame_idx": 167, "global_frame_idx": 10682, "task_index": 7}, {"db_idx": 10683, "episode_idx": 38, "frame_idx": 168, "global_frame_idx": 10683, "task_index": 7}, {"db_idx": 10684, "episode_idx": 38, "frame_idx": 169, "global_frame_idx": 10684, "task_index": 7}, {"db_idx": 10685, "episode_idx": 38, "frame_idx": 170, "global_frame_idx": 10685, "task_index": 7}, {"db_idx": 10686, "episode_idx": 38, "frame_idx": 171, "global_frame_idx": 10686, "task_index": 7}, {"db_idx": 10687, "episode_idx": 38, "frame_idx": 172, "global_frame_idx": 10687, "task_index": 7}, {"db_idx": 10688, "episode_idx": 38, "frame_idx": 173, "global_frame_idx": 10688, "task_index": 7}, {"db_idx": 10689, "episode_idx": 38, "frame_idx": 174, "global_frame_idx": 10689, "task_index": 7}, {"db_idx": 10690, "episode_idx": 38, "frame_idx": 175, "global_frame_idx": 10690, "task_index": 7}, {"db_idx": 10691, "episode_idx": 38, "frame_idx": 176, "global_frame_idx": 10691, "task_index": 7}, {"db_idx": 10692, "episode_idx": 38, "frame_idx": 177, "global_frame_idx": 10692, "task_index": 7}, {"db_idx": 10693, "episode_idx": 38, "frame_idx": 178, "global_frame_idx": 10693, "task_index": 7}, {"db_idx": 10694, "episode_idx": 38, "frame_idx": 179, "global_frame_idx": 10694, "task_index": 7}, {"db_idx": 10695, "episode_idx": 38, "frame_idx": 180, "global_frame_idx": 10695, "task_index": 7}, {"db_idx": 10696, "episode_idx": 38, "frame_idx": 181, "global_frame_idx": 10696, "task_index": 7}, {"db_idx": 10697, "episode_idx": 38, "frame_idx": 182, "global_frame_idx": 10697, "task_index": 7}, {"db_idx": 10698, "episode_idx": 38, "frame_idx": 183, "global_frame_idx": 10698, "task_index": 7}, {"db_idx": 10699, "episode_idx": 38, "frame_idx": 184, "global_frame_idx": 10699, "task_index": 7}, {"db_idx": 10700, "episode_idx": 38, "frame_idx": 185, "global_frame_idx": 10700, "task_index": 7}, {"db_idx": 10701, "episode_idx": 38, "frame_idx": 186, "global_frame_idx": 10701, "task_index": 7}, {"db_idx": 10702, "episode_idx": 38, "frame_idx": 187, "global_frame_idx": 10702, "task_index": 7}, {"db_idx": 10703, "episode_idx": 38, "frame_idx": 188, "global_frame_idx": 10703, "task_index": 7}, {"db_idx": 10704, "episode_idx": 38, "frame_idx": 189, "global_frame_idx": 10704, "task_index": 7}, {"db_idx": 10705, "episode_idx": 38, "frame_idx": 190, "global_frame_idx": 10705, "task_index": 7}, {"db_idx": 10706, "episode_idx": 38, "frame_idx": 191, "global_frame_idx": 10706, "task_index": 7}, {"db_idx": 10707, "episode_idx": 38, "frame_idx": 192, "global_frame_idx": 10707, "task_index": 7}, {"db_idx": 10708, "episode_idx": 38, "frame_idx": 193, "global_frame_idx": 10708, "task_index": 7}, {"db_idx": 10709, "episode_idx": 38, "frame_idx": 194, "global_frame_idx": 10709, "task_index": 7}, {"db_idx": 10710, "episode_idx": 38, "frame_idx": 195, "global_frame_idx": 10710, "task_index": 7}, {"db_idx": 10711, "episode_idx": 38, "frame_idx": 196, "global_frame_idx": 10711, "task_index": 7}, {"db_idx": 10712, "episode_idx": 38, "frame_idx": 197, "global_frame_idx": 10712, "task_index": 7}, {"db_idx": 10713, "episode_idx": 38, "frame_idx": 198, "global_frame_idx": 10713, "task_index": 7}, {"db_idx": 10714, "episode_idx": 38, "frame_idx": 199, "global_frame_idx": 10714, "task_index": 7}, {"db_idx": 10715, "episode_idx": 38, "frame_idx": 200, "global_frame_idx": 10715, "task_index": 7}, {"db_idx": 10716, "episode_idx": 38, "frame_idx": 201, "global_frame_idx": 10716, "task_index": 7}, {"db_idx": 10717, "episode_idx": 38, "frame_idx": 202, "global_frame_idx": 10717, "task_index": 7}, {"db_idx": 10718, "episode_idx": 38, "frame_idx": 203, "global_frame_idx": 10718, "task_index": 7}, {"db_idx": 10719, "episode_idx": 38, "frame_idx": 204, "global_frame_idx": 10719, "task_index": 7}, {"db_idx": 10720, "episode_idx": 38, "frame_idx": 205, "global_frame_idx": 10720, "task_index": 7}, {"db_idx": 10721, "episode_idx": 38, "frame_idx": 206, "global_frame_idx": 10721, "task_index": 7}, {"db_idx": 10722, "episode_idx": 38, "frame_idx": 207, "global_frame_idx": 10722, "task_index": 7}, {"db_idx": 10723, "episode_idx": 38, "frame_idx": 208, "global_frame_idx": 10723, "task_index": 7}, {"db_idx": 10724, "episode_idx": 38, "frame_idx": 209, "global_frame_idx": 10724, "task_index": 7}, {"db_idx": 10725, "episode_idx": 38, "frame_idx": 210, "global_frame_idx": 10725, "task_index": 7}, {"db_idx": 10726, "episode_idx": 38, "frame_idx": 211, "global_frame_idx": 10726, "task_index": 7}, {"db_idx": 10727, "episode_idx": 38, "frame_idx": 212, "global_frame_idx": 10727, "task_index": 7}, {"db_idx": 10728, "episode_idx": 38, "frame_idx": 213, "global_frame_idx": 10728, "task_index": 7}, {"db_idx": 10729, "episode_idx": 38, "frame_idx": 214, "global_frame_idx": 10729, "task_index": 7}, {"db_idx": 10730, "episode_idx": 38, "frame_idx": 215, "global_frame_idx": 10730, "task_index": 7}, {"db_idx": 10731, "episode_idx": 38, "frame_idx": 216, "global_frame_idx": 10731, "task_index": 7}, {"db_idx": 10732, "episode_idx": 38, "frame_idx": 217, "global_frame_idx": 10732, "task_index": 7}, {"db_idx": 10733, "episode_idx": 38, "frame_idx": 218, "global_frame_idx": 10733, "task_index": 7}, {"db_idx": 10734, "episode_idx": 38, "frame_idx": 219, "global_frame_idx": 10734, "task_index": 7}, {"db_idx": 10735, "episode_idx": 38, "frame_idx": 220, "global_frame_idx": 10735, "task_index": 7}, {"db_idx": 10736, "episode_idx": 38, "frame_idx": 221, "global_frame_idx": 10736, "task_index": 7}, {"db_idx": 10737, "episode_idx": 38, "frame_idx": 222, "global_frame_idx": 10737, "task_index": 7}, {"db_idx": 10738, "episode_idx": 38, "frame_idx": 223, "global_frame_idx": 10738, "task_index": 7}, {"db_idx": 10739, "episode_idx": 38, "frame_idx": 224, "global_frame_idx": 10739, "task_index": 7}, {"db_idx": 10740, "episode_idx": 38, "frame_idx": 225, "global_frame_idx": 10740, "task_index": 7}, {"db_idx": 10741, "episode_idx": 38, "frame_idx": 226, "global_frame_idx": 10741, "task_index": 7}, {"db_idx": 10742, "episode_idx": 38, "frame_idx": 227, "global_frame_idx": 10742, "task_index": 7}, {"db_idx": 10743, "episode_idx": 38, "frame_idx": 228, "global_frame_idx": 10743, "task_index": 7}, {"db_idx": 10744, "episode_idx": 38, "frame_idx": 229, "global_frame_idx": 10744, "task_index": 7}, {"db_idx": 10745, "episode_idx": 38, "frame_idx": 230, "global_frame_idx": 10745, "task_index": 7}, {"db_idx": 10746, "episode_idx": 38, "frame_idx": 231, "global_frame_idx": 10746, "task_index": 7}, {"db_idx": 10747, "episode_idx": 38, "frame_idx": 232, "global_frame_idx": 10747, "task_index": 7}, {"db_idx": 10748, "episode_idx": 38, "frame_idx": 233, "global_frame_idx": 10748, "task_index": 7}, {"db_idx": 10749, "episode_idx": 38, "frame_idx": 234, "global_frame_idx": 10749, "task_index": 7}, {"db_idx": 10750, "episode_idx": 38, "frame_idx": 235, "global_frame_idx": 10750, "task_index": 7}, {"db_idx": 10751, "episode_idx": 38, "frame_idx": 236, "global_frame_idx": 10751, "task_index": 7}, {"db_idx": 10752, "episode_idx": 38, "frame_idx": 237, "global_frame_idx": 10752, "task_index": 7}, {"db_idx": 10753, "episode_idx": 38, "frame_idx": 238, "global_frame_idx": 10753, "task_index": 7}, {"db_idx": 10754, "episode_idx": 38, "frame_idx": 239, "global_frame_idx": 10754, "task_index": 7}, {"db_idx": 10755, "episode_idx": 38, "frame_idx": 240, "global_frame_idx": 10755, "task_index": 7}, {"db_idx": 10756, "episode_idx": 38, "frame_idx": 241, "global_frame_idx": 10756, "task_index": 7}, {"db_idx": 10757, "episode_idx": 38, "frame_idx": 242, "global_frame_idx": 10757, "task_index": 7}, {"db_idx": 10758, "episode_idx": 38, "frame_idx": 243, "global_frame_idx": 10758, "task_index": 7}, {"db_idx": 10759, "episode_idx": 38, "frame_idx": 244, "global_frame_idx": 10759, "task_index": 7}, {"db_idx": 10760, "episode_idx": 38, "frame_idx": 245, "global_frame_idx": 10760, "task_index": 7}, {"db_idx": 10761, "episode_idx": 38, "frame_idx": 246, "global_frame_idx": 10761, "task_index": 7}, {"db_idx": 10762, "episode_idx": 38, "frame_idx": 247, "global_frame_idx": 10762, "task_index": 7}, {"db_idx": 10763, "episode_idx": 38, "frame_idx": 248, "global_frame_idx": 10763, "task_index": 7}, {"db_idx": 10764, "episode_idx": 38, "frame_idx": 249, "global_frame_idx": 10764, "task_index": 7}, {"db_idx": 10765, "episode_idx": 38, "frame_idx": 250, "global_frame_idx": 10765, "task_index": 7}, {"db_idx": 10766, "episode_idx": 38, "frame_idx": 251, "global_frame_idx": 10766, "task_index": 7}, {"db_idx": 10767, "episode_idx": 38, "frame_idx": 252, "global_frame_idx": 10767, "task_index": 7}, {"db_idx": 10768, "episode_idx": 39, "frame_idx": 0, "global_frame_idx": 10768, "task_index": 7}, {"db_idx": 10769, "episode_idx": 39, "frame_idx": 1, "global_frame_idx": 10769, "task_index": 7}, {"db_idx": 10770, "episode_idx": 39, "frame_idx": 2, "global_frame_idx": 10770, "task_index": 7}, {"db_idx": 10771, "episode_idx": 39, "frame_idx": 3, "global_frame_idx": 10771, "task_index": 7}, {"db_idx": 10772, "episode_idx": 39, "frame_idx": 4, "global_frame_idx": 10772, "task_index": 7}, {"db_idx": 10773, "episode_idx": 39, "frame_idx": 5, "global_frame_idx": 10773, "task_index": 7}, {"db_idx": 10774, "episode_idx": 39, "frame_idx": 6, "global_frame_idx": 10774, "task_index": 7}, {"db_idx": 10775, "episode_idx": 39, "frame_idx": 7, "global_frame_idx": 10775, "task_index": 7}, {"db_idx": 10776, "episode_idx": 39, "frame_idx": 8, "global_frame_idx": 10776, "task_index": 7}, {"db_idx": 10777, "episode_idx": 39, "frame_idx": 9, "global_frame_idx": 10777, "task_index": 7}, {"db_idx": 10778, "episode_idx": 39, "frame_idx": 10, "global_frame_idx": 10778, "task_index": 7}, {"db_idx": 10779, "episode_idx": 39, "frame_idx": 11, "global_frame_idx": 10779, "task_index": 7}, {"db_idx": 10780, "episode_idx": 39, "frame_idx": 12, "global_frame_idx": 10780, "task_index": 7}, {"db_idx": 10781, "episode_idx": 39, "frame_idx": 13, "global_frame_idx": 10781, "task_index": 7}, {"db_idx": 10782, "episode_idx": 39, "frame_idx": 14, "global_frame_idx": 10782, "task_index": 7}, {"db_idx": 10783, "episode_idx": 39, "frame_idx": 15, "global_frame_idx": 10783, "task_index": 7}, {"db_idx": 10784, "episode_idx": 39, "frame_idx": 16, "global_frame_idx": 10784, "task_index": 7}, {"db_idx": 10785, "episode_idx": 39, "frame_idx": 17, "global_frame_idx": 10785, "task_index": 7}, {"db_idx": 10786, "episode_idx": 39, "frame_idx": 18, "global_frame_idx": 10786, "task_index": 7}, {"db_idx": 10787, "episode_idx": 39, "frame_idx": 19, "global_frame_idx": 10787, "task_index": 7}, {"db_idx": 10788, "episode_idx": 39, "frame_idx": 20, "global_frame_idx": 10788, "task_index": 7}, {"db_idx": 10789, "episode_idx": 39, "frame_idx": 21, "global_frame_idx": 10789, "task_index": 7}, {"db_idx": 10790, "episode_idx": 39, "frame_idx": 22, "global_frame_idx": 10790, "task_index": 7}, {"db_idx": 10791, "episode_idx": 39, "frame_idx": 23, "global_frame_idx": 10791, "task_index": 7}, {"db_idx": 10792, "episode_idx": 39, "frame_idx": 24, "global_frame_idx": 10792, "task_index": 7}, {"db_idx": 10793, "episode_idx": 39, "frame_idx": 25, "global_frame_idx": 10793, "task_index": 7}, {"db_idx": 10794, "episode_idx": 39, "frame_idx": 26, "global_frame_idx": 10794, "task_index": 7}, {"db_idx": 10795, "episode_idx": 39, "frame_idx": 27, "global_frame_idx": 10795, "task_index": 7}, {"db_idx": 10796, "episode_idx": 39, "frame_idx": 28, "global_frame_idx": 10796, "task_index": 7}, {"db_idx": 10797, "episode_idx": 39, "frame_idx": 29, "global_frame_idx": 10797, "task_index": 7}, {"db_idx": 10798, "episode_idx": 39, "frame_idx": 30, "global_frame_idx": 10798, "task_index": 7}, {"db_idx": 10799, "episode_idx": 39, "frame_idx": 31, "global_frame_idx": 10799, "task_index": 7}, {"db_idx": 10800, "episode_idx": 39, "frame_idx": 32, "global_frame_idx": 10800, "task_index": 7}, {"db_idx": 10801, "episode_idx": 39, "frame_idx": 33, "global_frame_idx": 10801, "task_index": 7}, {"db_idx": 10802, "episode_idx": 39, "frame_idx": 34, "global_frame_idx": 10802, "task_index": 7}, {"db_idx": 10803, "episode_idx": 39, "frame_idx": 35, "global_frame_idx": 10803, "task_index": 7}, {"db_idx": 10804, "episode_idx": 39, "frame_idx": 36, "global_frame_idx": 10804, "task_index": 7}, {"db_idx": 10805, "episode_idx": 39, "frame_idx": 37, "global_frame_idx": 10805, "task_index": 7}, {"db_idx": 10806, "episode_idx": 39, "frame_idx": 38, "global_frame_idx": 10806, "task_index": 7}, {"db_idx": 10807, "episode_idx": 39, "frame_idx": 39, "global_frame_idx": 10807, "task_index": 7}, {"db_idx": 10808, "episode_idx": 39, "frame_idx": 40, "global_frame_idx": 10808, "task_index": 7}, {"db_idx": 10809, "episode_idx": 39, "frame_idx": 41, "global_frame_idx": 10809, "task_index": 7}, {"db_idx": 10810, "episode_idx": 39, "frame_idx": 42, "global_frame_idx": 10810, "task_index": 7}, {"db_idx": 10811, "episode_idx": 39, "frame_idx": 43, "global_frame_idx": 10811, "task_index": 7}, {"db_idx": 10812, "episode_idx": 39, "frame_idx": 44, "global_frame_idx": 10812, "task_index": 7}, {"db_idx": 10813, "episode_idx": 39, "frame_idx": 45, "global_frame_idx": 10813, "task_index": 7}, {"db_idx": 10814, "episode_idx": 39, "frame_idx": 46, "global_frame_idx": 10814, "task_index": 7}, {"db_idx": 10815, "episode_idx": 39, "frame_idx": 47, "global_frame_idx": 10815, "task_index": 7}, {"db_idx": 10816, "episode_idx": 39, "frame_idx": 48, "global_frame_idx": 10816, "task_index": 7}, {"db_idx": 10817, "episode_idx": 39, "frame_idx": 49, "global_frame_idx": 10817, "task_index": 7}, {"db_idx": 10818, "episode_idx": 39, "frame_idx": 50, "global_frame_idx": 10818, "task_index": 7}, {"db_idx": 10819, "episode_idx": 39, "frame_idx": 51, "global_frame_idx": 10819, "task_index": 7}, {"db_idx": 10820, "episode_idx": 39, "frame_idx": 52, "global_frame_idx": 10820, "task_index": 7}, {"db_idx": 10821, "episode_idx": 39, "frame_idx": 53, "global_frame_idx": 10821, "task_index": 7}, {"db_idx": 10822, "episode_idx": 39, "frame_idx": 54, "global_frame_idx": 10822, "task_index": 7}, {"db_idx": 10823, "episode_idx": 39, "frame_idx": 55, "global_frame_idx": 10823, "task_index": 7}, {"db_idx": 10824, "episode_idx": 39, "frame_idx": 56, "global_frame_idx": 10824, "task_index": 7}, {"db_idx": 10825, "episode_idx": 39, "frame_idx": 57, "global_frame_idx": 10825, "task_index": 7}, {"db_idx": 10826, "episode_idx": 39, "frame_idx": 58, "global_frame_idx": 10826, "task_index": 7}, {"db_idx": 10827, "episode_idx": 39, "frame_idx": 59, "global_frame_idx": 10827, "task_index": 7}, {"db_idx": 10828, "episode_idx": 39, "frame_idx": 60, "global_frame_idx": 10828, "task_index": 7}, {"db_idx": 10829, "episode_idx": 39, "frame_idx": 61, "global_frame_idx": 10829, "task_index": 7}, {"db_idx": 10830, "episode_idx": 39, "frame_idx": 62, "global_frame_idx": 10830, "task_index": 7}, {"db_idx": 10831, "episode_idx": 39, "frame_idx": 63, "global_frame_idx": 10831, "task_index": 7}, {"db_idx": 10832, "episode_idx": 39, "frame_idx": 64, "global_frame_idx": 10832, "task_index": 7}, {"db_idx": 10833, "episode_idx": 39, "frame_idx": 65, "global_frame_idx": 10833, "task_index": 7}, {"db_idx": 10834, "episode_idx": 39, "frame_idx": 66, "global_frame_idx": 10834, "task_index": 7}, {"db_idx": 10835, "episode_idx": 39, "frame_idx": 67, "global_frame_idx": 10835, "task_index": 7}, {"db_idx": 10836, "episode_idx": 39, "frame_idx": 68, "global_frame_idx": 10836, "task_index": 7}, {"db_idx": 10837, "episode_idx": 39, "frame_idx": 69, "global_frame_idx": 10837, "task_index": 7}, {"db_idx": 10838, "episode_idx": 39, "frame_idx": 70, "global_frame_idx": 10838, "task_index": 7}, {"db_idx": 10839, "episode_idx": 39, "frame_idx": 71, "global_frame_idx": 10839, "task_index": 7}, {"db_idx": 10840, "episode_idx": 39, "frame_idx": 72, "global_frame_idx": 10840, "task_index": 7}, {"db_idx": 10841, "episode_idx": 39, "frame_idx": 73, "global_frame_idx": 10841, "task_index": 7}, {"db_idx": 10842, "episode_idx": 39, "frame_idx": 74, "global_frame_idx": 10842, "task_index": 7}, {"db_idx": 10843, "episode_idx": 39, "frame_idx": 75, "global_frame_idx": 10843, "task_index": 7}, {"db_idx": 10844, "episode_idx": 39, "frame_idx": 76, "global_frame_idx": 10844, "task_index": 7}, {"db_idx": 10845, "episode_idx": 39, "frame_idx": 77, "global_frame_idx": 10845, "task_index": 7}, {"db_idx": 10846, "episode_idx": 39, "frame_idx": 78, "global_frame_idx": 10846, "task_index": 7}, {"db_idx": 10847, "episode_idx": 39, "frame_idx": 79, "global_frame_idx": 10847, "task_index": 7}, {"db_idx": 10848, "episode_idx": 39, "frame_idx": 80, "global_frame_idx": 10848, "task_index": 7}, {"db_idx": 10849, "episode_idx": 39, "frame_idx": 81, "global_frame_idx": 10849, "task_index": 7}, {"db_idx": 10850, "episode_idx": 39, "frame_idx": 82, "global_frame_idx": 10850, "task_index": 7}, {"db_idx": 10851, "episode_idx": 39, "frame_idx": 83, "global_frame_idx": 10851, "task_index": 7}, {"db_idx": 10852, "episode_idx": 39, "frame_idx": 84, "global_frame_idx": 10852, "task_index": 7}, {"db_idx": 10853, "episode_idx": 39, "frame_idx": 85, "global_frame_idx": 10853, "task_index": 7}, {"db_idx": 10854, "episode_idx": 39, "frame_idx": 86, "global_frame_idx": 10854, "task_index": 7}, {"db_idx": 10855, "episode_idx": 39, "frame_idx": 87, "global_frame_idx": 10855, "task_index": 7}, {"db_idx": 10856, "episode_idx": 39, "frame_idx": 88, "global_frame_idx": 10856, "task_index": 7}, {"db_idx": 10857, "episode_idx": 39, "frame_idx": 89, "global_frame_idx": 10857, "task_index": 7}, {"db_idx": 10858, "episode_idx": 39, "frame_idx": 90, "global_frame_idx": 10858, "task_index": 7}, {"db_idx": 10859, "episode_idx": 39, "frame_idx": 91, "global_frame_idx": 10859, "task_index": 7}, {"db_idx": 10860, "episode_idx": 39, "frame_idx": 92, "global_frame_idx": 10860, "task_index": 7}, {"db_idx": 10861, "episode_idx": 39, "frame_idx": 93, "global_frame_idx": 10861, "task_index": 7}, {"db_idx": 10862, "episode_idx": 39, "frame_idx": 94, "global_frame_idx": 10862, "task_index": 7}, {"db_idx": 10863, "episode_idx": 39, "frame_idx": 95, "global_frame_idx": 10863, "task_index": 7}, {"db_idx": 10864, "episode_idx": 39, "frame_idx": 96, "global_frame_idx": 10864, "task_index": 7}, {"db_idx": 10865, "episode_idx": 39, "frame_idx": 97, "global_frame_idx": 10865, "task_index": 7}, {"db_idx": 10866, "episode_idx": 39, "frame_idx": 98, "global_frame_idx": 10866, "task_index": 7}, {"db_idx": 10867, "episode_idx": 39, "frame_idx": 99, "global_frame_idx": 10867, "task_index": 7}, {"db_idx": 10868, "episode_idx": 39, "frame_idx": 100, "global_frame_idx": 10868, "task_index": 7}, {"db_idx": 10869, "episode_idx": 39, "frame_idx": 101, "global_frame_idx": 10869, "task_index": 7}, {"db_idx": 10870, "episode_idx": 39, "frame_idx": 102, "global_frame_idx": 10870, "task_index": 7}, {"db_idx": 10871, "episode_idx": 39, "frame_idx": 103, "global_frame_idx": 10871, "task_index": 7}, {"db_idx": 10872, "episode_idx": 39, "frame_idx": 104, "global_frame_idx": 10872, "task_index": 7}, {"db_idx": 10873, "episode_idx": 39, "frame_idx": 105, "global_frame_idx": 10873, "task_index": 7}, {"db_idx": 10874, "episode_idx": 39, "frame_idx": 106, "global_frame_idx": 10874, "task_index": 7}, {"db_idx": 10875, "episode_idx": 39, "frame_idx": 107, "global_frame_idx": 10875, "task_index": 7}, {"db_idx": 10876, "episode_idx": 39, "frame_idx": 108, "global_frame_idx": 10876, "task_index": 7}, {"db_idx": 10877, "episode_idx": 39, "frame_idx": 109, "global_frame_idx": 10877, "task_index": 7}, {"db_idx": 10878, "episode_idx": 39, "frame_idx": 110, "global_frame_idx": 10878, "task_index": 7}, {"db_idx": 10879, "episode_idx": 39, "frame_idx": 111, "global_frame_idx": 10879, "task_index": 7}, {"db_idx": 10880, "episode_idx": 39, "frame_idx": 112, "global_frame_idx": 10880, "task_index": 7}, {"db_idx": 10881, "episode_idx": 39, "frame_idx": 113, "global_frame_idx": 10881, "task_index": 7}, {"db_idx": 10882, "episode_idx": 39, "frame_idx": 114, "global_frame_idx": 10882, "task_index": 7}, {"db_idx": 10883, "episode_idx": 39, "frame_idx": 115, "global_frame_idx": 10883, "task_index": 7}, {"db_idx": 10884, "episode_idx": 39, "frame_idx": 116, "global_frame_idx": 10884, "task_index": 7}, {"db_idx": 10885, "episode_idx": 39, "frame_idx": 117, "global_frame_idx": 10885, "task_index": 7}, {"db_idx": 10886, "episode_idx": 39, "frame_idx": 118, "global_frame_idx": 10886, "task_index": 7}, {"db_idx": 10887, "episode_idx": 39, "frame_idx": 119, "global_frame_idx": 10887, "task_index": 7}, {"db_idx": 10888, "episode_idx": 39, "frame_idx": 120, "global_frame_idx": 10888, "task_index": 7}, {"db_idx": 10889, "episode_idx": 39, "frame_idx": 121, "global_frame_idx": 10889, "task_index": 7}, {"db_idx": 10890, "episode_idx": 39, "frame_idx": 122, "global_frame_idx": 10890, "task_index": 7}, {"db_idx": 10891, "episode_idx": 39, "frame_idx": 123, "global_frame_idx": 10891, "task_index": 7}, {"db_idx": 10892, "episode_idx": 39, "frame_idx": 124, "global_frame_idx": 10892, "task_index": 7}, {"db_idx": 10893, "episode_idx": 39, "frame_idx": 125, "global_frame_idx": 10893, "task_index": 7}, {"db_idx": 10894, "episode_idx": 39, "frame_idx": 126, "global_frame_idx": 10894, "task_index": 7}, {"db_idx": 10895, "episode_idx": 39, "frame_idx": 127, "global_frame_idx": 10895, "task_index": 7}, {"db_idx": 10896, "episode_idx": 39, "frame_idx": 128, "global_frame_idx": 10896, "task_index": 7}, {"db_idx": 10897, "episode_idx": 39, "frame_idx": 129, "global_frame_idx": 10897, "task_index": 7}, {"db_idx": 10898, "episode_idx": 39, "frame_idx": 130, "global_frame_idx": 10898, "task_index": 7}, {"db_idx": 10899, "episode_idx": 39, "frame_idx": 131, "global_frame_idx": 10899, "task_index": 7}, {"db_idx": 10900, "episode_idx": 39, "frame_idx": 132, "global_frame_idx": 10900, "task_index": 7}, {"db_idx": 10901, "episode_idx": 39, "frame_idx": 133, "global_frame_idx": 10901, "task_index": 7}, {"db_idx": 10902, "episode_idx": 39, "frame_idx": 134, "global_frame_idx": 10902, "task_index": 7}, {"db_idx": 10903, "episode_idx": 39, "frame_idx": 135, "global_frame_idx": 10903, "task_index": 7}, {"db_idx": 10904, "episode_idx": 39, "frame_idx": 136, "global_frame_idx": 10904, "task_index": 7}, {"db_idx": 10905, "episode_idx": 39, "frame_idx": 137, "global_frame_idx": 10905, "task_index": 7}, {"db_idx": 10906, "episode_idx": 39, "frame_idx": 138, "global_frame_idx": 10906, "task_index": 7}, {"db_idx": 10907, "episode_idx": 39, "frame_idx": 139, "global_frame_idx": 10907, "task_index": 7}, {"db_idx": 10908, "episode_idx": 39, "frame_idx": 140, "global_frame_idx": 10908, "task_index": 7}, {"db_idx": 10909, "episode_idx": 39, "frame_idx": 141, "global_frame_idx": 10909, "task_index": 7}, {"db_idx": 10910, "episode_idx": 39, "frame_idx": 142, "global_frame_idx": 10910, "task_index": 7}, {"db_idx": 10911, "episode_idx": 39, "frame_idx": 143, "global_frame_idx": 10911, "task_index": 7}, {"db_idx": 10912, "episode_idx": 39, "frame_idx": 144, "global_frame_idx": 10912, "task_index": 7}, {"db_idx": 10913, "episode_idx": 39, "frame_idx": 145, "global_frame_idx": 10913, "task_index": 7}, {"db_idx": 10914, "episode_idx": 39, "frame_idx": 146, "global_frame_idx": 10914, "task_index": 7}, {"db_idx": 10915, "episode_idx": 39, "frame_idx": 147, "global_frame_idx": 10915, "task_index": 7}, {"db_idx": 10916, "episode_idx": 39, "frame_idx": 148, "global_frame_idx": 10916, "task_index": 7}, {"db_idx": 10917, "episode_idx": 39, "frame_idx": 149, "global_frame_idx": 10917, "task_index": 7}, {"db_idx": 10918, "episode_idx": 39, "frame_idx": 150, "global_frame_idx": 10918, "task_index": 7}, {"db_idx": 10919, "episode_idx": 39, "frame_idx": 151, "global_frame_idx": 10919, "task_index": 7}, {"db_idx": 10920, "episode_idx": 39, "frame_idx": 152, "global_frame_idx": 10920, "task_index": 7}, {"db_idx": 10921, "episode_idx": 39, "frame_idx": 153, "global_frame_idx": 10921, "task_index": 7}, {"db_idx": 10922, "episode_idx": 39, "frame_idx": 154, "global_frame_idx": 10922, "task_index": 7}, {"db_idx": 10923, "episode_idx": 39, "frame_idx": 155, "global_frame_idx": 10923, "task_index": 7}, {"db_idx": 10924, "episode_idx": 39, "frame_idx": 156, "global_frame_idx": 10924, "task_index": 7}, {"db_idx": 10925, "episode_idx": 39, "frame_idx": 157, "global_frame_idx": 10925, "task_index": 7}, {"db_idx": 10926, "episode_idx": 39, "frame_idx": 158, "global_frame_idx": 10926, "task_index": 7}, {"db_idx": 10927, "episode_idx": 39, "frame_idx": 159, "global_frame_idx": 10927, "task_index": 7}, {"db_idx": 10928, "episode_idx": 39, "frame_idx": 160, "global_frame_idx": 10928, "task_index": 7}, {"db_idx": 10929, "episode_idx": 39, "frame_idx": 161, "global_frame_idx": 10929, "task_index": 7}, {"db_idx": 10930, "episode_idx": 39, "frame_idx": 162, "global_frame_idx": 10930, "task_index": 7}, {"db_idx": 10931, "episode_idx": 39, "frame_idx": 163, "global_frame_idx": 10931, "task_index": 7}, {"db_idx": 10932, "episode_idx": 39, "frame_idx": 164, "global_frame_idx": 10932, "task_index": 7}, {"db_idx": 10933, "episode_idx": 39, "frame_idx": 165, "global_frame_idx": 10933, "task_index": 7}, {"db_idx": 10934, "episode_idx": 39, "frame_idx": 166, "global_frame_idx": 10934, "task_index": 7}, {"db_idx": 10935, "episode_idx": 39, "frame_idx": 167, "global_frame_idx": 10935, "task_index": 7}, {"db_idx": 10936, "episode_idx": 39, "frame_idx": 168, "global_frame_idx": 10936, "task_index": 7}, {"db_idx": 10937, "episode_idx": 39, "frame_idx": 169, "global_frame_idx": 10937, "task_index": 7}, {"db_idx": 10938, "episode_idx": 39, "frame_idx": 170, "global_frame_idx": 10938, "task_index": 7}, {"db_idx": 10939, "episode_idx": 39, "frame_idx": 171, "global_frame_idx": 10939, "task_index": 7}, {"db_idx": 10940, "episode_idx": 39, "frame_idx": 172, "global_frame_idx": 10940, "task_index": 7}, {"db_idx": 10941, "episode_idx": 39, "frame_idx": 173, "global_frame_idx": 10941, "task_index": 7}, {"db_idx": 10942, "episode_idx": 39, "frame_idx": 174, "global_frame_idx": 10942, "task_index": 7}, {"db_idx": 10943, "episode_idx": 39, "frame_idx": 175, "global_frame_idx": 10943, "task_index": 7}, {"db_idx": 10944, "episode_idx": 39, "frame_idx": 176, "global_frame_idx": 10944, "task_index": 7}, {"db_idx": 10945, "episode_idx": 39, "frame_idx": 177, "global_frame_idx": 10945, "task_index": 7}, {"db_idx": 10946, "episode_idx": 39, "frame_idx": 178, "global_frame_idx": 10946, "task_index": 7}, {"db_idx": 10947, "episode_idx": 39, "frame_idx": 179, "global_frame_idx": 10947, "task_index": 7}, {"db_idx": 10948, "episode_idx": 39, "frame_idx": 180, "global_frame_idx": 10948, "task_index": 7}, {"db_idx": 10949, "episode_idx": 39, "frame_idx": 181, "global_frame_idx": 10949, "task_index": 7}, {"db_idx": 10950, "episode_idx": 39, "frame_idx": 182, "global_frame_idx": 10950, "task_index": 7}, {"db_idx": 10951, "episode_idx": 39, "frame_idx": 183, "global_frame_idx": 10951, "task_index": 7}, {"db_idx": 10952, "episode_idx": 39, "frame_idx": 184, "global_frame_idx": 10952, "task_index": 7}, {"db_idx": 10953, "episode_idx": 39, "frame_idx": 185, "global_frame_idx": 10953, "task_index": 7}, {"db_idx": 10954, "episode_idx": 39, "frame_idx": 186, "global_frame_idx": 10954, "task_index": 7}, {"db_idx": 10955, "episode_idx": 39, "frame_idx": 187, "global_frame_idx": 10955, "task_index": 7}, {"db_idx": 10956, "episode_idx": 39, "frame_idx": 188, "global_frame_idx": 10956, "task_index": 7}, {"db_idx": 10957, "episode_idx": 39, "frame_idx": 189, "global_frame_idx": 10957, "task_index": 7}, {"db_idx": 10958, "episode_idx": 39, "frame_idx": 190, "global_frame_idx": 10958, "task_index": 7}, {"db_idx": 10959, "episode_idx": 39, "frame_idx": 191, "global_frame_idx": 10959, "task_index": 7}, {"db_idx": 10960, "episode_idx": 39, "frame_idx": 192, "global_frame_idx": 10960, "task_index": 7}, {"db_idx": 10961, "episode_idx": 39, "frame_idx": 193, "global_frame_idx": 10961, "task_index": 7}, {"db_idx": 10962, "episode_idx": 39, "frame_idx": 194, "global_frame_idx": 10962, "task_index": 7}, {"db_idx": 10963, "episode_idx": 39, "frame_idx": 195, "global_frame_idx": 10963, "task_index": 7}, {"db_idx": 10964, "episode_idx": 39, "frame_idx": 196, "global_frame_idx": 10964, "task_index": 7}, {"db_idx": 10965, "episode_idx": 39, "frame_idx": 197, "global_frame_idx": 10965, "task_index": 7}, {"db_idx": 10966, "episode_idx": 39, "frame_idx": 198, "global_frame_idx": 10966, "task_index": 7}, {"db_idx": 10967, "episode_idx": 39, "frame_idx": 199, "global_frame_idx": 10967, "task_index": 7}, {"db_idx": 10968, "episode_idx": 39, "frame_idx": 200, "global_frame_idx": 10968, "task_index": 7}, {"db_idx": 10969, "episode_idx": 39, "frame_idx": 201, "global_frame_idx": 10969, "task_index": 7}, {"db_idx": 10970, "episode_idx": 39, "frame_idx": 202, "global_frame_idx": 10970, "task_index": 7}, {"db_idx": 10971, "episode_idx": 39, "frame_idx": 203, "global_frame_idx": 10971, "task_index": 7}, {"db_idx": 10972, "episode_idx": 39, "frame_idx": 204, "global_frame_idx": 10972, "task_index": 7}, {"db_idx": 10973, "episode_idx": 39, "frame_idx": 205, "global_frame_idx": 10973, "task_index": 7}, {"db_idx": 10974, "episode_idx": 39, "frame_idx": 206, "global_frame_idx": 10974, "task_index": 7}, {"db_idx": 10975, "episode_idx": 39, "frame_idx": 207, "global_frame_idx": 10975, "task_index": 7}, {"db_idx": 10976, "episode_idx": 39, "frame_idx": 208, "global_frame_idx": 10976, "task_index": 7}, {"db_idx": 10977, "episode_idx": 39, "frame_idx": 209, "global_frame_idx": 10977, "task_index": 7}, {"db_idx": 10978, "episode_idx": 39, "frame_idx": 210, "global_frame_idx": 10978, "task_index": 7}, {"db_idx": 10979, "episode_idx": 39, "frame_idx": 211, "global_frame_idx": 10979, "task_index": 7}, {"db_idx": 10980, "episode_idx": 39, "frame_idx": 212, "global_frame_idx": 10980, "task_index": 7}, {"db_idx": 10981, "episode_idx": 39, "frame_idx": 213, "global_frame_idx": 10981, "task_index": 7}, {"db_idx": 10982, "episode_idx": 39, "frame_idx": 214, "global_frame_idx": 10982, "task_index": 7}, {"db_idx": 10983, "episode_idx": 39, "frame_idx": 215, "global_frame_idx": 10983, "task_index": 7}, {"db_idx": 10984, "episode_idx": 39, "frame_idx": 216, "global_frame_idx": 10984, "task_index": 7}, {"db_idx": 10985, "episode_idx": 39, "frame_idx": 217, "global_frame_idx": 10985, "task_index": 7}, {"db_idx": 10986, "episode_idx": 39, "frame_idx": 218, "global_frame_idx": 10986, "task_index": 7}, {"db_idx": 10987, "episode_idx": 39, "frame_idx": 219, "global_frame_idx": 10987, "task_index": 7}, {"db_idx": 10988, "episode_idx": 39, "frame_idx": 220, "global_frame_idx": 10988, "task_index": 7}, {"db_idx": 10989, "episode_idx": 39, "frame_idx": 221, "global_frame_idx": 10989, "task_index": 7}, {"db_idx": 10990, "episode_idx": 39, "frame_idx": 222, "global_frame_idx": 10990, "task_index": 7}, {"db_idx": 10991, "episode_idx": 39, "frame_idx": 223, "global_frame_idx": 10991, "task_index": 7}, {"db_idx": 10992, "episode_idx": 39, "frame_idx": 224, "global_frame_idx": 10992, "task_index": 7}, {"db_idx": 10993, "episode_idx": 39, "frame_idx": 225, "global_frame_idx": 10993, "task_index": 7}, {"db_idx": 10994, "episode_idx": 39, "frame_idx": 226, "global_frame_idx": 10994, "task_index": 7}, {"db_idx": 10995, "episode_idx": 39, "frame_idx": 227, "global_frame_idx": 10995, "task_index": 7}, {"db_idx": 10996, "episode_idx": 39, "frame_idx": 228, "global_frame_idx": 10996, "task_index": 7}, {"db_idx": 10997, "episode_idx": 39, "frame_idx": 229, "global_frame_idx": 10997, "task_index": 7}, {"db_idx": 10998, "episode_idx": 39, "frame_idx": 230, "global_frame_idx": 10998, "task_index": 7}, {"db_idx": 10999, "episode_idx": 39, "frame_idx": 231, "global_frame_idx": 10999, "task_index": 7}, {"db_idx": 11000, "episode_idx": 39, "frame_idx": 232, "global_frame_idx": 11000, "task_index": 7}, {"db_idx": 11001, "episode_idx": 39, "frame_idx": 233, "global_frame_idx": 11001, "task_index": 7}, {"db_idx": 11002, "episode_idx": 39, "frame_idx": 234, "global_frame_idx": 11002, "task_index": 7}, {"db_idx": 11003, "episode_idx": 39, "frame_idx": 235, "global_frame_idx": 11003, "task_index": 7}, {"db_idx": 11004, "episode_idx": 39, "frame_idx": 236, "global_frame_idx": 11004, "task_index": 7}, {"db_idx": 11005, "episode_idx": 39, "frame_idx": 237, "global_frame_idx": 11005, "task_index": 7}, {"db_idx": 11006, "episode_idx": 39, "frame_idx": 238, "global_frame_idx": 11006, "task_index": 7}, {"db_idx": 11007, "episode_idx": 39, "frame_idx": 239, "global_frame_idx": 11007, "task_index": 7}, {"db_idx": 11008, "episode_idx": 39, "frame_idx": 240, "global_frame_idx": 11008, "task_index": 7}, {"db_idx": 11009, "episode_idx": 39, "frame_idx": 241, "global_frame_idx": 11009, "task_index": 7}, {"db_idx": 11010, "episode_idx": 39, "frame_idx": 242, "global_frame_idx": 11010, "task_index": 7}, {"db_idx": 11011, "episode_idx": 39, "frame_idx": 243, "global_frame_idx": 11011, "task_index": 7}, {"db_idx": 11012, "episode_idx": 39, "frame_idx": 244, "global_frame_idx": 11012, "task_index": 7}, {"db_idx": 11013, "episode_idx": 39, "frame_idx": 245, "global_frame_idx": 11013, "task_index": 7}, {"db_idx": 11014, "episode_idx": 39, "frame_idx": 246, "global_frame_idx": 11014, "task_index": 7}, {"db_idx": 11015, "episode_idx": 39, "frame_idx": 247, "global_frame_idx": 11015, "task_index": 7}, {"db_idx": 11016, "episode_idx": 39, "frame_idx": 248, "global_frame_idx": 11016, "task_index": 7}, {"db_idx": 11017, "episode_idx": 39, "frame_idx": 249, "global_frame_idx": 11017, "task_index": 7}, {"db_idx": 11018, "episode_idx": 39, "frame_idx": 250, "global_frame_idx": 11018, "task_index": 7}, {"db_idx": 11019, "episode_idx": 39, "frame_idx": 251, "global_frame_idx": 11019, "task_index": 7}, {"db_idx": 11020, "episode_idx": 39, "frame_idx": 252, "global_frame_idx": 11020, "task_index": 7}, {"db_idx": 11021, "episode_idx": 39, "frame_idx": 253, "global_frame_idx": 11021, "task_index": 7}, {"db_idx": 11022, "episode_idx": 39, "frame_idx": 254, "global_frame_idx": 11022, "task_index": 7}, {"db_idx": 11023, "episode_idx": 39, "frame_idx": 255, "global_frame_idx": 11023, "task_index": 7}, {"db_idx": 11024, "episode_idx": 39, "frame_idx": 256, "global_frame_idx": 11024, "task_index": 7}, {"db_idx": 11025, "episode_idx": 39, "frame_idx": 257, "global_frame_idx": 11025, "task_index": 7}, {"db_idx": 11026, "episode_idx": 39, "frame_idx": 258, "global_frame_idx": 11026, "task_index": 7}, {"db_idx": 11027, "episode_idx": 39, "frame_idx": 259, "global_frame_idx": 11027, "task_index": 7}, {"db_idx": 11028, "episode_idx": 39, "frame_idx": 260, "global_frame_idx": 11028, "task_index": 7}, {"db_idx": 11029, "episode_idx": 39, "frame_idx": 261, "global_frame_idx": 11029, "task_index": 7}, {"db_idx": 11030, "episode_idx": 39, "frame_idx": 262, "global_frame_idx": 11030, "task_index": 7}, {"db_idx": 11031, "episode_idx": 39, "frame_idx": 263, "global_frame_idx": 11031, "task_index": 7}, {"db_idx": 11032, "episode_idx": 39, "frame_idx": 264, "global_frame_idx": 11032, "task_index": 7}, {"db_idx": 11033, "episode_idx": 39, "frame_idx": 265, "global_frame_idx": 11033, "task_index": 7}, {"db_idx": 11034, "episode_idx": 39, "frame_idx": 266, "global_frame_idx": 11034, "task_index": 7}, {"db_idx": 11035, "episode_idx": 39, "frame_idx": 267, "global_frame_idx": 11035, "task_index": 7}, {"db_idx": 11036, "episode_idx": 39, "frame_idx": 268, "global_frame_idx": 11036, "task_index": 7}, {"db_idx": 11037, "episode_idx": 39, "frame_idx": 269, "global_frame_idx": 11037, "task_index": 7}, {"db_idx": 11038, "episode_idx": 39, "frame_idx": 270, "global_frame_idx": 11038, "task_index": 7}, {"db_idx": 11039, "episode_idx": 39, "frame_idx": 271, "global_frame_idx": 11039, "task_index": 7}, {"db_idx": 11040, "episode_idx": 39, "frame_idx": 272, "global_frame_idx": 11040, "task_index": 7}, {"db_idx": 11041, "episode_idx": 39, "frame_idx": 273, "global_frame_idx": 11041, "task_index": 7}, {"db_idx": 11042, "episode_idx": 39, "frame_idx": 274, "global_frame_idx": 11042, "task_index": 7}, {"db_idx": 11043, "episode_idx": 39, "frame_idx": 275, "global_frame_idx": 11043, "task_index": 7}, {"db_idx": 11044, "episode_idx": 39, "frame_idx": 276, "global_frame_idx": 11044, "task_index": 7}, {"db_idx": 11045, "episode_idx": 39, "frame_idx": 277, "global_frame_idx": 11045, "task_index": 7}, {"db_idx": 11046, "episode_idx": 39, "frame_idx": 278, "global_frame_idx": 11046, "task_index": 7}, {"db_idx": 11047, "episode_idx": 40, "frame_idx": 0, "global_frame_idx": 11047, "task_index": 8}, {"db_idx": 11048, "episode_idx": 40, "frame_idx": 1, "global_frame_idx": 11048, "task_index": 8}, {"db_idx": 11049, "episode_idx": 40, "frame_idx": 2, "global_frame_idx": 11049, "task_index": 8}, {"db_idx": 11050, "episode_idx": 40, "frame_idx": 3, "global_frame_idx": 11050, "task_index": 8}, {"db_idx": 11051, "episode_idx": 40, "frame_idx": 4, "global_frame_idx": 11051, "task_index": 8}, {"db_idx": 11052, "episode_idx": 40, "frame_idx": 5, "global_frame_idx": 11052, "task_index": 8}, {"db_idx": 11053, "episode_idx": 40, "frame_idx": 6, "global_frame_idx": 11053, "task_index": 8}, {"db_idx": 11054, "episode_idx": 40, "frame_idx": 7, "global_frame_idx": 11054, "task_index": 8}, {"db_idx": 11055, "episode_idx": 40, "frame_idx": 8, "global_frame_idx": 11055, "task_index": 8}, {"db_idx": 11056, "episode_idx": 40, "frame_idx": 9, "global_frame_idx": 11056, "task_index": 8}, {"db_idx": 11057, "episode_idx": 40, "frame_idx": 10, "global_frame_idx": 11057, "task_index": 8}, {"db_idx": 11058, "episode_idx": 40, "frame_idx": 11, "global_frame_idx": 11058, "task_index": 8}, {"db_idx": 11059, "episode_idx": 40, "frame_idx": 12, "global_frame_idx": 11059, "task_index": 8}, {"db_idx": 11060, "episode_idx": 40, "frame_idx": 13, "global_frame_idx": 11060, "task_index": 8}, {"db_idx": 11061, "episode_idx": 40, "frame_idx": 14, "global_frame_idx": 11061, "task_index": 8}, {"db_idx": 11062, "episode_idx": 40, "frame_idx": 15, "global_frame_idx": 11062, "task_index": 8}, {"db_idx": 11063, "episode_idx": 40, "frame_idx": 16, "global_frame_idx": 11063, "task_index": 8}, {"db_idx": 11064, "episode_idx": 40, "frame_idx": 17, "global_frame_idx": 11064, "task_index": 8}, {"db_idx": 11065, "episode_idx": 40, "frame_idx": 18, "global_frame_idx": 11065, "task_index": 8}, {"db_idx": 11066, "episode_idx": 40, "frame_idx": 19, "global_frame_idx": 11066, "task_index": 8}, {"db_idx": 11067, "episode_idx": 40, "frame_idx": 20, "global_frame_idx": 11067, "task_index": 8}, {"db_idx": 11068, "episode_idx": 40, "frame_idx": 21, "global_frame_idx": 11068, "task_index": 8}, {"db_idx": 11069, "episode_idx": 40, "frame_idx": 22, "global_frame_idx": 11069, "task_index": 8}, {"db_idx": 11070, "episode_idx": 40, "frame_idx": 23, "global_frame_idx": 11070, "task_index": 8}, {"db_idx": 11071, "episode_idx": 40, "frame_idx": 24, "global_frame_idx": 11071, "task_index": 8}, {"db_idx": 11072, "episode_idx": 40, "frame_idx": 25, "global_frame_idx": 11072, "task_index": 8}, {"db_idx": 11073, "episode_idx": 40, "frame_idx": 26, "global_frame_idx": 11073, "task_index": 8}, {"db_idx": 11074, "episode_idx": 40, "frame_idx": 27, "global_frame_idx": 11074, "task_index": 8}, {"db_idx": 11075, "episode_idx": 40, "frame_idx": 28, "global_frame_idx": 11075, "task_index": 8}, {"db_idx": 11076, "episode_idx": 40, "frame_idx": 29, "global_frame_idx": 11076, "task_index": 8}, {"db_idx": 11077, "episode_idx": 40, "frame_idx": 30, "global_frame_idx": 11077, "task_index": 8}, {"db_idx": 11078, "episode_idx": 40, "frame_idx": 31, "global_frame_idx": 11078, "task_index": 8}, {"db_idx": 11079, "episode_idx": 40, "frame_idx": 32, "global_frame_idx": 11079, "task_index": 8}, {"db_idx": 11080, "episode_idx": 40, "frame_idx": 33, "global_frame_idx": 11080, "task_index": 8}, {"db_idx": 11081, "episode_idx": 40, "frame_idx": 34, "global_frame_idx": 11081, "task_index": 8}, {"db_idx": 11082, "episode_idx": 40, "frame_idx": 35, "global_frame_idx": 11082, "task_index": 8}, {"db_idx": 11083, "episode_idx": 40, "frame_idx": 36, "global_frame_idx": 11083, "task_index": 8}, {"db_idx": 11084, "episode_idx": 40, "frame_idx": 37, "global_frame_idx": 11084, "task_index": 8}, {"db_idx": 11085, "episode_idx": 40, "frame_idx": 38, "global_frame_idx": 11085, "task_index": 8}, {"db_idx": 11086, "episode_idx": 40, "frame_idx": 39, "global_frame_idx": 11086, "task_index": 8}, {"db_idx": 11087, "episode_idx": 40, "frame_idx": 40, "global_frame_idx": 11087, "task_index": 8}, {"db_idx": 11088, "episode_idx": 40, "frame_idx": 41, "global_frame_idx": 11088, "task_index": 8}, {"db_idx": 11089, "episode_idx": 40, "frame_idx": 42, "global_frame_idx": 11089, "task_index": 8}, {"db_idx": 11090, "episode_idx": 40, "frame_idx": 43, "global_frame_idx": 11090, "task_index": 8}, {"db_idx": 11091, "episode_idx": 40, "frame_idx": 44, "global_frame_idx": 11091, "task_index": 8}, {"db_idx": 11092, "episode_idx": 40, "frame_idx": 45, "global_frame_idx": 11092, "task_index": 8}, {"db_idx": 11093, "episode_idx": 40, "frame_idx": 46, "global_frame_idx": 11093, "task_index": 8}, {"db_idx": 11094, "episode_idx": 40, "frame_idx": 47, "global_frame_idx": 11094, "task_index": 8}, {"db_idx": 11095, "episode_idx": 40, "frame_idx": 48, "global_frame_idx": 11095, "task_index": 8}, {"db_idx": 11096, "episode_idx": 40, "frame_idx": 49, "global_frame_idx": 11096, "task_index": 8}, {"db_idx": 11097, "episode_idx": 40, "frame_idx": 50, "global_frame_idx": 11097, "task_index": 8}, {"db_idx": 11098, "episode_idx": 40, "frame_idx": 51, "global_frame_idx": 11098, "task_index": 8}, {"db_idx": 11099, "episode_idx": 40, "frame_idx": 52, "global_frame_idx": 11099, "task_index": 8}, {"db_idx": 11100, "episode_idx": 40, "frame_idx": 53, "global_frame_idx": 11100, "task_index": 8}, {"db_idx": 11101, "episode_idx": 40, "frame_idx": 54, "global_frame_idx": 11101, "task_index": 8}, {"db_idx": 11102, "episode_idx": 40, "frame_idx": 55, "global_frame_idx": 11102, "task_index": 8}, {"db_idx": 11103, "episode_idx": 40, "frame_idx": 56, "global_frame_idx": 11103, "task_index": 8}, {"db_idx": 11104, "episode_idx": 40, "frame_idx": 57, "global_frame_idx": 11104, "task_index": 8}, {"db_idx": 11105, "episode_idx": 40, "frame_idx": 58, "global_frame_idx": 11105, "task_index": 8}, {"db_idx": 11106, "episode_idx": 40, "frame_idx": 59, "global_frame_idx": 11106, "task_index": 8}, {"db_idx": 11107, "episode_idx": 40, "frame_idx": 60, "global_frame_idx": 11107, "task_index": 8}, {"db_idx": 11108, "episode_idx": 40, "frame_idx": 61, "global_frame_idx": 11108, "task_index": 8}, {"db_idx": 11109, "episode_idx": 40, "frame_idx": 62, "global_frame_idx": 11109, "task_index": 8}, {"db_idx": 11110, "episode_idx": 40, "frame_idx": 63, "global_frame_idx": 11110, "task_index": 8}, {"db_idx": 11111, "episode_idx": 40, "frame_idx": 64, "global_frame_idx": 11111, "task_index": 8}, {"db_idx": 11112, "episode_idx": 40, "frame_idx": 65, "global_frame_idx": 11112, "task_index": 8}, {"db_idx": 11113, "episode_idx": 40, "frame_idx": 66, "global_frame_idx": 11113, "task_index": 8}, {"db_idx": 11114, "episode_idx": 40, "frame_idx": 67, "global_frame_idx": 11114, "task_index": 8}, {"db_idx": 11115, "episode_idx": 40, "frame_idx": 68, "global_frame_idx": 11115, "task_index": 8}, {"db_idx": 11116, "episode_idx": 40, "frame_idx": 69, "global_frame_idx": 11116, "task_index": 8}, {"db_idx": 11117, "episode_idx": 40, "frame_idx": 70, "global_frame_idx": 11117, "task_index": 8}, {"db_idx": 11118, "episode_idx": 40, "frame_idx": 71, "global_frame_idx": 11118, "task_index": 8}, {"db_idx": 11119, "episode_idx": 40, "frame_idx": 72, "global_frame_idx": 11119, "task_index": 8}, {"db_idx": 11120, "episode_idx": 40, "frame_idx": 73, "global_frame_idx": 11120, "task_index": 8}, {"db_idx": 11121, "episode_idx": 40, "frame_idx": 74, "global_frame_idx": 11121, "task_index": 8}, {"db_idx": 11122, "episode_idx": 40, "frame_idx": 75, "global_frame_idx": 11122, "task_index": 8}, {"db_idx": 11123, "episode_idx": 40, "frame_idx": 76, "global_frame_idx": 11123, "task_index": 8}, {"db_idx": 11124, "episode_idx": 40, "frame_idx": 77, "global_frame_idx": 11124, "task_index": 8}, {"db_idx": 11125, "episode_idx": 40, "frame_idx": 78, "global_frame_idx": 11125, "task_index": 8}, {"db_idx": 11126, "episode_idx": 40, "frame_idx": 79, "global_frame_idx": 11126, "task_index": 8}, {"db_idx": 11127, "episode_idx": 40, "frame_idx": 80, "global_frame_idx": 11127, "task_index": 8}, {"db_idx": 11128, "episode_idx": 40, "frame_idx": 81, "global_frame_idx": 11128, "task_index": 8}, {"db_idx": 11129, "episode_idx": 40, "frame_idx": 82, "global_frame_idx": 11129, "task_index": 8}, {"db_idx": 11130, "episode_idx": 40, "frame_idx": 83, "global_frame_idx": 11130, "task_index": 8}, {"db_idx": 11131, "episode_idx": 40, "frame_idx": 84, "global_frame_idx": 11131, "task_index": 8}, {"db_idx": 11132, "episode_idx": 40, "frame_idx": 85, "global_frame_idx": 11132, "task_index": 8}, {"db_idx": 11133, "episode_idx": 40, "frame_idx": 86, "global_frame_idx": 11133, "task_index": 8}, {"db_idx": 11134, "episode_idx": 40, "frame_idx": 87, "global_frame_idx": 11134, "task_index": 8}, {"db_idx": 11135, "episode_idx": 40, "frame_idx": 88, "global_frame_idx": 11135, "task_index": 8}, {"db_idx": 11136, "episode_idx": 40, "frame_idx": 89, "global_frame_idx": 11136, "task_index": 8}, {"db_idx": 11137, "episode_idx": 40, "frame_idx": 90, "global_frame_idx": 11137, "task_index": 8}, {"db_idx": 11138, "episode_idx": 40, "frame_idx": 91, "global_frame_idx": 11138, "task_index": 8}, {"db_idx": 11139, "episode_idx": 40, "frame_idx": 92, "global_frame_idx": 11139, "task_index": 8}, {"db_idx": 11140, "episode_idx": 40, "frame_idx": 93, "global_frame_idx": 11140, "task_index": 8}, {"db_idx": 11141, "episode_idx": 40, "frame_idx": 94, "global_frame_idx": 11141, "task_index": 8}, {"db_idx": 11142, "episode_idx": 40, "frame_idx": 95, "global_frame_idx": 11142, "task_index": 8}, {"db_idx": 11143, "episode_idx": 40, "frame_idx": 96, "global_frame_idx": 11143, "task_index": 8}, {"db_idx": 11144, "episode_idx": 40, "frame_idx": 97, "global_frame_idx": 11144, "task_index": 8}, {"db_idx": 11145, "episode_idx": 40, "frame_idx": 98, "global_frame_idx": 11145, "task_index": 8}, {"db_idx": 11146, "episode_idx": 40, "frame_idx": 99, "global_frame_idx": 11146, "task_index": 8}, {"db_idx": 11147, "episode_idx": 40, "frame_idx": 100, "global_frame_idx": 11147, "task_index": 8}, {"db_idx": 11148, "episode_idx": 40, "frame_idx": 101, "global_frame_idx": 11148, "task_index": 8}, {"db_idx": 11149, "episode_idx": 40, "frame_idx": 102, "global_frame_idx": 11149, "task_index": 8}, {"db_idx": 11150, "episode_idx": 40, "frame_idx": 103, "global_frame_idx": 11150, "task_index": 8}, {"db_idx": 11151, "episode_idx": 40, "frame_idx": 104, "global_frame_idx": 11151, "task_index": 8}, {"db_idx": 11152, "episode_idx": 40, "frame_idx": 105, "global_frame_idx": 11152, "task_index": 8}, {"db_idx": 11153, "episode_idx": 40, "frame_idx": 106, "global_frame_idx": 11153, "task_index": 8}, {"db_idx": 11154, "episode_idx": 40, "frame_idx": 107, "global_frame_idx": 11154, "task_index": 8}, {"db_idx": 11155, "episode_idx": 40, "frame_idx": 108, "global_frame_idx": 11155, "task_index": 8}, {"db_idx": 11156, "episode_idx": 40, "frame_idx": 109, "global_frame_idx": 11156, "task_index": 8}, {"db_idx": 11157, "episode_idx": 40, "frame_idx": 110, "global_frame_idx": 11157, "task_index": 8}, {"db_idx": 11158, "episode_idx": 40, "frame_idx": 111, "global_frame_idx": 11158, "task_index": 8}, {"db_idx": 11159, "episode_idx": 40, "frame_idx": 112, "global_frame_idx": 11159, "task_index": 8}, {"db_idx": 11160, "episode_idx": 40, "frame_idx": 113, "global_frame_idx": 11160, "task_index": 8}, {"db_idx": 11161, "episode_idx": 40, "frame_idx": 114, "global_frame_idx": 11161, "task_index": 8}, {"db_idx": 11162, "episode_idx": 40, "frame_idx": 115, "global_frame_idx": 11162, "task_index": 8}, {"db_idx": 11163, "episode_idx": 40, "frame_idx": 116, "global_frame_idx": 11163, "task_index": 8}, {"db_idx": 11164, "episode_idx": 40, "frame_idx": 117, "global_frame_idx": 11164, "task_index": 8}, {"db_idx": 11165, "episode_idx": 40, "frame_idx": 118, "global_frame_idx": 11165, "task_index": 8}, {"db_idx": 11166, "episode_idx": 40, "frame_idx": 119, "global_frame_idx": 11166, "task_index": 8}, {"db_idx": 11167, "episode_idx": 40, "frame_idx": 120, "global_frame_idx": 11167, "task_index": 8}, {"db_idx": 11168, "episode_idx": 40, "frame_idx": 121, "global_frame_idx": 11168, "task_index": 8}, {"db_idx": 11169, "episode_idx": 40, "frame_idx": 122, "global_frame_idx": 11169, "task_index": 8}, {"db_idx": 11170, "episode_idx": 40, "frame_idx": 123, "global_frame_idx": 11170, "task_index": 8}, {"db_idx": 11171, "episode_idx": 40, "frame_idx": 124, "global_frame_idx": 11171, "task_index": 8}, {"db_idx": 11172, "episode_idx": 40, "frame_idx": 125, "global_frame_idx": 11172, "task_index": 8}, {"db_idx": 11173, "episode_idx": 40, "frame_idx": 126, "global_frame_idx": 11173, "task_index": 8}, {"db_idx": 11174, "episode_idx": 40, "frame_idx": 127, "global_frame_idx": 11174, "task_index": 8}, {"db_idx": 11175, "episode_idx": 40, "frame_idx": 128, "global_frame_idx": 11175, "task_index": 8}, {"db_idx": 11176, "episode_idx": 40, "frame_idx": 129, "global_frame_idx": 11176, "task_index": 8}, {"db_idx": 11177, "episode_idx": 40, "frame_idx": 130, "global_frame_idx": 11177, "task_index": 8}, {"db_idx": 11178, "episode_idx": 40, "frame_idx": 131, "global_frame_idx": 11178, "task_index": 8}, {"db_idx": 11179, "episode_idx": 40, "frame_idx": 132, "global_frame_idx": 11179, "task_index": 8}, {"db_idx": 11180, "episode_idx": 40, "frame_idx": 133, "global_frame_idx": 11180, "task_index": 8}, {"db_idx": 11181, "episode_idx": 40, "frame_idx": 134, "global_frame_idx": 11181, "task_index": 8}, {"db_idx": 11182, "episode_idx": 40, "frame_idx": 135, "global_frame_idx": 11182, "task_index": 8}, {"db_idx": 11183, "episode_idx": 40, "frame_idx": 136, "global_frame_idx": 11183, "task_index": 8}, {"db_idx": 11184, "episode_idx": 40, "frame_idx": 137, "global_frame_idx": 11184, "task_index": 8}, {"db_idx": 11185, "episode_idx": 40, "frame_idx": 138, "global_frame_idx": 11185, "task_index": 8}, {"db_idx": 11186, "episode_idx": 40, "frame_idx": 139, "global_frame_idx": 11186, "task_index": 8}, {"db_idx": 11187, "episode_idx": 40, "frame_idx": 140, "global_frame_idx": 11187, "task_index": 8}, {"db_idx": 11188, "episode_idx": 40, "frame_idx": 141, "global_frame_idx": 11188, "task_index": 8}, {"db_idx": 11189, "episode_idx": 40, "frame_idx": 142, "global_frame_idx": 11189, "task_index": 8}, {"db_idx": 11190, "episode_idx": 40, "frame_idx": 143, "global_frame_idx": 11190, "task_index": 8}, {"db_idx": 11191, "episode_idx": 40, "frame_idx": 144, "global_frame_idx": 11191, "task_index": 8}, {"db_idx": 11192, "episode_idx": 40, "frame_idx": 145, "global_frame_idx": 11192, "task_index": 8}, {"db_idx": 11193, "episode_idx": 40, "frame_idx": 146, "global_frame_idx": 11193, "task_index": 8}, {"db_idx": 11194, "episode_idx": 40, "frame_idx": 147, "global_frame_idx": 11194, "task_index": 8}, {"db_idx": 11195, "episode_idx": 40, "frame_idx": 148, "global_frame_idx": 11195, "task_index": 8}, {"db_idx": 11196, "episode_idx": 40, "frame_idx": 149, "global_frame_idx": 11196, "task_index": 8}, {"db_idx": 11197, "episode_idx": 40, "frame_idx": 150, "global_frame_idx": 11197, "task_index": 8}, {"db_idx": 11198, "episode_idx": 40, "frame_idx": 151, "global_frame_idx": 11198, "task_index": 8}, {"db_idx": 11199, "episode_idx": 40, "frame_idx": 152, "global_frame_idx": 11199, "task_index": 8}, {"db_idx": 11200, "episode_idx": 40, "frame_idx": 153, "global_frame_idx": 11200, "task_index": 8}, {"db_idx": 11201, "episode_idx": 40, "frame_idx": 154, "global_frame_idx": 11201, "task_index": 8}, {"db_idx": 11202, "episode_idx": 40, "frame_idx": 155, "global_frame_idx": 11202, "task_index": 8}, {"db_idx": 11203, "episode_idx": 40, "frame_idx": 156, "global_frame_idx": 11203, "task_index": 8}, {"db_idx": 11204, "episode_idx": 40, "frame_idx": 157, "global_frame_idx": 11204, "task_index": 8}, {"db_idx": 11205, "episode_idx": 40, "frame_idx": 158, "global_frame_idx": 11205, "task_index": 8}, {"db_idx": 11206, "episode_idx": 40, "frame_idx": 159, "global_frame_idx": 11206, "task_index": 8}, {"db_idx": 11207, "episode_idx": 40, "frame_idx": 160, "global_frame_idx": 11207, "task_index": 8}, {"db_idx": 11208, "episode_idx": 40, "frame_idx": 161, "global_frame_idx": 11208, "task_index": 8}, {"db_idx": 11209, "episode_idx": 40, "frame_idx": 162, "global_frame_idx": 11209, "task_index": 8}, {"db_idx": 11210, "episode_idx": 40, "frame_idx": 163, "global_frame_idx": 11210, "task_index": 8}, {"db_idx": 11211, "episode_idx": 40, "frame_idx": 164, "global_frame_idx": 11211, "task_index": 8}, {"db_idx": 11212, "episode_idx": 40, "frame_idx": 165, "global_frame_idx": 11212, "task_index": 8}, {"db_idx": 11213, "episode_idx": 40, "frame_idx": 166, "global_frame_idx": 11213, "task_index": 8}, {"db_idx": 11214, "episode_idx": 40, "frame_idx": 167, "global_frame_idx": 11214, "task_index": 8}, {"db_idx": 11215, "episode_idx": 40, "frame_idx": 168, "global_frame_idx": 11215, "task_index": 8}, {"db_idx": 11216, "episode_idx": 40, "frame_idx": 169, "global_frame_idx": 11216, "task_index": 8}, {"db_idx": 11217, "episode_idx": 40, "frame_idx": 170, "global_frame_idx": 11217, "task_index": 8}, {"db_idx": 11218, "episode_idx": 40, "frame_idx": 171, "global_frame_idx": 11218, "task_index": 8}, {"db_idx": 11219, "episode_idx": 40, "frame_idx": 172, "global_frame_idx": 11219, "task_index": 8}, {"db_idx": 11220, "episode_idx": 40, "frame_idx": 173, "global_frame_idx": 11220, "task_index": 8}, {"db_idx": 11221, "episode_idx": 40, "frame_idx": 174, "global_frame_idx": 11221, "task_index": 8}, {"db_idx": 11222, "episode_idx": 40, "frame_idx": 175, "global_frame_idx": 11222, "task_index": 8}, {"db_idx": 11223, "episode_idx": 40, "frame_idx": 176, "global_frame_idx": 11223, "task_index": 8}, {"db_idx": 11224, "episode_idx": 40, "frame_idx": 177, "global_frame_idx": 11224, "task_index": 8}, {"db_idx": 11225, "episode_idx": 40, "frame_idx": 178, "global_frame_idx": 11225, "task_index": 8}, {"db_idx": 11226, "episode_idx": 40, "frame_idx": 179, "global_frame_idx": 11226, "task_index": 8}, {"db_idx": 11227, "episode_idx": 40, "frame_idx": 180, "global_frame_idx": 11227, "task_index": 8}, {"db_idx": 11228, "episode_idx": 40, "frame_idx": 181, "global_frame_idx": 11228, "task_index": 8}, {"db_idx": 11229, "episode_idx": 40, "frame_idx": 182, "global_frame_idx": 11229, "task_index": 8}, {"db_idx": 11230, "episode_idx": 40, "frame_idx": 183, "global_frame_idx": 11230, "task_index": 8}, {"db_idx": 11231, "episode_idx": 40, "frame_idx": 184, "global_frame_idx": 11231, "task_index": 8}, {"db_idx": 11232, "episode_idx": 40, "frame_idx": 185, "global_frame_idx": 11232, "task_index": 8}, {"db_idx": 11233, "episode_idx": 40, "frame_idx": 186, "global_frame_idx": 11233, "task_index": 8}, {"db_idx": 11234, "episode_idx": 40, "frame_idx": 187, "global_frame_idx": 11234, "task_index": 8}, {"db_idx": 11235, "episode_idx": 40, "frame_idx": 188, "global_frame_idx": 11235, "task_index": 8}, {"db_idx": 11236, "episode_idx": 40, "frame_idx": 189, "global_frame_idx": 11236, "task_index": 8}, {"db_idx": 11237, "episode_idx": 40, "frame_idx": 190, "global_frame_idx": 11237, "task_index": 8}, {"db_idx": 11238, "episode_idx": 40, "frame_idx": 191, "global_frame_idx": 11238, "task_index": 8}, {"db_idx": 11239, "episode_idx": 40, "frame_idx": 192, "global_frame_idx": 11239, "task_index": 8}, {"db_idx": 11240, "episode_idx": 40, "frame_idx": 193, "global_frame_idx": 11240, "task_index": 8}, {"db_idx": 11241, "episode_idx": 40, "frame_idx": 194, "global_frame_idx": 11241, "task_index": 8}, {"db_idx": 11242, "episode_idx": 40, "frame_idx": 195, "global_frame_idx": 11242, "task_index": 8}, {"db_idx": 11243, "episode_idx": 40, "frame_idx": 196, "global_frame_idx": 11243, "task_index": 8}, {"db_idx": 11244, "episode_idx": 40, "frame_idx": 197, "global_frame_idx": 11244, "task_index": 8}, {"db_idx": 11245, "episode_idx": 40, "frame_idx": 198, "global_frame_idx": 11245, "task_index": 8}, {"db_idx": 11246, "episode_idx": 40, "frame_idx": 199, "global_frame_idx": 11246, "task_index": 8}, {"db_idx": 11247, "episode_idx": 40, "frame_idx": 200, "global_frame_idx": 11247, "task_index": 8}, {"db_idx": 11248, "episode_idx": 40, "frame_idx": 201, "global_frame_idx": 11248, "task_index": 8}, {"db_idx": 11249, "episode_idx": 40, "frame_idx": 202, "global_frame_idx": 11249, "task_index": 8}, {"db_idx": 11250, "episode_idx": 40, "frame_idx": 203, "global_frame_idx": 11250, "task_index": 8}, {"db_idx": 11251, "episode_idx": 40, "frame_idx": 204, "global_frame_idx": 11251, "task_index": 8}, {"db_idx": 11252, "episode_idx": 40, "frame_idx": 205, "global_frame_idx": 11252, "task_index": 8}, {"db_idx": 11253, "episode_idx": 40, "frame_idx": 206, "global_frame_idx": 11253, "task_index": 8}, {"db_idx": 11254, "episode_idx": 40, "frame_idx": 207, "global_frame_idx": 11254, "task_index": 8}, {"db_idx": 11255, "episode_idx": 40, "frame_idx": 208, "global_frame_idx": 11255, "task_index": 8}, {"db_idx": 11256, "episode_idx": 40, "frame_idx": 209, "global_frame_idx": 11256, "task_index": 8}, {"db_idx": 11257, "episode_idx": 40, "frame_idx": 210, "global_frame_idx": 11257, "task_index": 8}, {"db_idx": 11258, "episode_idx": 40, "frame_idx": 211, "global_frame_idx": 11258, "task_index": 8}, {"db_idx": 11259, "episode_idx": 40, "frame_idx": 212, "global_frame_idx": 11259, "task_index": 8}, {"db_idx": 11260, "episode_idx": 40, "frame_idx": 213, "global_frame_idx": 11260, "task_index": 8}, {"db_idx": 11261, "episode_idx": 40, "frame_idx": 214, "global_frame_idx": 11261, "task_index": 8}, {"db_idx": 11262, "episode_idx": 40, "frame_idx": 215, "global_frame_idx": 11262, "task_index": 8}, {"db_idx": 11263, "episode_idx": 40, "frame_idx": 216, "global_frame_idx": 11263, "task_index": 8}, {"db_idx": 11264, "episode_idx": 40, "frame_idx": 217, "global_frame_idx": 11264, "task_index": 8}, {"db_idx": 11265, "episode_idx": 40, "frame_idx": 218, "global_frame_idx": 11265, "task_index": 8}, {"db_idx": 11266, "episode_idx": 40, "frame_idx": 219, "global_frame_idx": 11266, "task_index": 8}, {"db_idx": 11267, "episode_idx": 40, "frame_idx": 220, "global_frame_idx": 11267, "task_index": 8}, {"db_idx": 11268, "episode_idx": 40, "frame_idx": 221, "global_frame_idx": 11268, "task_index": 8}, {"db_idx": 11269, "episode_idx": 40, "frame_idx": 222, "global_frame_idx": 11269, "task_index": 8}, {"db_idx": 11270, "episode_idx": 40, "frame_idx": 223, "global_frame_idx": 11270, "task_index": 8}, {"db_idx": 11271, "episode_idx": 40, "frame_idx": 224, "global_frame_idx": 11271, "task_index": 8}, {"db_idx": 11272, "episode_idx": 40, "frame_idx": 225, "global_frame_idx": 11272, "task_index": 8}, {"db_idx": 11273, "episode_idx": 40, "frame_idx": 226, "global_frame_idx": 11273, "task_index": 8}, {"db_idx": 11274, "episode_idx": 40, "frame_idx": 227, "global_frame_idx": 11274, "task_index": 8}, {"db_idx": 11275, "episode_idx": 40, "frame_idx": 228, "global_frame_idx": 11275, "task_index": 8}, {"db_idx": 11276, "episode_idx": 40, "frame_idx": 229, "global_frame_idx": 11276, "task_index": 8}, {"db_idx": 11277, "episode_idx": 40, "frame_idx": 230, "global_frame_idx": 11277, "task_index": 8}, {"db_idx": 11278, "episode_idx": 40, "frame_idx": 231, "global_frame_idx": 11278, "task_index": 8}, {"db_idx": 11279, "episode_idx": 40, "frame_idx": 232, "global_frame_idx": 11279, "task_index": 8}, {"db_idx": 11280, "episode_idx": 40, "frame_idx": 233, "global_frame_idx": 11280, "task_index": 8}, {"db_idx": 11281, "episode_idx": 40, "frame_idx": 234, "global_frame_idx": 11281, "task_index": 8}, {"db_idx": 11282, "episode_idx": 40, "frame_idx": 235, "global_frame_idx": 11282, "task_index": 8}, {"db_idx": 11283, "episode_idx": 40, "frame_idx": 236, "global_frame_idx": 11283, "task_index": 8}, {"db_idx": 11284, "episode_idx": 40, "frame_idx": 237, "global_frame_idx": 11284, "task_index": 8}, {"db_idx": 11285, "episode_idx": 40, "frame_idx": 238, "global_frame_idx": 11285, "task_index": 8}, {"db_idx": 11286, "episode_idx": 40, "frame_idx": 239, "global_frame_idx": 11286, "task_index": 8}, {"db_idx": 11287, "episode_idx": 40, "frame_idx": 240, "global_frame_idx": 11287, "task_index": 8}, {"db_idx": 11288, "episode_idx": 40, "frame_idx": 241, "global_frame_idx": 11288, "task_index": 8}, {"db_idx": 11289, "episode_idx": 40, "frame_idx": 242, "global_frame_idx": 11289, "task_index": 8}, {"db_idx": 11290, "episode_idx": 40, "frame_idx": 243, "global_frame_idx": 11290, "task_index": 8}, {"db_idx": 11291, "episode_idx": 40, "frame_idx": 244, "global_frame_idx": 11291, "task_index": 8}, {"db_idx": 11292, "episode_idx": 40, "frame_idx": 245, "global_frame_idx": 11292, "task_index": 8}, {"db_idx": 11293, "episode_idx": 41, "frame_idx": 0, "global_frame_idx": 11293, "task_index": 8}, {"db_idx": 11294, "episode_idx": 41, "frame_idx": 1, "global_frame_idx": 11294, "task_index": 8}, {"db_idx": 11295, "episode_idx": 41, "frame_idx": 2, "global_frame_idx": 11295, "task_index": 8}, {"db_idx": 11296, "episode_idx": 41, "frame_idx": 3, "global_frame_idx": 11296, "task_index": 8}, {"db_idx": 11297, "episode_idx": 41, "frame_idx": 4, "global_frame_idx": 11297, "task_index": 8}, {"db_idx": 11298, "episode_idx": 41, "frame_idx": 5, "global_frame_idx": 11298, "task_index": 8}, {"db_idx": 11299, "episode_idx": 41, "frame_idx": 6, "global_frame_idx": 11299, "task_index": 8}, {"db_idx": 11300, "episode_idx": 41, "frame_idx": 7, "global_frame_idx": 11300, "task_index": 8}, {"db_idx": 11301, "episode_idx": 41, "frame_idx": 8, "global_frame_idx": 11301, "task_index": 8}, {"db_idx": 11302, "episode_idx": 41, "frame_idx": 9, "global_frame_idx": 11302, "task_index": 8}, {"db_idx": 11303, "episode_idx": 41, "frame_idx": 10, "global_frame_idx": 11303, "task_index": 8}, {"db_idx": 11304, "episode_idx": 41, "frame_idx": 11, "global_frame_idx": 11304, "task_index": 8}, {"db_idx": 11305, "episode_idx": 41, "frame_idx": 12, "global_frame_idx": 11305, "task_index": 8}, {"db_idx": 11306, "episode_idx": 41, "frame_idx": 13, "global_frame_idx": 11306, "task_index": 8}, {"db_idx": 11307, "episode_idx": 41, "frame_idx": 14, "global_frame_idx": 11307, "task_index": 8}, {"db_idx": 11308, "episode_idx": 41, "frame_idx": 15, "global_frame_idx": 11308, "task_index": 8}, {"db_idx": 11309, "episode_idx": 41, "frame_idx": 16, "global_frame_idx": 11309, "task_index": 8}, {"db_idx": 11310, "episode_idx": 41, "frame_idx": 17, "global_frame_idx": 11310, "task_index": 8}, {"db_idx": 11311, "episode_idx": 41, "frame_idx": 18, "global_frame_idx": 11311, "task_index": 8}, {"db_idx": 11312, "episode_idx": 41, "frame_idx": 19, "global_frame_idx": 11312, "task_index": 8}, {"db_idx": 11313, "episode_idx": 41, "frame_idx": 20, "global_frame_idx": 11313, "task_index": 8}, {"db_idx": 11314, "episode_idx": 41, "frame_idx": 21, "global_frame_idx": 11314, "task_index": 8}, {"db_idx": 11315, "episode_idx": 41, "frame_idx": 22, "global_frame_idx": 11315, "task_index": 8}, {"db_idx": 11316, "episode_idx": 41, "frame_idx": 23, "global_frame_idx": 11316, "task_index": 8}, {"db_idx": 11317, "episode_idx": 41, "frame_idx": 24, "global_frame_idx": 11317, "task_index": 8}, {"db_idx": 11318, "episode_idx": 41, "frame_idx": 25, "global_frame_idx": 11318, "task_index": 8}, {"db_idx": 11319, "episode_idx": 41, "frame_idx": 26, "global_frame_idx": 11319, "task_index": 8}, {"db_idx": 11320, "episode_idx": 41, "frame_idx": 27, "global_frame_idx": 11320, "task_index": 8}, {"db_idx": 11321, "episode_idx": 41, "frame_idx": 28, "global_frame_idx": 11321, "task_index": 8}, {"db_idx": 11322, "episode_idx": 41, "frame_idx": 29, "global_frame_idx": 11322, "task_index": 8}, {"db_idx": 11323, "episode_idx": 41, "frame_idx": 30, "global_frame_idx": 11323, "task_index": 8}, {"db_idx": 11324, "episode_idx": 41, "frame_idx": 31, "global_frame_idx": 11324, "task_index": 8}, {"db_idx": 11325, "episode_idx": 41, "frame_idx": 32, "global_frame_idx": 11325, "task_index": 8}, {"db_idx": 11326, "episode_idx": 41, "frame_idx": 33, "global_frame_idx": 11326, "task_index": 8}, {"db_idx": 11327, "episode_idx": 41, "frame_idx": 34, "global_frame_idx": 11327, "task_index": 8}, {"db_idx": 11328, "episode_idx": 41, "frame_idx": 35, "global_frame_idx": 11328, "task_index": 8}, {"db_idx": 11329, "episode_idx": 41, "frame_idx": 36, "global_frame_idx": 11329, "task_index": 8}, {"db_idx": 11330, "episode_idx": 41, "frame_idx": 37, "global_frame_idx": 11330, "task_index": 8}, {"db_idx": 11331, "episode_idx": 41, "frame_idx": 38, "global_frame_idx": 11331, "task_index": 8}, {"db_idx": 11332, "episode_idx": 41, "frame_idx": 39, "global_frame_idx": 11332, "task_index": 8}, {"db_idx": 11333, "episode_idx": 41, "frame_idx": 40, "global_frame_idx": 11333, "task_index": 8}, {"db_idx": 11334, "episode_idx": 41, "frame_idx": 41, "global_frame_idx": 11334, "task_index": 8}, {"db_idx": 11335, "episode_idx": 41, "frame_idx": 42, "global_frame_idx": 11335, "task_index": 8}, {"db_idx": 11336, "episode_idx": 41, "frame_idx": 43, "global_frame_idx": 11336, "task_index": 8}, {"db_idx": 11337, "episode_idx": 41, "frame_idx": 44, "global_frame_idx": 11337, "task_index": 8}, {"db_idx": 11338, "episode_idx": 41, "frame_idx": 45, "global_frame_idx": 11338, "task_index": 8}, {"db_idx": 11339, "episode_idx": 41, "frame_idx": 46, "global_frame_idx": 11339, "task_index": 8}, {"db_idx": 11340, "episode_idx": 41, "frame_idx": 47, "global_frame_idx": 11340, "task_index": 8}, {"db_idx": 11341, "episode_idx": 41, "frame_idx": 48, "global_frame_idx": 11341, "task_index": 8}, {"db_idx": 11342, "episode_idx": 41, "frame_idx": 49, "global_frame_idx": 11342, "task_index": 8}, {"db_idx": 11343, "episode_idx": 41, "frame_idx": 50, "global_frame_idx": 11343, "task_index": 8}, {"db_idx": 11344, "episode_idx": 41, "frame_idx": 51, "global_frame_idx": 11344, "task_index": 8}, {"db_idx": 11345, "episode_idx": 41, "frame_idx": 52, "global_frame_idx": 11345, "task_index": 8}, {"db_idx": 11346, "episode_idx": 41, "frame_idx": 53, "global_frame_idx": 11346, "task_index": 8}, {"db_idx": 11347, "episode_idx": 41, "frame_idx": 54, "global_frame_idx": 11347, "task_index": 8}, {"db_idx": 11348, "episode_idx": 41, "frame_idx": 55, "global_frame_idx": 11348, "task_index": 8}, {"db_idx": 11349, "episode_idx": 41, "frame_idx": 56, "global_frame_idx": 11349, "task_index": 8}, {"db_idx": 11350, "episode_idx": 41, "frame_idx": 57, "global_frame_idx": 11350, "task_index": 8}, {"db_idx": 11351, "episode_idx": 41, "frame_idx": 58, "global_frame_idx": 11351, "task_index": 8}, {"db_idx": 11352, "episode_idx": 41, "frame_idx": 59, "global_frame_idx": 11352, "task_index": 8}, {"db_idx": 11353, "episode_idx": 41, "frame_idx": 60, "global_frame_idx": 11353, "task_index": 8}, {"db_idx": 11354, "episode_idx": 41, "frame_idx": 61, "global_frame_idx": 11354, "task_index": 8}, {"db_idx": 11355, "episode_idx": 41, "frame_idx": 62, "global_frame_idx": 11355, "task_index": 8}, {"db_idx": 11356, "episode_idx": 41, "frame_idx": 63, "global_frame_idx": 11356, "task_index": 8}, {"db_idx": 11357, "episode_idx": 41, "frame_idx": 64, "global_frame_idx": 11357, "task_index": 8}, {"db_idx": 11358, "episode_idx": 41, "frame_idx": 65, "global_frame_idx": 11358, "task_index": 8}, {"db_idx": 11359, "episode_idx": 41, "frame_idx": 66, "global_frame_idx": 11359, "task_index": 8}, {"db_idx": 11360, "episode_idx": 41, "frame_idx": 67, "global_frame_idx": 11360, "task_index": 8}, {"db_idx": 11361, "episode_idx": 41, "frame_idx": 68, "global_frame_idx": 11361, "task_index": 8}, {"db_idx": 11362, "episode_idx": 41, "frame_idx": 69, "global_frame_idx": 11362, "task_index": 8}, {"db_idx": 11363, "episode_idx": 41, "frame_idx": 70, "global_frame_idx": 11363, "task_index": 8}, {"db_idx": 11364, "episode_idx": 41, "frame_idx": 71, "global_frame_idx": 11364, "task_index": 8}, {"db_idx": 11365, "episode_idx": 41, "frame_idx": 72, "global_frame_idx": 11365, "task_index": 8}, {"db_idx": 11366, "episode_idx": 41, "frame_idx": 73, "global_frame_idx": 11366, "task_index": 8}, {"db_idx": 11367, "episode_idx": 41, "frame_idx": 74, "global_frame_idx": 11367, "task_index": 8}, {"db_idx": 11368, "episode_idx": 41, "frame_idx": 75, "global_frame_idx": 11368, "task_index": 8}, {"db_idx": 11369, "episode_idx": 41, "frame_idx": 76, "global_frame_idx": 11369, "task_index": 8}, {"db_idx": 11370, "episode_idx": 41, "frame_idx": 77, "global_frame_idx": 11370, "task_index": 8}, {"db_idx": 11371, "episode_idx": 41, "frame_idx": 78, "global_frame_idx": 11371, "task_index": 8}, {"db_idx": 11372, "episode_idx": 41, "frame_idx": 79, "global_frame_idx": 11372, "task_index": 8}, {"db_idx": 11373, "episode_idx": 41, "frame_idx": 80, "global_frame_idx": 11373, "task_index": 8}, {"db_idx": 11374, "episode_idx": 41, "frame_idx": 81, "global_frame_idx": 11374, "task_index": 8}, {"db_idx": 11375, "episode_idx": 41, "frame_idx": 82, "global_frame_idx": 11375, "task_index": 8}, {"db_idx": 11376, "episode_idx": 41, "frame_idx": 83, "global_frame_idx": 11376, "task_index": 8}, {"db_idx": 11377, "episode_idx": 41, "frame_idx": 84, "global_frame_idx": 11377, "task_index": 8}, {"db_idx": 11378, "episode_idx": 41, "frame_idx": 85, "global_frame_idx": 11378, "task_index": 8}, {"db_idx": 11379, "episode_idx": 41, "frame_idx": 86, "global_frame_idx": 11379, "task_index": 8}, {"db_idx": 11380, "episode_idx": 41, "frame_idx": 87, "global_frame_idx": 11380, "task_index": 8}, {"db_idx": 11381, "episode_idx": 41, "frame_idx": 88, "global_frame_idx": 11381, "task_index": 8}, {"db_idx": 11382, "episode_idx": 41, "frame_idx": 89, "global_frame_idx": 11382, "task_index": 8}, {"db_idx": 11383, "episode_idx": 41, "frame_idx": 90, "global_frame_idx": 11383, "task_index": 8}, {"db_idx": 11384, "episode_idx": 41, "frame_idx": 91, "global_frame_idx": 11384, "task_index": 8}, {"db_idx": 11385, "episode_idx": 41, "frame_idx": 92, "global_frame_idx": 11385, "task_index": 8}, {"db_idx": 11386, "episode_idx": 41, "frame_idx": 93, "global_frame_idx": 11386, "task_index": 8}, {"db_idx": 11387, "episode_idx": 41, "frame_idx": 94, "global_frame_idx": 11387, "task_index": 8}, {"db_idx": 11388, "episode_idx": 41, "frame_idx": 95, "global_frame_idx": 11388, "task_index": 8}, {"db_idx": 11389, "episode_idx": 41, "frame_idx": 96, "global_frame_idx": 11389, "task_index": 8}, {"db_idx": 11390, "episode_idx": 41, "frame_idx": 97, "global_frame_idx": 11390, "task_index": 8}, {"db_idx": 11391, "episode_idx": 41, "frame_idx": 98, "global_frame_idx": 11391, "task_index": 8}, {"db_idx": 11392, "episode_idx": 41, "frame_idx": 99, "global_frame_idx": 11392, "task_index": 8}, {"db_idx": 11393, "episode_idx": 41, "frame_idx": 100, "global_frame_idx": 11393, "task_index": 8}, {"db_idx": 11394, "episode_idx": 41, "frame_idx": 101, "global_frame_idx": 11394, "task_index": 8}, {"db_idx": 11395, "episode_idx": 41, "frame_idx": 102, "global_frame_idx": 11395, "task_index": 8}, {"db_idx": 11396, "episode_idx": 41, "frame_idx": 103, "global_frame_idx": 11396, "task_index": 8}, {"db_idx": 11397, "episode_idx": 41, "frame_idx": 104, "global_frame_idx": 11397, "task_index": 8}, {"db_idx": 11398, "episode_idx": 41, "frame_idx": 105, "global_frame_idx": 11398, "task_index": 8}, {"db_idx": 11399, "episode_idx": 41, "frame_idx": 106, "global_frame_idx": 11399, "task_index": 8}, {"db_idx": 11400, "episode_idx": 41, "frame_idx": 107, "global_frame_idx": 11400, "task_index": 8}, {"db_idx": 11401, "episode_idx": 41, "frame_idx": 108, "global_frame_idx": 11401, "task_index": 8}, {"db_idx": 11402, "episode_idx": 41, "frame_idx": 109, "global_frame_idx": 11402, "task_index": 8}, {"db_idx": 11403, "episode_idx": 41, "frame_idx": 110, "global_frame_idx": 11403, "task_index": 8}, {"db_idx": 11404, "episode_idx": 41, "frame_idx": 111, "global_frame_idx": 11404, "task_index": 8}, {"db_idx": 11405, "episode_idx": 41, "frame_idx": 112, "global_frame_idx": 11405, "task_index": 8}, {"db_idx": 11406, "episode_idx": 41, "frame_idx": 113, "global_frame_idx": 11406, "task_index": 8}, {"db_idx": 11407, "episode_idx": 41, "frame_idx": 114, "global_frame_idx": 11407, "task_index": 8}, {"db_idx": 11408, "episode_idx": 41, "frame_idx": 115, "global_frame_idx": 11408, "task_index": 8}, {"db_idx": 11409, "episode_idx": 41, "frame_idx": 116, "global_frame_idx": 11409, "task_index": 8}, {"db_idx": 11410, "episode_idx": 41, "frame_idx": 117, "global_frame_idx": 11410, "task_index": 8}, {"db_idx": 11411, "episode_idx": 41, "frame_idx": 118, "global_frame_idx": 11411, "task_index": 8}, {"db_idx": 11412, "episode_idx": 41, "frame_idx": 119, "global_frame_idx": 11412, "task_index": 8}, {"db_idx": 11413, "episode_idx": 41, "frame_idx": 120, "global_frame_idx": 11413, "task_index": 8}, {"db_idx": 11414, "episode_idx": 41, "frame_idx": 121, "global_frame_idx": 11414, "task_index": 8}, {"db_idx": 11415, "episode_idx": 41, "frame_idx": 122, "global_frame_idx": 11415, "task_index": 8}, {"db_idx": 11416, "episode_idx": 41, "frame_idx": 123, "global_frame_idx": 11416, "task_index": 8}, {"db_idx": 11417, "episode_idx": 41, "frame_idx": 124, "global_frame_idx": 11417, "task_index": 8}, {"db_idx": 11418, "episode_idx": 41, "frame_idx": 125, "global_frame_idx": 11418, "task_index": 8}, {"db_idx": 11419, "episode_idx": 41, "frame_idx": 126, "global_frame_idx": 11419, "task_index": 8}, {"db_idx": 11420, "episode_idx": 41, "frame_idx": 127, "global_frame_idx": 11420, "task_index": 8}, {"db_idx": 11421, "episode_idx": 41, "frame_idx": 128, "global_frame_idx": 11421, "task_index": 8}, {"db_idx": 11422, "episode_idx": 41, "frame_idx": 129, "global_frame_idx": 11422, "task_index": 8}, {"db_idx": 11423, "episode_idx": 41, "frame_idx": 130, "global_frame_idx": 11423, "task_index": 8}, {"db_idx": 11424, "episode_idx": 41, "frame_idx": 131, "global_frame_idx": 11424, "task_index": 8}, {"db_idx": 11425, "episode_idx": 41, "frame_idx": 132, "global_frame_idx": 11425, "task_index": 8}, {"db_idx": 11426, "episode_idx": 41, "frame_idx": 133, "global_frame_idx": 11426, "task_index": 8}, {"db_idx": 11427, "episode_idx": 41, "frame_idx": 134, "global_frame_idx": 11427, "task_index": 8}, {"db_idx": 11428, "episode_idx": 41, "frame_idx": 135, "global_frame_idx": 11428, "task_index": 8}, {"db_idx": 11429, "episode_idx": 41, "frame_idx": 136, "global_frame_idx": 11429, "task_index": 8}, {"db_idx": 11430, "episode_idx": 41, "frame_idx": 137, "global_frame_idx": 11430, "task_index": 8}, {"db_idx": 11431, "episode_idx": 41, "frame_idx": 138, "global_frame_idx": 11431, "task_index": 8}, {"db_idx": 11432, "episode_idx": 41, "frame_idx": 139, "global_frame_idx": 11432, "task_index": 8}, {"db_idx": 11433, "episode_idx": 41, "frame_idx": 140, "global_frame_idx": 11433, "task_index": 8}, {"db_idx": 11434, "episode_idx": 41, "frame_idx": 141, "global_frame_idx": 11434, "task_index": 8}, {"db_idx": 11435, "episode_idx": 41, "frame_idx": 142, "global_frame_idx": 11435, "task_index": 8}, {"db_idx": 11436, "episode_idx": 41, "frame_idx": 143, "global_frame_idx": 11436, "task_index": 8}, {"db_idx": 11437, "episode_idx": 41, "frame_idx": 144, "global_frame_idx": 11437, "task_index": 8}, {"db_idx": 11438, "episode_idx": 41, "frame_idx": 145, "global_frame_idx": 11438, "task_index": 8}, {"db_idx": 11439, "episode_idx": 41, "frame_idx": 146, "global_frame_idx": 11439, "task_index": 8}, {"db_idx": 11440, "episode_idx": 41, "frame_idx": 147, "global_frame_idx": 11440, "task_index": 8}, {"db_idx": 11441, "episode_idx": 41, "frame_idx": 148, "global_frame_idx": 11441, "task_index": 8}, {"db_idx": 11442, "episode_idx": 41, "frame_idx": 149, "global_frame_idx": 11442, "task_index": 8}, {"db_idx": 11443, "episode_idx": 41, "frame_idx": 150, "global_frame_idx": 11443, "task_index": 8}, {"db_idx": 11444, "episode_idx": 41, "frame_idx": 151, "global_frame_idx": 11444, "task_index": 8}, {"db_idx": 11445, "episode_idx": 41, "frame_idx": 152, "global_frame_idx": 11445, "task_index": 8}, {"db_idx": 11446, "episode_idx": 41, "frame_idx": 153, "global_frame_idx": 11446, "task_index": 8}, {"db_idx": 11447, "episode_idx": 41, "frame_idx": 154, "global_frame_idx": 11447, "task_index": 8}, {"db_idx": 11448, "episode_idx": 41, "frame_idx": 155, "global_frame_idx": 11448, "task_index": 8}, {"db_idx": 11449, "episode_idx": 41, "frame_idx": 156, "global_frame_idx": 11449, "task_index": 8}, {"db_idx": 11450, "episode_idx": 41, "frame_idx": 157, "global_frame_idx": 11450, "task_index": 8}, {"db_idx": 11451, "episode_idx": 41, "frame_idx": 158, "global_frame_idx": 11451, "task_index": 8}, {"db_idx": 11452, "episode_idx": 41, "frame_idx": 159, "global_frame_idx": 11452, "task_index": 8}, {"db_idx": 11453, "episode_idx": 41, "frame_idx": 160, "global_frame_idx": 11453, "task_index": 8}, {"db_idx": 11454, "episode_idx": 41, "frame_idx": 161, "global_frame_idx": 11454, "task_index": 8}, {"db_idx": 11455, "episode_idx": 41, "frame_idx": 162, "global_frame_idx": 11455, "task_index": 8}, {"db_idx": 11456, "episode_idx": 41, "frame_idx": 163, "global_frame_idx": 11456, "task_index": 8}, {"db_idx": 11457, "episode_idx": 41, "frame_idx": 164, "global_frame_idx": 11457, "task_index": 8}, {"db_idx": 11458, "episode_idx": 41, "frame_idx": 165, "global_frame_idx": 11458, "task_index": 8}, {"db_idx": 11459, "episode_idx": 41, "frame_idx": 166, "global_frame_idx": 11459, "task_index": 8}, {"db_idx": 11460, "episode_idx": 41, "frame_idx": 167, "global_frame_idx": 11460, "task_index": 8}, {"db_idx": 11461, "episode_idx": 41, "frame_idx": 168, "global_frame_idx": 11461, "task_index": 8}, {"db_idx": 11462, "episode_idx": 41, "frame_idx": 169, "global_frame_idx": 11462, "task_index": 8}, {"db_idx": 11463, "episode_idx": 41, "frame_idx": 170, "global_frame_idx": 11463, "task_index": 8}, {"db_idx": 11464, "episode_idx": 41, "frame_idx": 171, "global_frame_idx": 11464, "task_index": 8}, {"db_idx": 11465, "episode_idx": 41, "frame_idx": 172, "global_frame_idx": 11465, "task_index": 8}, {"db_idx": 11466, "episode_idx": 41, "frame_idx": 173, "global_frame_idx": 11466, "task_index": 8}, {"db_idx": 11467, "episode_idx": 41, "frame_idx": 174, "global_frame_idx": 11467, "task_index": 8}, {"db_idx": 11468, "episode_idx": 41, "frame_idx": 175, "global_frame_idx": 11468, "task_index": 8}, {"db_idx": 11469, "episode_idx": 41, "frame_idx": 176, "global_frame_idx": 11469, "task_index": 8}, {"db_idx": 11470, "episode_idx": 41, "frame_idx": 177, "global_frame_idx": 11470, "task_index": 8}, {"db_idx": 11471, "episode_idx": 41, "frame_idx": 178, "global_frame_idx": 11471, "task_index": 8}, {"db_idx": 11472, "episode_idx": 41, "frame_idx": 179, "global_frame_idx": 11472, "task_index": 8}, {"db_idx": 11473, "episode_idx": 41, "frame_idx": 180, "global_frame_idx": 11473, "task_index": 8}, {"db_idx": 11474, "episode_idx": 41, "frame_idx": 181, "global_frame_idx": 11474, "task_index": 8}, {"db_idx": 11475, "episode_idx": 41, "frame_idx": 182, "global_frame_idx": 11475, "task_index": 8}, {"db_idx": 11476, "episode_idx": 41, "frame_idx": 183, "global_frame_idx": 11476, "task_index": 8}, {"db_idx": 11477, "episode_idx": 41, "frame_idx": 184, "global_frame_idx": 11477, "task_index": 8}, {"db_idx": 11478, "episode_idx": 41, "frame_idx": 185, "global_frame_idx": 11478, "task_index": 8}, {"db_idx": 11479, "episode_idx": 41, "frame_idx": 186, "global_frame_idx": 11479, "task_index": 8}, {"db_idx": 11480, "episode_idx": 41, "frame_idx": 187, "global_frame_idx": 11480, "task_index": 8}, {"db_idx": 11481, "episode_idx": 41, "frame_idx": 188, "global_frame_idx": 11481, "task_index": 8}, {"db_idx": 11482, "episode_idx": 41, "frame_idx": 189, "global_frame_idx": 11482, "task_index": 8}, {"db_idx": 11483, "episode_idx": 41, "frame_idx": 190, "global_frame_idx": 11483, "task_index": 8}, {"db_idx": 11484, "episode_idx": 41, "frame_idx": 191, "global_frame_idx": 11484, "task_index": 8}, {"db_idx": 11485, "episode_idx": 41, "frame_idx": 192, "global_frame_idx": 11485, "task_index": 8}, {"db_idx": 11486, "episode_idx": 41, "frame_idx": 193, "global_frame_idx": 11486, "task_index": 8}, {"db_idx": 11487, "episode_idx": 41, "frame_idx": 194, "global_frame_idx": 11487, "task_index": 8}, {"db_idx": 11488, "episode_idx": 41, "frame_idx": 195, "global_frame_idx": 11488, "task_index": 8}, {"db_idx": 11489, "episode_idx": 41, "frame_idx": 196, "global_frame_idx": 11489, "task_index": 8}, {"db_idx": 11490, "episode_idx": 41, "frame_idx": 197, "global_frame_idx": 11490, "task_index": 8}, {"db_idx": 11491, "episode_idx": 41, "frame_idx": 198, "global_frame_idx": 11491, "task_index": 8}, {"db_idx": 11492, "episode_idx": 41, "frame_idx": 199, "global_frame_idx": 11492, "task_index": 8}, {"db_idx": 11493, "episode_idx": 41, "frame_idx": 200, "global_frame_idx": 11493, "task_index": 8}, {"db_idx": 11494, "episode_idx": 41, "frame_idx": 201, "global_frame_idx": 11494, "task_index": 8}, {"db_idx": 11495, "episode_idx": 41, "frame_idx": 202, "global_frame_idx": 11495, "task_index": 8}, {"db_idx": 11496, "episode_idx": 41, "frame_idx": 203, "global_frame_idx": 11496, "task_index": 8}, {"db_idx": 11497, "episode_idx": 41, "frame_idx": 204, "global_frame_idx": 11497, "task_index": 8}, {"db_idx": 11498, "episode_idx": 41, "frame_idx": 205, "global_frame_idx": 11498, "task_index": 8}, {"db_idx": 11499, "episode_idx": 41, "frame_idx": 206, "global_frame_idx": 11499, "task_index": 8}, {"db_idx": 11500, "episode_idx": 41, "frame_idx": 207, "global_frame_idx": 11500, "task_index": 8}, {"db_idx": 11501, "episode_idx": 41, "frame_idx": 208, "global_frame_idx": 11501, "task_index": 8}, {"db_idx": 11502, "episode_idx": 41, "frame_idx": 209, "global_frame_idx": 11502, "task_index": 8}, {"db_idx": 11503, "episode_idx": 41, "frame_idx": 210, "global_frame_idx": 11503, "task_index": 8}, {"db_idx": 11504, "episode_idx": 41, "frame_idx": 211, "global_frame_idx": 11504, "task_index": 8}, {"db_idx": 11505, "episode_idx": 41, "frame_idx": 212, "global_frame_idx": 11505, "task_index": 8}, {"db_idx": 11506, "episode_idx": 41, "frame_idx": 213, "global_frame_idx": 11506, "task_index": 8}, {"db_idx": 11507, "episode_idx": 41, "frame_idx": 214, "global_frame_idx": 11507, "task_index": 8}, {"db_idx": 11508, "episode_idx": 41, "frame_idx": 215, "global_frame_idx": 11508, "task_index": 8}, {"db_idx": 11509, "episode_idx": 41, "frame_idx": 216, "global_frame_idx": 11509, "task_index": 8}, {"db_idx": 11510, "episode_idx": 41, "frame_idx": 217, "global_frame_idx": 11510, "task_index": 8}, {"db_idx": 11511, "episode_idx": 41, "frame_idx": 218, "global_frame_idx": 11511, "task_index": 8}, {"db_idx": 11512, "episode_idx": 41, "frame_idx": 219, "global_frame_idx": 11512, "task_index": 8}, {"db_idx": 11513, "episode_idx": 41, "frame_idx": 220, "global_frame_idx": 11513, "task_index": 8}, {"db_idx": 11514, "episode_idx": 41, "frame_idx": 221, "global_frame_idx": 11514, "task_index": 8}, {"db_idx": 11515, "episode_idx": 41, "frame_idx": 222, "global_frame_idx": 11515, "task_index": 8}, {"db_idx": 11516, "episode_idx": 41, "frame_idx": 223, "global_frame_idx": 11516, "task_index": 8}, {"db_idx": 11517, "episode_idx": 41, "frame_idx": 224, "global_frame_idx": 11517, "task_index": 8}, {"db_idx": 11518, "episode_idx": 41, "frame_idx": 225, "global_frame_idx": 11518, "task_index": 8}, {"db_idx": 11519, "episode_idx": 41, "frame_idx": 226, "global_frame_idx": 11519, "task_index": 8}, {"db_idx": 11520, "episode_idx": 41, "frame_idx": 227, "global_frame_idx": 11520, "task_index": 8}, {"db_idx": 11521, "episode_idx": 41, "frame_idx": 228, "global_frame_idx": 11521, "task_index": 8}, {"db_idx": 11522, "episode_idx": 41, "frame_idx": 229, "global_frame_idx": 11522, "task_index": 8}, {"db_idx": 11523, "episode_idx": 41, "frame_idx": 230, "global_frame_idx": 11523, "task_index": 8}, {"db_idx": 11524, "episode_idx": 41, "frame_idx": 231, "global_frame_idx": 11524, "task_index": 8}, {"db_idx": 11525, "episode_idx": 41, "frame_idx": 232, "global_frame_idx": 11525, "task_index": 8}, {"db_idx": 11526, "episode_idx": 41, "frame_idx": 233, "global_frame_idx": 11526, "task_index": 8}, {"db_idx": 11527, "episode_idx": 41, "frame_idx": 234, "global_frame_idx": 11527, "task_index": 8}, {"db_idx": 11528, "episode_idx": 41, "frame_idx": 235, "global_frame_idx": 11528, "task_index": 8}, {"db_idx": 11529, "episode_idx": 41, "frame_idx": 236, "global_frame_idx": 11529, "task_index": 8}, {"db_idx": 11530, "episode_idx": 41, "frame_idx": 237, "global_frame_idx": 11530, "task_index": 8}, {"db_idx": 11531, "episode_idx": 41, "frame_idx": 238, "global_frame_idx": 11531, "task_index": 8}, {"db_idx": 11532, "episode_idx": 41, "frame_idx": 239, "global_frame_idx": 11532, "task_index": 8}, {"db_idx": 11533, "episode_idx": 41, "frame_idx": 240, "global_frame_idx": 11533, "task_index": 8}, {"db_idx": 11534, "episode_idx": 41, "frame_idx": 241, "global_frame_idx": 11534, "task_index": 8}, {"db_idx": 11535, "episode_idx": 41, "frame_idx": 242, "global_frame_idx": 11535, "task_index": 8}, {"db_idx": 11536, "episode_idx": 41, "frame_idx": 243, "global_frame_idx": 11536, "task_index": 8}, {"db_idx": 11537, "episode_idx": 41, "frame_idx": 244, "global_frame_idx": 11537, "task_index": 8}, {"db_idx": 11538, "episode_idx": 41, "frame_idx": 245, "global_frame_idx": 11538, "task_index": 8}, {"db_idx": 11539, "episode_idx": 41, "frame_idx": 246, "global_frame_idx": 11539, "task_index": 8}, {"db_idx": 11540, "episode_idx": 42, "frame_idx": 0, "global_frame_idx": 11540, "task_index": 8}, {"db_idx": 11541, "episode_idx": 42, "frame_idx": 1, "global_frame_idx": 11541, "task_index": 8}, {"db_idx": 11542, "episode_idx": 42, "frame_idx": 2, "global_frame_idx": 11542, "task_index": 8}, {"db_idx": 11543, "episode_idx": 42, "frame_idx": 3, "global_frame_idx": 11543, "task_index": 8}, {"db_idx": 11544, "episode_idx": 42, "frame_idx": 4, "global_frame_idx": 11544, "task_index": 8}, {"db_idx": 11545, "episode_idx": 42, "frame_idx": 5, "global_frame_idx": 11545, "task_index": 8}, {"db_idx": 11546, "episode_idx": 42, "frame_idx": 6, "global_frame_idx": 11546, "task_index": 8}, {"db_idx": 11547, "episode_idx": 42, "frame_idx": 7, "global_frame_idx": 11547, "task_index": 8}, {"db_idx": 11548, "episode_idx": 42, "frame_idx": 8, "global_frame_idx": 11548, "task_index": 8}, {"db_idx": 11549, "episode_idx": 42, "frame_idx": 9, "global_frame_idx": 11549, "task_index": 8}, {"db_idx": 11550, "episode_idx": 42, "frame_idx": 10, "global_frame_idx": 11550, "task_index": 8}, {"db_idx": 11551, "episode_idx": 42, "frame_idx": 11, "global_frame_idx": 11551, "task_index": 8}, {"db_idx": 11552, "episode_idx": 42, "frame_idx": 12, "global_frame_idx": 11552, "task_index": 8}, {"db_idx": 11553, "episode_idx": 42, "frame_idx": 13, "global_frame_idx": 11553, "task_index": 8}, {"db_idx": 11554, "episode_idx": 42, "frame_idx": 14, "global_frame_idx": 11554, "task_index": 8}, {"db_idx": 11555, "episode_idx": 42, "frame_idx": 15, "global_frame_idx": 11555, "task_index": 8}, {"db_idx": 11556, "episode_idx": 42, "frame_idx": 16, "global_frame_idx": 11556, "task_index": 8}, {"db_idx": 11557, "episode_idx": 42, "frame_idx": 17, "global_frame_idx": 11557, "task_index": 8}, {"db_idx": 11558, "episode_idx": 42, "frame_idx": 18, "global_frame_idx": 11558, "task_index": 8}, {"db_idx": 11559, "episode_idx": 42, "frame_idx": 19, "global_frame_idx": 11559, "task_index": 8}, {"db_idx": 11560, "episode_idx": 42, "frame_idx": 20, "global_frame_idx": 11560, "task_index": 8}, {"db_idx": 11561, "episode_idx": 42, "frame_idx": 21, "global_frame_idx": 11561, "task_index": 8}, {"db_idx": 11562, "episode_idx": 42, "frame_idx": 22, "global_frame_idx": 11562, "task_index": 8}, {"db_idx": 11563, "episode_idx": 42, "frame_idx": 23, "global_frame_idx": 11563, "task_index": 8}, {"db_idx": 11564, "episode_idx": 42, "frame_idx": 24, "global_frame_idx": 11564, "task_index": 8}, {"db_idx": 11565, "episode_idx": 42, "frame_idx": 25, "global_frame_idx": 11565, "task_index": 8}, {"db_idx": 11566, "episode_idx": 42, "frame_idx": 26, "global_frame_idx": 11566, "task_index": 8}, {"db_idx": 11567, "episode_idx": 42, "frame_idx": 27, "global_frame_idx": 11567, "task_index": 8}, {"db_idx": 11568, "episode_idx": 42, "frame_idx": 28, "global_frame_idx": 11568, "task_index": 8}, {"db_idx": 11569, "episode_idx": 42, "frame_idx": 29, "global_frame_idx": 11569, "task_index": 8}, {"db_idx": 11570, "episode_idx": 42, "frame_idx": 30, "global_frame_idx": 11570, "task_index": 8}, {"db_idx": 11571, "episode_idx": 42, "frame_idx": 31, "global_frame_idx": 11571, "task_index": 8}, {"db_idx": 11572, "episode_idx": 42, "frame_idx": 32, "global_frame_idx": 11572, "task_index": 8}, {"db_idx": 11573, "episode_idx": 42, "frame_idx": 33, "global_frame_idx": 11573, "task_index": 8}, {"db_idx": 11574, "episode_idx": 42, "frame_idx": 34, "global_frame_idx": 11574, "task_index": 8}, {"db_idx": 11575, "episode_idx": 42, "frame_idx": 35, "global_frame_idx": 11575, "task_index": 8}, {"db_idx": 11576, "episode_idx": 42, "frame_idx": 36, "global_frame_idx": 11576, "task_index": 8}, {"db_idx": 11577, "episode_idx": 42, "frame_idx": 37, "global_frame_idx": 11577, "task_index": 8}, {"db_idx": 11578, "episode_idx": 42, "frame_idx": 38, "global_frame_idx": 11578, "task_index": 8}, {"db_idx": 11579, "episode_idx": 42, "frame_idx": 39, "global_frame_idx": 11579, "task_index": 8}, {"db_idx": 11580, "episode_idx": 42, "frame_idx": 40, "global_frame_idx": 11580, "task_index": 8}, {"db_idx": 11581, "episode_idx": 42, "frame_idx": 41, "global_frame_idx": 11581, "task_index": 8}, {"db_idx": 11582, "episode_idx": 42, "frame_idx": 42, "global_frame_idx": 11582, "task_index": 8}, {"db_idx": 11583, "episode_idx": 42, "frame_idx": 43, "global_frame_idx": 11583, "task_index": 8}, {"db_idx": 11584, "episode_idx": 42, "frame_idx": 44, "global_frame_idx": 11584, "task_index": 8}, {"db_idx": 11585, "episode_idx": 42, "frame_idx": 45, "global_frame_idx": 11585, "task_index": 8}, {"db_idx": 11586, "episode_idx": 42, "frame_idx": 46, "global_frame_idx": 11586, "task_index": 8}, {"db_idx": 11587, "episode_idx": 42, "frame_idx": 47, "global_frame_idx": 11587, "task_index": 8}, {"db_idx": 11588, "episode_idx": 42, "frame_idx": 48, "global_frame_idx": 11588, "task_index": 8}, {"db_idx": 11589, "episode_idx": 42, "frame_idx": 49, "global_frame_idx": 11589, "task_index": 8}, {"db_idx": 11590, "episode_idx": 42, "frame_idx": 50, "global_frame_idx": 11590, "task_index": 8}, {"db_idx": 11591, "episode_idx": 42, "frame_idx": 51, "global_frame_idx": 11591, "task_index": 8}, {"db_idx": 11592, "episode_idx": 42, "frame_idx": 52, "global_frame_idx": 11592, "task_index": 8}, {"db_idx": 11593, "episode_idx": 42, "frame_idx": 53, "global_frame_idx": 11593, "task_index": 8}, {"db_idx": 11594, "episode_idx": 42, "frame_idx": 54, "global_frame_idx": 11594, "task_index": 8}, {"db_idx": 11595, "episode_idx": 42, "frame_idx": 55, "global_frame_idx": 11595, "task_index": 8}, {"db_idx": 11596, "episode_idx": 42, "frame_idx": 56, "global_frame_idx": 11596, "task_index": 8}, {"db_idx": 11597, "episode_idx": 42, "frame_idx": 57, "global_frame_idx": 11597, "task_index": 8}, {"db_idx": 11598, "episode_idx": 42, "frame_idx": 58, "global_frame_idx": 11598, "task_index": 8}, {"db_idx": 11599, "episode_idx": 42, "frame_idx": 59, "global_frame_idx": 11599, "task_index": 8}, {"db_idx": 11600, "episode_idx": 42, "frame_idx": 60, "global_frame_idx": 11600, "task_index": 8}, {"db_idx": 11601, "episode_idx": 42, "frame_idx": 61, "global_frame_idx": 11601, "task_index": 8}, {"db_idx": 11602, "episode_idx": 42, "frame_idx": 62, "global_frame_idx": 11602, "task_index": 8}, {"db_idx": 11603, "episode_idx": 42, "frame_idx": 63, "global_frame_idx": 11603, "task_index": 8}, {"db_idx": 11604, "episode_idx": 42, "frame_idx": 64, "global_frame_idx": 11604, "task_index": 8}, {"db_idx": 11605, "episode_idx": 42, "frame_idx": 65, "global_frame_idx": 11605, "task_index": 8}, {"db_idx": 11606, "episode_idx": 42, "frame_idx": 66, "global_frame_idx": 11606, "task_index": 8}, {"db_idx": 11607, "episode_idx": 42, "frame_idx": 67, "global_frame_idx": 11607, "task_index": 8}, {"db_idx": 11608, "episode_idx": 42, "frame_idx": 68, "global_frame_idx": 11608, "task_index": 8}, {"db_idx": 11609, "episode_idx": 42, "frame_idx": 69, "global_frame_idx": 11609, "task_index": 8}, {"db_idx": 11610, "episode_idx": 42, "frame_idx": 70, "global_frame_idx": 11610, "task_index": 8}, {"db_idx": 11611, "episode_idx": 42, "frame_idx": 71, "global_frame_idx": 11611, "task_index": 8}, {"db_idx": 11612, "episode_idx": 42, "frame_idx": 72, "global_frame_idx": 11612, "task_index": 8}, {"db_idx": 11613, "episode_idx": 42, "frame_idx": 73, "global_frame_idx": 11613, "task_index": 8}, {"db_idx": 11614, "episode_idx": 42, "frame_idx": 74, "global_frame_idx": 11614, "task_index": 8}, {"db_idx": 11615, "episode_idx": 42, "frame_idx": 75, "global_frame_idx": 11615, "task_index": 8}, {"db_idx": 11616, "episode_idx": 42, "frame_idx": 76, "global_frame_idx": 11616, "task_index": 8}, {"db_idx": 11617, "episode_idx": 42, "frame_idx": 77, "global_frame_idx": 11617, "task_index": 8}, {"db_idx": 11618, "episode_idx": 42, "frame_idx": 78, "global_frame_idx": 11618, "task_index": 8}, {"db_idx": 11619, "episode_idx": 42, "frame_idx": 79, "global_frame_idx": 11619, "task_index": 8}, {"db_idx": 11620, "episode_idx": 42, "frame_idx": 80, "global_frame_idx": 11620, "task_index": 8}, {"db_idx": 11621, "episode_idx": 42, "frame_idx": 81, "global_frame_idx": 11621, "task_index": 8}, {"db_idx": 11622, "episode_idx": 42, "frame_idx": 82, "global_frame_idx": 11622, "task_index": 8}, {"db_idx": 11623, "episode_idx": 42, "frame_idx": 83, "global_frame_idx": 11623, "task_index": 8}, {"db_idx": 11624, "episode_idx": 42, "frame_idx": 84, "global_frame_idx": 11624, "task_index": 8}, {"db_idx": 11625, "episode_idx": 42, "frame_idx": 85, "global_frame_idx": 11625, "task_index": 8}, {"db_idx": 11626, "episode_idx": 42, "frame_idx": 86, "global_frame_idx": 11626, "task_index": 8}, {"db_idx": 11627, "episode_idx": 42, "frame_idx": 87, "global_frame_idx": 11627, "task_index": 8}, {"db_idx": 11628, "episode_idx": 42, "frame_idx": 88, "global_frame_idx": 11628, "task_index": 8}, {"db_idx": 11629, "episode_idx": 42, "frame_idx": 89, "global_frame_idx": 11629, "task_index": 8}, {"db_idx": 11630, "episode_idx": 42, "frame_idx": 90, "global_frame_idx": 11630, "task_index": 8}, {"db_idx": 11631, "episode_idx": 42, "frame_idx": 91, "global_frame_idx": 11631, "task_index": 8}, {"db_idx": 11632, "episode_idx": 42, "frame_idx": 92, "global_frame_idx": 11632, "task_index": 8}, {"db_idx": 11633, "episode_idx": 42, "frame_idx": 93, "global_frame_idx": 11633, "task_index": 8}, {"db_idx": 11634, "episode_idx": 42, "frame_idx": 94, "global_frame_idx": 11634, "task_index": 8}, {"db_idx": 11635, "episode_idx": 42, "frame_idx": 95, "global_frame_idx": 11635, "task_index": 8}, {"db_idx": 11636, "episode_idx": 42, "frame_idx": 96, "global_frame_idx": 11636, "task_index": 8}, {"db_idx": 11637, "episode_idx": 42, "frame_idx": 97, "global_frame_idx": 11637, "task_index": 8}, {"db_idx": 11638, "episode_idx": 42, "frame_idx": 98, "global_frame_idx": 11638, "task_index": 8}, {"db_idx": 11639, "episode_idx": 42, "frame_idx": 99, "global_frame_idx": 11639, "task_index": 8}, {"db_idx": 11640, "episode_idx": 42, "frame_idx": 100, "global_frame_idx": 11640, "task_index": 8}, {"db_idx": 11641, "episode_idx": 42, "frame_idx": 101, "global_frame_idx": 11641, "task_index": 8}, {"db_idx": 11642, "episode_idx": 42, "frame_idx": 102, "global_frame_idx": 11642, "task_index": 8}, {"db_idx": 11643, "episode_idx": 42, "frame_idx": 103, "global_frame_idx": 11643, "task_index": 8}, {"db_idx": 11644, "episode_idx": 42, "frame_idx": 104, "global_frame_idx": 11644, "task_index": 8}, {"db_idx": 11645, "episode_idx": 42, "frame_idx": 105, "global_frame_idx": 11645, "task_index": 8}, {"db_idx": 11646, "episode_idx": 42, "frame_idx": 106, "global_frame_idx": 11646, "task_index": 8}, {"db_idx": 11647, "episode_idx": 42, "frame_idx": 107, "global_frame_idx": 11647, "task_index": 8}, {"db_idx": 11648, "episode_idx": 42, "frame_idx": 108, "global_frame_idx": 11648, "task_index": 8}, {"db_idx": 11649, "episode_idx": 42, "frame_idx": 109, "global_frame_idx": 11649, "task_index": 8}, {"db_idx": 11650, "episode_idx": 42, "frame_idx": 110, "global_frame_idx": 11650, "task_index": 8}, {"db_idx": 11651, "episode_idx": 42, "frame_idx": 111, "global_frame_idx": 11651, "task_index": 8}, {"db_idx": 11652, "episode_idx": 42, "frame_idx": 112, "global_frame_idx": 11652, "task_index": 8}, {"db_idx": 11653, "episode_idx": 42, "frame_idx": 113, "global_frame_idx": 11653, "task_index": 8}, {"db_idx": 11654, "episode_idx": 42, "frame_idx": 114, "global_frame_idx": 11654, "task_index": 8}, {"db_idx": 11655, "episode_idx": 42, "frame_idx": 115, "global_frame_idx": 11655, "task_index": 8}, {"db_idx": 11656, "episode_idx": 42, "frame_idx": 116, "global_frame_idx": 11656, "task_index": 8}, {"db_idx": 11657, "episode_idx": 42, "frame_idx": 117, "global_frame_idx": 11657, "task_index": 8}, {"db_idx": 11658, "episode_idx": 42, "frame_idx": 118, "global_frame_idx": 11658, "task_index": 8}, {"db_idx": 11659, "episode_idx": 42, "frame_idx": 119, "global_frame_idx": 11659, "task_index": 8}, {"db_idx": 11660, "episode_idx": 42, "frame_idx": 120, "global_frame_idx": 11660, "task_index": 8}, {"db_idx": 11661, "episode_idx": 42, "frame_idx": 121, "global_frame_idx": 11661, "task_index": 8}, {"db_idx": 11662, "episode_idx": 42, "frame_idx": 122, "global_frame_idx": 11662, "task_index": 8}, {"db_idx": 11663, "episode_idx": 42, "frame_idx": 123, "global_frame_idx": 11663, "task_index": 8}, {"db_idx": 11664, "episode_idx": 42, "frame_idx": 124, "global_frame_idx": 11664, "task_index": 8}, {"db_idx": 11665, "episode_idx": 42, "frame_idx": 125, "global_frame_idx": 11665, "task_index": 8}, {"db_idx": 11666, "episode_idx": 42, "frame_idx": 126, "global_frame_idx": 11666, "task_index": 8}, {"db_idx": 11667, "episode_idx": 42, "frame_idx": 127, "global_frame_idx": 11667, "task_index": 8}, {"db_idx": 11668, "episode_idx": 42, "frame_idx": 128, "global_frame_idx": 11668, "task_index": 8}, {"db_idx": 11669, "episode_idx": 42, "frame_idx": 129, "global_frame_idx": 11669, "task_index": 8}, {"db_idx": 11670, "episode_idx": 42, "frame_idx": 130, "global_frame_idx": 11670, "task_index": 8}, {"db_idx": 11671, "episode_idx": 42, "frame_idx": 131, "global_frame_idx": 11671, "task_index": 8}, {"db_idx": 11672, "episode_idx": 42, "frame_idx": 132, "global_frame_idx": 11672, "task_index": 8}, {"db_idx": 11673, "episode_idx": 42, "frame_idx": 133, "global_frame_idx": 11673, "task_index": 8}, {"db_idx": 11674, "episode_idx": 42, "frame_idx": 134, "global_frame_idx": 11674, "task_index": 8}, {"db_idx": 11675, "episode_idx": 42, "frame_idx": 135, "global_frame_idx": 11675, "task_index": 8}, {"db_idx": 11676, "episode_idx": 42, "frame_idx": 136, "global_frame_idx": 11676, "task_index": 8}, {"db_idx": 11677, "episode_idx": 42, "frame_idx": 137, "global_frame_idx": 11677, "task_index": 8}, {"db_idx": 11678, "episode_idx": 42, "frame_idx": 138, "global_frame_idx": 11678, "task_index": 8}, {"db_idx": 11679, "episode_idx": 42, "frame_idx": 139, "global_frame_idx": 11679, "task_index": 8}, {"db_idx": 11680, "episode_idx": 42, "frame_idx": 140, "global_frame_idx": 11680, "task_index": 8}, {"db_idx": 11681, "episode_idx": 42, "frame_idx": 141, "global_frame_idx": 11681, "task_index": 8}, {"db_idx": 11682, "episode_idx": 42, "frame_idx": 142, "global_frame_idx": 11682, "task_index": 8}, {"db_idx": 11683, "episode_idx": 42, "frame_idx": 143, "global_frame_idx": 11683, "task_index": 8}, {"db_idx": 11684, "episode_idx": 42, "frame_idx": 144, "global_frame_idx": 11684, "task_index": 8}, {"db_idx": 11685, "episode_idx": 42, "frame_idx": 145, "global_frame_idx": 11685, "task_index": 8}, {"db_idx": 11686, "episode_idx": 42, "frame_idx": 146, "global_frame_idx": 11686, "task_index": 8}, {"db_idx": 11687, "episode_idx": 42, "frame_idx": 147, "global_frame_idx": 11687, "task_index": 8}, {"db_idx": 11688, "episode_idx": 42, "frame_idx": 148, "global_frame_idx": 11688, "task_index": 8}, {"db_idx": 11689, "episode_idx": 42, "frame_idx": 149, "global_frame_idx": 11689, "task_index": 8}, {"db_idx": 11690, "episode_idx": 42, "frame_idx": 150, "global_frame_idx": 11690, "task_index": 8}, {"db_idx": 11691, "episode_idx": 42, "frame_idx": 151, "global_frame_idx": 11691, "task_index": 8}, {"db_idx": 11692, "episode_idx": 42, "frame_idx": 152, "global_frame_idx": 11692, "task_index": 8}, {"db_idx": 11693, "episode_idx": 42, "frame_idx": 153, "global_frame_idx": 11693, "task_index": 8}, {"db_idx": 11694, "episode_idx": 42, "frame_idx": 154, "global_frame_idx": 11694, "task_index": 8}, {"db_idx": 11695, "episode_idx": 42, "frame_idx": 155, "global_frame_idx": 11695, "task_index": 8}, {"db_idx": 11696, "episode_idx": 42, "frame_idx": 156, "global_frame_idx": 11696, "task_index": 8}, {"db_idx": 11697, "episode_idx": 42, "frame_idx": 157, "global_frame_idx": 11697, "task_index": 8}, {"db_idx": 11698, "episode_idx": 42, "frame_idx": 158, "global_frame_idx": 11698, "task_index": 8}, {"db_idx": 11699, "episode_idx": 42, "frame_idx": 159, "global_frame_idx": 11699, "task_index": 8}, {"db_idx": 11700, "episode_idx": 42, "frame_idx": 160, "global_frame_idx": 11700, "task_index": 8}, {"db_idx": 11701, "episode_idx": 42, "frame_idx": 161, "global_frame_idx": 11701, "task_index": 8}, {"db_idx": 11702, "episode_idx": 42, "frame_idx": 162, "global_frame_idx": 11702, "task_index": 8}, {"db_idx": 11703, "episode_idx": 42, "frame_idx": 163, "global_frame_idx": 11703, "task_index": 8}, {"db_idx": 11704, "episode_idx": 42, "frame_idx": 164, "global_frame_idx": 11704, "task_index": 8}, {"db_idx": 11705, "episode_idx": 42, "frame_idx": 165, "global_frame_idx": 11705, "task_index": 8}, {"db_idx": 11706, "episode_idx": 42, "frame_idx": 166, "global_frame_idx": 11706, "task_index": 8}, {"db_idx": 11707, "episode_idx": 42, "frame_idx": 167, "global_frame_idx": 11707, "task_index": 8}, {"db_idx": 11708, "episode_idx": 42, "frame_idx": 168, "global_frame_idx": 11708, "task_index": 8}, {"db_idx": 11709, "episode_idx": 42, "frame_idx": 169, "global_frame_idx": 11709, "task_index": 8}, {"db_idx": 11710, "episode_idx": 42, "frame_idx": 170, "global_frame_idx": 11710, "task_index": 8}, {"db_idx": 11711, "episode_idx": 42, "frame_idx": 171, "global_frame_idx": 11711, "task_index": 8}, {"db_idx": 11712, "episode_idx": 42, "frame_idx": 172, "global_frame_idx": 11712, "task_index": 8}, {"db_idx": 11713, "episode_idx": 42, "frame_idx": 173, "global_frame_idx": 11713, "task_index": 8}, {"db_idx": 11714, "episode_idx": 42, "frame_idx": 174, "global_frame_idx": 11714, "task_index": 8}, {"db_idx": 11715, "episode_idx": 42, "frame_idx": 175, "global_frame_idx": 11715, "task_index": 8}, {"db_idx": 11716, "episode_idx": 42, "frame_idx": 176, "global_frame_idx": 11716, "task_index": 8}, {"db_idx": 11717, "episode_idx": 42, "frame_idx": 177, "global_frame_idx": 11717, "task_index": 8}, {"db_idx": 11718, "episode_idx": 42, "frame_idx": 178, "global_frame_idx": 11718, "task_index": 8}, {"db_idx": 11719, "episode_idx": 42, "frame_idx": 179, "global_frame_idx": 11719, "task_index": 8}, {"db_idx": 11720, "episode_idx": 42, "frame_idx": 180, "global_frame_idx": 11720, "task_index": 8}, {"db_idx": 11721, "episode_idx": 42, "frame_idx": 181, "global_frame_idx": 11721, "task_index": 8}, {"db_idx": 11722, "episode_idx": 42, "frame_idx": 182, "global_frame_idx": 11722, "task_index": 8}, {"db_idx": 11723, "episode_idx": 42, "frame_idx": 183, "global_frame_idx": 11723, "task_index": 8}, {"db_idx": 11724, "episode_idx": 42, "frame_idx": 184, "global_frame_idx": 11724, "task_index": 8}, {"db_idx": 11725, "episode_idx": 42, "frame_idx": 185, "global_frame_idx": 11725, "task_index": 8}, {"db_idx": 11726, "episode_idx": 42, "frame_idx": 186, "global_frame_idx": 11726, "task_index": 8}, {"db_idx": 11727, "episode_idx": 42, "frame_idx": 187, "global_frame_idx": 11727, "task_index": 8}, {"db_idx": 11728, "episode_idx": 42, "frame_idx": 188, "global_frame_idx": 11728, "task_index": 8}, {"db_idx": 11729, "episode_idx": 42, "frame_idx": 189, "global_frame_idx": 11729, "task_index": 8}, {"db_idx": 11730, "episode_idx": 42, "frame_idx": 190, "global_frame_idx": 11730, "task_index": 8}, {"db_idx": 11731, "episode_idx": 42, "frame_idx": 191, "global_frame_idx": 11731, "task_index": 8}, {"db_idx": 11732, "episode_idx": 42, "frame_idx": 192, "global_frame_idx": 11732, "task_index": 8}, {"db_idx": 11733, "episode_idx": 42, "frame_idx": 193, "global_frame_idx": 11733, "task_index": 8}, {"db_idx": 11734, "episode_idx": 42, "frame_idx": 194, "global_frame_idx": 11734, "task_index": 8}, {"db_idx": 11735, "episode_idx": 42, "frame_idx": 195, "global_frame_idx": 11735, "task_index": 8}, {"db_idx": 11736, "episode_idx": 42, "frame_idx": 196, "global_frame_idx": 11736, "task_index": 8}, {"db_idx": 11737, "episode_idx": 42, "frame_idx": 197, "global_frame_idx": 11737, "task_index": 8}, {"db_idx": 11738, "episode_idx": 42, "frame_idx": 198, "global_frame_idx": 11738, "task_index": 8}, {"db_idx": 11739, "episode_idx": 42, "frame_idx": 199, "global_frame_idx": 11739, "task_index": 8}, {"db_idx": 11740, "episode_idx": 42, "frame_idx": 200, "global_frame_idx": 11740, "task_index": 8}, {"db_idx": 11741, "episode_idx": 42, "frame_idx": 201, "global_frame_idx": 11741, "task_index": 8}, {"db_idx": 11742, "episode_idx": 42, "frame_idx": 202, "global_frame_idx": 11742, "task_index": 8}, {"db_idx": 11743, "episode_idx": 42, "frame_idx": 203, "global_frame_idx": 11743, "task_index": 8}, {"db_idx": 11744, "episode_idx": 42, "frame_idx": 204, "global_frame_idx": 11744, "task_index": 8}, {"db_idx": 11745, "episode_idx": 42, "frame_idx": 205, "global_frame_idx": 11745, "task_index": 8}, {"db_idx": 11746, "episode_idx": 42, "frame_idx": 206, "global_frame_idx": 11746, "task_index": 8}, {"db_idx": 11747, "episode_idx": 42, "frame_idx": 207, "global_frame_idx": 11747, "task_index": 8}, {"db_idx": 11748, "episode_idx": 42, "frame_idx": 208, "global_frame_idx": 11748, "task_index": 8}, {"db_idx": 11749, "episode_idx": 42, "frame_idx": 209, "global_frame_idx": 11749, "task_index": 8}, {"db_idx": 11750, "episode_idx": 42, "frame_idx": 210, "global_frame_idx": 11750, "task_index": 8}, {"db_idx": 11751, "episode_idx": 42, "frame_idx": 211, "global_frame_idx": 11751, "task_index": 8}, {"db_idx": 11752, "episode_idx": 42, "frame_idx": 212, "global_frame_idx": 11752, "task_index": 8}, {"db_idx": 11753, "episode_idx": 42, "frame_idx": 213, "global_frame_idx": 11753, "task_index": 8}, {"db_idx": 11754, "episode_idx": 42, "frame_idx": 214, "global_frame_idx": 11754, "task_index": 8}, {"db_idx": 11755, "episode_idx": 42, "frame_idx": 215, "global_frame_idx": 11755, "task_index": 8}, {"db_idx": 11756, "episode_idx": 42, "frame_idx": 216, "global_frame_idx": 11756, "task_index": 8}, {"db_idx": 11757, "episode_idx": 42, "frame_idx": 217, "global_frame_idx": 11757, "task_index": 8}, {"db_idx": 11758, "episode_idx": 42, "frame_idx": 218, "global_frame_idx": 11758, "task_index": 8}, {"db_idx": 11759, "episode_idx": 42, "frame_idx": 219, "global_frame_idx": 11759, "task_index": 8}, {"db_idx": 11760, "episode_idx": 42, "frame_idx": 220, "global_frame_idx": 11760, "task_index": 8}, {"db_idx": 11761, "episode_idx": 42, "frame_idx": 221, "global_frame_idx": 11761, "task_index": 8}, {"db_idx": 11762, "episode_idx": 42, "frame_idx": 222, "global_frame_idx": 11762, "task_index": 8}, {"db_idx": 11763, "episode_idx": 42, "frame_idx": 223, "global_frame_idx": 11763, "task_index": 8}, {"db_idx": 11764, "episode_idx": 42, "frame_idx": 224, "global_frame_idx": 11764, "task_index": 8}, {"db_idx": 11765, "episode_idx": 42, "frame_idx": 225, "global_frame_idx": 11765, "task_index": 8}, {"db_idx": 11766, "episode_idx": 42, "frame_idx": 226, "global_frame_idx": 11766, "task_index": 8}, {"db_idx": 11767, "episode_idx": 42, "frame_idx": 227, "global_frame_idx": 11767, "task_index": 8}, {"db_idx": 11768, "episode_idx": 42, "frame_idx": 228, "global_frame_idx": 11768, "task_index": 8}, {"db_idx": 11769, "episode_idx": 42, "frame_idx": 229, "global_frame_idx": 11769, "task_index": 8}, {"db_idx": 11770, "episode_idx": 42, "frame_idx": 230, "global_frame_idx": 11770, "task_index": 8}, {"db_idx": 11771, "episode_idx": 42, "frame_idx": 231, "global_frame_idx": 11771, "task_index": 8}, {"db_idx": 11772, "episode_idx": 42, "frame_idx": 232, "global_frame_idx": 11772, "task_index": 8}, {"db_idx": 11773, "episode_idx": 42, "frame_idx": 233, "global_frame_idx": 11773, "task_index": 8}, {"db_idx": 11774, "episode_idx": 42, "frame_idx": 234, "global_frame_idx": 11774, "task_index": 8}, {"db_idx": 11775, "episode_idx": 42, "frame_idx": 235, "global_frame_idx": 11775, "task_index": 8}, {"db_idx": 11776, "episode_idx": 42, "frame_idx": 236, "global_frame_idx": 11776, "task_index": 8}, {"db_idx": 11777, "episode_idx": 42, "frame_idx": 237, "global_frame_idx": 11777, "task_index": 8}, {"db_idx": 11778, "episode_idx": 42, "frame_idx": 238, "global_frame_idx": 11778, "task_index": 8}, {"db_idx": 11779, "episode_idx": 42, "frame_idx": 239, "global_frame_idx": 11779, "task_index": 8}, {"db_idx": 11780, "episode_idx": 42, "frame_idx": 240, "global_frame_idx": 11780, "task_index": 8}, {"db_idx": 11781, "episode_idx": 42, "frame_idx": 241, "global_frame_idx": 11781, "task_index": 8}, {"db_idx": 11782, "episode_idx": 42, "frame_idx": 242, "global_frame_idx": 11782, "task_index": 8}, {"db_idx": 11783, "episode_idx": 42, "frame_idx": 243, "global_frame_idx": 11783, "task_index": 8}, {"db_idx": 11784, "episode_idx": 42, "frame_idx": 244, "global_frame_idx": 11784, "task_index": 8}, {"db_idx": 11785, "episode_idx": 42, "frame_idx": 245, "global_frame_idx": 11785, "task_index": 8}, {"db_idx": 11786, "episode_idx": 42, "frame_idx": 246, "global_frame_idx": 11786, "task_index": 8}, {"db_idx": 11787, "episode_idx": 42, "frame_idx": 247, "global_frame_idx": 11787, "task_index": 8}, {"db_idx": 11788, "episode_idx": 42, "frame_idx": 248, "global_frame_idx": 11788, "task_index": 8}, {"db_idx": 11789, "episode_idx": 42, "frame_idx": 249, "global_frame_idx": 11789, "task_index": 8}, {"db_idx": 11790, "episode_idx": 42, "frame_idx": 250, "global_frame_idx": 11790, "task_index": 8}, {"db_idx": 11791, "episode_idx": 42, "frame_idx": 251, "global_frame_idx": 11791, "task_index": 8}, {"db_idx": 11792, "episode_idx": 42, "frame_idx": 252, "global_frame_idx": 11792, "task_index": 8}, {"db_idx": 11793, "episode_idx": 42, "frame_idx": 253, "global_frame_idx": 11793, "task_index": 8}, {"db_idx": 11794, "episode_idx": 43, "frame_idx": 0, "global_frame_idx": 11794, "task_index": 8}, {"db_idx": 11795, "episode_idx": 43, "frame_idx": 1, "global_frame_idx": 11795, "task_index": 8}, {"db_idx": 11796, "episode_idx": 43, "frame_idx": 2, "global_frame_idx": 11796, "task_index": 8}, {"db_idx": 11797, "episode_idx": 43, "frame_idx": 3, "global_frame_idx": 11797, "task_index": 8}, {"db_idx": 11798, "episode_idx": 43, "frame_idx": 4, "global_frame_idx": 11798, "task_index": 8}, {"db_idx": 11799, "episode_idx": 43, "frame_idx": 5, "global_frame_idx": 11799, "task_index": 8}, {"db_idx": 11800, "episode_idx": 43, "frame_idx": 6, "global_frame_idx": 11800, "task_index": 8}, {"db_idx": 11801, "episode_idx": 43, "frame_idx": 7, "global_frame_idx": 11801, "task_index": 8}, {"db_idx": 11802, "episode_idx": 43, "frame_idx": 8, "global_frame_idx": 11802, "task_index": 8}, {"db_idx": 11803, "episode_idx": 43, "frame_idx": 9, "global_frame_idx": 11803, "task_index": 8}, {"db_idx": 11804, "episode_idx": 43, "frame_idx": 10, "global_frame_idx": 11804, "task_index": 8}, {"db_idx": 11805, "episode_idx": 43, "frame_idx": 11, "global_frame_idx": 11805, "task_index": 8}, {"db_idx": 11806, "episode_idx": 43, "frame_idx": 12, "global_frame_idx": 11806, "task_index": 8}, {"db_idx": 11807, "episode_idx": 43, "frame_idx": 13, "global_frame_idx": 11807, "task_index": 8}, {"db_idx": 11808, "episode_idx": 43, "frame_idx": 14, "global_frame_idx": 11808, "task_index": 8}, {"db_idx": 11809, "episode_idx": 43, "frame_idx": 15, "global_frame_idx": 11809, "task_index": 8}, {"db_idx": 11810, "episode_idx": 43, "frame_idx": 16, "global_frame_idx": 11810, "task_index": 8}, {"db_idx": 11811, "episode_idx": 43, "frame_idx": 17, "global_frame_idx": 11811, "task_index": 8}, {"db_idx": 11812, "episode_idx": 43, "frame_idx": 18, "global_frame_idx": 11812, "task_index": 8}, {"db_idx": 11813, "episode_idx": 43, "frame_idx": 19, "global_frame_idx": 11813, "task_index": 8}, {"db_idx": 11814, "episode_idx": 43, "frame_idx": 20, "global_frame_idx": 11814, "task_index": 8}, {"db_idx": 11815, "episode_idx": 43, "frame_idx": 21, "global_frame_idx": 11815, "task_index": 8}, {"db_idx": 11816, "episode_idx": 43, "frame_idx": 22, "global_frame_idx": 11816, "task_index": 8}, {"db_idx": 11817, "episode_idx": 43, "frame_idx": 23, "global_frame_idx": 11817, "task_index": 8}, {"db_idx": 11818, "episode_idx": 43, "frame_idx": 24, "global_frame_idx": 11818, "task_index": 8}, {"db_idx": 11819, "episode_idx": 43, "frame_idx": 25, "global_frame_idx": 11819, "task_index": 8}, {"db_idx": 11820, "episode_idx": 43, "frame_idx": 26, "global_frame_idx": 11820, "task_index": 8}, {"db_idx": 11821, "episode_idx": 43, "frame_idx": 27, "global_frame_idx": 11821, "task_index": 8}, {"db_idx": 11822, "episode_idx": 43, "frame_idx": 28, "global_frame_idx": 11822, "task_index": 8}, {"db_idx": 11823, "episode_idx": 43, "frame_idx": 29, "global_frame_idx": 11823, "task_index": 8}, {"db_idx": 11824, "episode_idx": 43, "frame_idx": 30, "global_frame_idx": 11824, "task_index": 8}, {"db_idx": 11825, "episode_idx": 43, "frame_idx": 31, "global_frame_idx": 11825, "task_index": 8}, {"db_idx": 11826, "episode_idx": 43, "frame_idx": 32, "global_frame_idx": 11826, "task_index": 8}, {"db_idx": 11827, "episode_idx": 43, "frame_idx": 33, "global_frame_idx": 11827, "task_index": 8}, {"db_idx": 11828, "episode_idx": 43, "frame_idx": 34, "global_frame_idx": 11828, "task_index": 8}, {"db_idx": 11829, "episode_idx": 43, "frame_idx": 35, "global_frame_idx": 11829, "task_index": 8}, {"db_idx": 11830, "episode_idx": 43, "frame_idx": 36, "global_frame_idx": 11830, "task_index": 8}, {"db_idx": 11831, "episode_idx": 43, "frame_idx": 37, "global_frame_idx": 11831, "task_index": 8}, {"db_idx": 11832, "episode_idx": 43, "frame_idx": 38, "global_frame_idx": 11832, "task_index": 8}, {"db_idx": 11833, "episode_idx": 43, "frame_idx": 39, "global_frame_idx": 11833, "task_index": 8}, {"db_idx": 11834, "episode_idx": 43, "frame_idx": 40, "global_frame_idx": 11834, "task_index": 8}, {"db_idx": 11835, "episode_idx": 43, "frame_idx": 41, "global_frame_idx": 11835, "task_index": 8}, {"db_idx": 11836, "episode_idx": 43, "frame_idx": 42, "global_frame_idx": 11836, "task_index": 8}, {"db_idx": 11837, "episode_idx": 43, "frame_idx": 43, "global_frame_idx": 11837, "task_index": 8}, {"db_idx": 11838, "episode_idx": 43, "frame_idx": 44, "global_frame_idx": 11838, "task_index": 8}, {"db_idx": 11839, "episode_idx": 43, "frame_idx": 45, "global_frame_idx": 11839, "task_index": 8}, {"db_idx": 11840, "episode_idx": 43, "frame_idx": 46, "global_frame_idx": 11840, "task_index": 8}, {"db_idx": 11841, "episode_idx": 43, "frame_idx": 47, "global_frame_idx": 11841, "task_index": 8}, {"db_idx": 11842, "episode_idx": 43, "frame_idx": 48, "global_frame_idx": 11842, "task_index": 8}, {"db_idx": 11843, "episode_idx": 43, "frame_idx": 49, "global_frame_idx": 11843, "task_index": 8}, {"db_idx": 11844, "episode_idx": 43, "frame_idx": 50, "global_frame_idx": 11844, "task_index": 8}, {"db_idx": 11845, "episode_idx": 43, "frame_idx": 51, "global_frame_idx": 11845, "task_index": 8}, {"db_idx": 11846, "episode_idx": 43, "frame_idx": 52, "global_frame_idx": 11846, "task_index": 8}, {"db_idx": 11847, "episode_idx": 43, "frame_idx": 53, "global_frame_idx": 11847, "task_index": 8}, {"db_idx": 11848, "episode_idx": 43, "frame_idx": 54, "global_frame_idx": 11848, "task_index": 8}, {"db_idx": 11849, "episode_idx": 43, "frame_idx": 55, "global_frame_idx": 11849, "task_index": 8}, {"db_idx": 11850, "episode_idx": 43, "frame_idx": 56, "global_frame_idx": 11850, "task_index": 8}, {"db_idx": 11851, "episode_idx": 43, "frame_idx": 57, "global_frame_idx": 11851, "task_index": 8}, {"db_idx": 11852, "episode_idx": 43, "frame_idx": 58, "global_frame_idx": 11852, "task_index": 8}, {"db_idx": 11853, "episode_idx": 43, "frame_idx": 59, "global_frame_idx": 11853, "task_index": 8}, {"db_idx": 11854, "episode_idx": 43, "frame_idx": 60, "global_frame_idx": 11854, "task_index": 8}, {"db_idx": 11855, "episode_idx": 43, "frame_idx": 61, "global_frame_idx": 11855, "task_index": 8}, {"db_idx": 11856, "episode_idx": 43, "frame_idx": 62, "global_frame_idx": 11856, "task_index": 8}, {"db_idx": 11857, "episode_idx": 43, "frame_idx": 63, "global_frame_idx": 11857, "task_index": 8}, {"db_idx": 11858, "episode_idx": 43, "frame_idx": 64, "global_frame_idx": 11858, "task_index": 8}, {"db_idx": 11859, "episode_idx": 43, "frame_idx": 65, "global_frame_idx": 11859, "task_index": 8}, {"db_idx": 11860, "episode_idx": 43, "frame_idx": 66, "global_frame_idx": 11860, "task_index": 8}, {"db_idx": 11861, "episode_idx": 43, "frame_idx": 67, "global_frame_idx": 11861, "task_index": 8}, {"db_idx": 11862, "episode_idx": 43, "frame_idx": 68, "global_frame_idx": 11862, "task_index": 8}, {"db_idx": 11863, "episode_idx": 43, "frame_idx": 69, "global_frame_idx": 11863, "task_index": 8}, {"db_idx": 11864, "episode_idx": 43, "frame_idx": 70, "global_frame_idx": 11864, "task_index": 8}, {"db_idx": 11865, "episode_idx": 43, "frame_idx": 71, "global_frame_idx": 11865, "task_index": 8}, {"db_idx": 11866, "episode_idx": 43, "frame_idx": 72, "global_frame_idx": 11866, "task_index": 8}, {"db_idx": 11867, "episode_idx": 43, "frame_idx": 73, "global_frame_idx": 11867, "task_index": 8}, {"db_idx": 11868, "episode_idx": 43, "frame_idx": 74, "global_frame_idx": 11868, "task_index": 8}, {"db_idx": 11869, "episode_idx": 43, "frame_idx": 75, "global_frame_idx": 11869, "task_index": 8}, {"db_idx": 11870, "episode_idx": 43, "frame_idx": 76, "global_frame_idx": 11870, "task_index": 8}, {"db_idx": 11871, "episode_idx": 43, "frame_idx": 77, "global_frame_idx": 11871, "task_index": 8}, {"db_idx": 11872, "episode_idx": 43, "frame_idx": 78, "global_frame_idx": 11872, "task_index": 8}, {"db_idx": 11873, "episode_idx": 43, "frame_idx": 79, "global_frame_idx": 11873, "task_index": 8}, {"db_idx": 11874, "episode_idx": 43, "frame_idx": 80, "global_frame_idx": 11874, "task_index": 8}, {"db_idx": 11875, "episode_idx": 43, "frame_idx": 81, "global_frame_idx": 11875, "task_index": 8}, {"db_idx": 11876, "episode_idx": 43, "frame_idx": 82, "global_frame_idx": 11876, "task_index": 8}, {"db_idx": 11877, "episode_idx": 43, "frame_idx": 83, "global_frame_idx": 11877, "task_index": 8}, {"db_idx": 11878, "episode_idx": 43, "frame_idx": 84, "global_frame_idx": 11878, "task_index": 8}, {"db_idx": 11879, "episode_idx": 43, "frame_idx": 85, "global_frame_idx": 11879, "task_index": 8}, {"db_idx": 11880, "episode_idx": 43, "frame_idx": 86, "global_frame_idx": 11880, "task_index": 8}, {"db_idx": 11881, "episode_idx": 43, "frame_idx": 87, "global_frame_idx": 11881, "task_index": 8}, {"db_idx": 11882, "episode_idx": 43, "frame_idx": 88, "global_frame_idx": 11882, "task_index": 8}, {"db_idx": 11883, "episode_idx": 43, "frame_idx": 89, "global_frame_idx": 11883, "task_index": 8}, {"db_idx": 11884, "episode_idx": 43, "frame_idx": 90, "global_frame_idx": 11884, "task_index": 8}, {"db_idx": 11885, "episode_idx": 43, "frame_idx": 91, "global_frame_idx": 11885, "task_index": 8}, {"db_idx": 11886, "episode_idx": 43, "frame_idx": 92, "global_frame_idx": 11886, "task_index": 8}, {"db_idx": 11887, "episode_idx": 43, "frame_idx": 93, "global_frame_idx": 11887, "task_index": 8}, {"db_idx": 11888, "episode_idx": 43, "frame_idx": 94, "global_frame_idx": 11888, "task_index": 8}, {"db_idx": 11889, "episode_idx": 43, "frame_idx": 95, "global_frame_idx": 11889, "task_index": 8}, {"db_idx": 11890, "episode_idx": 43, "frame_idx": 96, "global_frame_idx": 11890, "task_index": 8}, {"db_idx": 11891, "episode_idx": 43, "frame_idx": 97, "global_frame_idx": 11891, "task_index": 8}, {"db_idx": 11892, "episode_idx": 43, "frame_idx": 98, "global_frame_idx": 11892, "task_index": 8}, {"db_idx": 11893, "episode_idx": 43, "frame_idx": 99, "global_frame_idx": 11893, "task_index": 8}, {"db_idx": 11894, "episode_idx": 43, "frame_idx": 100, "global_frame_idx": 11894, "task_index": 8}, {"db_idx": 11895, "episode_idx": 43, "frame_idx": 101, "global_frame_idx": 11895, "task_index": 8}, {"db_idx": 11896, "episode_idx": 43, "frame_idx": 102, "global_frame_idx": 11896, "task_index": 8}, {"db_idx": 11897, "episode_idx": 43, "frame_idx": 103, "global_frame_idx": 11897, "task_index": 8}, {"db_idx": 11898, "episode_idx": 43, "frame_idx": 104, "global_frame_idx": 11898, "task_index": 8}, {"db_idx": 11899, "episode_idx": 43, "frame_idx": 105, "global_frame_idx": 11899, "task_index": 8}, {"db_idx": 11900, "episode_idx": 43, "frame_idx": 106, "global_frame_idx": 11900, "task_index": 8}, {"db_idx": 11901, "episode_idx": 43, "frame_idx": 107, "global_frame_idx": 11901, "task_index": 8}, {"db_idx": 11902, "episode_idx": 43, "frame_idx": 108, "global_frame_idx": 11902, "task_index": 8}, {"db_idx": 11903, "episode_idx": 43, "frame_idx": 109, "global_frame_idx": 11903, "task_index": 8}, {"db_idx": 11904, "episode_idx": 43, "frame_idx": 110, "global_frame_idx": 11904, "task_index": 8}, {"db_idx": 11905, "episode_idx": 43, "frame_idx": 111, "global_frame_idx": 11905, "task_index": 8}, {"db_idx": 11906, "episode_idx": 43, "frame_idx": 112, "global_frame_idx": 11906, "task_index": 8}, {"db_idx": 11907, "episode_idx": 43, "frame_idx": 113, "global_frame_idx": 11907, "task_index": 8}, {"db_idx": 11908, "episode_idx": 43, "frame_idx": 114, "global_frame_idx": 11908, "task_index": 8}, {"db_idx": 11909, "episode_idx": 43, "frame_idx": 115, "global_frame_idx": 11909, "task_index": 8}, {"db_idx": 11910, "episode_idx": 43, "frame_idx": 116, "global_frame_idx": 11910, "task_index": 8}, {"db_idx": 11911, "episode_idx": 43, "frame_idx": 117, "global_frame_idx": 11911, "task_index": 8}, {"db_idx": 11912, "episode_idx": 43, "frame_idx": 118, "global_frame_idx": 11912, "task_index": 8}, {"db_idx": 11913, "episode_idx": 43, "frame_idx": 119, "global_frame_idx": 11913, "task_index": 8}, {"db_idx": 11914, "episode_idx": 43, "frame_idx": 120, "global_frame_idx": 11914, "task_index": 8}, {"db_idx": 11915, "episode_idx": 43, "frame_idx": 121, "global_frame_idx": 11915, "task_index": 8}, {"db_idx": 11916, "episode_idx": 43, "frame_idx": 122, "global_frame_idx": 11916, "task_index": 8}, {"db_idx": 11917, "episode_idx": 43, "frame_idx": 123, "global_frame_idx": 11917, "task_index": 8}, {"db_idx": 11918, "episode_idx": 43, "frame_idx": 124, "global_frame_idx": 11918, "task_index": 8}, {"db_idx": 11919, "episode_idx": 43, "frame_idx": 125, "global_frame_idx": 11919, "task_index": 8}, {"db_idx": 11920, "episode_idx": 43, "frame_idx": 126, "global_frame_idx": 11920, "task_index": 8}, {"db_idx": 11921, "episode_idx": 43, "frame_idx": 127, "global_frame_idx": 11921, "task_index": 8}, {"db_idx": 11922, "episode_idx": 43, "frame_idx": 128, "global_frame_idx": 11922, "task_index": 8}, {"db_idx": 11923, "episode_idx": 43, "frame_idx": 129, "global_frame_idx": 11923, "task_index": 8}, {"db_idx": 11924, "episode_idx": 43, "frame_idx": 130, "global_frame_idx": 11924, "task_index": 8}, {"db_idx": 11925, "episode_idx": 43, "frame_idx": 131, "global_frame_idx": 11925, "task_index": 8}, {"db_idx": 11926, "episode_idx": 43, "frame_idx": 132, "global_frame_idx": 11926, "task_index": 8}, {"db_idx": 11927, "episode_idx": 43, "frame_idx": 133, "global_frame_idx": 11927, "task_index": 8}, {"db_idx": 11928, "episode_idx": 43, "frame_idx": 134, "global_frame_idx": 11928, "task_index": 8}, {"db_idx": 11929, "episode_idx": 43, "frame_idx": 135, "global_frame_idx": 11929, "task_index": 8}, {"db_idx": 11930, "episode_idx": 43, "frame_idx": 136, "global_frame_idx": 11930, "task_index": 8}, {"db_idx": 11931, "episode_idx": 43, "frame_idx": 137, "global_frame_idx": 11931, "task_index": 8}, {"db_idx": 11932, "episode_idx": 43, "frame_idx": 138, "global_frame_idx": 11932, "task_index": 8}, {"db_idx": 11933, "episode_idx": 43, "frame_idx": 139, "global_frame_idx": 11933, "task_index": 8}, {"db_idx": 11934, "episode_idx": 43, "frame_idx": 140, "global_frame_idx": 11934, "task_index": 8}, {"db_idx": 11935, "episode_idx": 43, "frame_idx": 141, "global_frame_idx": 11935, "task_index": 8}, {"db_idx": 11936, "episode_idx": 43, "frame_idx": 142, "global_frame_idx": 11936, "task_index": 8}, {"db_idx": 11937, "episode_idx": 43, "frame_idx": 143, "global_frame_idx": 11937, "task_index": 8}, {"db_idx": 11938, "episode_idx": 43, "frame_idx": 144, "global_frame_idx": 11938, "task_index": 8}, {"db_idx": 11939, "episode_idx": 43, "frame_idx": 145, "global_frame_idx": 11939, "task_index": 8}, {"db_idx": 11940, "episode_idx": 43, "frame_idx": 146, "global_frame_idx": 11940, "task_index": 8}, {"db_idx": 11941, "episode_idx": 43, "frame_idx": 147, "global_frame_idx": 11941, "task_index": 8}, {"db_idx": 11942, "episode_idx": 43, "frame_idx": 148, "global_frame_idx": 11942, "task_index": 8}, {"db_idx": 11943, "episode_idx": 43, "frame_idx": 149, "global_frame_idx": 11943, "task_index": 8}, {"db_idx": 11944, "episode_idx": 43, "frame_idx": 150, "global_frame_idx": 11944, "task_index": 8}, {"db_idx": 11945, "episode_idx": 43, "frame_idx": 151, "global_frame_idx": 11945, "task_index": 8}, {"db_idx": 11946, "episode_idx": 43, "frame_idx": 152, "global_frame_idx": 11946, "task_index": 8}, {"db_idx": 11947, "episode_idx": 43, "frame_idx": 153, "global_frame_idx": 11947, "task_index": 8}, {"db_idx": 11948, "episode_idx": 43, "frame_idx": 154, "global_frame_idx": 11948, "task_index": 8}, {"db_idx": 11949, "episode_idx": 43, "frame_idx": 155, "global_frame_idx": 11949, "task_index": 8}, {"db_idx": 11950, "episode_idx": 43, "frame_idx": 156, "global_frame_idx": 11950, "task_index": 8}, {"db_idx": 11951, "episode_idx": 43, "frame_idx": 157, "global_frame_idx": 11951, "task_index": 8}, {"db_idx": 11952, "episode_idx": 43, "frame_idx": 158, "global_frame_idx": 11952, "task_index": 8}, {"db_idx": 11953, "episode_idx": 43, "frame_idx": 159, "global_frame_idx": 11953, "task_index": 8}, {"db_idx": 11954, "episode_idx": 43, "frame_idx": 160, "global_frame_idx": 11954, "task_index": 8}, {"db_idx": 11955, "episode_idx": 43, "frame_idx": 161, "global_frame_idx": 11955, "task_index": 8}, {"db_idx": 11956, "episode_idx": 43, "frame_idx": 162, "global_frame_idx": 11956, "task_index": 8}, {"db_idx": 11957, "episode_idx": 43, "frame_idx": 163, "global_frame_idx": 11957, "task_index": 8}, {"db_idx": 11958, "episode_idx": 43, "frame_idx": 164, "global_frame_idx": 11958, "task_index": 8}, {"db_idx": 11959, "episode_idx": 43, "frame_idx": 165, "global_frame_idx": 11959, "task_index": 8}, {"db_idx": 11960, "episode_idx": 43, "frame_idx": 166, "global_frame_idx": 11960, "task_index": 8}, {"db_idx": 11961, "episode_idx": 43, "frame_idx": 167, "global_frame_idx": 11961, "task_index": 8}, {"db_idx": 11962, "episode_idx": 43, "frame_idx": 168, "global_frame_idx": 11962, "task_index": 8}, {"db_idx": 11963, "episode_idx": 43, "frame_idx": 169, "global_frame_idx": 11963, "task_index": 8}, {"db_idx": 11964, "episode_idx": 43, "frame_idx": 170, "global_frame_idx": 11964, "task_index": 8}, {"db_idx": 11965, "episode_idx": 43, "frame_idx": 171, "global_frame_idx": 11965, "task_index": 8}, {"db_idx": 11966, "episode_idx": 43, "frame_idx": 172, "global_frame_idx": 11966, "task_index": 8}, {"db_idx": 11967, "episode_idx": 43, "frame_idx": 173, "global_frame_idx": 11967, "task_index": 8}, {"db_idx": 11968, "episode_idx": 43, "frame_idx": 174, "global_frame_idx": 11968, "task_index": 8}, {"db_idx": 11969, "episode_idx": 43, "frame_idx": 175, "global_frame_idx": 11969, "task_index": 8}, {"db_idx": 11970, "episode_idx": 43, "frame_idx": 176, "global_frame_idx": 11970, "task_index": 8}, {"db_idx": 11971, "episode_idx": 43, "frame_idx": 177, "global_frame_idx": 11971, "task_index": 8}, {"db_idx": 11972, "episode_idx": 43, "frame_idx": 178, "global_frame_idx": 11972, "task_index": 8}, {"db_idx": 11973, "episode_idx": 43, "frame_idx": 179, "global_frame_idx": 11973, "task_index": 8}, {"db_idx": 11974, "episode_idx": 43, "frame_idx": 180, "global_frame_idx": 11974, "task_index": 8}, {"db_idx": 11975, "episode_idx": 43, "frame_idx": 181, "global_frame_idx": 11975, "task_index": 8}, {"db_idx": 11976, "episode_idx": 43, "frame_idx": 182, "global_frame_idx": 11976, "task_index": 8}, {"db_idx": 11977, "episode_idx": 43, "frame_idx": 183, "global_frame_idx": 11977, "task_index": 8}, {"db_idx": 11978, "episode_idx": 43, "frame_idx": 184, "global_frame_idx": 11978, "task_index": 8}, {"db_idx": 11979, "episode_idx": 43, "frame_idx": 185, "global_frame_idx": 11979, "task_index": 8}, {"db_idx": 11980, "episode_idx": 43, "frame_idx": 186, "global_frame_idx": 11980, "task_index": 8}, {"db_idx": 11981, "episode_idx": 43, "frame_idx": 187, "global_frame_idx": 11981, "task_index": 8}, {"db_idx": 11982, "episode_idx": 43, "frame_idx": 188, "global_frame_idx": 11982, "task_index": 8}, {"db_idx": 11983, "episode_idx": 43, "frame_idx": 189, "global_frame_idx": 11983, "task_index": 8}, {"db_idx": 11984, "episode_idx": 43, "frame_idx": 190, "global_frame_idx": 11984, "task_index": 8}, {"db_idx": 11985, "episode_idx": 43, "frame_idx": 191, "global_frame_idx": 11985, "task_index": 8}, {"db_idx": 11986, "episode_idx": 43, "frame_idx": 192, "global_frame_idx": 11986, "task_index": 8}, {"db_idx": 11987, "episode_idx": 43, "frame_idx": 193, "global_frame_idx": 11987, "task_index": 8}, {"db_idx": 11988, "episode_idx": 43, "frame_idx": 194, "global_frame_idx": 11988, "task_index": 8}, {"db_idx": 11989, "episode_idx": 43, "frame_idx": 195, "global_frame_idx": 11989, "task_index": 8}, {"db_idx": 11990, "episode_idx": 43, "frame_idx": 196, "global_frame_idx": 11990, "task_index": 8}, {"db_idx": 11991, "episode_idx": 43, "frame_idx": 197, "global_frame_idx": 11991, "task_index": 8}, {"db_idx": 11992, "episode_idx": 43, "frame_idx": 198, "global_frame_idx": 11992, "task_index": 8}, {"db_idx": 11993, "episode_idx": 43, "frame_idx": 199, "global_frame_idx": 11993, "task_index": 8}, {"db_idx": 11994, "episode_idx": 43, "frame_idx": 200, "global_frame_idx": 11994, "task_index": 8}, {"db_idx": 11995, "episode_idx": 43, "frame_idx": 201, "global_frame_idx": 11995, "task_index": 8}, {"db_idx": 11996, "episode_idx": 43, "frame_idx": 202, "global_frame_idx": 11996, "task_index": 8}, {"db_idx": 11997, "episode_idx": 43, "frame_idx": 203, "global_frame_idx": 11997, "task_index": 8}, {"db_idx": 11998, "episode_idx": 43, "frame_idx": 204, "global_frame_idx": 11998, "task_index": 8}, {"db_idx": 11999, "episode_idx": 43, "frame_idx": 205, "global_frame_idx": 11999, "task_index": 8}, {"db_idx": 12000, "episode_idx": 43, "frame_idx": 206, "global_frame_idx": 12000, "task_index": 8}, {"db_idx": 12001, "episode_idx": 43, "frame_idx": 207, "global_frame_idx": 12001, "task_index": 8}, {"db_idx": 12002, "episode_idx": 43, "frame_idx": 208, "global_frame_idx": 12002, "task_index": 8}, {"db_idx": 12003, "episode_idx": 43, "frame_idx": 209, "global_frame_idx": 12003, "task_index": 8}, {"db_idx": 12004, "episode_idx": 43, "frame_idx": 210, "global_frame_idx": 12004, "task_index": 8}, {"db_idx": 12005, "episode_idx": 43, "frame_idx": 211, "global_frame_idx": 12005, "task_index": 8}, {"db_idx": 12006, "episode_idx": 43, "frame_idx": 212, "global_frame_idx": 12006, "task_index": 8}, {"db_idx": 12007, "episode_idx": 43, "frame_idx": 213, "global_frame_idx": 12007, "task_index": 8}, {"db_idx": 12008, "episode_idx": 43, "frame_idx": 214, "global_frame_idx": 12008, "task_index": 8}, {"db_idx": 12009, "episode_idx": 43, "frame_idx": 215, "global_frame_idx": 12009, "task_index": 8}, {"db_idx": 12010, "episode_idx": 43, "frame_idx": 216, "global_frame_idx": 12010, "task_index": 8}, {"db_idx": 12011, "episode_idx": 43, "frame_idx": 217, "global_frame_idx": 12011, "task_index": 8}, {"db_idx": 12012, "episode_idx": 43, "frame_idx": 218, "global_frame_idx": 12012, "task_index": 8}, {"db_idx": 12013, "episode_idx": 43, "frame_idx": 219, "global_frame_idx": 12013, "task_index": 8}, {"db_idx": 12014, "episode_idx": 43, "frame_idx": 220, "global_frame_idx": 12014, "task_index": 8}, {"db_idx": 12015, "episode_idx": 43, "frame_idx": 221, "global_frame_idx": 12015, "task_index": 8}, {"db_idx": 12016, "episode_idx": 43, "frame_idx": 222, "global_frame_idx": 12016, "task_index": 8}, {"db_idx": 12017, "episode_idx": 43, "frame_idx": 223, "global_frame_idx": 12017, "task_index": 8}, {"db_idx": 12018, "episode_idx": 43, "frame_idx": 224, "global_frame_idx": 12018, "task_index": 8}, {"db_idx": 12019, "episode_idx": 43, "frame_idx": 225, "global_frame_idx": 12019, "task_index": 8}, {"db_idx": 12020, "episode_idx": 43, "frame_idx": 226, "global_frame_idx": 12020, "task_index": 8}, {"db_idx": 12021, "episode_idx": 43, "frame_idx": 227, "global_frame_idx": 12021, "task_index": 8}, {"db_idx": 12022, "episode_idx": 43, "frame_idx": 228, "global_frame_idx": 12022, "task_index": 8}, {"db_idx": 12023, "episode_idx": 43, "frame_idx": 229, "global_frame_idx": 12023, "task_index": 8}, {"db_idx": 12024, "episode_idx": 43, "frame_idx": 230, "global_frame_idx": 12024, "task_index": 8}, {"db_idx": 12025, "episode_idx": 43, "frame_idx": 231, "global_frame_idx": 12025, "task_index": 8}, {"db_idx": 12026, "episode_idx": 43, "frame_idx": 232, "global_frame_idx": 12026, "task_index": 8}, {"db_idx": 12027, "episode_idx": 43, "frame_idx": 233, "global_frame_idx": 12027, "task_index": 8}, {"db_idx": 12028, "episode_idx": 43, "frame_idx": 234, "global_frame_idx": 12028, "task_index": 8}, {"db_idx": 12029, "episode_idx": 43, "frame_idx": 235, "global_frame_idx": 12029, "task_index": 8}, {"db_idx": 12030, "episode_idx": 43, "frame_idx": 236, "global_frame_idx": 12030, "task_index": 8}, {"db_idx": 12031, "episode_idx": 43, "frame_idx": 237, "global_frame_idx": 12031, "task_index": 8}, {"db_idx": 12032, "episode_idx": 43, "frame_idx": 238, "global_frame_idx": 12032, "task_index": 8}, {"db_idx": 12033, "episode_idx": 43, "frame_idx": 239, "global_frame_idx": 12033, "task_index": 8}, {"db_idx": 12034, "episode_idx": 43, "frame_idx": 240, "global_frame_idx": 12034, "task_index": 8}, {"db_idx": 12035, "episode_idx": 43, "frame_idx": 241, "global_frame_idx": 12035, "task_index": 8}, {"db_idx": 12036, "episode_idx": 43, "frame_idx": 242, "global_frame_idx": 12036, "task_index": 8}, {"db_idx": 12037, "episode_idx": 43, "frame_idx": 243, "global_frame_idx": 12037, "task_index": 8}, {"db_idx": 12038, "episode_idx": 43, "frame_idx": 244, "global_frame_idx": 12038, "task_index": 8}, {"db_idx": 12039, "episode_idx": 43, "frame_idx": 245, "global_frame_idx": 12039, "task_index": 8}, {"db_idx": 12040, "episode_idx": 43, "frame_idx": 246, "global_frame_idx": 12040, "task_index": 8}, {"db_idx": 12041, "episode_idx": 43, "frame_idx": 247, "global_frame_idx": 12041, "task_index": 8}, {"db_idx": 12042, "episode_idx": 43, "frame_idx": 248, "global_frame_idx": 12042, "task_index": 8}, {"db_idx": 12043, "episode_idx": 43, "frame_idx": 249, "global_frame_idx": 12043, "task_index": 8}, {"db_idx": 12044, "episode_idx": 43, "frame_idx": 250, "global_frame_idx": 12044, "task_index": 8}, {"db_idx": 12045, "episode_idx": 43, "frame_idx": 251, "global_frame_idx": 12045, "task_index": 8}, {"db_idx": 12046, "episode_idx": 44, "frame_idx": 0, "global_frame_idx": 12046, "task_index": 8}, {"db_idx": 12047, "episode_idx": 44, "frame_idx": 1, "global_frame_idx": 12047, "task_index": 8}, {"db_idx": 12048, "episode_idx": 44, "frame_idx": 2, "global_frame_idx": 12048, "task_index": 8}, {"db_idx": 12049, "episode_idx": 44, "frame_idx": 3, "global_frame_idx": 12049, "task_index": 8}, {"db_idx": 12050, "episode_idx": 44, "frame_idx": 4, "global_frame_idx": 12050, "task_index": 8}, {"db_idx": 12051, "episode_idx": 44, "frame_idx": 5, "global_frame_idx": 12051, "task_index": 8}, {"db_idx": 12052, "episode_idx": 44, "frame_idx": 6, "global_frame_idx": 12052, "task_index": 8}, {"db_idx": 12053, "episode_idx": 44, "frame_idx": 7, "global_frame_idx": 12053, "task_index": 8}, {"db_idx": 12054, "episode_idx": 44, "frame_idx": 8, "global_frame_idx": 12054, "task_index": 8}, {"db_idx": 12055, "episode_idx": 44, "frame_idx": 9, "global_frame_idx": 12055, "task_index": 8}, {"db_idx": 12056, "episode_idx": 44, "frame_idx": 10, "global_frame_idx": 12056, "task_index": 8}, {"db_idx": 12057, "episode_idx": 44, "frame_idx": 11, "global_frame_idx": 12057, "task_index": 8}, {"db_idx": 12058, "episode_idx": 44, "frame_idx": 12, "global_frame_idx": 12058, "task_index": 8}, {"db_idx": 12059, "episode_idx": 44, "frame_idx": 13, "global_frame_idx": 12059, "task_index": 8}, {"db_idx": 12060, "episode_idx": 44, "frame_idx": 14, "global_frame_idx": 12060, "task_index": 8}, {"db_idx": 12061, "episode_idx": 44, "frame_idx": 15, "global_frame_idx": 12061, "task_index": 8}, {"db_idx": 12062, "episode_idx": 44, "frame_idx": 16, "global_frame_idx": 12062, "task_index": 8}, {"db_idx": 12063, "episode_idx": 44, "frame_idx": 17, "global_frame_idx": 12063, "task_index": 8}, {"db_idx": 12064, "episode_idx": 44, "frame_idx": 18, "global_frame_idx": 12064, "task_index": 8}, {"db_idx": 12065, "episode_idx": 44, "frame_idx": 19, "global_frame_idx": 12065, "task_index": 8}, {"db_idx": 12066, "episode_idx": 44, "frame_idx": 20, "global_frame_idx": 12066, "task_index": 8}, {"db_idx": 12067, "episode_idx": 44, "frame_idx": 21, "global_frame_idx": 12067, "task_index": 8}, {"db_idx": 12068, "episode_idx": 44, "frame_idx": 22, "global_frame_idx": 12068, "task_index": 8}, {"db_idx": 12069, "episode_idx": 44, "frame_idx": 23, "global_frame_idx": 12069, "task_index": 8}, {"db_idx": 12070, "episode_idx": 44, "frame_idx": 24, "global_frame_idx": 12070, "task_index": 8}, {"db_idx": 12071, "episode_idx": 44, "frame_idx": 25, "global_frame_idx": 12071, "task_index": 8}, {"db_idx": 12072, "episode_idx": 44, "frame_idx": 26, "global_frame_idx": 12072, "task_index": 8}, {"db_idx": 12073, "episode_idx": 44, "frame_idx": 27, "global_frame_idx": 12073, "task_index": 8}, {"db_idx": 12074, "episode_idx": 44, "frame_idx": 28, "global_frame_idx": 12074, "task_index": 8}, {"db_idx": 12075, "episode_idx": 44, "frame_idx": 29, "global_frame_idx": 12075, "task_index": 8}, {"db_idx": 12076, "episode_idx": 44, "frame_idx": 30, "global_frame_idx": 12076, "task_index": 8}, {"db_idx": 12077, "episode_idx": 44, "frame_idx": 31, "global_frame_idx": 12077, "task_index": 8}, {"db_idx": 12078, "episode_idx": 44, "frame_idx": 32, "global_frame_idx": 12078, "task_index": 8}, {"db_idx": 12079, "episode_idx": 44, "frame_idx": 33, "global_frame_idx": 12079, "task_index": 8}, {"db_idx": 12080, "episode_idx": 44, "frame_idx": 34, "global_frame_idx": 12080, "task_index": 8}, {"db_idx": 12081, "episode_idx": 44, "frame_idx": 35, "global_frame_idx": 12081, "task_index": 8}, {"db_idx": 12082, "episode_idx": 44, "frame_idx": 36, "global_frame_idx": 12082, "task_index": 8}, {"db_idx": 12083, "episode_idx": 44, "frame_idx": 37, "global_frame_idx": 12083, "task_index": 8}, {"db_idx": 12084, "episode_idx": 44, "frame_idx": 38, "global_frame_idx": 12084, "task_index": 8}, {"db_idx": 12085, "episode_idx": 44, "frame_idx": 39, "global_frame_idx": 12085, "task_index": 8}, {"db_idx": 12086, "episode_idx": 44, "frame_idx": 40, "global_frame_idx": 12086, "task_index": 8}, {"db_idx": 12087, "episode_idx": 44, "frame_idx": 41, "global_frame_idx": 12087, "task_index": 8}, {"db_idx": 12088, "episode_idx": 44, "frame_idx": 42, "global_frame_idx": 12088, "task_index": 8}, {"db_idx": 12089, "episode_idx": 44, "frame_idx": 43, "global_frame_idx": 12089, "task_index": 8}, {"db_idx": 12090, "episode_idx": 44, "frame_idx": 44, "global_frame_idx": 12090, "task_index": 8}, {"db_idx": 12091, "episode_idx": 44, "frame_idx": 45, "global_frame_idx": 12091, "task_index": 8}, {"db_idx": 12092, "episode_idx": 44, "frame_idx": 46, "global_frame_idx": 12092, "task_index": 8}, {"db_idx": 12093, "episode_idx": 44, "frame_idx": 47, "global_frame_idx": 12093, "task_index": 8}, {"db_idx": 12094, "episode_idx": 44, "frame_idx": 48, "global_frame_idx": 12094, "task_index": 8}, {"db_idx": 12095, "episode_idx": 44, "frame_idx": 49, "global_frame_idx": 12095, "task_index": 8}, {"db_idx": 12096, "episode_idx": 44, "frame_idx": 50, "global_frame_idx": 12096, "task_index": 8}, {"db_idx": 12097, "episode_idx": 44, "frame_idx": 51, "global_frame_idx": 12097, "task_index": 8}, {"db_idx": 12098, "episode_idx": 44, "frame_idx": 52, "global_frame_idx": 12098, "task_index": 8}, {"db_idx": 12099, "episode_idx": 44, "frame_idx": 53, "global_frame_idx": 12099, "task_index": 8}, {"db_idx": 12100, "episode_idx": 44, "frame_idx": 54, "global_frame_idx": 12100, "task_index": 8}, {"db_idx": 12101, "episode_idx": 44, "frame_idx": 55, "global_frame_idx": 12101, "task_index": 8}, {"db_idx": 12102, "episode_idx": 44, "frame_idx": 56, "global_frame_idx": 12102, "task_index": 8}, {"db_idx": 12103, "episode_idx": 44, "frame_idx": 57, "global_frame_idx": 12103, "task_index": 8}, {"db_idx": 12104, "episode_idx": 44, "frame_idx": 58, "global_frame_idx": 12104, "task_index": 8}, {"db_idx": 12105, "episode_idx": 44, "frame_idx": 59, "global_frame_idx": 12105, "task_index": 8}, {"db_idx": 12106, "episode_idx": 44, "frame_idx": 60, "global_frame_idx": 12106, "task_index": 8}, {"db_idx": 12107, "episode_idx": 44, "frame_idx": 61, "global_frame_idx": 12107, "task_index": 8}, {"db_idx": 12108, "episode_idx": 44, "frame_idx": 62, "global_frame_idx": 12108, "task_index": 8}, {"db_idx": 12109, "episode_idx": 44, "frame_idx": 63, "global_frame_idx": 12109, "task_index": 8}, {"db_idx": 12110, "episode_idx": 44, "frame_idx": 64, "global_frame_idx": 12110, "task_index": 8}, {"db_idx": 12111, "episode_idx": 44, "frame_idx": 65, "global_frame_idx": 12111, "task_index": 8}, {"db_idx": 12112, "episode_idx": 44, "frame_idx": 66, "global_frame_idx": 12112, "task_index": 8}, {"db_idx": 12113, "episode_idx": 44, "frame_idx": 67, "global_frame_idx": 12113, "task_index": 8}, {"db_idx": 12114, "episode_idx": 44, "frame_idx": 68, "global_frame_idx": 12114, "task_index": 8}, {"db_idx": 12115, "episode_idx": 44, "frame_idx": 69, "global_frame_idx": 12115, "task_index": 8}, {"db_idx": 12116, "episode_idx": 44, "frame_idx": 70, "global_frame_idx": 12116, "task_index": 8}, {"db_idx": 12117, "episode_idx": 44, "frame_idx": 71, "global_frame_idx": 12117, "task_index": 8}, {"db_idx": 12118, "episode_idx": 44, "frame_idx": 72, "global_frame_idx": 12118, "task_index": 8}, {"db_idx": 12119, "episode_idx": 44, "frame_idx": 73, "global_frame_idx": 12119, "task_index": 8}, {"db_idx": 12120, "episode_idx": 44, "frame_idx": 74, "global_frame_idx": 12120, "task_index": 8}, {"db_idx": 12121, "episode_idx": 44, "frame_idx": 75, "global_frame_idx": 12121, "task_index": 8}, {"db_idx": 12122, "episode_idx": 44, "frame_idx": 76, "global_frame_idx": 12122, "task_index": 8}, {"db_idx": 12123, "episode_idx": 44, "frame_idx": 77, "global_frame_idx": 12123, "task_index": 8}, {"db_idx": 12124, "episode_idx": 44, "frame_idx": 78, "global_frame_idx": 12124, "task_index": 8}, {"db_idx": 12125, "episode_idx": 44, "frame_idx": 79, "global_frame_idx": 12125, "task_index": 8}, {"db_idx": 12126, "episode_idx": 44, "frame_idx": 80, "global_frame_idx": 12126, "task_index": 8}, {"db_idx": 12127, "episode_idx": 44, "frame_idx": 81, "global_frame_idx": 12127, "task_index": 8}, {"db_idx": 12128, "episode_idx": 44, "frame_idx": 82, "global_frame_idx": 12128, "task_index": 8}, {"db_idx": 12129, "episode_idx": 44, "frame_idx": 83, "global_frame_idx": 12129, "task_index": 8}, {"db_idx": 12130, "episode_idx": 44, "frame_idx": 84, "global_frame_idx": 12130, "task_index": 8}, {"db_idx": 12131, "episode_idx": 44, "frame_idx": 85, "global_frame_idx": 12131, "task_index": 8}, {"db_idx": 12132, "episode_idx": 44, "frame_idx": 86, "global_frame_idx": 12132, "task_index": 8}, {"db_idx": 12133, "episode_idx": 44, "frame_idx": 87, "global_frame_idx": 12133, "task_index": 8}, {"db_idx": 12134, "episode_idx": 44, "frame_idx": 88, "global_frame_idx": 12134, "task_index": 8}, {"db_idx": 12135, "episode_idx": 44, "frame_idx": 89, "global_frame_idx": 12135, "task_index": 8}, {"db_idx": 12136, "episode_idx": 44, "frame_idx": 90, "global_frame_idx": 12136, "task_index": 8}, {"db_idx": 12137, "episode_idx": 44, "frame_idx": 91, "global_frame_idx": 12137, "task_index": 8}, {"db_idx": 12138, "episode_idx": 44, "frame_idx": 92, "global_frame_idx": 12138, "task_index": 8}, {"db_idx": 12139, "episode_idx": 44, "frame_idx": 93, "global_frame_idx": 12139, "task_index": 8}, {"db_idx": 12140, "episode_idx": 44, "frame_idx": 94, "global_frame_idx": 12140, "task_index": 8}, {"db_idx": 12141, "episode_idx": 44, "frame_idx": 95, "global_frame_idx": 12141, "task_index": 8}, {"db_idx": 12142, "episode_idx": 44, "frame_idx": 96, "global_frame_idx": 12142, "task_index": 8}, {"db_idx": 12143, "episode_idx": 44, "frame_idx": 97, "global_frame_idx": 12143, "task_index": 8}, {"db_idx": 12144, "episode_idx": 44, "frame_idx": 98, "global_frame_idx": 12144, "task_index": 8}, {"db_idx": 12145, "episode_idx": 44, "frame_idx": 99, "global_frame_idx": 12145, "task_index": 8}, {"db_idx": 12146, "episode_idx": 44, "frame_idx": 100, "global_frame_idx": 12146, "task_index": 8}, {"db_idx": 12147, "episode_idx": 44, "frame_idx": 101, "global_frame_idx": 12147, "task_index": 8}, {"db_idx": 12148, "episode_idx": 44, "frame_idx": 102, "global_frame_idx": 12148, "task_index": 8}, {"db_idx": 12149, "episode_idx": 44, "frame_idx": 103, "global_frame_idx": 12149, "task_index": 8}, {"db_idx": 12150, "episode_idx": 44, "frame_idx": 104, "global_frame_idx": 12150, "task_index": 8}, {"db_idx": 12151, "episode_idx": 44, "frame_idx": 105, "global_frame_idx": 12151, "task_index": 8}, {"db_idx": 12152, "episode_idx": 44, "frame_idx": 106, "global_frame_idx": 12152, "task_index": 8}, {"db_idx": 12153, "episode_idx": 44, "frame_idx": 107, "global_frame_idx": 12153, "task_index": 8}, {"db_idx": 12154, "episode_idx": 44, "frame_idx": 108, "global_frame_idx": 12154, "task_index": 8}, {"db_idx": 12155, "episode_idx": 44, "frame_idx": 109, "global_frame_idx": 12155, "task_index": 8}, {"db_idx": 12156, "episode_idx": 44, "frame_idx": 110, "global_frame_idx": 12156, "task_index": 8}, {"db_idx": 12157, "episode_idx": 44, "frame_idx": 111, "global_frame_idx": 12157, "task_index": 8}, {"db_idx": 12158, "episode_idx": 44, "frame_idx": 112, "global_frame_idx": 12158, "task_index": 8}, {"db_idx": 12159, "episode_idx": 44, "frame_idx": 113, "global_frame_idx": 12159, "task_index": 8}, {"db_idx": 12160, "episode_idx": 44, "frame_idx": 114, "global_frame_idx": 12160, "task_index": 8}, {"db_idx": 12161, "episode_idx": 44, "frame_idx": 115, "global_frame_idx": 12161, "task_index": 8}, {"db_idx": 12162, "episode_idx": 44, "frame_idx": 116, "global_frame_idx": 12162, "task_index": 8}, {"db_idx": 12163, "episode_idx": 44, "frame_idx": 117, "global_frame_idx": 12163, "task_index": 8}, {"db_idx": 12164, "episode_idx": 44, "frame_idx": 118, "global_frame_idx": 12164, "task_index": 8}, {"db_idx": 12165, "episode_idx": 44, "frame_idx": 119, "global_frame_idx": 12165, "task_index": 8}, {"db_idx": 12166, "episode_idx": 44, "frame_idx": 120, "global_frame_idx": 12166, "task_index": 8}, {"db_idx": 12167, "episode_idx": 44, "frame_idx": 121, "global_frame_idx": 12167, "task_index": 8}, {"db_idx": 12168, "episode_idx": 44, "frame_idx": 122, "global_frame_idx": 12168, "task_index": 8}, {"db_idx": 12169, "episode_idx": 44, "frame_idx": 123, "global_frame_idx": 12169, "task_index": 8}, {"db_idx": 12170, "episode_idx": 44, "frame_idx": 124, "global_frame_idx": 12170, "task_index": 8}, {"db_idx": 12171, "episode_idx": 44, "frame_idx": 125, "global_frame_idx": 12171, "task_index": 8}, {"db_idx": 12172, "episode_idx": 44, "frame_idx": 126, "global_frame_idx": 12172, "task_index": 8}, {"db_idx": 12173, "episode_idx": 44, "frame_idx": 127, "global_frame_idx": 12173, "task_index": 8}, {"db_idx": 12174, "episode_idx": 44, "frame_idx": 128, "global_frame_idx": 12174, "task_index": 8}, {"db_idx": 12175, "episode_idx": 44, "frame_idx": 129, "global_frame_idx": 12175, "task_index": 8}, {"db_idx": 12176, "episode_idx": 44, "frame_idx": 130, "global_frame_idx": 12176, "task_index": 8}, {"db_idx": 12177, "episode_idx": 44, "frame_idx": 131, "global_frame_idx": 12177, "task_index": 8}, {"db_idx": 12178, "episode_idx": 44, "frame_idx": 132, "global_frame_idx": 12178, "task_index": 8}, {"db_idx": 12179, "episode_idx": 44, "frame_idx": 133, "global_frame_idx": 12179, "task_index": 8}, {"db_idx": 12180, "episode_idx": 44, "frame_idx": 134, "global_frame_idx": 12180, "task_index": 8}, {"db_idx": 12181, "episode_idx": 44, "frame_idx": 135, "global_frame_idx": 12181, "task_index": 8}, {"db_idx": 12182, "episode_idx": 44, "frame_idx": 136, "global_frame_idx": 12182, "task_index": 8}, {"db_idx": 12183, "episode_idx": 44, "frame_idx": 137, "global_frame_idx": 12183, "task_index": 8}, {"db_idx": 12184, "episode_idx": 44, "frame_idx": 138, "global_frame_idx": 12184, "task_index": 8}, {"db_idx": 12185, "episode_idx": 44, "frame_idx": 139, "global_frame_idx": 12185, "task_index": 8}, {"db_idx": 12186, "episode_idx": 44, "frame_idx": 140, "global_frame_idx": 12186, "task_index": 8}, {"db_idx": 12187, "episode_idx": 44, "frame_idx": 141, "global_frame_idx": 12187, "task_index": 8}, {"db_idx": 12188, "episode_idx": 44, "frame_idx": 142, "global_frame_idx": 12188, "task_index": 8}, {"db_idx": 12189, "episode_idx": 44, "frame_idx": 143, "global_frame_idx": 12189, "task_index": 8}, {"db_idx": 12190, "episode_idx": 44, "frame_idx": 144, "global_frame_idx": 12190, "task_index": 8}, {"db_idx": 12191, "episode_idx": 44, "frame_idx": 145, "global_frame_idx": 12191, "task_index": 8}, {"db_idx": 12192, "episode_idx": 44, "frame_idx": 146, "global_frame_idx": 12192, "task_index": 8}, {"db_idx": 12193, "episode_idx": 44, "frame_idx": 147, "global_frame_idx": 12193, "task_index": 8}, {"db_idx": 12194, "episode_idx": 44, "frame_idx": 148, "global_frame_idx": 12194, "task_index": 8}, {"db_idx": 12195, "episode_idx": 44, "frame_idx": 149, "global_frame_idx": 12195, "task_index": 8}, {"db_idx": 12196, "episode_idx": 44, "frame_idx": 150, "global_frame_idx": 12196, "task_index": 8}, {"db_idx": 12197, "episode_idx": 44, "frame_idx": 151, "global_frame_idx": 12197, "task_index": 8}, {"db_idx": 12198, "episode_idx": 44, "frame_idx": 152, "global_frame_idx": 12198, "task_index": 8}, {"db_idx": 12199, "episode_idx": 44, "frame_idx": 153, "global_frame_idx": 12199, "task_index": 8}, {"db_idx": 12200, "episode_idx": 44, "frame_idx": 154, "global_frame_idx": 12200, "task_index": 8}, {"db_idx": 12201, "episode_idx": 44, "frame_idx": 155, "global_frame_idx": 12201, "task_index": 8}, {"db_idx": 12202, "episode_idx": 44, "frame_idx": 156, "global_frame_idx": 12202, "task_index": 8}, {"db_idx": 12203, "episode_idx": 44, "frame_idx": 157, "global_frame_idx": 12203, "task_index": 8}, {"db_idx": 12204, "episode_idx": 44, "frame_idx": 158, "global_frame_idx": 12204, "task_index": 8}, {"db_idx": 12205, "episode_idx": 44, "frame_idx": 159, "global_frame_idx": 12205, "task_index": 8}, {"db_idx": 12206, "episode_idx": 44, "frame_idx": 160, "global_frame_idx": 12206, "task_index": 8}, {"db_idx": 12207, "episode_idx": 44, "frame_idx": 161, "global_frame_idx": 12207, "task_index": 8}, {"db_idx": 12208, "episode_idx": 44, "frame_idx": 162, "global_frame_idx": 12208, "task_index": 8}, {"db_idx": 12209, "episode_idx": 44, "frame_idx": 163, "global_frame_idx": 12209, "task_index": 8}, {"db_idx": 12210, "episode_idx": 44, "frame_idx": 164, "global_frame_idx": 12210, "task_index": 8}, {"db_idx": 12211, "episode_idx": 44, "frame_idx": 165, "global_frame_idx": 12211, "task_index": 8}, {"db_idx": 12212, "episode_idx": 44, "frame_idx": 166, "global_frame_idx": 12212, "task_index": 8}, {"db_idx": 12213, "episode_idx": 44, "frame_idx": 167, "global_frame_idx": 12213, "task_index": 8}, {"db_idx": 12214, "episode_idx": 44, "frame_idx": 168, "global_frame_idx": 12214, "task_index": 8}, {"db_idx": 12215, "episode_idx": 44, "frame_idx": 169, "global_frame_idx": 12215, "task_index": 8}, {"db_idx": 12216, "episode_idx": 44, "frame_idx": 170, "global_frame_idx": 12216, "task_index": 8}, {"db_idx": 12217, "episode_idx": 44, "frame_idx": 171, "global_frame_idx": 12217, "task_index": 8}, {"db_idx": 12218, "episode_idx": 44, "frame_idx": 172, "global_frame_idx": 12218, "task_index": 8}, {"db_idx": 12219, "episode_idx": 44, "frame_idx": 173, "global_frame_idx": 12219, "task_index": 8}, {"db_idx": 12220, "episode_idx": 44, "frame_idx": 174, "global_frame_idx": 12220, "task_index": 8}, {"db_idx": 12221, "episode_idx": 44, "frame_idx": 175, "global_frame_idx": 12221, "task_index": 8}, {"db_idx": 12222, "episode_idx": 44, "frame_idx": 176, "global_frame_idx": 12222, "task_index": 8}, {"db_idx": 12223, "episode_idx": 44, "frame_idx": 177, "global_frame_idx": 12223, "task_index": 8}, {"db_idx": 12224, "episode_idx": 44, "frame_idx": 178, "global_frame_idx": 12224, "task_index": 8}, {"db_idx": 12225, "episode_idx": 44, "frame_idx": 179, "global_frame_idx": 12225, "task_index": 8}, {"db_idx": 12226, "episode_idx": 44, "frame_idx": 180, "global_frame_idx": 12226, "task_index": 8}, {"db_idx": 12227, "episode_idx": 44, "frame_idx": 181, "global_frame_idx": 12227, "task_index": 8}, {"db_idx": 12228, "episode_idx": 44, "frame_idx": 182, "global_frame_idx": 12228, "task_index": 8}, {"db_idx": 12229, "episode_idx": 44, "frame_idx": 183, "global_frame_idx": 12229, "task_index": 8}, {"db_idx": 12230, "episode_idx": 44, "frame_idx": 184, "global_frame_idx": 12230, "task_index": 8}, {"db_idx": 12231, "episode_idx": 44, "frame_idx": 185, "global_frame_idx": 12231, "task_index": 8}, {"db_idx": 12232, "episode_idx": 44, "frame_idx": 186, "global_frame_idx": 12232, "task_index": 8}, {"db_idx": 12233, "episode_idx": 44, "frame_idx": 187, "global_frame_idx": 12233, "task_index": 8}, {"db_idx": 12234, "episode_idx": 44, "frame_idx": 188, "global_frame_idx": 12234, "task_index": 8}, {"db_idx": 12235, "episode_idx": 44, "frame_idx": 189, "global_frame_idx": 12235, "task_index": 8}, {"db_idx": 12236, "episode_idx": 44, "frame_idx": 190, "global_frame_idx": 12236, "task_index": 8}, {"db_idx": 12237, "episode_idx": 44, "frame_idx": 191, "global_frame_idx": 12237, "task_index": 8}, {"db_idx": 12238, "episode_idx": 44, "frame_idx": 192, "global_frame_idx": 12238, "task_index": 8}, {"db_idx": 12239, "episode_idx": 44, "frame_idx": 193, "global_frame_idx": 12239, "task_index": 8}, {"db_idx": 12240, "episode_idx": 44, "frame_idx": 194, "global_frame_idx": 12240, "task_index": 8}, {"db_idx": 12241, "episode_idx": 44, "frame_idx": 195, "global_frame_idx": 12241, "task_index": 8}, {"db_idx": 12242, "episode_idx": 44, "frame_idx": 196, "global_frame_idx": 12242, "task_index": 8}, {"db_idx": 12243, "episode_idx": 44, "frame_idx": 197, "global_frame_idx": 12243, "task_index": 8}, {"db_idx": 12244, "episode_idx": 44, "frame_idx": 198, "global_frame_idx": 12244, "task_index": 8}, {"db_idx": 12245, "episode_idx": 44, "frame_idx": 199, "global_frame_idx": 12245, "task_index": 8}, {"db_idx": 12246, "episode_idx": 44, "frame_idx": 200, "global_frame_idx": 12246, "task_index": 8}, {"db_idx": 12247, "episode_idx": 44, "frame_idx": 201, "global_frame_idx": 12247, "task_index": 8}, {"db_idx": 12248, "episode_idx": 44, "frame_idx": 202, "global_frame_idx": 12248, "task_index": 8}, {"db_idx": 12249, "episode_idx": 44, "frame_idx": 203, "global_frame_idx": 12249, "task_index": 8}, {"db_idx": 12250, "episode_idx": 44, "frame_idx": 204, "global_frame_idx": 12250, "task_index": 8}, {"db_idx": 12251, "episode_idx": 44, "frame_idx": 205, "global_frame_idx": 12251, "task_index": 8}, {"db_idx": 12252, "episode_idx": 44, "frame_idx": 206, "global_frame_idx": 12252, "task_index": 8}, {"db_idx": 12253, "episode_idx": 44, "frame_idx": 207, "global_frame_idx": 12253, "task_index": 8}, {"db_idx": 12254, "episode_idx": 44, "frame_idx": 208, "global_frame_idx": 12254, "task_index": 8}, {"db_idx": 12255, "episode_idx": 44, "frame_idx": 209, "global_frame_idx": 12255, "task_index": 8}, {"db_idx": 12256, "episode_idx": 44, "frame_idx": 210, "global_frame_idx": 12256, "task_index": 8}, {"db_idx": 12257, "episode_idx": 44, "frame_idx": 211, "global_frame_idx": 12257, "task_index": 8}, {"db_idx": 12258, "episode_idx": 44, "frame_idx": 212, "global_frame_idx": 12258, "task_index": 8}, {"db_idx": 12259, "episode_idx": 44, "frame_idx": 213, "global_frame_idx": 12259, "task_index": 8}, {"db_idx": 12260, "episode_idx": 44, "frame_idx": 214, "global_frame_idx": 12260, "task_index": 8}, {"db_idx": 12261, "episode_idx": 44, "frame_idx": 215, "global_frame_idx": 12261, "task_index": 8}, {"db_idx": 12262, "episode_idx": 44, "frame_idx": 216, "global_frame_idx": 12262, "task_index": 8}, {"db_idx": 12263, "episode_idx": 44, "frame_idx": 217, "global_frame_idx": 12263, "task_index": 8}, {"db_idx": 12264, "episode_idx": 44, "frame_idx": 218, "global_frame_idx": 12264, "task_index": 8}, {"db_idx": 12265, "episode_idx": 44, "frame_idx": 219, "global_frame_idx": 12265, "task_index": 8}, {"db_idx": 12266, "episode_idx": 44, "frame_idx": 220, "global_frame_idx": 12266, "task_index": 8}, {"db_idx": 12267, "episode_idx": 44, "frame_idx": 221, "global_frame_idx": 12267, "task_index": 8}, {"db_idx": 12268, "episode_idx": 44, "frame_idx": 222, "global_frame_idx": 12268, "task_index": 8}, {"db_idx": 12269, "episode_idx": 44, "frame_idx": 223, "global_frame_idx": 12269, "task_index": 8}, {"db_idx": 12270, "episode_idx": 44, "frame_idx": 224, "global_frame_idx": 12270, "task_index": 8}, {"db_idx": 12271, "episode_idx": 44, "frame_idx": 225, "global_frame_idx": 12271, "task_index": 8}, {"db_idx": 12272, "episode_idx": 44, "frame_idx": 226, "global_frame_idx": 12272, "task_index": 8}, {"db_idx": 12273, "episode_idx": 44, "frame_idx": 227, "global_frame_idx": 12273, "task_index": 8}, {"db_idx": 12274, "episode_idx": 44, "frame_idx": 228, "global_frame_idx": 12274, "task_index": 8}, {"db_idx": 12275, "episode_idx": 44, "frame_idx": 229, "global_frame_idx": 12275, "task_index": 8}, {"db_idx": 12276, "episode_idx": 44, "frame_idx": 230, "global_frame_idx": 12276, "task_index": 8}, {"db_idx": 12277, "episode_idx": 44, "frame_idx": 231, "global_frame_idx": 12277, "task_index": 8}, {"db_idx": 12278, "episode_idx": 44, "frame_idx": 232, "global_frame_idx": 12278, "task_index": 8}, {"db_idx": 12279, "episode_idx": 44, "frame_idx": 233, "global_frame_idx": 12279, "task_index": 8}, {"db_idx": 12280, "episode_idx": 44, "frame_idx": 234, "global_frame_idx": 12280, "task_index": 8}, {"db_idx": 12281, "episode_idx": 44, "frame_idx": 235, "global_frame_idx": 12281, "task_index": 8}, {"db_idx": 12282, "episode_idx": 44, "frame_idx": 236, "global_frame_idx": 12282, "task_index": 8}, {"db_idx": 12283, "episode_idx": 44, "frame_idx": 237, "global_frame_idx": 12283, "task_index": 8}, {"db_idx": 12284, "episode_idx": 44, "frame_idx": 238, "global_frame_idx": 12284, "task_index": 8}, {"db_idx": 12285, "episode_idx": 44, "frame_idx": 239, "global_frame_idx": 12285, "task_index": 8}, {"db_idx": 12286, "episode_idx": 44, "frame_idx": 240, "global_frame_idx": 12286, "task_index": 8}, {"db_idx": 12287, "episode_idx": 44, "frame_idx": 241, "global_frame_idx": 12287, "task_index": 8}, {"db_idx": 12288, "episode_idx": 44, "frame_idx": 242, "global_frame_idx": 12288, "task_index": 8}, {"db_idx": 12289, "episode_idx": 44, "frame_idx": 243, "global_frame_idx": 12289, "task_index": 8}, {"db_idx": 12290, "episode_idx": 44, "frame_idx": 244, "global_frame_idx": 12290, "task_index": 8}, {"db_idx": 12291, "episode_idx": 44, "frame_idx": 245, "global_frame_idx": 12291, "task_index": 8}, {"db_idx": 12292, "episode_idx": 44, "frame_idx": 246, "global_frame_idx": 12292, "task_index": 8}, {"db_idx": 12293, "episode_idx": 44, "frame_idx": 247, "global_frame_idx": 12293, "task_index": 8}, {"db_idx": 12294, "episode_idx": 44, "frame_idx": 248, "global_frame_idx": 12294, "task_index": 8}, {"db_idx": 12295, "episode_idx": 44, "frame_idx": 249, "global_frame_idx": 12295, "task_index": 8}, {"db_idx": 12296, "episode_idx": 44, "frame_idx": 250, "global_frame_idx": 12296, "task_index": 8}, {"db_idx": 12297, "episode_idx": 44, "frame_idx": 251, "global_frame_idx": 12297, "task_index": 8}, {"db_idx": 12298, "episode_idx": 44, "frame_idx": 252, "global_frame_idx": 12298, "task_index": 8}, {"db_idx": 12299, "episode_idx": 44, "frame_idx": 253, "global_frame_idx": 12299, "task_index": 8}, {"db_idx": 12300, "episode_idx": 44, "frame_idx": 254, "global_frame_idx": 12300, "task_index": 8}, {"db_idx": 12301, "episode_idx": 45, "frame_idx": 0, "global_frame_idx": 12301, "task_index": 9}, {"db_idx": 12302, "episode_idx": 45, "frame_idx": 1, "global_frame_idx": 12302, "task_index": 9}, {"db_idx": 12303, "episode_idx": 45, "frame_idx": 2, "global_frame_idx": 12303, "task_index": 9}, {"db_idx": 12304, "episode_idx": 45, "frame_idx": 3, "global_frame_idx": 12304, "task_index": 9}, {"db_idx": 12305, "episode_idx": 45, "frame_idx": 4, "global_frame_idx": 12305, "task_index": 9}, {"db_idx": 12306, "episode_idx": 45, "frame_idx": 5, "global_frame_idx": 12306, "task_index": 9}, {"db_idx": 12307, "episode_idx": 45, "frame_idx": 6, "global_frame_idx": 12307, "task_index": 9}, {"db_idx": 12308, "episode_idx": 45, "frame_idx": 7, "global_frame_idx": 12308, "task_index": 9}, {"db_idx": 12309, "episode_idx": 45, "frame_idx": 8, "global_frame_idx": 12309, "task_index": 9}, {"db_idx": 12310, "episode_idx": 45, "frame_idx": 9, "global_frame_idx": 12310, "task_index": 9}, {"db_idx": 12311, "episode_idx": 45, "frame_idx": 10, "global_frame_idx": 12311, "task_index": 9}, {"db_idx": 12312, "episode_idx": 45, "frame_idx": 11, "global_frame_idx": 12312, "task_index": 9}, {"db_idx": 12313, "episode_idx": 45, "frame_idx": 12, "global_frame_idx": 12313, "task_index": 9}, {"db_idx": 12314, "episode_idx": 45, "frame_idx": 13, "global_frame_idx": 12314, "task_index": 9}, {"db_idx": 12315, "episode_idx": 45, "frame_idx": 14, "global_frame_idx": 12315, "task_index": 9}, {"db_idx": 12316, "episode_idx": 45, "frame_idx": 15, "global_frame_idx": 12316, "task_index": 9}, {"db_idx": 12317, "episode_idx": 45, "frame_idx": 16, "global_frame_idx": 12317, "task_index": 9}, {"db_idx": 12318, "episode_idx": 45, "frame_idx": 17, "global_frame_idx": 12318, "task_index": 9}, {"db_idx": 12319, "episode_idx": 45, "frame_idx": 18, "global_frame_idx": 12319, "task_index": 9}, {"db_idx": 12320, "episode_idx": 45, "frame_idx": 19, "global_frame_idx": 12320, "task_index": 9}, {"db_idx": 12321, "episode_idx": 45, "frame_idx": 20, "global_frame_idx": 12321, "task_index": 9}, {"db_idx": 12322, "episode_idx": 45, "frame_idx": 21, "global_frame_idx": 12322, "task_index": 9}, {"db_idx": 12323, "episode_idx": 45, "frame_idx": 22, "global_frame_idx": 12323, "task_index": 9}, {"db_idx": 12324, "episode_idx": 45, "frame_idx": 23, "global_frame_idx": 12324, "task_index": 9}, {"db_idx": 12325, "episode_idx": 45, "frame_idx": 24, "global_frame_idx": 12325, "task_index": 9}, {"db_idx": 12326, "episode_idx": 45, "frame_idx": 25, "global_frame_idx": 12326, "task_index": 9}, {"db_idx": 12327, "episode_idx": 45, "frame_idx": 26, "global_frame_idx": 12327, "task_index": 9}, {"db_idx": 12328, "episode_idx": 45, "frame_idx": 27, "global_frame_idx": 12328, "task_index": 9}, {"db_idx": 12329, "episode_idx": 45, "frame_idx": 28, "global_frame_idx": 12329, "task_index": 9}, {"db_idx": 12330, "episode_idx": 45, "frame_idx": 29, "global_frame_idx": 12330, "task_index": 9}, {"db_idx": 12331, "episode_idx": 45, "frame_idx": 30, "global_frame_idx": 12331, "task_index": 9}, {"db_idx": 12332, "episode_idx": 45, "frame_idx": 31, "global_frame_idx": 12332, "task_index": 9}, {"db_idx": 12333, "episode_idx": 45, "frame_idx": 32, "global_frame_idx": 12333, "task_index": 9}, {"db_idx": 12334, "episode_idx": 45, "frame_idx": 33, "global_frame_idx": 12334, "task_index": 9}, {"db_idx": 12335, "episode_idx": 45, "frame_idx": 34, "global_frame_idx": 12335, "task_index": 9}, {"db_idx": 12336, "episode_idx": 45, "frame_idx": 35, "global_frame_idx": 12336, "task_index": 9}, {"db_idx": 12337, "episode_idx": 45, "frame_idx": 36, "global_frame_idx": 12337, "task_index": 9}, {"db_idx": 12338, "episode_idx": 45, "frame_idx": 37, "global_frame_idx": 12338, "task_index": 9}, {"db_idx": 12339, "episode_idx": 45, "frame_idx": 38, "global_frame_idx": 12339, "task_index": 9}, {"db_idx": 12340, "episode_idx": 45, "frame_idx": 39, "global_frame_idx": 12340, "task_index": 9}, {"db_idx": 12341, "episode_idx": 45, "frame_idx": 40, "global_frame_idx": 12341, "task_index": 9}, {"db_idx": 12342, "episode_idx": 45, "frame_idx": 41, "global_frame_idx": 12342, "task_index": 9}, {"db_idx": 12343, "episode_idx": 45, "frame_idx": 42, "global_frame_idx": 12343, "task_index": 9}, {"db_idx": 12344, "episode_idx": 45, "frame_idx": 43, "global_frame_idx": 12344, "task_index": 9}, {"db_idx": 12345, "episode_idx": 45, "frame_idx": 44, "global_frame_idx": 12345, "task_index": 9}, {"db_idx": 12346, "episode_idx": 45, "frame_idx": 45, "global_frame_idx": 12346, "task_index": 9}, {"db_idx": 12347, "episode_idx": 45, "frame_idx": 46, "global_frame_idx": 12347, "task_index": 9}, {"db_idx": 12348, "episode_idx": 45, "frame_idx": 47, "global_frame_idx": 12348, "task_index": 9}, {"db_idx": 12349, "episode_idx": 45, "frame_idx": 48, "global_frame_idx": 12349, "task_index": 9}, {"db_idx": 12350, "episode_idx": 45, "frame_idx": 49, "global_frame_idx": 12350, "task_index": 9}, {"db_idx": 12351, "episode_idx": 45, "frame_idx": 50, "global_frame_idx": 12351, "task_index": 9}, {"db_idx": 12352, "episode_idx": 45, "frame_idx": 51, "global_frame_idx": 12352, "task_index": 9}, {"db_idx": 12353, "episode_idx": 45, "frame_idx": 52, "global_frame_idx": 12353, "task_index": 9}, {"db_idx": 12354, "episode_idx": 45, "frame_idx": 53, "global_frame_idx": 12354, "task_index": 9}, {"db_idx": 12355, "episode_idx": 45, "frame_idx": 54, "global_frame_idx": 12355, "task_index": 9}, {"db_idx": 12356, "episode_idx": 45, "frame_idx": 55, "global_frame_idx": 12356, "task_index": 9}, {"db_idx": 12357, "episode_idx": 45, "frame_idx": 56, "global_frame_idx": 12357, "task_index": 9}, {"db_idx": 12358, "episode_idx": 45, "frame_idx": 57, "global_frame_idx": 12358, "task_index": 9}, {"db_idx": 12359, "episode_idx": 45, "frame_idx": 58, "global_frame_idx": 12359, "task_index": 9}, {"db_idx": 12360, "episode_idx": 45, "frame_idx": 59, "global_frame_idx": 12360, "task_index": 9}, {"db_idx": 12361, "episode_idx": 45, "frame_idx": 60, "global_frame_idx": 12361, "task_index": 9}, {"db_idx": 12362, "episode_idx": 45, "frame_idx": 61, "global_frame_idx": 12362, "task_index": 9}, {"db_idx": 12363, "episode_idx": 45, "frame_idx": 62, "global_frame_idx": 12363, "task_index": 9}, {"db_idx": 12364, "episode_idx": 45, "frame_idx": 63, "global_frame_idx": 12364, "task_index": 9}, {"db_idx": 12365, "episode_idx": 45, "frame_idx": 64, "global_frame_idx": 12365, "task_index": 9}, {"db_idx": 12366, "episode_idx": 45, "frame_idx": 65, "global_frame_idx": 12366, "task_index": 9}, {"db_idx": 12367, "episode_idx": 45, "frame_idx": 66, "global_frame_idx": 12367, "task_index": 9}, {"db_idx": 12368, "episode_idx": 45, "frame_idx": 67, "global_frame_idx": 12368, "task_index": 9}, {"db_idx": 12369, "episode_idx": 45, "frame_idx": 68, "global_frame_idx": 12369, "task_index": 9}, {"db_idx": 12370, "episode_idx": 45, "frame_idx": 69, "global_frame_idx": 12370, "task_index": 9}, {"db_idx": 12371, "episode_idx": 45, "frame_idx": 70, "global_frame_idx": 12371, "task_index": 9}, {"db_idx": 12372, "episode_idx": 45, "frame_idx": 71, "global_frame_idx": 12372, "task_index": 9}, {"db_idx": 12373, "episode_idx": 45, "frame_idx": 72, "global_frame_idx": 12373, "task_index": 9}, {"db_idx": 12374, "episode_idx": 45, "frame_idx": 73, "global_frame_idx": 12374, "task_index": 9}, {"db_idx": 12375, "episode_idx": 45, "frame_idx": 74, "global_frame_idx": 12375, "task_index": 9}, {"db_idx": 12376, "episode_idx": 45, "frame_idx": 75, "global_frame_idx": 12376, "task_index": 9}, {"db_idx": 12377, "episode_idx": 45, "frame_idx": 76, "global_frame_idx": 12377, "task_index": 9}, {"db_idx": 12378, "episode_idx": 45, "frame_idx": 77, "global_frame_idx": 12378, "task_index": 9}, {"db_idx": 12379, "episode_idx": 45, "frame_idx": 78, "global_frame_idx": 12379, "task_index": 9}, {"db_idx": 12380, "episode_idx": 45, "frame_idx": 79, "global_frame_idx": 12380, "task_index": 9}, {"db_idx": 12381, "episode_idx": 45, "frame_idx": 80, "global_frame_idx": 12381, "task_index": 9}, {"db_idx": 12382, "episode_idx": 45, "frame_idx": 81, "global_frame_idx": 12382, "task_index": 9}, {"db_idx": 12383, "episode_idx": 45, "frame_idx": 82, "global_frame_idx": 12383, "task_index": 9}, {"db_idx": 12384, "episode_idx": 45, "frame_idx": 83, "global_frame_idx": 12384, "task_index": 9}, {"db_idx": 12385, "episode_idx": 45, "frame_idx": 84, "global_frame_idx": 12385, "task_index": 9}, {"db_idx": 12386, "episode_idx": 45, "frame_idx": 85, "global_frame_idx": 12386, "task_index": 9}, {"db_idx": 12387, "episode_idx": 45, "frame_idx": 86, "global_frame_idx": 12387, "task_index": 9}, {"db_idx": 12388, "episode_idx": 45, "frame_idx": 87, "global_frame_idx": 12388, "task_index": 9}, {"db_idx": 12389, "episode_idx": 45, "frame_idx": 88, "global_frame_idx": 12389, "task_index": 9}, {"db_idx": 12390, "episode_idx": 45, "frame_idx": 89, "global_frame_idx": 12390, "task_index": 9}, {"db_idx": 12391, "episode_idx": 45, "frame_idx": 90, "global_frame_idx": 12391, "task_index": 9}, {"db_idx": 12392, "episode_idx": 45, "frame_idx": 91, "global_frame_idx": 12392, "task_index": 9}, {"db_idx": 12393, "episode_idx": 45, "frame_idx": 92, "global_frame_idx": 12393, "task_index": 9}, {"db_idx": 12394, "episode_idx": 45, "frame_idx": 93, "global_frame_idx": 12394, "task_index": 9}, {"db_idx": 12395, "episode_idx": 45, "frame_idx": 94, "global_frame_idx": 12395, "task_index": 9}, {"db_idx": 12396, "episode_idx": 45, "frame_idx": 95, "global_frame_idx": 12396, "task_index": 9}, {"db_idx": 12397, "episode_idx": 45, "frame_idx": 96, "global_frame_idx": 12397, "task_index": 9}, {"db_idx": 12398, "episode_idx": 45, "frame_idx": 97, "global_frame_idx": 12398, "task_index": 9}, {"db_idx": 12399, "episode_idx": 45, "frame_idx": 98, "global_frame_idx": 12399, "task_index": 9}, {"db_idx": 12400, "episode_idx": 45, "frame_idx": 99, "global_frame_idx": 12400, "task_index": 9}, {"db_idx": 12401, "episode_idx": 45, "frame_idx": 100, "global_frame_idx": 12401, "task_index": 9}, {"db_idx": 12402, "episode_idx": 45, "frame_idx": 101, "global_frame_idx": 12402, "task_index": 9}, {"db_idx": 12403, "episode_idx": 45, "frame_idx": 102, "global_frame_idx": 12403, "task_index": 9}, {"db_idx": 12404, "episode_idx": 45, "frame_idx": 103, "global_frame_idx": 12404, "task_index": 9}, {"db_idx": 12405, "episode_idx": 45, "frame_idx": 104, "global_frame_idx": 12405, "task_index": 9}, {"db_idx": 12406, "episode_idx": 45, "frame_idx": 105, "global_frame_idx": 12406, "task_index": 9}, {"db_idx": 12407, "episode_idx": 45, "frame_idx": 106, "global_frame_idx": 12407, "task_index": 9}, {"db_idx": 12408, "episode_idx": 45, "frame_idx": 107, "global_frame_idx": 12408, "task_index": 9}, {"db_idx": 12409, "episode_idx": 45, "frame_idx": 108, "global_frame_idx": 12409, "task_index": 9}, {"db_idx": 12410, "episode_idx": 45, "frame_idx": 109, "global_frame_idx": 12410, "task_index": 9}, {"db_idx": 12411, "episode_idx": 45, "frame_idx": 110, "global_frame_idx": 12411, "task_index": 9}, {"db_idx": 12412, "episode_idx": 45, "frame_idx": 111, "global_frame_idx": 12412, "task_index": 9}, {"db_idx": 12413, "episode_idx": 45, "frame_idx": 112, "global_frame_idx": 12413, "task_index": 9}, {"db_idx": 12414, "episode_idx": 45, "frame_idx": 113, "global_frame_idx": 12414, "task_index": 9}, {"db_idx": 12415, "episode_idx": 45, "frame_idx": 114, "global_frame_idx": 12415, "task_index": 9}, {"db_idx": 12416, "episode_idx": 45, "frame_idx": 115, "global_frame_idx": 12416, "task_index": 9}, {"db_idx": 12417, "episode_idx": 45, "frame_idx": 116, "global_frame_idx": 12417, "task_index": 9}, {"db_idx": 12418, "episode_idx": 45, "frame_idx": 117, "global_frame_idx": 12418, "task_index": 9}, {"db_idx": 12419, "episode_idx": 45, "frame_idx": 118, "global_frame_idx": 12419, "task_index": 9}, {"db_idx": 12420, "episode_idx": 45, "frame_idx": 119, "global_frame_idx": 12420, "task_index": 9}, {"db_idx": 12421, "episode_idx": 45, "frame_idx": 120, "global_frame_idx": 12421, "task_index": 9}, {"db_idx": 12422, "episode_idx": 45, "frame_idx": 121, "global_frame_idx": 12422, "task_index": 9}, {"db_idx": 12423, "episode_idx": 45, "frame_idx": 122, "global_frame_idx": 12423, "task_index": 9}, {"db_idx": 12424, "episode_idx": 45, "frame_idx": 123, "global_frame_idx": 12424, "task_index": 9}, {"db_idx": 12425, "episode_idx": 45, "frame_idx": 124, "global_frame_idx": 12425, "task_index": 9}, {"db_idx": 12426, "episode_idx": 45, "frame_idx": 125, "global_frame_idx": 12426, "task_index": 9}, {"db_idx": 12427, "episode_idx": 45, "frame_idx": 126, "global_frame_idx": 12427, "task_index": 9}, {"db_idx": 12428, "episode_idx": 45, "frame_idx": 127, "global_frame_idx": 12428, "task_index": 9}, {"db_idx": 12429, "episode_idx": 45, "frame_idx": 128, "global_frame_idx": 12429, "task_index": 9}, {"db_idx": 12430, "episode_idx": 45, "frame_idx": 129, "global_frame_idx": 12430, "task_index": 9}, {"db_idx": 12431, "episode_idx": 45, "frame_idx": 130, "global_frame_idx": 12431, "task_index": 9}, {"db_idx": 12432, "episode_idx": 45, "frame_idx": 131, "global_frame_idx": 12432, "task_index": 9}, {"db_idx": 12433, "episode_idx": 45, "frame_idx": 132, "global_frame_idx": 12433, "task_index": 9}, {"db_idx": 12434, "episode_idx": 45, "frame_idx": 133, "global_frame_idx": 12434, "task_index": 9}, {"db_idx": 12435, "episode_idx": 45, "frame_idx": 134, "global_frame_idx": 12435, "task_index": 9}, {"db_idx": 12436, "episode_idx": 45, "frame_idx": 135, "global_frame_idx": 12436, "task_index": 9}, {"db_idx": 12437, "episode_idx": 45, "frame_idx": 136, "global_frame_idx": 12437, "task_index": 9}, {"db_idx": 12438, "episode_idx": 45, "frame_idx": 137, "global_frame_idx": 12438, "task_index": 9}, {"db_idx": 12439, "episode_idx": 45, "frame_idx": 138, "global_frame_idx": 12439, "task_index": 9}, {"db_idx": 12440, "episode_idx": 45, "frame_idx": 139, "global_frame_idx": 12440, "task_index": 9}, {"db_idx": 12441, "episode_idx": 45, "frame_idx": 140, "global_frame_idx": 12441, "task_index": 9}, {"db_idx": 12442, "episode_idx": 45, "frame_idx": 141, "global_frame_idx": 12442, "task_index": 9}, {"db_idx": 12443, "episode_idx": 45, "frame_idx": 142, "global_frame_idx": 12443, "task_index": 9}, {"db_idx": 12444, "episode_idx": 45, "frame_idx": 143, "global_frame_idx": 12444, "task_index": 9}, {"db_idx": 12445, "episode_idx": 45, "frame_idx": 144, "global_frame_idx": 12445, "task_index": 9}, {"db_idx": 12446, "episode_idx": 45, "frame_idx": 145, "global_frame_idx": 12446, "task_index": 9}, {"db_idx": 12447, "episode_idx": 45, "frame_idx": 146, "global_frame_idx": 12447, "task_index": 9}, {"db_idx": 12448, "episode_idx": 45, "frame_idx": 147, "global_frame_idx": 12448, "task_index": 9}, {"db_idx": 12449, "episode_idx": 45, "frame_idx": 148, "global_frame_idx": 12449, "task_index": 9}, {"db_idx": 12450, "episode_idx": 45, "frame_idx": 149, "global_frame_idx": 12450, "task_index": 9}, {"db_idx": 12451, "episode_idx": 45, "frame_idx": 150, "global_frame_idx": 12451, "task_index": 9}, {"db_idx": 12452, "episode_idx": 45, "frame_idx": 151, "global_frame_idx": 12452, "task_index": 9}, {"db_idx": 12453, "episode_idx": 45, "frame_idx": 152, "global_frame_idx": 12453, "task_index": 9}, {"db_idx": 12454, "episode_idx": 45, "frame_idx": 153, "global_frame_idx": 12454, "task_index": 9}, {"db_idx": 12455, "episode_idx": 45, "frame_idx": 154, "global_frame_idx": 12455, "task_index": 9}, {"db_idx": 12456, "episode_idx": 45, "frame_idx": 155, "global_frame_idx": 12456, "task_index": 9}, {"db_idx": 12457, "episode_idx": 45, "frame_idx": 156, "global_frame_idx": 12457, "task_index": 9}, {"db_idx": 12458, "episode_idx": 45, "frame_idx": 157, "global_frame_idx": 12458, "task_index": 9}, {"db_idx": 12459, "episode_idx": 45, "frame_idx": 158, "global_frame_idx": 12459, "task_index": 9}, {"db_idx": 12460, "episode_idx": 45, "frame_idx": 159, "global_frame_idx": 12460, "task_index": 9}, {"db_idx": 12461, "episode_idx": 45, "frame_idx": 160, "global_frame_idx": 12461, "task_index": 9}, {"db_idx": 12462, "episode_idx": 45, "frame_idx": 161, "global_frame_idx": 12462, "task_index": 9}, {"db_idx": 12463, "episode_idx": 45, "frame_idx": 162, "global_frame_idx": 12463, "task_index": 9}, {"db_idx": 12464, "episode_idx": 45, "frame_idx": 163, "global_frame_idx": 12464, "task_index": 9}, {"db_idx": 12465, "episode_idx": 45, "frame_idx": 164, "global_frame_idx": 12465, "task_index": 9}, {"db_idx": 12466, "episode_idx": 45, "frame_idx": 165, "global_frame_idx": 12466, "task_index": 9}, {"db_idx": 12467, "episode_idx": 45, "frame_idx": 166, "global_frame_idx": 12467, "task_index": 9}, {"db_idx": 12468, "episode_idx": 45, "frame_idx": 167, "global_frame_idx": 12468, "task_index": 9}, {"db_idx": 12469, "episode_idx": 45, "frame_idx": 168, "global_frame_idx": 12469, "task_index": 9}, {"db_idx": 12470, "episode_idx": 45, "frame_idx": 169, "global_frame_idx": 12470, "task_index": 9}, {"db_idx": 12471, "episode_idx": 45, "frame_idx": 170, "global_frame_idx": 12471, "task_index": 9}, {"db_idx": 12472, "episode_idx": 45, "frame_idx": 171, "global_frame_idx": 12472, "task_index": 9}, {"db_idx": 12473, "episode_idx": 45, "frame_idx": 172, "global_frame_idx": 12473, "task_index": 9}, {"db_idx": 12474, "episode_idx": 45, "frame_idx": 173, "global_frame_idx": 12474, "task_index": 9}, {"db_idx": 12475, "episode_idx": 45, "frame_idx": 174, "global_frame_idx": 12475, "task_index": 9}, {"db_idx": 12476, "episode_idx": 45, "frame_idx": 175, "global_frame_idx": 12476, "task_index": 9}, {"db_idx": 12477, "episode_idx": 45, "frame_idx": 176, "global_frame_idx": 12477, "task_index": 9}, {"db_idx": 12478, "episode_idx": 45, "frame_idx": 177, "global_frame_idx": 12478, "task_index": 9}, {"db_idx": 12479, "episode_idx": 45, "frame_idx": 178, "global_frame_idx": 12479, "task_index": 9}, {"db_idx": 12480, "episode_idx": 45, "frame_idx": 179, "global_frame_idx": 12480, "task_index": 9}, {"db_idx": 12481, "episode_idx": 45, "frame_idx": 180, "global_frame_idx": 12481, "task_index": 9}, {"db_idx": 12482, "episode_idx": 45, "frame_idx": 181, "global_frame_idx": 12482, "task_index": 9}, {"db_idx": 12483, "episode_idx": 45, "frame_idx": 182, "global_frame_idx": 12483, "task_index": 9}, {"db_idx": 12484, "episode_idx": 45, "frame_idx": 183, "global_frame_idx": 12484, "task_index": 9}, {"db_idx": 12485, "episode_idx": 45, "frame_idx": 184, "global_frame_idx": 12485, "task_index": 9}, {"db_idx": 12486, "episode_idx": 45, "frame_idx": 185, "global_frame_idx": 12486, "task_index": 9}, {"db_idx": 12487, "episode_idx": 45, "frame_idx": 186, "global_frame_idx": 12487, "task_index": 9}, {"db_idx": 12488, "episode_idx": 45, "frame_idx": 187, "global_frame_idx": 12488, "task_index": 9}, {"db_idx": 12489, "episode_idx": 45, "frame_idx": 188, "global_frame_idx": 12489, "task_index": 9}, {"db_idx": 12490, "episode_idx": 45, "frame_idx": 189, "global_frame_idx": 12490, "task_index": 9}, {"db_idx": 12491, "episode_idx": 45, "frame_idx": 190, "global_frame_idx": 12491, "task_index": 9}, {"db_idx": 12492, "episode_idx": 45, "frame_idx": 191, "global_frame_idx": 12492, "task_index": 9}, {"db_idx": 12493, "episode_idx": 45, "frame_idx": 192, "global_frame_idx": 12493, "task_index": 9}, {"db_idx": 12494, "episode_idx": 45, "frame_idx": 193, "global_frame_idx": 12494, "task_index": 9}, {"db_idx": 12495, "episode_idx": 45, "frame_idx": 194, "global_frame_idx": 12495, "task_index": 9}, {"db_idx": 12496, "episode_idx": 45, "frame_idx": 195, "global_frame_idx": 12496, "task_index": 9}, {"db_idx": 12497, "episode_idx": 45, "frame_idx": 196, "global_frame_idx": 12497, "task_index": 9}, {"db_idx": 12498, "episode_idx": 45, "frame_idx": 197, "global_frame_idx": 12498, "task_index": 9}, {"db_idx": 12499, "episode_idx": 45, "frame_idx": 198, "global_frame_idx": 12499, "task_index": 9}, {"db_idx": 12500, "episode_idx": 45, "frame_idx": 199, "global_frame_idx": 12500, "task_index": 9}, {"db_idx": 12501, "episode_idx": 45, "frame_idx": 200, "global_frame_idx": 12501, "task_index": 9}, {"db_idx": 12502, "episode_idx": 45, "frame_idx": 201, "global_frame_idx": 12502, "task_index": 9}, {"db_idx": 12503, "episode_idx": 45, "frame_idx": 202, "global_frame_idx": 12503, "task_index": 9}, {"db_idx": 12504, "episode_idx": 45, "frame_idx": 203, "global_frame_idx": 12504, "task_index": 9}, {"db_idx": 12505, "episode_idx": 45, "frame_idx": 204, "global_frame_idx": 12505, "task_index": 9}, {"db_idx": 12506, "episode_idx": 45, "frame_idx": 205, "global_frame_idx": 12506, "task_index": 9}, {"db_idx": 12507, "episode_idx": 45, "frame_idx": 206, "global_frame_idx": 12507, "task_index": 9}, {"db_idx": 12508, "episode_idx": 45, "frame_idx": 207, "global_frame_idx": 12508, "task_index": 9}, {"db_idx": 12509, "episode_idx": 45, "frame_idx": 208, "global_frame_idx": 12509, "task_index": 9}, {"db_idx": 12510, "episode_idx": 45, "frame_idx": 209, "global_frame_idx": 12510, "task_index": 9}, {"db_idx": 12511, "episode_idx": 45, "frame_idx": 210, "global_frame_idx": 12511, "task_index": 9}, {"db_idx": 12512, "episode_idx": 45, "frame_idx": 211, "global_frame_idx": 12512, "task_index": 9}, {"db_idx": 12513, "episode_idx": 45, "frame_idx": 212, "global_frame_idx": 12513, "task_index": 9}, {"db_idx": 12514, "episode_idx": 45, "frame_idx": 213, "global_frame_idx": 12514, "task_index": 9}, {"db_idx": 12515, "episode_idx": 45, "frame_idx": 214, "global_frame_idx": 12515, "task_index": 9}, {"db_idx": 12516, "episode_idx": 45, "frame_idx": 215, "global_frame_idx": 12516, "task_index": 9}, {"db_idx": 12517, "episode_idx": 45, "frame_idx": 216, "global_frame_idx": 12517, "task_index": 9}, {"db_idx": 12518, "episode_idx": 45, "frame_idx": 217, "global_frame_idx": 12518, "task_index": 9}, {"db_idx": 12519, "episode_idx": 45, "frame_idx": 218, "global_frame_idx": 12519, "task_index": 9}, {"db_idx": 12520, "episode_idx": 45, "frame_idx": 219, "global_frame_idx": 12520, "task_index": 9}, {"db_idx": 12521, "episode_idx": 45, "frame_idx": 220, "global_frame_idx": 12521, "task_index": 9}, {"db_idx": 12522, "episode_idx": 45, "frame_idx": 221, "global_frame_idx": 12522, "task_index": 9}, {"db_idx": 12523, "episode_idx": 45, "frame_idx": 222, "global_frame_idx": 12523, "task_index": 9}, {"db_idx": 12524, "episode_idx": 45, "frame_idx": 223, "global_frame_idx": 12524, "task_index": 9}, {"db_idx": 12525, "episode_idx": 45, "frame_idx": 224, "global_frame_idx": 12525, "task_index": 9}, {"db_idx": 12526, "episode_idx": 45, "frame_idx": 225, "global_frame_idx": 12526, "task_index": 9}, {"db_idx": 12527, "episode_idx": 45, "frame_idx": 226, "global_frame_idx": 12527, "task_index": 9}, {"db_idx": 12528, "episode_idx": 45, "frame_idx": 227, "global_frame_idx": 12528, "task_index": 9}, {"db_idx": 12529, "episode_idx": 45, "frame_idx": 228, "global_frame_idx": 12529, "task_index": 9}, {"db_idx": 12530, "episode_idx": 45, "frame_idx": 229, "global_frame_idx": 12530, "task_index": 9}, {"db_idx": 12531, "episode_idx": 45, "frame_idx": 230, "global_frame_idx": 12531, "task_index": 9}, {"db_idx": 12532, "episode_idx": 45, "frame_idx": 231, "global_frame_idx": 12532, "task_index": 9}, {"db_idx": 12533, "episode_idx": 45, "frame_idx": 232, "global_frame_idx": 12533, "task_index": 9}, {"db_idx": 12534, "episode_idx": 45, "frame_idx": 233, "global_frame_idx": 12534, "task_index": 9}, {"db_idx": 12535, "episode_idx": 45, "frame_idx": 234, "global_frame_idx": 12535, "task_index": 9}, {"db_idx": 12536, "episode_idx": 45, "frame_idx": 235, "global_frame_idx": 12536, "task_index": 9}, {"db_idx": 12537, "episode_idx": 45, "frame_idx": 236, "global_frame_idx": 12537, "task_index": 9}, {"db_idx": 12538, "episode_idx": 45, "frame_idx": 237, "global_frame_idx": 12538, "task_index": 9}, {"db_idx": 12539, "episode_idx": 45, "frame_idx": 238, "global_frame_idx": 12539, "task_index": 9}, {"db_idx": 12540, "episode_idx": 45, "frame_idx": 239, "global_frame_idx": 12540, "task_index": 9}, {"db_idx": 12541, "episode_idx": 45, "frame_idx": 240, "global_frame_idx": 12541, "task_index": 9}, {"db_idx": 12542, "episode_idx": 45, "frame_idx": 241, "global_frame_idx": 12542, "task_index": 9}, {"db_idx": 12543, "episode_idx": 45, "frame_idx": 242, "global_frame_idx": 12543, "task_index": 9}, {"db_idx": 12544, "episode_idx": 45, "frame_idx": 243, "global_frame_idx": 12544, "task_index": 9}, {"db_idx": 12545, "episode_idx": 45, "frame_idx": 244, "global_frame_idx": 12545, "task_index": 9}, {"db_idx": 12546, "episode_idx": 45, "frame_idx": 245, "global_frame_idx": 12546, "task_index": 9}, {"db_idx": 12547, "episode_idx": 45, "frame_idx": 246, "global_frame_idx": 12547, "task_index": 9}, {"db_idx": 12548, "episode_idx": 45, "frame_idx": 247, "global_frame_idx": 12548, "task_index": 9}, {"db_idx": 12549, "episode_idx": 45, "frame_idx": 248, "global_frame_idx": 12549, "task_index": 9}, {"db_idx": 12550, "episode_idx": 45, "frame_idx": 249, "global_frame_idx": 12550, "task_index": 9}, {"db_idx": 12551, "episode_idx": 45, "frame_idx": 250, "global_frame_idx": 12551, "task_index": 9}, {"db_idx": 12552, "episode_idx": 45, "frame_idx": 251, "global_frame_idx": 12552, "task_index": 9}, {"db_idx": 12553, "episode_idx": 45, "frame_idx": 252, "global_frame_idx": 12553, "task_index": 9}, {"db_idx": 12554, "episode_idx": 45, "frame_idx": 253, "global_frame_idx": 12554, "task_index": 9}, {"db_idx": 12555, "episode_idx": 45, "frame_idx": 254, "global_frame_idx": 12555, "task_index": 9}, {"db_idx": 12556, "episode_idx": 45, "frame_idx": 255, "global_frame_idx": 12556, "task_index": 9}, {"db_idx": 12557, "episode_idx": 45, "frame_idx": 256, "global_frame_idx": 12557, "task_index": 9}, {"db_idx": 12558, "episode_idx": 45, "frame_idx": 257, "global_frame_idx": 12558, "task_index": 9}, {"db_idx": 12559, "episode_idx": 45, "frame_idx": 258, "global_frame_idx": 12559, "task_index": 9}, {"db_idx": 12560, "episode_idx": 45, "frame_idx": 259, "global_frame_idx": 12560, "task_index": 9}, {"db_idx": 12561, "episode_idx": 45, "frame_idx": 260, "global_frame_idx": 12561, "task_index": 9}, {"db_idx": 12562, "episode_idx": 45, "frame_idx": 261, "global_frame_idx": 12562, "task_index": 9}, {"db_idx": 12563, "episode_idx": 45, "frame_idx": 262, "global_frame_idx": 12563, "task_index": 9}, {"db_idx": 12564, "episode_idx": 45, "frame_idx": 263, "global_frame_idx": 12564, "task_index": 9}, {"db_idx": 12565, "episode_idx": 45, "frame_idx": 264, "global_frame_idx": 12565, "task_index": 9}, {"db_idx": 12566, "episode_idx": 45, "frame_idx": 265, "global_frame_idx": 12566, "task_index": 9}, {"db_idx": 12567, "episode_idx": 45, "frame_idx": 266, "global_frame_idx": 12567, "task_index": 9}, {"db_idx": 12568, "episode_idx": 45, "frame_idx": 267, "global_frame_idx": 12568, "task_index": 9}, {"db_idx": 12569, "episode_idx": 45, "frame_idx": 268, "global_frame_idx": 12569, "task_index": 9}, {"db_idx": 12570, "episode_idx": 45, "frame_idx": 269, "global_frame_idx": 12570, "task_index": 9}, {"db_idx": 12571, "episode_idx": 45, "frame_idx": 270, "global_frame_idx": 12571, "task_index": 9}, {"db_idx": 12572, "episode_idx": 45, "frame_idx": 271, "global_frame_idx": 12572, "task_index": 9}, {"db_idx": 12573, "episode_idx": 45, "frame_idx": 272, "global_frame_idx": 12573, "task_index": 9}, {"db_idx": 12574, "episode_idx": 45, "frame_idx": 273, "global_frame_idx": 12574, "task_index": 9}, {"db_idx": 12575, "episode_idx": 45, "frame_idx": 274, "global_frame_idx": 12575, "task_index": 9}, {"db_idx": 12576, "episode_idx": 45, "frame_idx": 275, "global_frame_idx": 12576, "task_index": 9}, {"db_idx": 12577, "episode_idx": 45, "frame_idx": 276, "global_frame_idx": 12577, "task_index": 9}, {"db_idx": 12578, "episode_idx": 45, "frame_idx": 277, "global_frame_idx": 12578, "task_index": 9}, {"db_idx": 12579, "episode_idx": 45, "frame_idx": 278, "global_frame_idx": 12579, "task_index": 9}, {"db_idx": 12580, "episode_idx": 45, "frame_idx": 279, "global_frame_idx": 12580, "task_index": 9}, {"db_idx": 12581, "episode_idx": 45, "frame_idx": 280, "global_frame_idx": 12581, "task_index": 9}, {"db_idx": 12582, "episode_idx": 45, "frame_idx": 281, "global_frame_idx": 12582, "task_index": 9}, {"db_idx": 12583, "episode_idx": 45, "frame_idx": 282, "global_frame_idx": 12583, "task_index": 9}, {"db_idx": 12584, "episode_idx": 45, "frame_idx": 283, "global_frame_idx": 12584, "task_index": 9}, {"db_idx": 12585, "episode_idx": 45, "frame_idx": 284, "global_frame_idx": 12585, "task_index": 9}, {"db_idx": 12586, "episode_idx": 46, "frame_idx": 0, "global_frame_idx": 12586, "task_index": 9}, {"db_idx": 12587, "episode_idx": 46, "frame_idx": 1, "global_frame_idx": 12587, "task_index": 9}, {"db_idx": 12588, "episode_idx": 46, "frame_idx": 2, "global_frame_idx": 12588, "task_index": 9}, {"db_idx": 12589, "episode_idx": 46, "frame_idx": 3, "global_frame_idx": 12589, "task_index": 9}, {"db_idx": 12590, "episode_idx": 46, "frame_idx": 4, "global_frame_idx": 12590, "task_index": 9}, {"db_idx": 12591, "episode_idx": 46, "frame_idx": 5, "global_frame_idx": 12591, "task_index": 9}, {"db_idx": 12592, "episode_idx": 46, "frame_idx": 6, "global_frame_idx": 12592, "task_index": 9}, {"db_idx": 12593, "episode_idx": 46, "frame_idx": 7, "global_frame_idx": 12593, "task_index": 9}, {"db_idx": 12594, "episode_idx": 46, "frame_idx": 8, "global_frame_idx": 12594, "task_index": 9}, {"db_idx": 12595, "episode_idx": 46, "frame_idx": 9, "global_frame_idx": 12595, "task_index": 9}, {"db_idx": 12596, "episode_idx": 46, "frame_idx": 10, "global_frame_idx": 12596, "task_index": 9}, {"db_idx": 12597, "episode_idx": 46, "frame_idx": 11, "global_frame_idx": 12597, "task_index": 9}, {"db_idx": 12598, "episode_idx": 46, "frame_idx": 12, "global_frame_idx": 12598, "task_index": 9}, {"db_idx": 12599, "episode_idx": 46, "frame_idx": 13, "global_frame_idx": 12599, "task_index": 9}, {"db_idx": 12600, "episode_idx": 46, "frame_idx": 14, "global_frame_idx": 12600, "task_index": 9}, {"db_idx": 12601, "episode_idx": 46, "frame_idx": 15, "global_frame_idx": 12601, "task_index": 9}, {"db_idx": 12602, "episode_idx": 46, "frame_idx": 16, "global_frame_idx": 12602, "task_index": 9}, {"db_idx": 12603, "episode_idx": 46, "frame_idx": 17, "global_frame_idx": 12603, "task_index": 9}, {"db_idx": 12604, "episode_idx": 46, "frame_idx": 18, "global_frame_idx": 12604, "task_index": 9}, {"db_idx": 12605, "episode_idx": 46, "frame_idx": 19, "global_frame_idx": 12605, "task_index": 9}, {"db_idx": 12606, "episode_idx": 46, "frame_idx": 20, "global_frame_idx": 12606, "task_index": 9}, {"db_idx": 12607, "episode_idx": 46, "frame_idx": 21, "global_frame_idx": 12607, "task_index": 9}, {"db_idx": 12608, "episode_idx": 46, "frame_idx": 22, "global_frame_idx": 12608, "task_index": 9}, {"db_idx": 12609, "episode_idx": 46, "frame_idx": 23, "global_frame_idx": 12609, "task_index": 9}, {"db_idx": 12610, "episode_idx": 46, "frame_idx": 24, "global_frame_idx": 12610, "task_index": 9}, {"db_idx": 12611, "episode_idx": 46, "frame_idx": 25, "global_frame_idx": 12611, "task_index": 9}, {"db_idx": 12612, "episode_idx": 46, "frame_idx": 26, "global_frame_idx": 12612, "task_index": 9}, {"db_idx": 12613, "episode_idx": 46, "frame_idx": 27, "global_frame_idx": 12613, "task_index": 9}, {"db_idx": 12614, "episode_idx": 46, "frame_idx": 28, "global_frame_idx": 12614, "task_index": 9}, {"db_idx": 12615, "episode_idx": 46, "frame_idx": 29, "global_frame_idx": 12615, "task_index": 9}, {"db_idx": 12616, "episode_idx": 46, "frame_idx": 30, "global_frame_idx": 12616, "task_index": 9}, {"db_idx": 12617, "episode_idx": 46, "frame_idx": 31, "global_frame_idx": 12617, "task_index": 9}, {"db_idx": 12618, "episode_idx": 46, "frame_idx": 32, "global_frame_idx": 12618, "task_index": 9}, {"db_idx": 12619, "episode_idx": 46, "frame_idx": 33, "global_frame_idx": 12619, "task_index": 9}, {"db_idx": 12620, "episode_idx": 46, "frame_idx": 34, "global_frame_idx": 12620, "task_index": 9}, {"db_idx": 12621, "episode_idx": 46, "frame_idx": 35, "global_frame_idx": 12621, "task_index": 9}, {"db_idx": 12622, "episode_idx": 46, "frame_idx": 36, "global_frame_idx": 12622, "task_index": 9}, {"db_idx": 12623, "episode_idx": 46, "frame_idx": 37, "global_frame_idx": 12623, "task_index": 9}, {"db_idx": 12624, "episode_idx": 46, "frame_idx": 38, "global_frame_idx": 12624, "task_index": 9}, {"db_idx": 12625, "episode_idx": 46, "frame_idx": 39, "global_frame_idx": 12625, "task_index": 9}, {"db_idx": 12626, "episode_idx": 46, "frame_idx": 40, "global_frame_idx": 12626, "task_index": 9}, {"db_idx": 12627, "episode_idx": 46, "frame_idx": 41, "global_frame_idx": 12627, "task_index": 9}, {"db_idx": 12628, "episode_idx": 46, "frame_idx": 42, "global_frame_idx": 12628, "task_index": 9}, {"db_idx": 12629, "episode_idx": 46, "frame_idx": 43, "global_frame_idx": 12629, "task_index": 9}, {"db_idx": 12630, "episode_idx": 46, "frame_idx": 44, "global_frame_idx": 12630, "task_index": 9}, {"db_idx": 12631, "episode_idx": 46, "frame_idx": 45, "global_frame_idx": 12631, "task_index": 9}, {"db_idx": 12632, "episode_idx": 46, "frame_idx": 46, "global_frame_idx": 12632, "task_index": 9}, {"db_idx": 12633, "episode_idx": 46, "frame_idx": 47, "global_frame_idx": 12633, "task_index": 9}, {"db_idx": 12634, "episode_idx": 46, "frame_idx": 48, "global_frame_idx": 12634, "task_index": 9}, {"db_idx": 12635, "episode_idx": 46, "frame_idx": 49, "global_frame_idx": 12635, "task_index": 9}, {"db_idx": 12636, "episode_idx": 46, "frame_idx": 50, "global_frame_idx": 12636, "task_index": 9}, {"db_idx": 12637, "episode_idx": 46, "frame_idx": 51, "global_frame_idx": 12637, "task_index": 9}, {"db_idx": 12638, "episode_idx": 46, "frame_idx": 52, "global_frame_idx": 12638, "task_index": 9}, {"db_idx": 12639, "episode_idx": 46, "frame_idx": 53, "global_frame_idx": 12639, "task_index": 9}, {"db_idx": 12640, "episode_idx": 46, "frame_idx": 54, "global_frame_idx": 12640, "task_index": 9}, {"db_idx": 12641, "episode_idx": 46, "frame_idx": 55, "global_frame_idx": 12641, "task_index": 9}, {"db_idx": 12642, "episode_idx": 46, "frame_idx": 56, "global_frame_idx": 12642, "task_index": 9}, {"db_idx": 12643, "episode_idx": 46, "frame_idx": 57, "global_frame_idx": 12643, "task_index": 9}, {"db_idx": 12644, "episode_idx": 46, "frame_idx": 58, "global_frame_idx": 12644, "task_index": 9}, {"db_idx": 12645, "episode_idx": 46, "frame_idx": 59, "global_frame_idx": 12645, "task_index": 9}, {"db_idx": 12646, "episode_idx": 46, "frame_idx": 60, "global_frame_idx": 12646, "task_index": 9}, {"db_idx": 12647, "episode_idx": 46, "frame_idx": 61, "global_frame_idx": 12647, "task_index": 9}, {"db_idx": 12648, "episode_idx": 46, "frame_idx": 62, "global_frame_idx": 12648, "task_index": 9}, {"db_idx": 12649, "episode_idx": 46, "frame_idx": 63, "global_frame_idx": 12649, "task_index": 9}, {"db_idx": 12650, "episode_idx": 46, "frame_idx": 64, "global_frame_idx": 12650, "task_index": 9}, {"db_idx": 12651, "episode_idx": 46, "frame_idx": 65, "global_frame_idx": 12651, "task_index": 9}, {"db_idx": 12652, "episode_idx": 46, "frame_idx": 66, "global_frame_idx": 12652, "task_index": 9}, {"db_idx": 12653, "episode_idx": 46, "frame_idx": 67, "global_frame_idx": 12653, "task_index": 9}, {"db_idx": 12654, "episode_idx": 46, "frame_idx": 68, "global_frame_idx": 12654, "task_index": 9}, {"db_idx": 12655, "episode_idx": 46, "frame_idx": 69, "global_frame_idx": 12655, "task_index": 9}, {"db_idx": 12656, "episode_idx": 46, "frame_idx": 70, "global_frame_idx": 12656, "task_index": 9}, {"db_idx": 12657, "episode_idx": 46, "frame_idx": 71, "global_frame_idx": 12657, "task_index": 9}, {"db_idx": 12658, "episode_idx": 46, "frame_idx": 72, "global_frame_idx": 12658, "task_index": 9}, {"db_idx": 12659, "episode_idx": 46, "frame_idx": 73, "global_frame_idx": 12659, "task_index": 9}, {"db_idx": 12660, "episode_idx": 46, "frame_idx": 74, "global_frame_idx": 12660, "task_index": 9}, {"db_idx": 12661, "episode_idx": 46, "frame_idx": 75, "global_frame_idx": 12661, "task_index": 9}, {"db_idx": 12662, "episode_idx": 46, "frame_idx": 76, "global_frame_idx": 12662, "task_index": 9}, {"db_idx": 12663, "episode_idx": 46, "frame_idx": 77, "global_frame_idx": 12663, "task_index": 9}, {"db_idx": 12664, "episode_idx": 46, "frame_idx": 78, "global_frame_idx": 12664, "task_index": 9}, {"db_idx": 12665, "episode_idx": 46, "frame_idx": 79, "global_frame_idx": 12665, "task_index": 9}, {"db_idx": 12666, "episode_idx": 46, "frame_idx": 80, "global_frame_idx": 12666, "task_index": 9}, {"db_idx": 12667, "episode_idx": 46, "frame_idx": 81, "global_frame_idx": 12667, "task_index": 9}, {"db_idx": 12668, "episode_idx": 46, "frame_idx": 82, "global_frame_idx": 12668, "task_index": 9}, {"db_idx": 12669, "episode_idx": 46, "frame_idx": 83, "global_frame_idx": 12669, "task_index": 9}, {"db_idx": 12670, "episode_idx": 46, "frame_idx": 84, "global_frame_idx": 12670, "task_index": 9}, {"db_idx": 12671, "episode_idx": 46, "frame_idx": 85, "global_frame_idx": 12671, "task_index": 9}, {"db_idx": 12672, "episode_idx": 46, "frame_idx": 86, "global_frame_idx": 12672, "task_index": 9}, {"db_idx": 12673, "episode_idx": 46, "frame_idx": 87, "global_frame_idx": 12673, "task_index": 9}, {"db_idx": 12674, "episode_idx": 46, "frame_idx": 88, "global_frame_idx": 12674, "task_index": 9}, {"db_idx": 12675, "episode_idx": 46, "frame_idx": 89, "global_frame_idx": 12675, "task_index": 9}, {"db_idx": 12676, "episode_idx": 46, "frame_idx": 90, "global_frame_idx": 12676, "task_index": 9}, {"db_idx": 12677, "episode_idx": 46, "frame_idx": 91, "global_frame_idx": 12677, "task_index": 9}, {"db_idx": 12678, "episode_idx": 46, "frame_idx": 92, "global_frame_idx": 12678, "task_index": 9}, {"db_idx": 12679, "episode_idx": 46, "frame_idx": 93, "global_frame_idx": 12679, "task_index": 9}, {"db_idx": 12680, "episode_idx": 46, "frame_idx": 94, "global_frame_idx": 12680, "task_index": 9}, {"db_idx": 12681, "episode_idx": 46, "frame_idx": 95, "global_frame_idx": 12681, "task_index": 9}, {"db_idx": 12682, "episode_idx": 46, "frame_idx": 96, "global_frame_idx": 12682, "task_index": 9}, {"db_idx": 12683, "episode_idx": 46, "frame_idx": 97, "global_frame_idx": 12683, "task_index": 9}, {"db_idx": 12684, "episode_idx": 46, "frame_idx": 98, "global_frame_idx": 12684, "task_index": 9}, {"db_idx": 12685, "episode_idx": 46, "frame_idx": 99, "global_frame_idx": 12685, "task_index": 9}, {"db_idx": 12686, "episode_idx": 46, "frame_idx": 100, "global_frame_idx": 12686, "task_index": 9}, {"db_idx": 12687, "episode_idx": 46, "frame_idx": 101, "global_frame_idx": 12687, "task_index": 9}, {"db_idx": 12688, "episode_idx": 46, "frame_idx": 102, "global_frame_idx": 12688, "task_index": 9}, {"db_idx": 12689, "episode_idx": 46, "frame_idx": 103, "global_frame_idx": 12689, "task_index": 9}, {"db_idx": 12690, "episode_idx": 46, "frame_idx": 104, "global_frame_idx": 12690, "task_index": 9}, {"db_idx": 12691, "episode_idx": 46, "frame_idx": 105, "global_frame_idx": 12691, "task_index": 9}, {"db_idx": 12692, "episode_idx": 46, "frame_idx": 106, "global_frame_idx": 12692, "task_index": 9}, {"db_idx": 12693, "episode_idx": 46, "frame_idx": 107, "global_frame_idx": 12693, "task_index": 9}, {"db_idx": 12694, "episode_idx": 46, "frame_idx": 108, "global_frame_idx": 12694, "task_index": 9}, {"db_idx": 12695, "episode_idx": 46, "frame_idx": 109, "global_frame_idx": 12695, "task_index": 9}, {"db_idx": 12696, "episode_idx": 46, "frame_idx": 110, "global_frame_idx": 12696, "task_index": 9}, {"db_idx": 12697, "episode_idx": 46, "frame_idx": 111, "global_frame_idx": 12697, "task_index": 9}, {"db_idx": 12698, "episode_idx": 46, "frame_idx": 112, "global_frame_idx": 12698, "task_index": 9}, {"db_idx": 12699, "episode_idx": 46, "frame_idx": 113, "global_frame_idx": 12699, "task_index": 9}, {"db_idx": 12700, "episode_idx": 46, "frame_idx": 114, "global_frame_idx": 12700, "task_index": 9}, {"db_idx": 12701, "episode_idx": 46, "frame_idx": 115, "global_frame_idx": 12701, "task_index": 9}, {"db_idx": 12702, "episode_idx": 46, "frame_idx": 116, "global_frame_idx": 12702, "task_index": 9}, {"db_idx": 12703, "episode_idx": 46, "frame_idx": 117, "global_frame_idx": 12703, "task_index": 9}, {"db_idx": 12704, "episode_idx": 46, "frame_idx": 118, "global_frame_idx": 12704, "task_index": 9}, {"db_idx": 12705, "episode_idx": 46, "frame_idx": 119, "global_frame_idx": 12705, "task_index": 9}, {"db_idx": 12706, "episode_idx": 46, "frame_idx": 120, "global_frame_idx": 12706, "task_index": 9}, {"db_idx": 12707, "episode_idx": 46, "frame_idx": 121, "global_frame_idx": 12707, "task_index": 9}, {"db_idx": 12708, "episode_idx": 46, "frame_idx": 122, "global_frame_idx": 12708, "task_index": 9}, {"db_idx": 12709, "episode_idx": 46, "frame_idx": 123, "global_frame_idx": 12709, "task_index": 9}, {"db_idx": 12710, "episode_idx": 46, "frame_idx": 124, "global_frame_idx": 12710, "task_index": 9}, {"db_idx": 12711, "episode_idx": 46, "frame_idx": 125, "global_frame_idx": 12711, "task_index": 9}, {"db_idx": 12712, "episode_idx": 46, "frame_idx": 126, "global_frame_idx": 12712, "task_index": 9}, {"db_idx": 12713, "episode_idx": 46, "frame_idx": 127, "global_frame_idx": 12713, "task_index": 9}, {"db_idx": 12714, "episode_idx": 46, "frame_idx": 128, "global_frame_idx": 12714, "task_index": 9}, {"db_idx": 12715, "episode_idx": 46, "frame_idx": 129, "global_frame_idx": 12715, "task_index": 9}, {"db_idx": 12716, "episode_idx": 46, "frame_idx": 130, "global_frame_idx": 12716, "task_index": 9}, {"db_idx": 12717, "episode_idx": 46, "frame_idx": 131, "global_frame_idx": 12717, "task_index": 9}, {"db_idx": 12718, "episode_idx": 46, "frame_idx": 132, "global_frame_idx": 12718, "task_index": 9}, {"db_idx": 12719, "episode_idx": 46, "frame_idx": 133, "global_frame_idx": 12719, "task_index": 9}, {"db_idx": 12720, "episode_idx": 46, "frame_idx": 134, "global_frame_idx": 12720, "task_index": 9}, {"db_idx": 12721, "episode_idx": 46, "frame_idx": 135, "global_frame_idx": 12721, "task_index": 9}, {"db_idx": 12722, "episode_idx": 46, "frame_idx": 136, "global_frame_idx": 12722, "task_index": 9}, {"db_idx": 12723, "episode_idx": 46, "frame_idx": 137, "global_frame_idx": 12723, "task_index": 9}, {"db_idx": 12724, "episode_idx": 46, "frame_idx": 138, "global_frame_idx": 12724, "task_index": 9}, {"db_idx": 12725, "episode_idx": 46, "frame_idx": 139, "global_frame_idx": 12725, "task_index": 9}, {"db_idx": 12726, "episode_idx": 46, "frame_idx": 140, "global_frame_idx": 12726, "task_index": 9}, {"db_idx": 12727, "episode_idx": 46, "frame_idx": 141, "global_frame_idx": 12727, "task_index": 9}, {"db_idx": 12728, "episode_idx": 46, "frame_idx": 142, "global_frame_idx": 12728, "task_index": 9}, {"db_idx": 12729, "episode_idx": 46, "frame_idx": 143, "global_frame_idx": 12729, "task_index": 9}, {"db_idx": 12730, "episode_idx": 46, "frame_idx": 144, "global_frame_idx": 12730, "task_index": 9}, {"db_idx": 12731, "episode_idx": 46, "frame_idx": 145, "global_frame_idx": 12731, "task_index": 9}, {"db_idx": 12732, "episode_idx": 46, "frame_idx": 146, "global_frame_idx": 12732, "task_index": 9}, {"db_idx": 12733, "episode_idx": 46, "frame_idx": 147, "global_frame_idx": 12733, "task_index": 9}, {"db_idx": 12734, "episode_idx": 46, "frame_idx": 148, "global_frame_idx": 12734, "task_index": 9}, {"db_idx": 12735, "episode_idx": 46, "frame_idx": 149, "global_frame_idx": 12735, "task_index": 9}, {"db_idx": 12736, "episode_idx": 46, "frame_idx": 150, "global_frame_idx": 12736, "task_index": 9}, {"db_idx": 12737, "episode_idx": 46, "frame_idx": 151, "global_frame_idx": 12737, "task_index": 9}, {"db_idx": 12738, "episode_idx": 46, "frame_idx": 152, "global_frame_idx": 12738, "task_index": 9}, {"db_idx": 12739, "episode_idx": 46, "frame_idx": 153, "global_frame_idx": 12739, "task_index": 9}, {"db_idx": 12740, "episode_idx": 46, "frame_idx": 154, "global_frame_idx": 12740, "task_index": 9}, {"db_idx": 12741, "episode_idx": 46, "frame_idx": 155, "global_frame_idx": 12741, "task_index": 9}, {"db_idx": 12742, "episode_idx": 46, "frame_idx": 156, "global_frame_idx": 12742, "task_index": 9}, {"db_idx": 12743, "episode_idx": 46, "frame_idx": 157, "global_frame_idx": 12743, "task_index": 9}, {"db_idx": 12744, "episode_idx": 46, "frame_idx": 158, "global_frame_idx": 12744, "task_index": 9}, {"db_idx": 12745, "episode_idx": 46, "frame_idx": 159, "global_frame_idx": 12745, "task_index": 9}, {"db_idx": 12746, "episode_idx": 46, "frame_idx": 160, "global_frame_idx": 12746, "task_index": 9}, {"db_idx": 12747, "episode_idx": 46, "frame_idx": 161, "global_frame_idx": 12747, "task_index": 9}, {"db_idx": 12748, "episode_idx": 46, "frame_idx": 162, "global_frame_idx": 12748, "task_index": 9}, {"db_idx": 12749, "episode_idx": 46, "frame_idx": 163, "global_frame_idx": 12749, "task_index": 9}, {"db_idx": 12750, "episode_idx": 46, "frame_idx": 164, "global_frame_idx": 12750, "task_index": 9}, {"db_idx": 12751, "episode_idx": 46, "frame_idx": 165, "global_frame_idx": 12751, "task_index": 9}, {"db_idx": 12752, "episode_idx": 46, "frame_idx": 166, "global_frame_idx": 12752, "task_index": 9}, {"db_idx": 12753, "episode_idx": 46, "frame_idx": 167, "global_frame_idx": 12753, "task_index": 9}, {"db_idx": 12754, "episode_idx": 46, "frame_idx": 168, "global_frame_idx": 12754, "task_index": 9}, {"db_idx": 12755, "episode_idx": 46, "frame_idx": 169, "global_frame_idx": 12755, "task_index": 9}, {"db_idx": 12756, "episode_idx": 46, "frame_idx": 170, "global_frame_idx": 12756, "task_index": 9}, {"db_idx": 12757, "episode_idx": 46, "frame_idx": 171, "global_frame_idx": 12757, "task_index": 9}, {"db_idx": 12758, "episode_idx": 46, "frame_idx": 172, "global_frame_idx": 12758, "task_index": 9}, {"db_idx": 12759, "episode_idx": 46, "frame_idx": 173, "global_frame_idx": 12759, "task_index": 9}, {"db_idx": 12760, "episode_idx": 46, "frame_idx": 174, "global_frame_idx": 12760, "task_index": 9}, {"db_idx": 12761, "episode_idx": 46, "frame_idx": 175, "global_frame_idx": 12761, "task_index": 9}, {"db_idx": 12762, "episode_idx": 46, "frame_idx": 176, "global_frame_idx": 12762, "task_index": 9}, {"db_idx": 12763, "episode_idx": 46, "frame_idx": 177, "global_frame_idx": 12763, "task_index": 9}, {"db_idx": 12764, "episode_idx": 46, "frame_idx": 178, "global_frame_idx": 12764, "task_index": 9}, {"db_idx": 12765, "episode_idx": 46, "frame_idx": 179, "global_frame_idx": 12765, "task_index": 9}, {"db_idx": 12766, "episode_idx": 46, "frame_idx": 180, "global_frame_idx": 12766, "task_index": 9}, {"db_idx": 12767, "episode_idx": 46, "frame_idx": 181, "global_frame_idx": 12767, "task_index": 9}, {"db_idx": 12768, "episode_idx": 46, "frame_idx": 182, "global_frame_idx": 12768, "task_index": 9}, {"db_idx": 12769, "episode_idx": 46, "frame_idx": 183, "global_frame_idx": 12769, "task_index": 9}, {"db_idx": 12770, "episode_idx": 46, "frame_idx": 184, "global_frame_idx": 12770, "task_index": 9}, {"db_idx": 12771, "episode_idx": 46, "frame_idx": 185, "global_frame_idx": 12771, "task_index": 9}, {"db_idx": 12772, "episode_idx": 46, "frame_idx": 186, "global_frame_idx": 12772, "task_index": 9}, {"db_idx": 12773, "episode_idx": 46, "frame_idx": 187, "global_frame_idx": 12773, "task_index": 9}, {"db_idx": 12774, "episode_idx": 46, "frame_idx": 188, "global_frame_idx": 12774, "task_index": 9}, {"db_idx": 12775, "episode_idx": 46, "frame_idx": 189, "global_frame_idx": 12775, "task_index": 9}, {"db_idx": 12776, "episode_idx": 46, "frame_idx": 190, "global_frame_idx": 12776, "task_index": 9}, {"db_idx": 12777, "episode_idx": 46, "frame_idx": 191, "global_frame_idx": 12777, "task_index": 9}, {"db_idx": 12778, "episode_idx": 46, "frame_idx": 192, "global_frame_idx": 12778, "task_index": 9}, {"db_idx": 12779, "episode_idx": 46, "frame_idx": 193, "global_frame_idx": 12779, "task_index": 9}, {"db_idx": 12780, "episode_idx": 46, "frame_idx": 194, "global_frame_idx": 12780, "task_index": 9}, {"db_idx": 12781, "episode_idx": 46, "frame_idx": 195, "global_frame_idx": 12781, "task_index": 9}, {"db_idx": 12782, "episode_idx": 46, "frame_idx": 196, "global_frame_idx": 12782, "task_index": 9}, {"db_idx": 12783, "episode_idx": 46, "frame_idx": 197, "global_frame_idx": 12783, "task_index": 9}, {"db_idx": 12784, "episode_idx": 46, "frame_idx": 198, "global_frame_idx": 12784, "task_index": 9}, {"db_idx": 12785, "episode_idx": 46, "frame_idx": 199, "global_frame_idx": 12785, "task_index": 9}, {"db_idx": 12786, "episode_idx": 46, "frame_idx": 200, "global_frame_idx": 12786, "task_index": 9}, {"db_idx": 12787, "episode_idx": 46, "frame_idx": 201, "global_frame_idx": 12787, "task_index": 9}, {"db_idx": 12788, "episode_idx": 46, "frame_idx": 202, "global_frame_idx": 12788, "task_index": 9}, {"db_idx": 12789, "episode_idx": 46, "frame_idx": 203, "global_frame_idx": 12789, "task_index": 9}, {"db_idx": 12790, "episode_idx": 46, "frame_idx": 204, "global_frame_idx": 12790, "task_index": 9}, {"db_idx": 12791, "episode_idx": 46, "frame_idx": 205, "global_frame_idx": 12791, "task_index": 9}, {"db_idx": 12792, "episode_idx": 46, "frame_idx": 206, "global_frame_idx": 12792, "task_index": 9}, {"db_idx": 12793, "episode_idx": 46, "frame_idx": 207, "global_frame_idx": 12793, "task_index": 9}, {"db_idx": 12794, "episode_idx": 46, "frame_idx": 208, "global_frame_idx": 12794, "task_index": 9}, {"db_idx": 12795, "episode_idx": 46, "frame_idx": 209, "global_frame_idx": 12795, "task_index": 9}, {"db_idx": 12796, "episode_idx": 46, "frame_idx": 210, "global_frame_idx": 12796, "task_index": 9}, {"db_idx": 12797, "episode_idx": 46, "frame_idx": 211, "global_frame_idx": 12797, "task_index": 9}, {"db_idx": 12798, "episode_idx": 46, "frame_idx": 212, "global_frame_idx": 12798, "task_index": 9}, {"db_idx": 12799, "episode_idx": 46, "frame_idx": 213, "global_frame_idx": 12799, "task_index": 9}, {"db_idx": 12800, "episode_idx": 46, "frame_idx": 214, "global_frame_idx": 12800, "task_index": 9}, {"db_idx": 12801, "episode_idx": 46, "frame_idx": 215, "global_frame_idx": 12801, "task_index": 9}, {"db_idx": 12802, "episode_idx": 46, "frame_idx": 216, "global_frame_idx": 12802, "task_index": 9}, {"db_idx": 12803, "episode_idx": 46, "frame_idx": 217, "global_frame_idx": 12803, "task_index": 9}, {"db_idx": 12804, "episode_idx": 46, "frame_idx": 218, "global_frame_idx": 12804, "task_index": 9}, {"db_idx": 12805, "episode_idx": 46, "frame_idx": 219, "global_frame_idx": 12805, "task_index": 9}, {"db_idx": 12806, "episode_idx": 46, "frame_idx": 220, "global_frame_idx": 12806, "task_index": 9}, {"db_idx": 12807, "episode_idx": 46, "frame_idx": 221, "global_frame_idx": 12807, "task_index": 9}, {"db_idx": 12808, "episode_idx": 46, "frame_idx": 222, "global_frame_idx": 12808, "task_index": 9}, {"db_idx": 12809, "episode_idx": 46, "frame_idx": 223, "global_frame_idx": 12809, "task_index": 9}, {"db_idx": 12810, "episode_idx": 46, "frame_idx": 224, "global_frame_idx": 12810, "task_index": 9}, {"db_idx": 12811, "episode_idx": 46, "frame_idx": 225, "global_frame_idx": 12811, "task_index": 9}, {"db_idx": 12812, "episode_idx": 46, "frame_idx": 226, "global_frame_idx": 12812, "task_index": 9}, {"db_idx": 12813, "episode_idx": 46, "frame_idx": 227, "global_frame_idx": 12813, "task_index": 9}, {"db_idx": 12814, "episode_idx": 46, "frame_idx": 228, "global_frame_idx": 12814, "task_index": 9}, {"db_idx": 12815, "episode_idx": 46, "frame_idx": 229, "global_frame_idx": 12815, "task_index": 9}, {"db_idx": 12816, "episode_idx": 46, "frame_idx": 230, "global_frame_idx": 12816, "task_index": 9}, {"db_idx": 12817, "episode_idx": 46, "frame_idx": 231, "global_frame_idx": 12817, "task_index": 9}, {"db_idx": 12818, "episode_idx": 46, "frame_idx": 232, "global_frame_idx": 12818, "task_index": 9}, {"db_idx": 12819, "episode_idx": 46, "frame_idx": 233, "global_frame_idx": 12819, "task_index": 9}, {"db_idx": 12820, "episode_idx": 46, "frame_idx": 234, "global_frame_idx": 12820, "task_index": 9}, {"db_idx": 12821, "episode_idx": 46, "frame_idx": 235, "global_frame_idx": 12821, "task_index": 9}, {"db_idx": 12822, "episode_idx": 46, "frame_idx": 236, "global_frame_idx": 12822, "task_index": 9}, {"db_idx": 12823, "episode_idx": 46, "frame_idx": 237, "global_frame_idx": 12823, "task_index": 9}, {"db_idx": 12824, "episode_idx": 46, "frame_idx": 238, "global_frame_idx": 12824, "task_index": 9}, {"db_idx": 12825, "episode_idx": 46, "frame_idx": 239, "global_frame_idx": 12825, "task_index": 9}, {"db_idx": 12826, "episode_idx": 46, "frame_idx": 240, "global_frame_idx": 12826, "task_index": 9}, {"db_idx": 12827, "episode_idx": 46, "frame_idx": 241, "global_frame_idx": 12827, "task_index": 9}, {"db_idx": 12828, "episode_idx": 46, "frame_idx": 242, "global_frame_idx": 12828, "task_index": 9}, {"db_idx": 12829, "episode_idx": 46, "frame_idx": 243, "global_frame_idx": 12829, "task_index": 9}, {"db_idx": 12830, "episode_idx": 46, "frame_idx": 244, "global_frame_idx": 12830, "task_index": 9}, {"db_idx": 12831, "episode_idx": 46, "frame_idx": 245, "global_frame_idx": 12831, "task_index": 9}, {"db_idx": 12832, "episode_idx": 46, "frame_idx": 246, "global_frame_idx": 12832, "task_index": 9}, {"db_idx": 12833, "episode_idx": 46, "frame_idx": 247, "global_frame_idx": 12833, "task_index": 9}, {"db_idx": 12834, "episode_idx": 46, "frame_idx": 248, "global_frame_idx": 12834, "task_index": 9}, {"db_idx": 12835, "episode_idx": 46, "frame_idx": 249, "global_frame_idx": 12835, "task_index": 9}, {"db_idx": 12836, "episode_idx": 46, "frame_idx": 250, "global_frame_idx": 12836, "task_index": 9}, {"db_idx": 12837, "episode_idx": 46, "frame_idx": 251, "global_frame_idx": 12837, "task_index": 9}, {"db_idx": 12838, "episode_idx": 46, "frame_idx": 252, "global_frame_idx": 12838, "task_index": 9}, {"db_idx": 12839, "episode_idx": 46, "frame_idx": 253, "global_frame_idx": 12839, "task_index": 9}, {"db_idx": 12840, "episode_idx": 46, "frame_idx": 254, "global_frame_idx": 12840, "task_index": 9}, {"db_idx": 12841, "episode_idx": 46, "frame_idx": 255, "global_frame_idx": 12841, "task_index": 9}, {"db_idx": 12842, "episode_idx": 46, "frame_idx": 256, "global_frame_idx": 12842, "task_index": 9}, {"db_idx": 12843, "episode_idx": 46, "frame_idx": 257, "global_frame_idx": 12843, "task_index": 9}, {"db_idx": 12844, "episode_idx": 46, "frame_idx": 258, "global_frame_idx": 12844, "task_index": 9}, {"db_idx": 12845, "episode_idx": 47, "frame_idx": 0, "global_frame_idx": 12845, "task_index": 9}, {"db_idx": 12846, "episode_idx": 47, "frame_idx": 1, "global_frame_idx": 12846, "task_index": 9}, {"db_idx": 12847, "episode_idx": 47, "frame_idx": 2, "global_frame_idx": 12847, "task_index": 9}, {"db_idx": 12848, "episode_idx": 47, "frame_idx": 3, "global_frame_idx": 12848, "task_index": 9}, {"db_idx": 12849, "episode_idx": 47, "frame_idx": 4, "global_frame_idx": 12849, "task_index": 9}, {"db_idx": 12850, "episode_idx": 47, "frame_idx": 5, "global_frame_idx": 12850, "task_index": 9}, {"db_idx": 12851, "episode_idx": 47, "frame_idx": 6, "global_frame_idx": 12851, "task_index": 9}, {"db_idx": 12852, "episode_idx": 47, "frame_idx": 7, "global_frame_idx": 12852, "task_index": 9}, {"db_idx": 12853, "episode_idx": 47, "frame_idx": 8, "global_frame_idx": 12853, "task_index": 9}, {"db_idx": 12854, "episode_idx": 47, "frame_idx": 9, "global_frame_idx": 12854, "task_index": 9}, {"db_idx": 12855, "episode_idx": 47, "frame_idx": 10, "global_frame_idx": 12855, "task_index": 9}, {"db_idx": 12856, "episode_idx": 47, "frame_idx": 11, "global_frame_idx": 12856, "task_index": 9}, {"db_idx": 12857, "episode_idx": 47, "frame_idx": 12, "global_frame_idx": 12857, "task_index": 9}, {"db_idx": 12858, "episode_idx": 47, "frame_idx": 13, "global_frame_idx": 12858, "task_index": 9}, {"db_idx": 12859, "episode_idx": 47, "frame_idx": 14, "global_frame_idx": 12859, "task_index": 9}, {"db_idx": 12860, "episode_idx": 47, "frame_idx": 15, "global_frame_idx": 12860, "task_index": 9}, {"db_idx": 12861, "episode_idx": 47, "frame_idx": 16, "global_frame_idx": 12861, "task_index": 9}, {"db_idx": 12862, "episode_idx": 47, "frame_idx": 17, "global_frame_idx": 12862, "task_index": 9}, {"db_idx": 12863, "episode_idx": 47, "frame_idx": 18, "global_frame_idx": 12863, "task_index": 9}, {"db_idx": 12864, "episode_idx": 47, "frame_idx": 19, "global_frame_idx": 12864, "task_index": 9}, {"db_idx": 12865, "episode_idx": 47, "frame_idx": 20, "global_frame_idx": 12865, "task_index": 9}, {"db_idx": 12866, "episode_idx": 47, "frame_idx": 21, "global_frame_idx": 12866, "task_index": 9}, {"db_idx": 12867, "episode_idx": 47, "frame_idx": 22, "global_frame_idx": 12867, "task_index": 9}, {"db_idx": 12868, "episode_idx": 47, "frame_idx": 23, "global_frame_idx": 12868, "task_index": 9}, {"db_idx": 12869, "episode_idx": 47, "frame_idx": 24, "global_frame_idx": 12869, "task_index": 9}, {"db_idx": 12870, "episode_idx": 47, "frame_idx": 25, "global_frame_idx": 12870, "task_index": 9}, {"db_idx": 12871, "episode_idx": 47, "frame_idx": 26, "global_frame_idx": 12871, "task_index": 9}, {"db_idx": 12872, "episode_idx": 47, "frame_idx": 27, "global_frame_idx": 12872, "task_index": 9}, {"db_idx": 12873, "episode_idx": 47, "frame_idx": 28, "global_frame_idx": 12873, "task_index": 9}, {"db_idx": 12874, "episode_idx": 47, "frame_idx": 29, "global_frame_idx": 12874, "task_index": 9}, {"db_idx": 12875, "episode_idx": 47, "frame_idx": 30, "global_frame_idx": 12875, "task_index": 9}, {"db_idx": 12876, "episode_idx": 47, "frame_idx": 31, "global_frame_idx": 12876, "task_index": 9}, {"db_idx": 12877, "episode_idx": 47, "frame_idx": 32, "global_frame_idx": 12877, "task_index": 9}, {"db_idx": 12878, "episode_idx": 47, "frame_idx": 33, "global_frame_idx": 12878, "task_index": 9}, {"db_idx": 12879, "episode_idx": 47, "frame_idx": 34, "global_frame_idx": 12879, "task_index": 9}, {"db_idx": 12880, "episode_idx": 47, "frame_idx": 35, "global_frame_idx": 12880, "task_index": 9}, {"db_idx": 12881, "episode_idx": 47, "frame_idx": 36, "global_frame_idx": 12881, "task_index": 9}, {"db_idx": 12882, "episode_idx": 47, "frame_idx": 37, "global_frame_idx": 12882, "task_index": 9}, {"db_idx": 12883, "episode_idx": 47, "frame_idx": 38, "global_frame_idx": 12883, "task_index": 9}, {"db_idx": 12884, "episode_idx": 47, "frame_idx": 39, "global_frame_idx": 12884, "task_index": 9}, {"db_idx": 12885, "episode_idx": 47, "frame_idx": 40, "global_frame_idx": 12885, "task_index": 9}, {"db_idx": 12886, "episode_idx": 47, "frame_idx": 41, "global_frame_idx": 12886, "task_index": 9}, {"db_idx": 12887, "episode_idx": 47, "frame_idx": 42, "global_frame_idx": 12887, "task_index": 9}, {"db_idx": 12888, "episode_idx": 47, "frame_idx": 43, "global_frame_idx": 12888, "task_index": 9}, {"db_idx": 12889, "episode_idx": 47, "frame_idx": 44, "global_frame_idx": 12889, "task_index": 9}, {"db_idx": 12890, "episode_idx": 47, "frame_idx": 45, "global_frame_idx": 12890, "task_index": 9}, {"db_idx": 12891, "episode_idx": 47, "frame_idx": 46, "global_frame_idx": 12891, "task_index": 9}, {"db_idx": 12892, "episode_idx": 47, "frame_idx": 47, "global_frame_idx": 12892, "task_index": 9}, {"db_idx": 12893, "episode_idx": 47, "frame_idx": 48, "global_frame_idx": 12893, "task_index": 9}, {"db_idx": 12894, "episode_idx": 47, "frame_idx": 49, "global_frame_idx": 12894, "task_index": 9}, {"db_idx": 12895, "episode_idx": 47, "frame_idx": 50, "global_frame_idx": 12895, "task_index": 9}, {"db_idx": 12896, "episode_idx": 47, "frame_idx": 51, "global_frame_idx": 12896, "task_index": 9}, {"db_idx": 12897, "episode_idx": 47, "frame_idx": 52, "global_frame_idx": 12897, "task_index": 9}, {"db_idx": 12898, "episode_idx": 47, "frame_idx": 53, "global_frame_idx": 12898, "task_index": 9}, {"db_idx": 12899, "episode_idx": 47, "frame_idx": 54, "global_frame_idx": 12899, "task_index": 9}, {"db_idx": 12900, "episode_idx": 47, "frame_idx": 55, "global_frame_idx": 12900, "task_index": 9}, {"db_idx": 12901, "episode_idx": 47, "frame_idx": 56, "global_frame_idx": 12901, "task_index": 9}, {"db_idx": 12902, "episode_idx": 47, "frame_idx": 57, "global_frame_idx": 12902, "task_index": 9}, {"db_idx": 12903, "episode_idx": 47, "frame_idx": 58, "global_frame_idx": 12903, "task_index": 9}, {"db_idx": 12904, "episode_idx": 47, "frame_idx": 59, "global_frame_idx": 12904, "task_index": 9}, {"db_idx": 12905, "episode_idx": 47, "frame_idx": 60, "global_frame_idx": 12905, "task_index": 9}, {"db_idx": 12906, "episode_idx": 47, "frame_idx": 61, "global_frame_idx": 12906, "task_index": 9}, {"db_idx": 12907, "episode_idx": 47, "frame_idx": 62, "global_frame_idx": 12907, "task_index": 9}, {"db_idx": 12908, "episode_idx": 47, "frame_idx": 63, "global_frame_idx": 12908, "task_index": 9}, {"db_idx": 12909, "episode_idx": 47, "frame_idx": 64, "global_frame_idx": 12909, "task_index": 9}, {"db_idx": 12910, "episode_idx": 47, "frame_idx": 65, "global_frame_idx": 12910, "task_index": 9}, {"db_idx": 12911, "episode_idx": 47, "frame_idx": 66, "global_frame_idx": 12911, "task_index": 9}, {"db_idx": 12912, "episode_idx": 47, "frame_idx": 67, "global_frame_idx": 12912, "task_index": 9}, {"db_idx": 12913, "episode_idx": 47, "frame_idx": 68, "global_frame_idx": 12913, "task_index": 9}, {"db_idx": 12914, "episode_idx": 47, "frame_idx": 69, "global_frame_idx": 12914, "task_index": 9}, {"db_idx": 12915, "episode_idx": 47, "frame_idx": 70, "global_frame_idx": 12915, "task_index": 9}, {"db_idx": 12916, "episode_idx": 47, "frame_idx": 71, "global_frame_idx": 12916, "task_index": 9}, {"db_idx": 12917, "episode_idx": 47, "frame_idx": 72, "global_frame_idx": 12917, "task_index": 9}, {"db_idx": 12918, "episode_idx": 47, "frame_idx": 73, "global_frame_idx": 12918, "task_index": 9}, {"db_idx": 12919, "episode_idx": 47, "frame_idx": 74, "global_frame_idx": 12919, "task_index": 9}, {"db_idx": 12920, "episode_idx": 47, "frame_idx": 75, "global_frame_idx": 12920, "task_index": 9}, {"db_idx": 12921, "episode_idx": 47, "frame_idx": 76, "global_frame_idx": 12921, "task_index": 9}, {"db_idx": 12922, "episode_idx": 47, "frame_idx": 77, "global_frame_idx": 12922, "task_index": 9}, {"db_idx": 12923, "episode_idx": 47, "frame_idx": 78, "global_frame_idx": 12923, "task_index": 9}, {"db_idx": 12924, "episode_idx": 47, "frame_idx": 79, "global_frame_idx": 12924, "task_index": 9}, {"db_idx": 12925, "episode_idx": 47, "frame_idx": 80, "global_frame_idx": 12925, "task_index": 9}, {"db_idx": 12926, "episode_idx": 47, "frame_idx": 81, "global_frame_idx": 12926, "task_index": 9}, {"db_idx": 12927, "episode_idx": 47, "frame_idx": 82, "global_frame_idx": 12927, "task_index": 9}, {"db_idx": 12928, "episode_idx": 47, "frame_idx": 83, "global_frame_idx": 12928, "task_index": 9}, {"db_idx": 12929, "episode_idx": 47, "frame_idx": 84, "global_frame_idx": 12929, "task_index": 9}, {"db_idx": 12930, "episode_idx": 47, "frame_idx": 85, "global_frame_idx": 12930, "task_index": 9}, {"db_idx": 12931, "episode_idx": 47, "frame_idx": 86, "global_frame_idx": 12931, "task_index": 9}, {"db_idx": 12932, "episode_idx": 47, "frame_idx": 87, "global_frame_idx": 12932, "task_index": 9}, {"db_idx": 12933, "episode_idx": 47, "frame_idx": 88, "global_frame_idx": 12933, "task_index": 9}, {"db_idx": 12934, "episode_idx": 47, "frame_idx": 89, "global_frame_idx": 12934, "task_index": 9}, {"db_idx": 12935, "episode_idx": 47, "frame_idx": 90, "global_frame_idx": 12935, "task_index": 9}, {"db_idx": 12936, "episode_idx": 47, "frame_idx": 91, "global_frame_idx": 12936, "task_index": 9}, {"db_idx": 12937, "episode_idx": 47, "frame_idx": 92, "global_frame_idx": 12937, "task_index": 9}, {"db_idx": 12938, "episode_idx": 47, "frame_idx": 93, "global_frame_idx": 12938, "task_index": 9}, {"db_idx": 12939, "episode_idx": 47, "frame_idx": 94, "global_frame_idx": 12939, "task_index": 9}, {"db_idx": 12940, "episode_idx": 47, "frame_idx": 95, "global_frame_idx": 12940, "task_index": 9}, {"db_idx": 12941, "episode_idx": 47, "frame_idx": 96, "global_frame_idx": 12941, "task_index": 9}, {"db_idx": 12942, "episode_idx": 47, "frame_idx": 97, "global_frame_idx": 12942, "task_index": 9}, {"db_idx": 12943, "episode_idx": 47, "frame_idx": 98, "global_frame_idx": 12943, "task_index": 9}, {"db_idx": 12944, "episode_idx": 47, "frame_idx": 99, "global_frame_idx": 12944, "task_index": 9}, {"db_idx": 12945, "episode_idx": 47, "frame_idx": 100, "global_frame_idx": 12945, "task_index": 9}, {"db_idx": 12946, "episode_idx": 47, "frame_idx": 101, "global_frame_idx": 12946, "task_index": 9}, {"db_idx": 12947, "episode_idx": 47, "frame_idx": 102, "global_frame_idx": 12947, "task_index": 9}, {"db_idx": 12948, "episode_idx": 47, "frame_idx": 103, "global_frame_idx": 12948, "task_index": 9}, {"db_idx": 12949, "episode_idx": 47, "frame_idx": 104, "global_frame_idx": 12949, "task_index": 9}, {"db_idx": 12950, "episode_idx": 47, "frame_idx": 105, "global_frame_idx": 12950, "task_index": 9}, {"db_idx": 12951, "episode_idx": 47, "frame_idx": 106, "global_frame_idx": 12951, "task_index": 9}, {"db_idx": 12952, "episode_idx": 47, "frame_idx": 107, "global_frame_idx": 12952, "task_index": 9}, {"db_idx": 12953, "episode_idx": 47, "frame_idx": 108, "global_frame_idx": 12953, "task_index": 9}, {"db_idx": 12954, "episode_idx": 47, "frame_idx": 109, "global_frame_idx": 12954, "task_index": 9}, {"db_idx": 12955, "episode_idx": 47, "frame_idx": 110, "global_frame_idx": 12955, "task_index": 9}, {"db_idx": 12956, "episode_idx": 47, "frame_idx": 111, "global_frame_idx": 12956, "task_index": 9}, {"db_idx": 12957, "episode_idx": 47, "frame_idx": 112, "global_frame_idx": 12957, "task_index": 9}, {"db_idx": 12958, "episode_idx": 47, "frame_idx": 113, "global_frame_idx": 12958, "task_index": 9}, {"db_idx": 12959, "episode_idx": 47, "frame_idx": 114, "global_frame_idx": 12959, "task_index": 9}, {"db_idx": 12960, "episode_idx": 47, "frame_idx": 115, "global_frame_idx": 12960, "task_index": 9}, {"db_idx": 12961, "episode_idx": 47, "frame_idx": 116, "global_frame_idx": 12961, "task_index": 9}, {"db_idx": 12962, "episode_idx": 47, "frame_idx": 117, "global_frame_idx": 12962, "task_index": 9}, {"db_idx": 12963, "episode_idx": 47, "frame_idx": 118, "global_frame_idx": 12963, "task_index": 9}, {"db_idx": 12964, "episode_idx": 47, "frame_idx": 119, "global_frame_idx": 12964, "task_index": 9}, {"db_idx": 12965, "episode_idx": 47, "frame_idx": 120, "global_frame_idx": 12965, "task_index": 9}, {"db_idx": 12966, "episode_idx": 47, "frame_idx": 121, "global_frame_idx": 12966, "task_index": 9}, {"db_idx": 12967, "episode_idx": 47, "frame_idx": 122, "global_frame_idx": 12967, "task_index": 9}, {"db_idx": 12968, "episode_idx": 47, "frame_idx": 123, "global_frame_idx": 12968, "task_index": 9}, {"db_idx": 12969, "episode_idx": 47, "frame_idx": 124, "global_frame_idx": 12969, "task_index": 9}, {"db_idx": 12970, "episode_idx": 47, "frame_idx": 125, "global_frame_idx": 12970, "task_index": 9}, {"db_idx": 12971, "episode_idx": 47, "frame_idx": 126, "global_frame_idx": 12971, "task_index": 9}, {"db_idx": 12972, "episode_idx": 47, "frame_idx": 127, "global_frame_idx": 12972, "task_index": 9}, {"db_idx": 12973, "episode_idx": 47, "frame_idx": 128, "global_frame_idx": 12973, "task_index": 9}, {"db_idx": 12974, "episode_idx": 47, "frame_idx": 129, "global_frame_idx": 12974, "task_index": 9}, {"db_idx": 12975, "episode_idx": 47, "frame_idx": 130, "global_frame_idx": 12975, "task_index": 9}, {"db_idx": 12976, "episode_idx": 47, "frame_idx": 131, "global_frame_idx": 12976, "task_index": 9}, {"db_idx": 12977, "episode_idx": 47, "frame_idx": 132, "global_frame_idx": 12977, "task_index": 9}, {"db_idx": 12978, "episode_idx": 47, "frame_idx": 133, "global_frame_idx": 12978, "task_index": 9}, {"db_idx": 12979, "episode_idx": 47, "frame_idx": 134, "global_frame_idx": 12979, "task_index": 9}, {"db_idx": 12980, "episode_idx": 47, "frame_idx": 135, "global_frame_idx": 12980, "task_index": 9}, {"db_idx": 12981, "episode_idx": 47, "frame_idx": 136, "global_frame_idx": 12981, "task_index": 9}, {"db_idx": 12982, "episode_idx": 47, "frame_idx": 137, "global_frame_idx": 12982, "task_index": 9}, {"db_idx": 12983, "episode_idx": 47, "frame_idx": 138, "global_frame_idx": 12983, "task_index": 9}, {"db_idx": 12984, "episode_idx": 47, "frame_idx": 139, "global_frame_idx": 12984, "task_index": 9}, {"db_idx": 12985, "episode_idx": 47, "frame_idx": 140, "global_frame_idx": 12985, "task_index": 9}, {"db_idx": 12986, "episode_idx": 47, "frame_idx": 141, "global_frame_idx": 12986, "task_index": 9}, {"db_idx": 12987, "episode_idx": 47, "frame_idx": 142, "global_frame_idx": 12987, "task_index": 9}, {"db_idx": 12988, "episode_idx": 47, "frame_idx": 143, "global_frame_idx": 12988, "task_index": 9}, {"db_idx": 12989, "episode_idx": 47, "frame_idx": 144, "global_frame_idx": 12989, "task_index": 9}, {"db_idx": 12990, "episode_idx": 47, "frame_idx": 145, "global_frame_idx": 12990, "task_index": 9}, {"db_idx": 12991, "episode_idx": 47, "frame_idx": 146, "global_frame_idx": 12991, "task_index": 9}, {"db_idx": 12992, "episode_idx": 47, "frame_idx": 147, "global_frame_idx": 12992, "task_index": 9}, {"db_idx": 12993, "episode_idx": 47, "frame_idx": 148, "global_frame_idx": 12993, "task_index": 9}, {"db_idx": 12994, "episode_idx": 47, "frame_idx": 149, "global_frame_idx": 12994, "task_index": 9}, {"db_idx": 12995, "episode_idx": 47, "frame_idx": 150, "global_frame_idx": 12995, "task_index": 9}, {"db_idx": 12996, "episode_idx": 47, "frame_idx": 151, "global_frame_idx": 12996, "task_index": 9}, {"db_idx": 12997, "episode_idx": 47, "frame_idx": 152, "global_frame_idx": 12997, "task_index": 9}, {"db_idx": 12998, "episode_idx": 47, "frame_idx": 153, "global_frame_idx": 12998, "task_index": 9}, {"db_idx": 12999, "episode_idx": 47, "frame_idx": 154, "global_frame_idx": 12999, "task_index": 9}, {"db_idx": 13000, "episode_idx": 47, "frame_idx": 155, "global_frame_idx": 13000, "task_index": 9}, {"db_idx": 13001, "episode_idx": 47, "frame_idx": 156, "global_frame_idx": 13001, "task_index": 9}, {"db_idx": 13002, "episode_idx": 47, "frame_idx": 157, "global_frame_idx": 13002, "task_index": 9}, {"db_idx": 13003, "episode_idx": 47, "frame_idx": 158, "global_frame_idx": 13003, "task_index": 9}, {"db_idx": 13004, "episode_idx": 47, "frame_idx": 159, "global_frame_idx": 13004, "task_index": 9}, {"db_idx": 13005, "episode_idx": 47, "frame_idx": 160, "global_frame_idx": 13005, "task_index": 9}, {"db_idx": 13006, "episode_idx": 47, "frame_idx": 161, "global_frame_idx": 13006, "task_index": 9}, {"db_idx": 13007, "episode_idx": 47, "frame_idx": 162, "global_frame_idx": 13007, "task_index": 9}, {"db_idx": 13008, "episode_idx": 47, "frame_idx": 163, "global_frame_idx": 13008, "task_index": 9}, {"db_idx": 13009, "episode_idx": 47, "frame_idx": 164, "global_frame_idx": 13009, "task_index": 9}, {"db_idx": 13010, "episode_idx": 47, "frame_idx": 165, "global_frame_idx": 13010, "task_index": 9}, {"db_idx": 13011, "episode_idx": 47, "frame_idx": 166, "global_frame_idx": 13011, "task_index": 9}, {"db_idx": 13012, "episode_idx": 47, "frame_idx": 167, "global_frame_idx": 13012, "task_index": 9}, {"db_idx": 13013, "episode_idx": 47, "frame_idx": 168, "global_frame_idx": 13013, "task_index": 9}, {"db_idx": 13014, "episode_idx": 47, "frame_idx": 169, "global_frame_idx": 13014, "task_index": 9}, {"db_idx": 13015, "episode_idx": 47, "frame_idx": 170, "global_frame_idx": 13015, "task_index": 9}, {"db_idx": 13016, "episode_idx": 47, "frame_idx": 171, "global_frame_idx": 13016, "task_index": 9}, {"db_idx": 13017, "episode_idx": 47, "frame_idx": 172, "global_frame_idx": 13017, "task_index": 9}, {"db_idx": 13018, "episode_idx": 47, "frame_idx": 173, "global_frame_idx": 13018, "task_index": 9}, {"db_idx": 13019, "episode_idx": 47, "frame_idx": 174, "global_frame_idx": 13019, "task_index": 9}, {"db_idx": 13020, "episode_idx": 47, "frame_idx": 175, "global_frame_idx": 13020, "task_index": 9}, {"db_idx": 13021, "episode_idx": 47, "frame_idx": 176, "global_frame_idx": 13021, "task_index": 9}, {"db_idx": 13022, "episode_idx": 47, "frame_idx": 177, "global_frame_idx": 13022, "task_index": 9}, {"db_idx": 13023, "episode_idx": 47, "frame_idx": 178, "global_frame_idx": 13023, "task_index": 9}, {"db_idx": 13024, "episode_idx": 47, "frame_idx": 179, "global_frame_idx": 13024, "task_index": 9}, {"db_idx": 13025, "episode_idx": 47, "frame_idx": 180, "global_frame_idx": 13025, "task_index": 9}, {"db_idx": 13026, "episode_idx": 47, "frame_idx": 181, "global_frame_idx": 13026, "task_index": 9}, {"db_idx": 13027, "episode_idx": 47, "frame_idx": 182, "global_frame_idx": 13027, "task_index": 9}, {"db_idx": 13028, "episode_idx": 47, "frame_idx": 183, "global_frame_idx": 13028, "task_index": 9}, {"db_idx": 13029, "episode_idx": 47, "frame_idx": 184, "global_frame_idx": 13029, "task_index": 9}, {"db_idx": 13030, "episode_idx": 47, "frame_idx": 185, "global_frame_idx": 13030, "task_index": 9}, {"db_idx": 13031, "episode_idx": 47, "frame_idx": 186, "global_frame_idx": 13031, "task_index": 9}, {"db_idx": 13032, "episode_idx": 47, "frame_idx": 187, "global_frame_idx": 13032, "task_index": 9}, {"db_idx": 13033, "episode_idx": 47, "frame_idx": 188, "global_frame_idx": 13033, "task_index": 9}, {"db_idx": 13034, "episode_idx": 47, "frame_idx": 189, "global_frame_idx": 13034, "task_index": 9}, {"db_idx": 13035, "episode_idx": 47, "frame_idx": 190, "global_frame_idx": 13035, "task_index": 9}, {"db_idx": 13036, "episode_idx": 47, "frame_idx": 191, "global_frame_idx": 13036, "task_index": 9}, {"db_idx": 13037, "episode_idx": 47, "frame_idx": 192, "global_frame_idx": 13037, "task_index": 9}, {"db_idx": 13038, "episode_idx": 47, "frame_idx": 193, "global_frame_idx": 13038, "task_index": 9}, {"db_idx": 13039, "episode_idx": 47, "frame_idx": 194, "global_frame_idx": 13039, "task_index": 9}, {"db_idx": 13040, "episode_idx": 47, "frame_idx": 195, "global_frame_idx": 13040, "task_index": 9}, {"db_idx": 13041, "episode_idx": 47, "frame_idx": 196, "global_frame_idx": 13041, "task_index": 9}, {"db_idx": 13042, "episode_idx": 47, "frame_idx": 197, "global_frame_idx": 13042, "task_index": 9}, {"db_idx": 13043, "episode_idx": 47, "frame_idx": 198, "global_frame_idx": 13043, "task_index": 9}, {"db_idx": 13044, "episode_idx": 47, "frame_idx": 199, "global_frame_idx": 13044, "task_index": 9}, {"db_idx": 13045, "episode_idx": 47, "frame_idx": 200, "global_frame_idx": 13045, "task_index": 9}, {"db_idx": 13046, "episode_idx": 47, "frame_idx": 201, "global_frame_idx": 13046, "task_index": 9}, {"db_idx": 13047, "episode_idx": 47, "frame_idx": 202, "global_frame_idx": 13047, "task_index": 9}, {"db_idx": 13048, "episode_idx": 47, "frame_idx": 203, "global_frame_idx": 13048, "task_index": 9}, {"db_idx": 13049, "episode_idx": 47, "frame_idx": 204, "global_frame_idx": 13049, "task_index": 9}, {"db_idx": 13050, "episode_idx": 47, "frame_idx": 205, "global_frame_idx": 13050, "task_index": 9}, {"db_idx": 13051, "episode_idx": 47, "frame_idx": 206, "global_frame_idx": 13051, "task_index": 9}, {"db_idx": 13052, "episode_idx": 47, "frame_idx": 207, "global_frame_idx": 13052, "task_index": 9}, {"db_idx": 13053, "episode_idx": 47, "frame_idx": 208, "global_frame_idx": 13053, "task_index": 9}, {"db_idx": 13054, "episode_idx": 47, "frame_idx": 209, "global_frame_idx": 13054, "task_index": 9}, {"db_idx": 13055, "episode_idx": 47, "frame_idx": 210, "global_frame_idx": 13055, "task_index": 9}, {"db_idx": 13056, "episode_idx": 47, "frame_idx": 211, "global_frame_idx": 13056, "task_index": 9}, {"db_idx": 13057, "episode_idx": 47, "frame_idx": 212, "global_frame_idx": 13057, "task_index": 9}, {"db_idx": 13058, "episode_idx": 47, "frame_idx": 213, "global_frame_idx": 13058, "task_index": 9}, {"db_idx": 13059, "episode_idx": 47, "frame_idx": 214, "global_frame_idx": 13059, "task_index": 9}, {"db_idx": 13060, "episode_idx": 47, "frame_idx": 215, "global_frame_idx": 13060, "task_index": 9}, {"db_idx": 13061, "episode_idx": 47, "frame_idx": 216, "global_frame_idx": 13061, "task_index": 9}, {"db_idx": 13062, "episode_idx": 47, "frame_idx": 217, "global_frame_idx": 13062, "task_index": 9}, {"db_idx": 13063, "episode_idx": 47, "frame_idx": 218, "global_frame_idx": 13063, "task_index": 9}, {"db_idx": 13064, "episode_idx": 47, "frame_idx": 219, "global_frame_idx": 13064, "task_index": 9}, {"db_idx": 13065, "episode_idx": 47, "frame_idx": 220, "global_frame_idx": 13065, "task_index": 9}, {"db_idx": 13066, "episode_idx": 47, "frame_idx": 221, "global_frame_idx": 13066, "task_index": 9}, {"db_idx": 13067, "episode_idx": 47, "frame_idx": 222, "global_frame_idx": 13067, "task_index": 9}, {"db_idx": 13068, "episode_idx": 47, "frame_idx": 223, "global_frame_idx": 13068, "task_index": 9}, {"db_idx": 13069, "episode_idx": 47, "frame_idx": 224, "global_frame_idx": 13069, "task_index": 9}, {"db_idx": 13070, "episode_idx": 47, "frame_idx": 225, "global_frame_idx": 13070, "task_index": 9}, {"db_idx": 13071, "episode_idx": 47, "frame_idx": 226, "global_frame_idx": 13071, "task_index": 9}, {"db_idx": 13072, "episode_idx": 47, "frame_idx": 227, "global_frame_idx": 13072, "task_index": 9}, {"db_idx": 13073, "episode_idx": 47, "frame_idx": 228, "global_frame_idx": 13073, "task_index": 9}, {"db_idx": 13074, "episode_idx": 47, "frame_idx": 229, "global_frame_idx": 13074, "task_index": 9}, {"db_idx": 13075, "episode_idx": 47, "frame_idx": 230, "global_frame_idx": 13075, "task_index": 9}, {"db_idx": 13076, "episode_idx": 47, "frame_idx": 231, "global_frame_idx": 13076, "task_index": 9}, {"db_idx": 13077, "episode_idx": 47, "frame_idx": 232, "global_frame_idx": 13077, "task_index": 9}, {"db_idx": 13078, "episode_idx": 47, "frame_idx": 233, "global_frame_idx": 13078, "task_index": 9}, {"db_idx": 13079, "episode_idx": 47, "frame_idx": 234, "global_frame_idx": 13079, "task_index": 9}, {"db_idx": 13080, "episode_idx": 47, "frame_idx": 235, "global_frame_idx": 13080, "task_index": 9}, {"db_idx": 13081, "episode_idx": 47, "frame_idx": 236, "global_frame_idx": 13081, "task_index": 9}, {"db_idx": 13082, "episode_idx": 47, "frame_idx": 237, "global_frame_idx": 13082, "task_index": 9}, {"db_idx": 13083, "episode_idx": 47, "frame_idx": 238, "global_frame_idx": 13083, "task_index": 9}, {"db_idx": 13084, "episode_idx": 47, "frame_idx": 239, "global_frame_idx": 13084, "task_index": 9}, {"db_idx": 13085, "episode_idx": 47, "frame_idx": 240, "global_frame_idx": 13085, "task_index": 9}, {"db_idx": 13086, "episode_idx": 47, "frame_idx": 241, "global_frame_idx": 13086, "task_index": 9}, {"db_idx": 13087, "episode_idx": 47, "frame_idx": 242, "global_frame_idx": 13087, "task_index": 9}, {"db_idx": 13088, "episode_idx": 47, "frame_idx": 243, "global_frame_idx": 13088, "task_index": 9}, {"db_idx": 13089, "episode_idx": 47, "frame_idx": 244, "global_frame_idx": 13089, "task_index": 9}, {"db_idx": 13090, "episode_idx": 48, "frame_idx": 0, "global_frame_idx": 13090, "task_index": 9}, {"db_idx": 13091, "episode_idx": 48, "frame_idx": 1, "global_frame_idx": 13091, "task_index": 9}, {"db_idx": 13092, "episode_idx": 48, "frame_idx": 2, "global_frame_idx": 13092, "task_index": 9}, {"db_idx": 13093, "episode_idx": 48, "frame_idx": 3, "global_frame_idx": 13093, "task_index": 9}, {"db_idx": 13094, "episode_idx": 48, "frame_idx": 4, "global_frame_idx": 13094, "task_index": 9}, {"db_idx": 13095, "episode_idx": 48, "frame_idx": 5, "global_frame_idx": 13095, "task_index": 9}, {"db_idx": 13096, "episode_idx": 48, "frame_idx": 6, "global_frame_idx": 13096, "task_index": 9}, {"db_idx": 13097, "episode_idx": 48, "frame_idx": 7, "global_frame_idx": 13097, "task_index": 9}, {"db_idx": 13098, "episode_idx": 48, "frame_idx": 8, "global_frame_idx": 13098, "task_index": 9}, {"db_idx": 13099, "episode_idx": 48, "frame_idx": 9, "global_frame_idx": 13099, "task_index": 9}, {"db_idx": 13100, "episode_idx": 48, "frame_idx": 10, "global_frame_idx": 13100, "task_index": 9}, {"db_idx": 13101, "episode_idx": 48, "frame_idx": 11, "global_frame_idx": 13101, "task_index": 9}, {"db_idx": 13102, "episode_idx": 48, "frame_idx": 12, "global_frame_idx": 13102, "task_index": 9}, {"db_idx": 13103, "episode_idx": 48, "frame_idx": 13, "global_frame_idx": 13103, "task_index": 9}, {"db_idx": 13104, "episode_idx": 48, "frame_idx": 14, "global_frame_idx": 13104, "task_index": 9}, {"db_idx": 13105, "episode_idx": 48, "frame_idx": 15, "global_frame_idx": 13105, "task_index": 9}, {"db_idx": 13106, "episode_idx": 48, "frame_idx": 16, "global_frame_idx": 13106, "task_index": 9}, {"db_idx": 13107, "episode_idx": 48, "frame_idx": 17, "global_frame_idx": 13107, "task_index": 9}, {"db_idx": 13108, "episode_idx": 48, "frame_idx": 18, "global_frame_idx": 13108, "task_index": 9}, {"db_idx": 13109, "episode_idx": 48, "frame_idx": 19, "global_frame_idx": 13109, "task_index": 9}, {"db_idx": 13110, "episode_idx": 48, "frame_idx": 20, "global_frame_idx": 13110, "task_index": 9}, {"db_idx": 13111, "episode_idx": 48, "frame_idx": 21, "global_frame_idx": 13111, "task_index": 9}, {"db_idx": 13112, "episode_idx": 48, "frame_idx": 22, "global_frame_idx": 13112, "task_index": 9}, {"db_idx": 13113, "episode_idx": 48, "frame_idx": 23, "global_frame_idx": 13113, "task_index": 9}, {"db_idx": 13114, "episode_idx": 48, "frame_idx": 24, "global_frame_idx": 13114, "task_index": 9}, {"db_idx": 13115, "episode_idx": 48, "frame_idx": 25, "global_frame_idx": 13115, "task_index": 9}, {"db_idx": 13116, "episode_idx": 48, "frame_idx": 26, "global_frame_idx": 13116, "task_index": 9}, {"db_idx": 13117, "episode_idx": 48, "frame_idx": 27, "global_frame_idx": 13117, "task_index": 9}, {"db_idx": 13118, "episode_idx": 48, "frame_idx": 28, "global_frame_idx": 13118, "task_index": 9}, {"db_idx": 13119, "episode_idx": 48, "frame_idx": 29, "global_frame_idx": 13119, "task_index": 9}, {"db_idx": 13120, "episode_idx": 48, "frame_idx": 30, "global_frame_idx": 13120, "task_index": 9}, {"db_idx": 13121, "episode_idx": 48, "frame_idx": 31, "global_frame_idx": 13121, "task_index": 9}, {"db_idx": 13122, "episode_idx": 48, "frame_idx": 32, "global_frame_idx": 13122, "task_index": 9}, {"db_idx": 13123, "episode_idx": 48, "frame_idx": 33, "global_frame_idx": 13123, "task_index": 9}, {"db_idx": 13124, "episode_idx": 48, "frame_idx": 34, "global_frame_idx": 13124, "task_index": 9}, {"db_idx": 13125, "episode_idx": 48, "frame_idx": 35, "global_frame_idx": 13125, "task_index": 9}, {"db_idx": 13126, "episode_idx": 48, "frame_idx": 36, "global_frame_idx": 13126, "task_index": 9}, {"db_idx": 13127, "episode_idx": 48, "frame_idx": 37, "global_frame_idx": 13127, "task_index": 9}, {"db_idx": 13128, "episode_idx": 48, "frame_idx": 38, "global_frame_idx": 13128, "task_index": 9}, {"db_idx": 13129, "episode_idx": 48, "frame_idx": 39, "global_frame_idx": 13129, "task_index": 9}, {"db_idx": 13130, "episode_idx": 48, "frame_idx": 40, "global_frame_idx": 13130, "task_index": 9}, {"db_idx": 13131, "episode_idx": 48, "frame_idx": 41, "global_frame_idx": 13131, "task_index": 9}, {"db_idx": 13132, "episode_idx": 48, "frame_idx": 42, "global_frame_idx": 13132, "task_index": 9}, {"db_idx": 13133, "episode_idx": 48, "frame_idx": 43, "global_frame_idx": 13133, "task_index": 9}, {"db_idx": 13134, "episode_idx": 48, "frame_idx": 44, "global_frame_idx": 13134, "task_index": 9}, {"db_idx": 13135, "episode_idx": 48, "frame_idx": 45, "global_frame_idx": 13135, "task_index": 9}, {"db_idx": 13136, "episode_idx": 48, "frame_idx": 46, "global_frame_idx": 13136, "task_index": 9}, {"db_idx": 13137, "episode_idx": 48, "frame_idx": 47, "global_frame_idx": 13137, "task_index": 9}, {"db_idx": 13138, "episode_idx": 48, "frame_idx": 48, "global_frame_idx": 13138, "task_index": 9}, {"db_idx": 13139, "episode_idx": 48, "frame_idx": 49, "global_frame_idx": 13139, "task_index": 9}, {"db_idx": 13140, "episode_idx": 48, "frame_idx": 50, "global_frame_idx": 13140, "task_index": 9}, {"db_idx": 13141, "episode_idx": 48, "frame_idx": 51, "global_frame_idx": 13141, "task_index": 9}, {"db_idx": 13142, "episode_idx": 48, "frame_idx": 52, "global_frame_idx": 13142, "task_index": 9}, {"db_idx": 13143, "episode_idx": 48, "frame_idx": 53, "global_frame_idx": 13143, "task_index": 9}, {"db_idx": 13144, "episode_idx": 48, "frame_idx": 54, "global_frame_idx": 13144, "task_index": 9}, {"db_idx": 13145, "episode_idx": 48, "frame_idx": 55, "global_frame_idx": 13145, "task_index": 9}, {"db_idx": 13146, "episode_idx": 48, "frame_idx": 56, "global_frame_idx": 13146, "task_index": 9}, {"db_idx": 13147, "episode_idx": 48, "frame_idx": 57, "global_frame_idx": 13147, "task_index": 9}, {"db_idx": 13148, "episode_idx": 48, "frame_idx": 58, "global_frame_idx": 13148, "task_index": 9}, {"db_idx": 13149, "episode_idx": 48, "frame_idx": 59, "global_frame_idx": 13149, "task_index": 9}, {"db_idx": 13150, "episode_idx": 48, "frame_idx": 60, "global_frame_idx": 13150, "task_index": 9}, {"db_idx": 13151, "episode_idx": 48, "frame_idx": 61, "global_frame_idx": 13151, "task_index": 9}, {"db_idx": 13152, "episode_idx": 48, "frame_idx": 62, "global_frame_idx": 13152, "task_index": 9}, {"db_idx": 13153, "episode_idx": 48, "frame_idx": 63, "global_frame_idx": 13153, "task_index": 9}, {"db_idx": 13154, "episode_idx": 48, "frame_idx": 64, "global_frame_idx": 13154, "task_index": 9}, {"db_idx": 13155, "episode_idx": 48, "frame_idx": 65, "global_frame_idx": 13155, "task_index": 9}, {"db_idx": 13156, "episode_idx": 48, "frame_idx": 66, "global_frame_idx": 13156, "task_index": 9}, {"db_idx": 13157, "episode_idx": 48, "frame_idx": 67, "global_frame_idx": 13157, "task_index": 9}, {"db_idx": 13158, "episode_idx": 48, "frame_idx": 68, "global_frame_idx": 13158, "task_index": 9}, {"db_idx": 13159, "episode_idx": 48, "frame_idx": 69, "global_frame_idx": 13159, "task_index": 9}, {"db_idx": 13160, "episode_idx": 48, "frame_idx": 70, "global_frame_idx": 13160, "task_index": 9}, {"db_idx": 13161, "episode_idx": 48, "frame_idx": 71, "global_frame_idx": 13161, "task_index": 9}, {"db_idx": 13162, "episode_idx": 48, "frame_idx": 72, "global_frame_idx": 13162, "task_index": 9}, {"db_idx": 13163, "episode_idx": 48, "frame_idx": 73, "global_frame_idx": 13163, "task_index": 9}, {"db_idx": 13164, "episode_idx": 48, "frame_idx": 74, "global_frame_idx": 13164, "task_index": 9}, {"db_idx": 13165, "episode_idx": 48, "frame_idx": 75, "global_frame_idx": 13165, "task_index": 9}, {"db_idx": 13166, "episode_idx": 48, "frame_idx": 76, "global_frame_idx": 13166, "task_index": 9}, {"db_idx": 13167, "episode_idx": 48, "frame_idx": 77, "global_frame_idx": 13167, "task_index": 9}, {"db_idx": 13168, "episode_idx": 48, "frame_idx": 78, "global_frame_idx": 13168, "task_index": 9}, {"db_idx": 13169, "episode_idx": 48, "frame_idx": 79, "global_frame_idx": 13169, "task_index": 9}, {"db_idx": 13170, "episode_idx": 48, "frame_idx": 80, "global_frame_idx": 13170, "task_index": 9}, {"db_idx": 13171, "episode_idx": 48, "frame_idx": 81, "global_frame_idx": 13171, "task_index": 9}, {"db_idx": 13172, "episode_idx": 48, "frame_idx": 82, "global_frame_idx": 13172, "task_index": 9}, {"db_idx": 13173, "episode_idx": 48, "frame_idx": 83, "global_frame_idx": 13173, "task_index": 9}, {"db_idx": 13174, "episode_idx": 48, "frame_idx": 84, "global_frame_idx": 13174, "task_index": 9}, {"db_idx": 13175, "episode_idx": 48, "frame_idx": 85, "global_frame_idx": 13175, "task_index": 9}, {"db_idx": 13176, "episode_idx": 48, "frame_idx": 86, "global_frame_idx": 13176, "task_index": 9}, {"db_idx": 13177, "episode_idx": 48, "frame_idx": 87, "global_frame_idx": 13177, "task_index": 9}, {"db_idx": 13178, "episode_idx": 48, "frame_idx": 88, "global_frame_idx": 13178, "task_index": 9}, {"db_idx": 13179, "episode_idx": 48, "frame_idx": 89, "global_frame_idx": 13179, "task_index": 9}, {"db_idx": 13180, "episode_idx": 48, "frame_idx": 90, "global_frame_idx": 13180, "task_index": 9}, {"db_idx": 13181, "episode_idx": 48, "frame_idx": 91, "global_frame_idx": 13181, "task_index": 9}, {"db_idx": 13182, "episode_idx": 48, "frame_idx": 92, "global_frame_idx": 13182, "task_index": 9}, {"db_idx": 13183, "episode_idx": 48, "frame_idx": 93, "global_frame_idx": 13183, "task_index": 9}, {"db_idx": 13184, "episode_idx": 48, "frame_idx": 94, "global_frame_idx": 13184, "task_index": 9}, {"db_idx": 13185, "episode_idx": 48, "frame_idx": 95, "global_frame_idx": 13185, "task_index": 9}, {"db_idx": 13186, "episode_idx": 48, "frame_idx": 96, "global_frame_idx": 13186, "task_index": 9}, {"db_idx": 13187, "episode_idx": 48, "frame_idx": 97, "global_frame_idx": 13187, "task_index": 9}, {"db_idx": 13188, "episode_idx": 48, "frame_idx": 98, "global_frame_idx": 13188, "task_index": 9}, {"db_idx": 13189, "episode_idx": 48, "frame_idx": 99, "global_frame_idx": 13189, "task_index": 9}, {"db_idx": 13190, "episode_idx": 48, "frame_idx": 100, "global_frame_idx": 13190, "task_index": 9}, {"db_idx": 13191, "episode_idx": 48, "frame_idx": 101, "global_frame_idx": 13191, "task_index": 9}, {"db_idx": 13192, "episode_idx": 48, "frame_idx": 102, "global_frame_idx": 13192, "task_index": 9}, {"db_idx": 13193, "episode_idx": 48, "frame_idx": 103, "global_frame_idx": 13193, "task_index": 9}, {"db_idx": 13194, "episode_idx": 48, "frame_idx": 104, "global_frame_idx": 13194, "task_index": 9}, {"db_idx": 13195, "episode_idx": 48, "frame_idx": 105, "global_frame_idx": 13195, "task_index": 9}, {"db_idx": 13196, "episode_idx": 48, "frame_idx": 106, "global_frame_idx": 13196, "task_index": 9}, {"db_idx": 13197, "episode_idx": 48, "frame_idx": 107, "global_frame_idx": 13197, "task_index": 9}, {"db_idx": 13198, "episode_idx": 48, "frame_idx": 108, "global_frame_idx": 13198, "task_index": 9}, {"db_idx": 13199, "episode_idx": 48, "frame_idx": 109, "global_frame_idx": 13199, "task_index": 9}, {"db_idx": 13200, "episode_idx": 48, "frame_idx": 110, "global_frame_idx": 13200, "task_index": 9}, {"db_idx": 13201, "episode_idx": 48, "frame_idx": 111, "global_frame_idx": 13201, "task_index": 9}, {"db_idx": 13202, "episode_idx": 48, "frame_idx": 112, "global_frame_idx": 13202, "task_index": 9}, {"db_idx": 13203, "episode_idx": 48, "frame_idx": 113, "global_frame_idx": 13203, "task_index": 9}, {"db_idx": 13204, "episode_idx": 48, "frame_idx": 114, "global_frame_idx": 13204, "task_index": 9}, {"db_idx": 13205, "episode_idx": 48, "frame_idx": 115, "global_frame_idx": 13205, "task_index": 9}, {"db_idx": 13206, "episode_idx": 48, "frame_idx": 116, "global_frame_idx": 13206, "task_index": 9}, {"db_idx": 13207, "episode_idx": 48, "frame_idx": 117, "global_frame_idx": 13207, "task_index": 9}, {"db_idx": 13208, "episode_idx": 48, "frame_idx": 118, "global_frame_idx": 13208, "task_index": 9}, {"db_idx": 13209, "episode_idx": 48, "frame_idx": 119, "global_frame_idx": 13209, "task_index": 9}, {"db_idx": 13210, "episode_idx": 48, "frame_idx": 120, "global_frame_idx": 13210, "task_index": 9}, {"db_idx": 13211, "episode_idx": 48, "frame_idx": 121, "global_frame_idx": 13211, "task_index": 9}, {"db_idx": 13212, "episode_idx": 48, "frame_idx": 122, "global_frame_idx": 13212, "task_index": 9}, {"db_idx": 13213, "episode_idx": 48, "frame_idx": 123, "global_frame_idx": 13213, "task_index": 9}, {"db_idx": 13214, "episode_idx": 48, "frame_idx": 124, "global_frame_idx": 13214, "task_index": 9}, {"db_idx": 13215, "episode_idx": 48, "frame_idx": 125, "global_frame_idx": 13215, "task_index": 9}, {"db_idx": 13216, "episode_idx": 48, "frame_idx": 126, "global_frame_idx": 13216, "task_index": 9}, {"db_idx": 13217, "episode_idx": 48, "frame_idx": 127, "global_frame_idx": 13217, "task_index": 9}, {"db_idx": 13218, "episode_idx": 48, "frame_idx": 128, "global_frame_idx": 13218, "task_index": 9}, {"db_idx": 13219, "episode_idx": 48, "frame_idx": 129, "global_frame_idx": 13219, "task_index": 9}, {"db_idx": 13220, "episode_idx": 48, "frame_idx": 130, "global_frame_idx": 13220, "task_index": 9}, {"db_idx": 13221, "episode_idx": 48, "frame_idx": 131, "global_frame_idx": 13221, "task_index": 9}, {"db_idx": 13222, "episode_idx": 48, "frame_idx": 132, "global_frame_idx": 13222, "task_index": 9}, {"db_idx": 13223, "episode_idx": 48, "frame_idx": 133, "global_frame_idx": 13223, "task_index": 9}, {"db_idx": 13224, "episode_idx": 48, "frame_idx": 134, "global_frame_idx": 13224, "task_index": 9}, {"db_idx": 13225, "episode_idx": 48, "frame_idx": 135, "global_frame_idx": 13225, "task_index": 9}, {"db_idx": 13226, "episode_idx": 48, "frame_idx": 136, "global_frame_idx": 13226, "task_index": 9}, {"db_idx": 13227, "episode_idx": 48, "frame_idx": 137, "global_frame_idx": 13227, "task_index": 9}, {"db_idx": 13228, "episode_idx": 48, "frame_idx": 138, "global_frame_idx": 13228, "task_index": 9}, {"db_idx": 13229, "episode_idx": 48, "frame_idx": 139, "global_frame_idx": 13229, "task_index": 9}, {"db_idx": 13230, "episode_idx": 48, "frame_idx": 140, "global_frame_idx": 13230, "task_index": 9}, {"db_idx": 13231, "episode_idx": 48, "frame_idx": 141, "global_frame_idx": 13231, "task_index": 9}, {"db_idx": 13232, "episode_idx": 48, "frame_idx": 142, "global_frame_idx": 13232, "task_index": 9}, {"db_idx": 13233, "episode_idx": 48, "frame_idx": 143, "global_frame_idx": 13233, "task_index": 9}, {"db_idx": 13234, "episode_idx": 48, "frame_idx": 144, "global_frame_idx": 13234, "task_index": 9}, {"db_idx": 13235, "episode_idx": 48, "frame_idx": 145, "global_frame_idx": 13235, "task_index": 9}, {"db_idx": 13236, "episode_idx": 48, "frame_idx": 146, "global_frame_idx": 13236, "task_index": 9}, {"db_idx": 13237, "episode_idx": 48, "frame_idx": 147, "global_frame_idx": 13237, "task_index": 9}, {"db_idx": 13238, "episode_idx": 48, "frame_idx": 148, "global_frame_idx": 13238, "task_index": 9}, {"db_idx": 13239, "episode_idx": 48, "frame_idx": 149, "global_frame_idx": 13239, "task_index": 9}, {"db_idx": 13240, "episode_idx": 48, "frame_idx": 150, "global_frame_idx": 13240, "task_index": 9}, {"db_idx": 13241, "episode_idx": 48, "frame_idx": 151, "global_frame_idx": 13241, "task_index": 9}, {"db_idx": 13242, "episode_idx": 48, "frame_idx": 152, "global_frame_idx": 13242, "task_index": 9}, {"db_idx": 13243, "episode_idx": 48, "frame_idx": 153, "global_frame_idx": 13243, "task_index": 9}, {"db_idx": 13244, "episode_idx": 48, "frame_idx": 154, "global_frame_idx": 13244, "task_index": 9}, {"db_idx": 13245, "episode_idx": 48, "frame_idx": 155, "global_frame_idx": 13245, "task_index": 9}, {"db_idx": 13246, "episode_idx": 48, "frame_idx": 156, "global_frame_idx": 13246, "task_index": 9}, {"db_idx": 13247, "episode_idx": 48, "frame_idx": 157, "global_frame_idx": 13247, "task_index": 9}, {"db_idx": 13248, "episode_idx": 48, "frame_idx": 158, "global_frame_idx": 13248, "task_index": 9}, {"db_idx": 13249, "episode_idx": 48, "frame_idx": 159, "global_frame_idx": 13249, "task_index": 9}, {"db_idx": 13250, "episode_idx": 48, "frame_idx": 160, "global_frame_idx": 13250, "task_index": 9}, {"db_idx": 13251, "episode_idx": 48, "frame_idx": 161, "global_frame_idx": 13251, "task_index": 9}, {"db_idx": 13252, "episode_idx": 48, "frame_idx": 162, "global_frame_idx": 13252, "task_index": 9}, {"db_idx": 13253, "episode_idx": 48, "frame_idx": 163, "global_frame_idx": 13253, "task_index": 9}, {"db_idx": 13254, "episode_idx": 48, "frame_idx": 164, "global_frame_idx": 13254, "task_index": 9}, {"db_idx": 13255, "episode_idx": 48, "frame_idx": 165, "global_frame_idx": 13255, "task_index": 9}, {"db_idx": 13256, "episode_idx": 48, "frame_idx": 166, "global_frame_idx": 13256, "task_index": 9}, {"db_idx": 13257, "episode_idx": 48, "frame_idx": 167, "global_frame_idx": 13257, "task_index": 9}, {"db_idx": 13258, "episode_idx": 48, "frame_idx": 168, "global_frame_idx": 13258, "task_index": 9}, {"db_idx": 13259, "episode_idx": 48, "frame_idx": 169, "global_frame_idx": 13259, "task_index": 9}, {"db_idx": 13260, "episode_idx": 48, "frame_idx": 170, "global_frame_idx": 13260, "task_index": 9}, {"db_idx": 13261, "episode_idx": 48, "frame_idx": 171, "global_frame_idx": 13261, "task_index": 9}, {"db_idx": 13262, "episode_idx": 48, "frame_idx": 172, "global_frame_idx": 13262, "task_index": 9}, {"db_idx": 13263, "episode_idx": 48, "frame_idx": 173, "global_frame_idx": 13263, "task_index": 9}, {"db_idx": 13264, "episode_idx": 48, "frame_idx": 174, "global_frame_idx": 13264, "task_index": 9}, {"db_idx": 13265, "episode_idx": 48, "frame_idx": 175, "global_frame_idx": 13265, "task_index": 9}, {"db_idx": 13266, "episode_idx": 48, "frame_idx": 176, "global_frame_idx": 13266, "task_index": 9}, {"db_idx": 13267, "episode_idx": 48, "frame_idx": 177, "global_frame_idx": 13267, "task_index": 9}, {"db_idx": 13268, "episode_idx": 48, "frame_idx": 178, "global_frame_idx": 13268, "task_index": 9}, {"db_idx": 13269, "episode_idx": 48, "frame_idx": 179, "global_frame_idx": 13269, "task_index": 9}, {"db_idx": 13270, "episode_idx": 48, "frame_idx": 180, "global_frame_idx": 13270, "task_index": 9}, {"db_idx": 13271, "episode_idx": 48, "frame_idx": 181, "global_frame_idx": 13271, "task_index": 9}, {"db_idx": 13272, "episode_idx": 48, "frame_idx": 182, "global_frame_idx": 13272, "task_index": 9}, {"db_idx": 13273, "episode_idx": 48, "frame_idx": 183, "global_frame_idx": 13273, "task_index": 9}, {"db_idx": 13274, "episode_idx": 48, "frame_idx": 184, "global_frame_idx": 13274, "task_index": 9}, {"db_idx": 13275, "episode_idx": 48, "frame_idx": 185, "global_frame_idx": 13275, "task_index": 9}, {"db_idx": 13276, "episode_idx": 48, "frame_idx": 186, "global_frame_idx": 13276, "task_index": 9}, {"db_idx": 13277, "episode_idx": 48, "frame_idx": 187, "global_frame_idx": 13277, "task_index": 9}, {"db_idx": 13278, "episode_idx": 48, "frame_idx": 188, "global_frame_idx": 13278, "task_index": 9}, {"db_idx": 13279, "episode_idx": 48, "frame_idx": 189, "global_frame_idx": 13279, "task_index": 9}, {"db_idx": 13280, "episode_idx": 48, "frame_idx": 190, "global_frame_idx": 13280, "task_index": 9}, {"db_idx": 13281, "episode_idx": 48, "frame_idx": 191, "global_frame_idx": 13281, "task_index": 9}, {"db_idx": 13282, "episode_idx": 48, "frame_idx": 192, "global_frame_idx": 13282, "task_index": 9}, {"db_idx": 13283, "episode_idx": 48, "frame_idx": 193, "global_frame_idx": 13283, "task_index": 9}, {"db_idx": 13284, "episode_idx": 48, "frame_idx": 194, "global_frame_idx": 13284, "task_index": 9}, {"db_idx": 13285, "episode_idx": 48, "frame_idx": 195, "global_frame_idx": 13285, "task_index": 9}, {"db_idx": 13286, "episode_idx": 48, "frame_idx": 196, "global_frame_idx": 13286, "task_index": 9}, {"db_idx": 13287, "episode_idx": 48, "frame_idx": 197, "global_frame_idx": 13287, "task_index": 9}, {"db_idx": 13288, "episode_idx": 48, "frame_idx": 198, "global_frame_idx": 13288, "task_index": 9}, {"db_idx": 13289, "episode_idx": 48, "frame_idx": 199, "global_frame_idx": 13289, "task_index": 9}, {"db_idx": 13290, "episode_idx": 48, "frame_idx": 200, "global_frame_idx": 13290, "task_index": 9}, {"db_idx": 13291, "episode_idx": 48, "frame_idx": 201, "global_frame_idx": 13291, "task_index": 9}, {"db_idx": 13292, "episode_idx": 48, "frame_idx": 202, "global_frame_idx": 13292, "task_index": 9}, {"db_idx": 13293, "episode_idx": 48, "frame_idx": 203, "global_frame_idx": 13293, "task_index": 9}, {"db_idx": 13294, "episode_idx": 48, "frame_idx": 204, "global_frame_idx": 13294, "task_index": 9}, {"db_idx": 13295, "episode_idx": 48, "frame_idx": 205, "global_frame_idx": 13295, "task_index": 9}, {"db_idx": 13296, "episode_idx": 48, "frame_idx": 206, "global_frame_idx": 13296, "task_index": 9}, {"db_idx": 13297, "episode_idx": 48, "frame_idx": 207, "global_frame_idx": 13297, "task_index": 9}, {"db_idx": 13298, "episode_idx": 48, "frame_idx": 208, "global_frame_idx": 13298, "task_index": 9}, {"db_idx": 13299, "episode_idx": 48, "frame_idx": 209, "global_frame_idx": 13299, "task_index": 9}, {"db_idx": 13300, "episode_idx": 48, "frame_idx": 210, "global_frame_idx": 13300, "task_index": 9}, {"db_idx": 13301, "episode_idx": 48, "frame_idx": 211, "global_frame_idx": 13301, "task_index": 9}, {"db_idx": 13302, "episode_idx": 48, "frame_idx": 212, "global_frame_idx": 13302, "task_index": 9}, {"db_idx": 13303, "episode_idx": 48, "frame_idx": 213, "global_frame_idx": 13303, "task_index": 9}, {"db_idx": 13304, "episode_idx": 48, "frame_idx": 214, "global_frame_idx": 13304, "task_index": 9}, {"db_idx": 13305, "episode_idx": 48, "frame_idx": 215, "global_frame_idx": 13305, "task_index": 9}, {"db_idx": 13306, "episode_idx": 48, "frame_idx": 216, "global_frame_idx": 13306, "task_index": 9}, {"db_idx": 13307, "episode_idx": 48, "frame_idx": 217, "global_frame_idx": 13307, "task_index": 9}, {"db_idx": 13308, "episode_idx": 48, "frame_idx": 218, "global_frame_idx": 13308, "task_index": 9}, {"db_idx": 13309, "episode_idx": 48, "frame_idx": 219, "global_frame_idx": 13309, "task_index": 9}, {"db_idx": 13310, "episode_idx": 48, "frame_idx": 220, "global_frame_idx": 13310, "task_index": 9}, {"db_idx": 13311, "episode_idx": 48, "frame_idx": 221, "global_frame_idx": 13311, "task_index": 9}, {"db_idx": 13312, "episode_idx": 48, "frame_idx": 222, "global_frame_idx": 13312, "task_index": 9}, {"db_idx": 13313, "episode_idx": 48, "frame_idx": 223, "global_frame_idx": 13313, "task_index": 9}, {"db_idx": 13314, "episode_idx": 48, "frame_idx": 224, "global_frame_idx": 13314, "task_index": 9}, {"db_idx": 13315, "episode_idx": 48, "frame_idx": 225, "global_frame_idx": 13315, "task_index": 9}, {"db_idx": 13316, "episode_idx": 48, "frame_idx": 226, "global_frame_idx": 13316, "task_index": 9}, {"db_idx": 13317, "episode_idx": 48, "frame_idx": 227, "global_frame_idx": 13317, "task_index": 9}, {"db_idx": 13318, "episode_idx": 48, "frame_idx": 228, "global_frame_idx": 13318, "task_index": 9}, {"db_idx": 13319, "episode_idx": 48, "frame_idx": 229, "global_frame_idx": 13319, "task_index": 9}, {"db_idx": 13320, "episode_idx": 48, "frame_idx": 230, "global_frame_idx": 13320, "task_index": 9}, {"db_idx": 13321, "episode_idx": 48, "frame_idx": 231, "global_frame_idx": 13321, "task_index": 9}, {"db_idx": 13322, "episode_idx": 48, "frame_idx": 232, "global_frame_idx": 13322, "task_index": 9}, {"db_idx": 13323, "episode_idx": 48, "frame_idx": 233, "global_frame_idx": 13323, "task_index": 9}, {"db_idx": 13324, "episode_idx": 48, "frame_idx": 234, "global_frame_idx": 13324, "task_index": 9}, {"db_idx": 13325, "episode_idx": 48, "frame_idx": 235, "global_frame_idx": 13325, "task_index": 9}, {"db_idx": 13326, "episode_idx": 48, "frame_idx": 236, "global_frame_idx": 13326, "task_index": 9}, {"db_idx": 13327, "episode_idx": 48, "frame_idx": 237, "global_frame_idx": 13327, "task_index": 9}, {"db_idx": 13328, "episode_idx": 48, "frame_idx": 238, "global_frame_idx": 13328, "task_index": 9}, {"db_idx": 13329, "episode_idx": 48, "frame_idx": 239, "global_frame_idx": 13329, "task_index": 9}, {"db_idx": 13330, "episode_idx": 48, "frame_idx": 240, "global_frame_idx": 13330, "task_index": 9}, {"db_idx": 13331, "episode_idx": 48, "frame_idx": 241, "global_frame_idx": 13331, "task_index": 9}, {"db_idx": 13332, "episode_idx": 48, "frame_idx": 242, "global_frame_idx": 13332, "task_index": 9}, {"db_idx": 13333, "episode_idx": 48, "frame_idx": 243, "global_frame_idx": 13333, "task_index": 9}, {"db_idx": 13334, "episode_idx": 48, "frame_idx": 244, "global_frame_idx": 13334, "task_index": 9}, {"db_idx": 13335, "episode_idx": 48, "frame_idx": 245, "global_frame_idx": 13335, "task_index": 9}, {"db_idx": 13336, "episode_idx": 48, "frame_idx": 246, "global_frame_idx": 13336, "task_index": 9}, {"db_idx": 13337, "episode_idx": 48, "frame_idx": 247, "global_frame_idx": 13337, "task_index": 9}, {"db_idx": 13338, "episode_idx": 48, "frame_idx": 248, "global_frame_idx": 13338, "task_index": 9}, {"db_idx": 13339, "episode_idx": 48, "frame_idx": 249, "global_frame_idx": 13339, "task_index": 9}, {"db_idx": 13340, "episode_idx": 48, "frame_idx": 250, "global_frame_idx": 13340, "task_index": 9}, {"db_idx": 13341, "episode_idx": 48, "frame_idx": 251, "global_frame_idx": 13341, "task_index": 9}, {"db_idx": 13342, "episode_idx": 48, "frame_idx": 252, "global_frame_idx": 13342, "task_index": 9}, {"db_idx": 13343, "episode_idx": 48, "frame_idx": 253, "global_frame_idx": 13343, "task_index": 9}, {"db_idx": 13344, "episode_idx": 48, "frame_idx": 254, "global_frame_idx": 13344, "task_index": 9}, {"db_idx": 13345, "episode_idx": 48, "frame_idx": 255, "global_frame_idx": 13345, "task_index": 9}, {"db_idx": 13346, "episode_idx": 48, "frame_idx": 256, "global_frame_idx": 13346, "task_index": 9}, {"db_idx": 13347, "episode_idx": 48, "frame_idx": 257, "global_frame_idx": 13347, "task_index": 9}, {"db_idx": 13348, "episode_idx": 48, "frame_idx": 258, "global_frame_idx": 13348, "task_index": 9}, {"db_idx": 13349, "episode_idx": 48, "frame_idx": 259, "global_frame_idx": 13349, "task_index": 9}, {"db_idx": 13350, "episode_idx": 48, "frame_idx": 260, "global_frame_idx": 13350, "task_index": 9}, {"db_idx": 13351, "episode_idx": 48, "frame_idx": 261, "global_frame_idx": 13351, "task_index": 9}, {"db_idx": 13352, "episode_idx": 48, "frame_idx": 262, "global_frame_idx": 13352, "task_index": 9}, {"db_idx": 13353, "episode_idx": 48, "frame_idx": 263, "global_frame_idx": 13353, "task_index": 9}, {"db_idx": 13354, "episode_idx": 48, "frame_idx": 264, "global_frame_idx": 13354, "task_index": 9}, {"db_idx": 13355, "episode_idx": 48, "frame_idx": 265, "global_frame_idx": 13355, "task_index": 9}, {"db_idx": 13356, "episode_idx": 48, "frame_idx": 266, "global_frame_idx": 13356, "task_index": 9}, {"db_idx": 13357, "episode_idx": 48, "frame_idx": 267, "global_frame_idx": 13357, "task_index": 9}, {"db_idx": 13358, "episode_idx": 48, "frame_idx": 268, "global_frame_idx": 13358, "task_index": 9}, {"db_idx": 13359, "episode_idx": 48, "frame_idx": 269, "global_frame_idx": 13359, "task_index": 9}, {"db_idx": 13360, "episode_idx": 48, "frame_idx": 270, "global_frame_idx": 13360, "task_index": 9}, {"db_idx": 13361, "episode_idx": 48, "frame_idx": 271, "global_frame_idx": 13361, "task_index": 9}, {"db_idx": 13362, "episode_idx": 48, "frame_idx": 272, "global_frame_idx": 13362, "task_index": 9}, {"db_idx": 13363, "episode_idx": 48, "frame_idx": 273, "global_frame_idx": 13363, "task_index": 9}, {"db_idx": 13364, "episode_idx": 48, "frame_idx": 274, "global_frame_idx": 13364, "task_index": 9}, {"db_idx": 13365, "episode_idx": 48, "frame_idx": 275, "global_frame_idx": 13365, "task_index": 9}, {"db_idx": 13366, "episode_idx": 48, "frame_idx": 276, "global_frame_idx": 13366, "task_index": 9}, {"db_idx": 13367, "episode_idx": 48, "frame_idx": 277, "global_frame_idx": 13367, "task_index": 9}, {"db_idx": 13368, "episode_idx": 48, "frame_idx": 278, "global_frame_idx": 13368, "task_index": 9}, {"db_idx": 13369, "episode_idx": 48, "frame_idx": 279, "global_frame_idx": 13369, "task_index": 9}, {"db_idx": 13370, "episode_idx": 48, "frame_idx": 280, "global_frame_idx": 13370, "task_index": 9}, {"db_idx": 13371, "episode_idx": 48, "frame_idx": 281, "global_frame_idx": 13371, "task_index": 9}, {"db_idx": 13372, "episode_idx": 48, "frame_idx": 282, "global_frame_idx": 13372, "task_index": 9}, {"db_idx": 13373, "episode_idx": 48, "frame_idx": 283, "global_frame_idx": 13373, "task_index": 9}, {"db_idx": 13374, "episode_idx": 48, "frame_idx": 284, "global_frame_idx": 13374, "task_index": 9}, {"db_idx": 13375, "episode_idx": 48, "frame_idx": 285, "global_frame_idx": 13375, "task_index": 9}, {"db_idx": 13376, "episode_idx": 48, "frame_idx": 286, "global_frame_idx": 13376, "task_index": 9}, {"db_idx": 13377, "episode_idx": 48, "frame_idx": 287, "global_frame_idx": 13377, "task_index": 9}, {"db_idx": 13378, "episode_idx": 48, "frame_idx": 288, "global_frame_idx": 13378, "task_index": 9}, {"db_idx": 13379, "episode_idx": 48, "frame_idx": 289, "global_frame_idx": 13379, "task_index": 9}, {"db_idx": 13380, "episode_idx": 48, "frame_idx": 290, "global_frame_idx": 13380, "task_index": 9}, {"db_idx": 13381, "episode_idx": 48, "frame_idx": 291, "global_frame_idx": 13381, "task_index": 9}, {"db_idx": 13382, "episode_idx": 48, "frame_idx": 292, "global_frame_idx": 13382, "task_index": 9}, {"db_idx": 13383, "episode_idx": 48, "frame_idx": 293, "global_frame_idx": 13383, "task_index": 9}, {"db_idx": 13384, "episode_idx": 48, "frame_idx": 294, "global_frame_idx": 13384, "task_index": 9}, {"db_idx": 13385, "episode_idx": 48, "frame_idx": 295, "global_frame_idx": 13385, "task_index": 9}, {"db_idx": 13386, "episode_idx": 49, "frame_idx": 0, "global_frame_idx": 13386, "task_index": 9}, {"db_idx": 13387, "episode_idx": 49, "frame_idx": 1, "global_frame_idx": 13387, "task_index": 9}, {"db_idx": 13388, "episode_idx": 49, "frame_idx": 2, "global_frame_idx": 13388, "task_index": 9}, {"db_idx": 13389, "episode_idx": 49, "frame_idx": 3, "global_frame_idx": 13389, "task_index": 9}, {"db_idx": 13390, "episode_idx": 49, "frame_idx": 4, "global_frame_idx": 13390, "task_index": 9}, {"db_idx": 13391, "episode_idx": 49, "frame_idx": 5, "global_frame_idx": 13391, "task_index": 9}, {"db_idx": 13392, "episode_idx": 49, "frame_idx": 6, "global_frame_idx": 13392, "task_index": 9}, {"db_idx": 13393, "episode_idx": 49, "frame_idx": 7, "global_frame_idx": 13393, "task_index": 9}, {"db_idx": 13394, "episode_idx": 49, "frame_idx": 8, "global_frame_idx": 13394, "task_index": 9}, {"db_idx": 13395, "episode_idx": 49, "frame_idx": 9, "global_frame_idx": 13395, "task_index": 9}, {"db_idx": 13396, "episode_idx": 49, "frame_idx": 10, "global_frame_idx": 13396, "task_index": 9}, {"db_idx": 13397, "episode_idx": 49, "frame_idx": 11, "global_frame_idx": 13397, "task_index": 9}, {"db_idx": 13398, "episode_idx": 49, "frame_idx": 12, "global_frame_idx": 13398, "task_index": 9}, {"db_idx": 13399, "episode_idx": 49, "frame_idx": 13, "global_frame_idx": 13399, "task_index": 9}, {"db_idx": 13400, "episode_idx": 49, "frame_idx": 14, "global_frame_idx": 13400, "task_index": 9}, {"db_idx": 13401, "episode_idx": 49, "frame_idx": 15, "global_frame_idx": 13401, "task_index": 9}, {"db_idx": 13402, "episode_idx": 49, "frame_idx": 16, "global_frame_idx": 13402, "task_index": 9}, {"db_idx": 13403, "episode_idx": 49, "frame_idx": 17, "global_frame_idx": 13403, "task_index": 9}, {"db_idx": 13404, "episode_idx": 49, "frame_idx": 18, "global_frame_idx": 13404, "task_index": 9}, {"db_idx": 13405, "episode_idx": 49, "frame_idx": 19, "global_frame_idx": 13405, "task_index": 9}, {"db_idx": 13406, "episode_idx": 49, "frame_idx": 20, "global_frame_idx": 13406, "task_index": 9}, {"db_idx": 13407, "episode_idx": 49, "frame_idx": 21, "global_frame_idx": 13407, "task_index": 9}, {"db_idx": 13408, "episode_idx": 49, "frame_idx": 22, "global_frame_idx": 13408, "task_index": 9}, {"db_idx": 13409, "episode_idx": 49, "frame_idx": 23, "global_frame_idx": 13409, "task_index": 9}, {"db_idx": 13410, "episode_idx": 49, "frame_idx": 24, "global_frame_idx": 13410, "task_index": 9}, {"db_idx": 13411, "episode_idx": 49, "frame_idx": 25, "global_frame_idx": 13411, "task_index": 9}, {"db_idx": 13412, "episode_idx": 49, "frame_idx": 26, "global_frame_idx": 13412, "task_index": 9}, {"db_idx": 13413, "episode_idx": 49, "frame_idx": 27, "global_frame_idx": 13413, "task_index": 9}, {"db_idx": 13414, "episode_idx": 49, "frame_idx": 28, "global_frame_idx": 13414, "task_index": 9}, {"db_idx": 13415, "episode_idx": 49, "frame_idx": 29, "global_frame_idx": 13415, "task_index": 9}, {"db_idx": 13416, "episode_idx": 49, "frame_idx": 30, "global_frame_idx": 13416, "task_index": 9}, {"db_idx": 13417, "episode_idx": 49, "frame_idx": 31, "global_frame_idx": 13417, "task_index": 9}, {"db_idx": 13418, "episode_idx": 49, "frame_idx": 32, "global_frame_idx": 13418, "task_index": 9}, {"db_idx": 13419, "episode_idx": 49, "frame_idx": 33, "global_frame_idx": 13419, "task_index": 9}, {"db_idx": 13420, "episode_idx": 49, "frame_idx": 34, "global_frame_idx": 13420, "task_index": 9}, {"db_idx": 13421, "episode_idx": 49, "frame_idx": 35, "global_frame_idx": 13421, "task_index": 9}, {"db_idx": 13422, "episode_idx": 49, "frame_idx": 36, "global_frame_idx": 13422, "task_index": 9}, {"db_idx": 13423, "episode_idx": 49, "frame_idx": 37, "global_frame_idx": 13423, "task_index": 9}, {"db_idx": 13424, "episode_idx": 49, "frame_idx": 38, "global_frame_idx": 13424, "task_index": 9}, {"db_idx": 13425, "episode_idx": 49, "frame_idx": 39, "global_frame_idx": 13425, "task_index": 9}, {"db_idx": 13426, "episode_idx": 49, "frame_idx": 40, "global_frame_idx": 13426, "task_index": 9}, {"db_idx": 13427, "episode_idx": 49, "frame_idx": 41, "global_frame_idx": 13427, "task_index": 9}, {"db_idx": 13428, "episode_idx": 49, "frame_idx": 42, "global_frame_idx": 13428, "task_index": 9}, {"db_idx": 13429, "episode_idx": 49, "frame_idx": 43, "global_frame_idx": 13429, "task_index": 9}, {"db_idx": 13430, "episode_idx": 49, "frame_idx": 44, "global_frame_idx": 13430, "task_index": 9}, {"db_idx": 13431, "episode_idx": 49, "frame_idx": 45, "global_frame_idx": 13431, "task_index": 9}, {"db_idx": 13432, "episode_idx": 49, "frame_idx": 46, "global_frame_idx": 13432, "task_index": 9}, {"db_idx": 13433, "episode_idx": 49, "frame_idx": 47, "global_frame_idx": 13433, "task_index": 9}, {"db_idx": 13434, "episode_idx": 49, "frame_idx": 48, "global_frame_idx": 13434, "task_index": 9}, {"db_idx": 13435, "episode_idx": 49, "frame_idx": 49, "global_frame_idx": 13435, "task_index": 9}, {"db_idx": 13436, "episode_idx": 49, "frame_idx": 50, "global_frame_idx": 13436, "task_index": 9}, {"db_idx": 13437, "episode_idx": 49, "frame_idx": 51, "global_frame_idx": 13437, "task_index": 9}, {"db_idx": 13438, "episode_idx": 49, "frame_idx": 52, "global_frame_idx": 13438, "task_index": 9}, {"db_idx": 13439, "episode_idx": 49, "frame_idx": 53, "global_frame_idx": 13439, "task_index": 9}, {"db_idx": 13440, "episode_idx": 49, "frame_idx": 54, "global_frame_idx": 13440, "task_index": 9}, {"db_idx": 13441, "episode_idx": 49, "frame_idx": 55, "global_frame_idx": 13441, "task_index": 9}, {"db_idx": 13442, "episode_idx": 49, "frame_idx": 56, "global_frame_idx": 13442, "task_index": 9}, {"db_idx": 13443, "episode_idx": 49, "frame_idx": 57, "global_frame_idx": 13443, "task_index": 9}, {"db_idx": 13444, "episode_idx": 49, "frame_idx": 58, "global_frame_idx": 13444, "task_index": 9}, {"db_idx": 13445, "episode_idx": 49, "frame_idx": 59, "global_frame_idx": 13445, "task_index": 9}, {"db_idx": 13446, "episode_idx": 49, "frame_idx": 60, "global_frame_idx": 13446, "task_index": 9}, {"db_idx": 13447, "episode_idx": 49, "frame_idx": 61, "global_frame_idx": 13447, "task_index": 9}, {"db_idx": 13448, "episode_idx": 49, "frame_idx": 62, "global_frame_idx": 13448, "task_index": 9}, {"db_idx": 13449, "episode_idx": 49, "frame_idx": 63, "global_frame_idx": 13449, "task_index": 9}, {"db_idx": 13450, "episode_idx": 49, "frame_idx": 64, "global_frame_idx": 13450, "task_index": 9}, {"db_idx": 13451, "episode_idx": 49, "frame_idx": 65, "global_frame_idx": 13451, "task_index": 9}, {"db_idx": 13452, "episode_idx": 49, "frame_idx": 66, "global_frame_idx": 13452, "task_index": 9}, {"db_idx": 13453, "episode_idx": 49, "frame_idx": 67, "global_frame_idx": 13453, "task_index": 9}, {"db_idx": 13454, "episode_idx": 49, "frame_idx": 68, "global_frame_idx": 13454, "task_index": 9}, {"db_idx": 13455, "episode_idx": 49, "frame_idx": 69, "global_frame_idx": 13455, "task_index": 9}, {"db_idx": 13456, "episode_idx": 49, "frame_idx": 70, "global_frame_idx": 13456, "task_index": 9}, {"db_idx": 13457, "episode_idx": 49, "frame_idx": 71, "global_frame_idx": 13457, "task_index": 9}, {"db_idx": 13458, "episode_idx": 49, "frame_idx": 72, "global_frame_idx": 13458, "task_index": 9}, {"db_idx": 13459, "episode_idx": 49, "frame_idx": 73, "global_frame_idx": 13459, "task_index": 9}, {"db_idx": 13460, "episode_idx": 49, "frame_idx": 74, "global_frame_idx": 13460, "task_index": 9}, {"db_idx": 13461, "episode_idx": 49, "frame_idx": 75, "global_frame_idx": 13461, "task_index": 9}, {"db_idx": 13462, "episode_idx": 49, "frame_idx": 76, "global_frame_idx": 13462, "task_index": 9}, {"db_idx": 13463, "episode_idx": 49, "frame_idx": 77, "global_frame_idx": 13463, "task_index": 9}, {"db_idx": 13464, "episode_idx": 49, "frame_idx": 78, "global_frame_idx": 13464, "task_index": 9}, {"db_idx": 13465, "episode_idx": 49, "frame_idx": 79, "global_frame_idx": 13465, "task_index": 9}, {"db_idx": 13466, "episode_idx": 49, "frame_idx": 80, "global_frame_idx": 13466, "task_index": 9}, {"db_idx": 13467, "episode_idx": 49, "frame_idx": 81, "global_frame_idx": 13467, "task_index": 9}, {"db_idx": 13468, "episode_idx": 49, "frame_idx": 82, "global_frame_idx": 13468, "task_index": 9}, {"db_idx": 13469, "episode_idx": 49, "frame_idx": 83, "global_frame_idx": 13469, "task_index": 9}, {"db_idx": 13470, "episode_idx": 49, "frame_idx": 84, "global_frame_idx": 13470, "task_index": 9}, {"db_idx": 13471, "episode_idx": 49, "frame_idx": 85, "global_frame_idx": 13471, "task_index": 9}, {"db_idx": 13472, "episode_idx": 49, "frame_idx": 86, "global_frame_idx": 13472, "task_index": 9}, {"db_idx": 13473, "episode_idx": 49, "frame_idx": 87, "global_frame_idx": 13473, "task_index": 9}, {"db_idx": 13474, "episode_idx": 49, "frame_idx": 88, "global_frame_idx": 13474, "task_index": 9}, {"db_idx": 13475, "episode_idx": 49, "frame_idx": 89, "global_frame_idx": 13475, "task_index": 9}, {"db_idx": 13476, "episode_idx": 49, "frame_idx": 90, "global_frame_idx": 13476, "task_index": 9}, {"db_idx": 13477, "episode_idx": 49, "frame_idx": 91, "global_frame_idx": 13477, "task_index": 9}, {"db_idx": 13478, "episode_idx": 49, "frame_idx": 92, "global_frame_idx": 13478, "task_index": 9}, {"db_idx": 13479, "episode_idx": 49, "frame_idx": 93, "global_frame_idx": 13479, "task_index": 9}, {"db_idx": 13480, "episode_idx": 49, "frame_idx": 94, "global_frame_idx": 13480, "task_index": 9}, {"db_idx": 13481, "episode_idx": 49, "frame_idx": 95, "global_frame_idx": 13481, "task_index": 9}, {"db_idx": 13482, "episode_idx": 49, "frame_idx": 96, "global_frame_idx": 13482, "task_index": 9}, {"db_idx": 13483, "episode_idx": 49, "frame_idx": 97, "global_frame_idx": 13483, "task_index": 9}, {"db_idx": 13484, "episode_idx": 49, "frame_idx": 98, "global_frame_idx": 13484, "task_index": 9}, {"db_idx": 13485, "episode_idx": 49, "frame_idx": 99, "global_frame_idx": 13485, "task_index": 9}, {"db_idx": 13486, "episode_idx": 49, "frame_idx": 100, "global_frame_idx": 13486, "task_index": 9}, {"db_idx": 13487, "episode_idx": 49, "frame_idx": 101, "global_frame_idx": 13487, "task_index": 9}, {"db_idx": 13488, "episode_idx": 49, "frame_idx": 102, "global_frame_idx": 13488, "task_index": 9}, {"db_idx": 13489, "episode_idx": 49, "frame_idx": 103, "global_frame_idx": 13489, "task_index": 9}, {"db_idx": 13490, "episode_idx": 49, "frame_idx": 104, "global_frame_idx": 13490, "task_index": 9}, {"db_idx": 13491, "episode_idx": 49, "frame_idx": 105, "global_frame_idx": 13491, "task_index": 9}, {"db_idx": 13492, "episode_idx": 49, "frame_idx": 106, "global_frame_idx": 13492, "task_index": 9}, {"db_idx": 13493, "episode_idx": 49, "frame_idx": 107, "global_frame_idx": 13493, "task_index": 9}, {"db_idx": 13494, "episode_idx": 49, "frame_idx": 108, "global_frame_idx": 13494, "task_index": 9}, {"db_idx": 13495, "episode_idx": 49, "frame_idx": 109, "global_frame_idx": 13495, "task_index": 9}, {"db_idx": 13496, "episode_idx": 49, "frame_idx": 110, "global_frame_idx": 13496, "task_index": 9}, {"db_idx": 13497, "episode_idx": 49, "frame_idx": 111, "global_frame_idx": 13497, "task_index": 9}, {"db_idx": 13498, "episode_idx": 49, "frame_idx": 112, "global_frame_idx": 13498, "task_index": 9}, {"db_idx": 13499, "episode_idx": 49, "frame_idx": 113, "global_frame_idx": 13499, "task_index": 9}, {"db_idx": 13500, "episode_idx": 49, "frame_idx": 114, "global_frame_idx": 13500, "task_index": 9}, {"db_idx": 13501, "episode_idx": 49, "frame_idx": 115, "global_frame_idx": 13501, "task_index": 9}, {"db_idx": 13502, "episode_idx": 49, "frame_idx": 116, "global_frame_idx": 13502, "task_index": 9}, {"db_idx": 13503, "episode_idx": 49, "frame_idx": 117, "global_frame_idx": 13503, "task_index": 9}, {"db_idx": 13504, "episode_idx": 49, "frame_idx": 118, "global_frame_idx": 13504, "task_index": 9}, {"db_idx": 13505, "episode_idx": 49, "frame_idx": 119, "global_frame_idx": 13505, "task_index": 9}, {"db_idx": 13506, "episode_idx": 49, "frame_idx": 120, "global_frame_idx": 13506, "task_index": 9}, {"db_idx": 13507, "episode_idx": 49, "frame_idx": 121, "global_frame_idx": 13507, "task_index": 9}, {"db_idx": 13508, "episode_idx": 49, "frame_idx": 122, "global_frame_idx": 13508, "task_index": 9}, {"db_idx": 13509, "episode_idx": 49, "frame_idx": 123, "global_frame_idx": 13509, "task_index": 9}, {"db_idx": 13510, "episode_idx": 49, "frame_idx": 124, "global_frame_idx": 13510, "task_index": 9}, {"db_idx": 13511, "episode_idx": 49, "frame_idx": 125, "global_frame_idx": 13511, "task_index": 9}, {"db_idx": 13512, "episode_idx": 49, "frame_idx": 126, "global_frame_idx": 13512, "task_index": 9}, {"db_idx": 13513, "episode_idx": 49, "frame_idx": 127, "global_frame_idx": 13513, "task_index": 9}, {"db_idx": 13514, "episode_idx": 49, "frame_idx": 128, "global_frame_idx": 13514, "task_index": 9}, {"db_idx": 13515, "episode_idx": 49, "frame_idx": 129, "global_frame_idx": 13515, "task_index": 9}, {"db_idx": 13516, "episode_idx": 49, "frame_idx": 130, "global_frame_idx": 13516, "task_index": 9}, {"db_idx": 13517, "episode_idx": 49, "frame_idx": 131, "global_frame_idx": 13517, "task_index": 9}, {"db_idx": 13518, "episode_idx": 49, "frame_idx": 132, "global_frame_idx": 13518, "task_index": 9}, {"db_idx": 13519, "episode_idx": 49, "frame_idx": 133, "global_frame_idx": 13519, "task_index": 9}, {"db_idx": 13520, "episode_idx": 49, "frame_idx": 134, "global_frame_idx": 13520, "task_index": 9}, {"db_idx": 13521, "episode_idx": 49, "frame_idx": 135, "global_frame_idx": 13521, "task_index": 9}, {"db_idx": 13522, "episode_idx": 49, "frame_idx": 136, "global_frame_idx": 13522, "task_index": 9}, {"db_idx": 13523, "episode_idx": 49, "frame_idx": 137, "global_frame_idx": 13523, "task_index": 9}, {"db_idx": 13524, "episode_idx": 49, "frame_idx": 138, "global_frame_idx": 13524, "task_index": 9}, {"db_idx": 13525, "episode_idx": 49, "frame_idx": 139, "global_frame_idx": 13525, "task_index": 9}, {"db_idx": 13526, "episode_idx": 49, "frame_idx": 140, "global_frame_idx": 13526, "task_index": 9}, {"db_idx": 13527, "episode_idx": 49, "frame_idx": 141, "global_frame_idx": 13527, "task_index": 9}, {"db_idx": 13528, "episode_idx": 49, "frame_idx": 142, "global_frame_idx": 13528, "task_index": 9}, {"db_idx": 13529, "episode_idx": 49, "frame_idx": 143, "global_frame_idx": 13529, "task_index": 9}, {"db_idx": 13530, "episode_idx": 49, "frame_idx": 144, "global_frame_idx": 13530, "task_index": 9}, {"db_idx": 13531, "episode_idx": 49, "frame_idx": 145, "global_frame_idx": 13531, "task_index": 9}, {"db_idx": 13532, "episode_idx": 49, "frame_idx": 146, "global_frame_idx": 13532, "task_index": 9}, {"db_idx": 13533, "episode_idx": 49, "frame_idx": 147, "global_frame_idx": 13533, "task_index": 9}, {"db_idx": 13534, "episode_idx": 49, "frame_idx": 148, "global_frame_idx": 13534, "task_index": 9}, {"db_idx": 13535, "episode_idx": 49, "frame_idx": 149, "global_frame_idx": 13535, "task_index": 9}, {"db_idx": 13536, "episode_idx": 49, "frame_idx": 150, "global_frame_idx": 13536, "task_index": 9}, {"db_idx": 13537, "episode_idx": 49, "frame_idx": 151, "global_frame_idx": 13537, "task_index": 9}, {"db_idx": 13538, "episode_idx": 49, "frame_idx": 152, "global_frame_idx": 13538, "task_index": 9}, {"db_idx": 13539, "episode_idx": 49, "frame_idx": 153, "global_frame_idx": 13539, "task_index": 9}, {"db_idx": 13540, "episode_idx": 49, "frame_idx": 154, "global_frame_idx": 13540, "task_index": 9}, {"db_idx": 13541, "episode_idx": 49, "frame_idx": 155, "global_frame_idx": 13541, "task_index": 9}, {"db_idx": 13542, "episode_idx": 49, "frame_idx": 156, "global_frame_idx": 13542, "task_index": 9}, {"db_idx": 13543, "episode_idx": 49, "frame_idx": 157, "global_frame_idx": 13543, "task_index": 9}, {"db_idx": 13544, "episode_idx": 49, "frame_idx": 158, "global_frame_idx": 13544, "task_index": 9}, {"db_idx": 13545, "episode_idx": 49, "frame_idx": 159, "global_frame_idx": 13545, "task_index": 9}, {"db_idx": 13546, "episode_idx": 49, "frame_idx": 160, "global_frame_idx": 13546, "task_index": 9}, {"db_idx": 13547, "episode_idx": 49, "frame_idx": 161, "global_frame_idx": 13547, "task_index": 9}, {"db_idx": 13548, "episode_idx": 49, "frame_idx": 162, "global_frame_idx": 13548, "task_index": 9}, {"db_idx": 13549, "episode_idx": 49, "frame_idx": 163, "global_frame_idx": 13549, "task_index": 9}, {"db_idx": 13550, "episode_idx": 49, "frame_idx": 164, "global_frame_idx": 13550, "task_index": 9}, {"db_idx": 13551, "episode_idx": 49, "frame_idx": 165, "global_frame_idx": 13551, "task_index": 9}, {"db_idx": 13552, "episode_idx": 49, "frame_idx": 166, "global_frame_idx": 13552, "task_index": 9}, {"db_idx": 13553, "episode_idx": 49, "frame_idx": 167, "global_frame_idx": 13553, "task_index": 9}, {"db_idx": 13554, "episode_idx": 49, "frame_idx": 168, "global_frame_idx": 13554, "task_index": 9}, {"db_idx": 13555, "episode_idx": 49, "frame_idx": 169, "global_frame_idx": 13555, "task_index": 9}, {"db_idx": 13556, "episode_idx": 49, "frame_idx": 170, "global_frame_idx": 13556, "task_index": 9}, {"db_idx": 13557, "episode_idx": 49, "frame_idx": 171, "global_frame_idx": 13557, "task_index": 9}, {"db_idx": 13558, "episode_idx": 49, "frame_idx": 172, "global_frame_idx": 13558, "task_index": 9}, {"db_idx": 13559, "episode_idx": 49, "frame_idx": 173, "global_frame_idx": 13559, "task_index": 9}, {"db_idx": 13560, "episode_idx": 49, "frame_idx": 174, "global_frame_idx": 13560, "task_index": 9}, {"db_idx": 13561, "episode_idx": 49, "frame_idx": 175, "global_frame_idx": 13561, "task_index": 9}, {"db_idx": 13562, "episode_idx": 49, "frame_idx": 176, "global_frame_idx": 13562, "task_index": 9}, {"db_idx": 13563, "episode_idx": 49, "frame_idx": 177, "global_frame_idx": 13563, "task_index": 9}, {"db_idx": 13564, "episode_idx": 49, "frame_idx": 178, "global_frame_idx": 13564, "task_index": 9}, {"db_idx": 13565, "episode_idx": 49, "frame_idx": 179, "global_frame_idx": 13565, "task_index": 9}, {"db_idx": 13566, "episode_idx": 49, "frame_idx": 180, "global_frame_idx": 13566, "task_index": 9}, {"db_idx": 13567, "episode_idx": 49, "frame_idx": 181, "global_frame_idx": 13567, "task_index": 9}, {"db_idx": 13568, "episode_idx": 49, "frame_idx": 182, "global_frame_idx": 13568, "task_index": 9}, {"db_idx": 13569, "episode_idx": 49, "frame_idx": 183, "global_frame_idx": 13569, "task_index": 9}, {"db_idx": 13570, "episode_idx": 49, "frame_idx": 184, "global_frame_idx": 13570, "task_index": 9}, {"db_idx": 13571, "episode_idx": 49, "frame_idx": 185, "global_frame_idx": 13571, "task_index": 9}, {"db_idx": 13572, "episode_idx": 49, "frame_idx": 186, "global_frame_idx": 13572, "task_index": 9}, {"db_idx": 13573, "episode_idx": 49, "frame_idx": 187, "global_frame_idx": 13573, "task_index": 9}, {"db_idx": 13574, "episode_idx": 49, "frame_idx": 188, "global_frame_idx": 13574, "task_index": 9}, {"db_idx": 13575, "episode_idx": 49, "frame_idx": 189, "global_frame_idx": 13575, "task_index": 9}, {"db_idx": 13576, "episode_idx": 49, "frame_idx": 190, "global_frame_idx": 13576, "task_index": 9}, {"db_idx": 13577, "episode_idx": 49, "frame_idx": 191, "global_frame_idx": 13577, "task_index": 9}, {"db_idx": 13578, "episode_idx": 49, "frame_idx": 192, "global_frame_idx": 13578, "task_index": 9}, {"db_idx": 13579, "episode_idx": 49, "frame_idx": 193, "global_frame_idx": 13579, "task_index": 9}, {"db_idx": 13580, "episode_idx": 49, "frame_idx": 194, "global_frame_idx": 13580, "task_index": 9}, {"db_idx": 13581, "episode_idx": 49, "frame_idx": 195, "global_frame_idx": 13581, "task_index": 9}, {"db_idx": 13582, "episode_idx": 49, "frame_idx": 196, "global_frame_idx": 13582, "task_index": 9}, {"db_idx": 13583, "episode_idx": 49, "frame_idx": 197, "global_frame_idx": 13583, "task_index": 9}, {"db_idx": 13584, "episode_idx": 49, "frame_idx": 198, "global_frame_idx": 13584, "task_index": 9}, {"db_idx": 13585, "episode_idx": 49, "frame_idx": 199, "global_frame_idx": 13585, "task_index": 9}, {"db_idx": 13586, "episode_idx": 49, "frame_idx": 200, "global_frame_idx": 13586, "task_index": 9}, {"db_idx": 13587, "episode_idx": 49, "frame_idx": 201, "global_frame_idx": 13587, "task_index": 9}, {"db_idx": 13588, "episode_idx": 49, "frame_idx": 202, "global_frame_idx": 13588, "task_index": 9}, {"db_idx": 13589, "episode_idx": 49, "frame_idx": 203, "global_frame_idx": 13589, "task_index": 9}, {"db_idx": 13590, "episode_idx": 49, "frame_idx": 204, "global_frame_idx": 13590, "task_index": 9}, {"db_idx": 13591, "episode_idx": 49, "frame_idx": 205, "global_frame_idx": 13591, "task_index": 9}, {"db_idx": 13592, "episode_idx": 49, "frame_idx": 206, "global_frame_idx": 13592, "task_index": 9}, {"db_idx": 13593, "episode_idx": 49, "frame_idx": 207, "global_frame_idx": 13593, "task_index": 9}, {"db_idx": 13594, "episode_idx": 49, "frame_idx": 208, "global_frame_idx": 13594, "task_index": 9}, {"db_idx": 13595, "episode_idx": 49, "frame_idx": 209, "global_frame_idx": 13595, "task_index": 9}, {"db_idx": 13596, "episode_idx": 49, "frame_idx": 210, "global_frame_idx": 13596, "task_index": 9}, {"db_idx": 13597, "episode_idx": 49, "frame_idx": 211, "global_frame_idx": 13597, "task_index": 9}, {"db_idx": 13598, "episode_idx": 49, "frame_idx": 212, "global_frame_idx": 13598, "task_index": 9}, {"db_idx": 13599, "episode_idx": 49, "frame_idx": 213, "global_frame_idx": 13599, "task_index": 9}, {"db_idx": 13600, "episode_idx": 49, "frame_idx": 214, "global_frame_idx": 13600, "task_index": 9}, {"db_idx": 13601, "episode_idx": 49, "frame_idx": 215, "global_frame_idx": 13601, "task_index": 9}, {"db_idx": 13602, "episode_idx": 49, "frame_idx": 216, "global_frame_idx": 13602, "task_index": 9}, {"db_idx": 13603, "episode_idx": 49, "frame_idx": 217, "global_frame_idx": 13603, "task_index": 9}, {"db_idx": 13604, "episode_idx": 49, "frame_idx": 218, "global_frame_idx": 13604, "task_index": 9}, {"db_idx": 13605, "episode_idx": 49, "frame_idx": 219, "global_frame_idx": 13605, "task_index": 9}, {"db_idx": 13606, "episode_idx": 49, "frame_idx": 220, "global_frame_idx": 13606, "task_index": 9}, {"db_idx": 13607, "episode_idx": 49, "frame_idx": 221, "global_frame_idx": 13607, "task_index": 9}, {"db_idx": 13608, "episode_idx": 49, "frame_idx": 222, "global_frame_idx": 13608, "task_index": 9}, {"db_idx": 13609, "episode_idx": 49, "frame_idx": 223, "global_frame_idx": 13609, "task_index": 9}, {"db_idx": 13610, "episode_idx": 49, "frame_idx": 224, "global_frame_idx": 13610, "task_index": 9}, {"db_idx": 13611, "episode_idx": 49, "frame_idx": 225, "global_frame_idx": 13611, "task_index": 9}, {"db_idx": 13612, "episode_idx": 49, "frame_idx": 226, "global_frame_idx": 13612, "task_index": 9}, {"db_idx": 13613, "episode_idx": 49, "frame_idx": 227, "global_frame_idx": 13613, "task_index": 9}, {"db_idx": 13614, "episode_idx": 49, "frame_idx": 228, "global_frame_idx": 13614, "task_index": 9}, {"db_idx": 13615, "episode_idx": 49, "frame_idx": 229, "global_frame_idx": 13615, "task_index": 9}, {"db_idx": 13616, "episode_idx": 49, "frame_idx": 230, "global_frame_idx": 13616, "task_index": 9}, {"db_idx": 13617, "episode_idx": 49, "frame_idx": 231, "global_frame_idx": 13617, "task_index": 9}, {"db_idx": 13618, "episode_idx": 49, "frame_idx": 232, "global_frame_idx": 13618, "task_index": 9}, {"db_idx": 13619, "episode_idx": 49, "frame_idx": 233, "global_frame_idx": 13619, "task_index": 9}, {"db_idx": 13620, "episode_idx": 49, "frame_idx": 234, "global_frame_idx": 13620, "task_index": 9}, {"db_idx": 13621, "episode_idx": 49, "frame_idx": 235, "global_frame_idx": 13621, "task_index": 9}, {"db_idx": 13622, "episode_idx": 49, "frame_idx": 236, "global_frame_idx": 13622, "task_index": 9}, {"db_idx": 13623, "episode_idx": 49, "frame_idx": 237, "global_frame_idx": 13623, "task_index": 9}, {"db_idx": 13624, "episode_idx": 49, "frame_idx": 238, "global_frame_idx": 13624, "task_index": 9}, {"db_idx": 13625, "episode_idx": 49, "frame_idx": 239, "global_frame_idx": 13625, "task_index": 9}, {"db_idx": 13626, "episode_idx": 49, "frame_idx": 240, "global_frame_idx": 13626, "task_index": 9}, {"db_idx": 13627, "episode_idx": 49, "frame_idx": 241, "global_frame_idx": 13627, "task_index": 9}, {"db_idx": 13628, "episode_idx": 49, "frame_idx": 242, "global_frame_idx": 13628, "task_index": 9}, {"db_idx": 13629, "episode_idx": 49, "frame_idx": 243, "global_frame_idx": 13629, "task_index": 9}, {"db_idx": 13630, "episode_idx": 49, "frame_idx": 244, "global_frame_idx": 13630, "task_index": 9}, {"db_idx": 13631, "episode_idx": 49, "frame_idx": 245, "global_frame_idx": 13631, "task_index": 9}, {"db_idx": 13632, "episode_idx": 49, "frame_idx": 246, "global_frame_idx": 13632, "task_index": 9}, {"db_idx": 13633, "episode_idx": 49, "frame_idx": 247, "global_frame_idx": 13633, "task_index": 9}, {"db_idx": 13634, "episode_idx": 49, "frame_idx": 248, "global_frame_idx": 13634, "task_index": 9}, {"db_idx": 13635, "episode_idx": 49, "frame_idx": 249, "global_frame_idx": 13635, "task_index": 9}, {"db_idx": 13636, "episode_idx": 50, "frame_idx": 0, "global_frame_idx": 13636, "task_index": 10}, {"db_idx": 13637, "episode_idx": 50, "frame_idx": 1, "global_frame_idx": 13637, "task_index": 10}, {"db_idx": 13638, "episode_idx": 50, "frame_idx": 2, "global_frame_idx": 13638, "task_index": 10}, {"db_idx": 13639, "episode_idx": 50, "frame_idx": 3, "global_frame_idx": 13639, "task_index": 10}, {"db_idx": 13640, "episode_idx": 50, "frame_idx": 4, "global_frame_idx": 13640, "task_index": 10}, {"db_idx": 13641, "episode_idx": 50, "frame_idx": 5, "global_frame_idx": 13641, "task_index": 10}, {"db_idx": 13642, "episode_idx": 50, "frame_idx": 6, "global_frame_idx": 13642, "task_index": 10}, {"db_idx": 13643, "episode_idx": 50, "frame_idx": 7, "global_frame_idx": 13643, "task_index": 10}, {"db_idx": 13644, "episode_idx": 50, "frame_idx": 8, "global_frame_idx": 13644, "task_index": 10}, {"db_idx": 13645, "episode_idx": 50, "frame_idx": 9, "global_frame_idx": 13645, "task_index": 10}, {"db_idx": 13646, "episode_idx": 50, "frame_idx": 10, "global_frame_idx": 13646, "task_index": 10}, {"db_idx": 13647, "episode_idx": 50, "frame_idx": 11, "global_frame_idx": 13647, "task_index": 10}, {"db_idx": 13648, "episode_idx": 50, "frame_idx": 12, "global_frame_idx": 13648, "task_index": 10}, {"db_idx": 13649, "episode_idx": 50, "frame_idx": 13, "global_frame_idx": 13649, "task_index": 10}, {"db_idx": 13650, "episode_idx": 50, "frame_idx": 14, "global_frame_idx": 13650, "task_index": 10}, {"db_idx": 13651, "episode_idx": 50, "frame_idx": 15, "global_frame_idx": 13651, "task_index": 10}, {"db_idx": 13652, "episode_idx": 50, "frame_idx": 16, "global_frame_idx": 13652, "task_index": 10}, {"db_idx": 13653, "episode_idx": 50, "frame_idx": 17, "global_frame_idx": 13653, "task_index": 10}, {"db_idx": 13654, "episode_idx": 50, "frame_idx": 18, "global_frame_idx": 13654, "task_index": 10}, {"db_idx": 13655, "episode_idx": 50, "frame_idx": 19, "global_frame_idx": 13655, "task_index": 10}, {"db_idx": 13656, "episode_idx": 50, "frame_idx": 20, "global_frame_idx": 13656, "task_index": 10}, {"db_idx": 13657, "episode_idx": 50, "frame_idx": 21, "global_frame_idx": 13657, "task_index": 10}, {"db_idx": 13658, "episode_idx": 50, "frame_idx": 22, "global_frame_idx": 13658, "task_index": 10}, {"db_idx": 13659, "episode_idx": 50, "frame_idx": 23, "global_frame_idx": 13659, "task_index": 10}, {"db_idx": 13660, "episode_idx": 50, "frame_idx": 24, "global_frame_idx": 13660, "task_index": 10}, {"db_idx": 13661, "episode_idx": 50, "frame_idx": 25, "global_frame_idx": 13661, "task_index": 10}, {"db_idx": 13662, "episode_idx": 50, "frame_idx": 26, "global_frame_idx": 13662, "task_index": 10}, {"db_idx": 13663, "episode_idx": 50, "frame_idx": 27, "global_frame_idx": 13663, "task_index": 10}, {"db_idx": 13664, "episode_idx": 50, "frame_idx": 28, "global_frame_idx": 13664, "task_index": 10}, {"db_idx": 13665, "episode_idx": 50, "frame_idx": 29, "global_frame_idx": 13665, "task_index": 10}, {"db_idx": 13666, "episode_idx": 50, "frame_idx": 30, "global_frame_idx": 13666, "task_index": 10}, {"db_idx": 13667, "episode_idx": 50, "frame_idx": 31, "global_frame_idx": 13667, "task_index": 10}, {"db_idx": 13668, "episode_idx": 50, "frame_idx": 32, "global_frame_idx": 13668, "task_index": 10}, {"db_idx": 13669, "episode_idx": 50, "frame_idx": 33, "global_frame_idx": 13669, "task_index": 10}, {"db_idx": 13670, "episode_idx": 50, "frame_idx": 34, "global_frame_idx": 13670, "task_index": 10}, {"db_idx": 13671, "episode_idx": 50, "frame_idx": 35, "global_frame_idx": 13671, "task_index": 10}, {"db_idx": 13672, "episode_idx": 50, "frame_idx": 36, "global_frame_idx": 13672, "task_index": 10}, {"db_idx": 13673, "episode_idx": 50, "frame_idx": 37, "global_frame_idx": 13673, "task_index": 10}, {"db_idx": 13674, "episode_idx": 50, "frame_idx": 38, "global_frame_idx": 13674, "task_index": 10}, {"db_idx": 13675, "episode_idx": 50, "frame_idx": 39, "global_frame_idx": 13675, "task_index": 10}, {"db_idx": 13676, "episode_idx": 50, "frame_idx": 40, "global_frame_idx": 13676, "task_index": 10}, {"db_idx": 13677, "episode_idx": 50, "frame_idx": 41, "global_frame_idx": 13677, "task_index": 10}, {"db_idx": 13678, "episode_idx": 50, "frame_idx": 42, "global_frame_idx": 13678, "task_index": 10}, {"db_idx": 13679, "episode_idx": 50, "frame_idx": 43, "global_frame_idx": 13679, "task_index": 10}, {"db_idx": 13680, "episode_idx": 50, "frame_idx": 44, "global_frame_idx": 13680, "task_index": 10}, {"db_idx": 13681, "episode_idx": 50, "frame_idx": 45, "global_frame_idx": 13681, "task_index": 10}, {"db_idx": 13682, "episode_idx": 50, "frame_idx": 46, "global_frame_idx": 13682, "task_index": 10}, {"db_idx": 13683, "episode_idx": 50, "frame_idx": 47, "global_frame_idx": 13683, "task_index": 10}, {"db_idx": 13684, "episode_idx": 50, "frame_idx": 48, "global_frame_idx": 13684, "task_index": 10}, {"db_idx": 13685, "episode_idx": 50, "frame_idx": 49, "global_frame_idx": 13685, "task_index": 10}, {"db_idx": 13686, "episode_idx": 50, "frame_idx": 50, "global_frame_idx": 13686, "task_index": 10}, {"db_idx": 13687, "episode_idx": 50, "frame_idx": 51, "global_frame_idx": 13687, "task_index": 10}, {"db_idx": 13688, "episode_idx": 50, "frame_idx": 52, "global_frame_idx": 13688, "task_index": 10}, {"db_idx": 13689, "episode_idx": 50, "frame_idx": 53, "global_frame_idx": 13689, "task_index": 10}, {"db_idx": 13690, "episode_idx": 50, "frame_idx": 54, "global_frame_idx": 13690, "task_index": 10}, {"db_idx": 13691, "episode_idx": 50, "frame_idx": 55, "global_frame_idx": 13691, "task_index": 10}, {"db_idx": 13692, "episode_idx": 50, "frame_idx": 56, "global_frame_idx": 13692, "task_index": 10}, {"db_idx": 13693, "episode_idx": 50, "frame_idx": 57, "global_frame_idx": 13693, "task_index": 10}, {"db_idx": 13694, "episode_idx": 50, "frame_idx": 58, "global_frame_idx": 13694, "task_index": 10}, {"db_idx": 13695, "episode_idx": 50, "frame_idx": 59, "global_frame_idx": 13695, "task_index": 10}, {"db_idx": 13696, "episode_idx": 50, "frame_idx": 60, "global_frame_idx": 13696, "task_index": 10}, {"db_idx": 13697, "episode_idx": 50, "frame_idx": 61, "global_frame_idx": 13697, "task_index": 10}, {"db_idx": 13698, "episode_idx": 50, "frame_idx": 62, "global_frame_idx": 13698, "task_index": 10}, {"db_idx": 13699, "episode_idx": 50, "frame_idx": 63, "global_frame_idx": 13699, "task_index": 10}, {"db_idx": 13700, "episode_idx": 50, "frame_idx": 64, "global_frame_idx": 13700, "task_index": 10}, {"db_idx": 13701, "episode_idx": 50, "frame_idx": 65, "global_frame_idx": 13701, "task_index": 10}, {"db_idx": 13702, "episode_idx": 50, "frame_idx": 66, "global_frame_idx": 13702, "task_index": 10}, {"db_idx": 13703, "episode_idx": 50, "frame_idx": 67, "global_frame_idx": 13703, "task_index": 10}, {"db_idx": 13704, "episode_idx": 50, "frame_idx": 68, "global_frame_idx": 13704, "task_index": 10}, {"db_idx": 13705, "episode_idx": 50, "frame_idx": 69, "global_frame_idx": 13705, "task_index": 10}, {"db_idx": 13706, "episode_idx": 50, "frame_idx": 70, "global_frame_idx": 13706, "task_index": 10}, {"db_idx": 13707, "episode_idx": 50, "frame_idx": 71, "global_frame_idx": 13707, "task_index": 10}, {"db_idx": 13708, "episode_idx": 50, "frame_idx": 72, "global_frame_idx": 13708, "task_index": 10}, {"db_idx": 13709, "episode_idx": 50, "frame_idx": 73, "global_frame_idx": 13709, "task_index": 10}, {"db_idx": 13710, "episode_idx": 50, "frame_idx": 74, "global_frame_idx": 13710, "task_index": 10}, {"db_idx": 13711, "episode_idx": 50, "frame_idx": 75, "global_frame_idx": 13711, "task_index": 10}, {"db_idx": 13712, "episode_idx": 50, "frame_idx": 76, "global_frame_idx": 13712, "task_index": 10}, {"db_idx": 13713, "episode_idx": 50, "frame_idx": 77, "global_frame_idx": 13713, "task_index": 10}, {"db_idx": 13714, "episode_idx": 50, "frame_idx": 78, "global_frame_idx": 13714, "task_index": 10}, {"db_idx": 13715, "episode_idx": 50, "frame_idx": 79, "global_frame_idx": 13715, "task_index": 10}, {"db_idx": 13716, "episode_idx": 50, "frame_idx": 80, "global_frame_idx": 13716, "task_index": 10}, {"db_idx": 13717, "episode_idx": 50, "frame_idx": 81, "global_frame_idx": 13717, "task_index": 10}, {"db_idx": 13718, "episode_idx": 50, "frame_idx": 82, "global_frame_idx": 13718, "task_index": 10}, {"db_idx": 13719, "episode_idx": 50, "frame_idx": 83, "global_frame_idx": 13719, "task_index": 10}, {"db_idx": 13720, "episode_idx": 50, "frame_idx": 84, "global_frame_idx": 13720, "task_index": 10}, {"db_idx": 13721, "episode_idx": 50, "frame_idx": 85, "global_frame_idx": 13721, "task_index": 10}, {"db_idx": 13722, "episode_idx": 50, "frame_idx": 86, "global_frame_idx": 13722, "task_index": 10}, {"db_idx": 13723, "episode_idx": 50, "frame_idx": 87, "global_frame_idx": 13723, "task_index": 10}, {"db_idx": 13724, "episode_idx": 50, "frame_idx": 88, "global_frame_idx": 13724, "task_index": 10}, {"db_idx": 13725, "episode_idx": 50, "frame_idx": 89, "global_frame_idx": 13725, "task_index": 10}, {"db_idx": 13726, "episode_idx": 50, "frame_idx": 90, "global_frame_idx": 13726, "task_index": 10}, {"db_idx": 13727, "episode_idx": 50, "frame_idx": 91, "global_frame_idx": 13727, "task_index": 10}, {"db_idx": 13728, "episode_idx": 50, "frame_idx": 92, "global_frame_idx": 13728, "task_index": 10}, {"db_idx": 13729, "episode_idx": 50, "frame_idx": 93, "global_frame_idx": 13729, "task_index": 10}, {"db_idx": 13730, "episode_idx": 50, "frame_idx": 94, "global_frame_idx": 13730, "task_index": 10}, {"db_idx": 13731, "episode_idx": 50, "frame_idx": 95, "global_frame_idx": 13731, "task_index": 10}, {"db_idx": 13732, "episode_idx": 50, "frame_idx": 96, "global_frame_idx": 13732, "task_index": 10}, {"db_idx": 13733, "episode_idx": 50, "frame_idx": 97, "global_frame_idx": 13733, "task_index": 10}, {"db_idx": 13734, "episode_idx": 50, "frame_idx": 98, "global_frame_idx": 13734, "task_index": 10}, {"db_idx": 13735, "episode_idx": 50, "frame_idx": 99, "global_frame_idx": 13735, "task_index": 10}, {"db_idx": 13736, "episode_idx": 50, "frame_idx": 100, "global_frame_idx": 13736, "task_index": 10}, {"db_idx": 13737, "episode_idx": 50, "frame_idx": 101, "global_frame_idx": 13737, "task_index": 10}, {"db_idx": 13738, "episode_idx": 50, "frame_idx": 102, "global_frame_idx": 13738, "task_index": 10}, {"db_idx": 13739, "episode_idx": 50, "frame_idx": 103, "global_frame_idx": 13739, "task_index": 10}, {"db_idx": 13740, "episode_idx": 50, "frame_idx": 104, "global_frame_idx": 13740, "task_index": 10}, {"db_idx": 13741, "episode_idx": 50, "frame_idx": 105, "global_frame_idx": 13741, "task_index": 10}, {"db_idx": 13742, "episode_idx": 50, "frame_idx": 106, "global_frame_idx": 13742, "task_index": 10}, {"db_idx": 13743, "episode_idx": 50, "frame_idx": 107, "global_frame_idx": 13743, "task_index": 10}, {"db_idx": 13744, "episode_idx": 50, "frame_idx": 108, "global_frame_idx": 13744, "task_index": 10}, {"db_idx": 13745, "episode_idx": 50, "frame_idx": 109, "global_frame_idx": 13745, "task_index": 10}, {"db_idx": 13746, "episode_idx": 50, "frame_idx": 110, "global_frame_idx": 13746, "task_index": 10}, {"db_idx": 13747, "episode_idx": 50, "frame_idx": 111, "global_frame_idx": 13747, "task_index": 10}, {"db_idx": 13748, "episode_idx": 50, "frame_idx": 112, "global_frame_idx": 13748, "task_index": 10}, {"db_idx": 13749, "episode_idx": 50, "frame_idx": 113, "global_frame_idx": 13749, "task_index": 10}, {"db_idx": 13750, "episode_idx": 50, "frame_idx": 114, "global_frame_idx": 13750, "task_index": 10}, {"db_idx": 13751, "episode_idx": 50, "frame_idx": 115, "global_frame_idx": 13751, "task_index": 10}, {"db_idx": 13752, "episode_idx": 50, "frame_idx": 116, "global_frame_idx": 13752, "task_index": 10}, {"db_idx": 13753, "episode_idx": 50, "frame_idx": 117, "global_frame_idx": 13753, "task_index": 10}, {"db_idx": 13754, "episode_idx": 50, "frame_idx": 118, "global_frame_idx": 13754, "task_index": 10}, {"db_idx": 13755, "episode_idx": 50, "frame_idx": 119, "global_frame_idx": 13755, "task_index": 10}, {"db_idx": 13756, "episode_idx": 50, "frame_idx": 120, "global_frame_idx": 13756, "task_index": 10}, {"db_idx": 13757, "episode_idx": 50, "frame_idx": 121, "global_frame_idx": 13757, "task_index": 10}, {"db_idx": 13758, "episode_idx": 50, "frame_idx": 122, "global_frame_idx": 13758, "task_index": 10}, {"db_idx": 13759, "episode_idx": 50, "frame_idx": 123, "global_frame_idx": 13759, "task_index": 10}, {"db_idx": 13760, "episode_idx": 50, "frame_idx": 124, "global_frame_idx": 13760, "task_index": 10}, {"db_idx": 13761, "episode_idx": 50, "frame_idx": 125, "global_frame_idx": 13761, "task_index": 10}, {"db_idx": 13762, "episode_idx": 50, "frame_idx": 126, "global_frame_idx": 13762, "task_index": 10}, {"db_idx": 13763, "episode_idx": 50, "frame_idx": 127, "global_frame_idx": 13763, "task_index": 10}, {"db_idx": 13764, "episode_idx": 50, "frame_idx": 128, "global_frame_idx": 13764, "task_index": 10}, {"db_idx": 13765, "episode_idx": 50, "frame_idx": 129, "global_frame_idx": 13765, "task_index": 10}, {"db_idx": 13766, "episode_idx": 50, "frame_idx": 130, "global_frame_idx": 13766, "task_index": 10}, {"db_idx": 13767, "episode_idx": 50, "frame_idx": 131, "global_frame_idx": 13767, "task_index": 10}, {"db_idx": 13768, "episode_idx": 50, "frame_idx": 132, "global_frame_idx": 13768, "task_index": 10}, {"db_idx": 13769, "episode_idx": 51, "frame_idx": 0, "global_frame_idx": 13769, "task_index": 10}, {"db_idx": 13770, "episode_idx": 51, "frame_idx": 1, "global_frame_idx": 13770, "task_index": 10}, {"db_idx": 13771, "episode_idx": 51, "frame_idx": 2, "global_frame_idx": 13771, "task_index": 10}, {"db_idx": 13772, "episode_idx": 51, "frame_idx": 3, "global_frame_idx": 13772, "task_index": 10}, {"db_idx": 13773, "episode_idx": 51, "frame_idx": 4, "global_frame_idx": 13773, "task_index": 10}, {"db_idx": 13774, "episode_idx": 51, "frame_idx": 5, "global_frame_idx": 13774, "task_index": 10}, {"db_idx": 13775, "episode_idx": 51, "frame_idx": 6, "global_frame_idx": 13775, "task_index": 10}, {"db_idx": 13776, "episode_idx": 51, "frame_idx": 7, "global_frame_idx": 13776, "task_index": 10}, {"db_idx": 13777, "episode_idx": 51, "frame_idx": 8, "global_frame_idx": 13777, "task_index": 10}, {"db_idx": 13778, "episode_idx": 51, "frame_idx": 9, "global_frame_idx": 13778, "task_index": 10}, {"db_idx": 13779, "episode_idx": 51, "frame_idx": 10, "global_frame_idx": 13779, "task_index": 10}, {"db_idx": 13780, "episode_idx": 51, "frame_idx": 11, "global_frame_idx": 13780, "task_index": 10}, {"db_idx": 13781, "episode_idx": 51, "frame_idx": 12, "global_frame_idx": 13781, "task_index": 10}, {"db_idx": 13782, "episode_idx": 51, "frame_idx": 13, "global_frame_idx": 13782, "task_index": 10}, {"db_idx": 13783, "episode_idx": 51, "frame_idx": 14, "global_frame_idx": 13783, "task_index": 10}, {"db_idx": 13784, "episode_idx": 51, "frame_idx": 15, "global_frame_idx": 13784, "task_index": 10}, {"db_idx": 13785, "episode_idx": 51, "frame_idx": 16, "global_frame_idx": 13785, "task_index": 10}, {"db_idx": 13786, "episode_idx": 51, "frame_idx": 17, "global_frame_idx": 13786, "task_index": 10}, {"db_idx": 13787, "episode_idx": 51, "frame_idx": 18, "global_frame_idx": 13787, "task_index": 10}, {"db_idx": 13788, "episode_idx": 51, "frame_idx": 19, "global_frame_idx": 13788, "task_index": 10}, {"db_idx": 13789, "episode_idx": 51, "frame_idx": 20, "global_frame_idx": 13789, "task_index": 10}, {"db_idx": 13790, "episode_idx": 51, "frame_idx": 21, "global_frame_idx": 13790, "task_index": 10}, {"db_idx": 13791, "episode_idx": 51, "frame_idx": 22, "global_frame_idx": 13791, "task_index": 10}, {"db_idx": 13792, "episode_idx": 51, "frame_idx": 23, "global_frame_idx": 13792, "task_index": 10}, {"db_idx": 13793, "episode_idx": 51, "frame_idx": 24, "global_frame_idx": 13793, "task_index": 10}, {"db_idx": 13794, "episode_idx": 51, "frame_idx": 25, "global_frame_idx": 13794, "task_index": 10}, {"db_idx": 13795, "episode_idx": 51, "frame_idx": 26, "global_frame_idx": 13795, "task_index": 10}, {"db_idx": 13796, "episode_idx": 51, "frame_idx": 27, "global_frame_idx": 13796, "task_index": 10}, {"db_idx": 13797, "episode_idx": 51, "frame_idx": 28, "global_frame_idx": 13797, "task_index": 10}, {"db_idx": 13798, "episode_idx": 51, "frame_idx": 29, "global_frame_idx": 13798, "task_index": 10}, {"db_idx": 13799, "episode_idx": 51, "frame_idx": 30, "global_frame_idx": 13799, "task_index": 10}, {"db_idx": 13800, "episode_idx": 51, "frame_idx": 31, "global_frame_idx": 13800, "task_index": 10}, {"db_idx": 13801, "episode_idx": 51, "frame_idx": 32, "global_frame_idx": 13801, "task_index": 10}, {"db_idx": 13802, "episode_idx": 51, "frame_idx": 33, "global_frame_idx": 13802, "task_index": 10}, {"db_idx": 13803, "episode_idx": 51, "frame_idx": 34, "global_frame_idx": 13803, "task_index": 10}, {"db_idx": 13804, "episode_idx": 51, "frame_idx": 35, "global_frame_idx": 13804, "task_index": 10}, {"db_idx": 13805, "episode_idx": 51, "frame_idx": 36, "global_frame_idx": 13805, "task_index": 10}, {"db_idx": 13806, "episode_idx": 51, "frame_idx": 37, "global_frame_idx": 13806, "task_index": 10}, {"db_idx": 13807, "episode_idx": 51, "frame_idx": 38, "global_frame_idx": 13807, "task_index": 10}, {"db_idx": 13808, "episode_idx": 51, "frame_idx": 39, "global_frame_idx": 13808, "task_index": 10}, {"db_idx": 13809, "episode_idx": 51, "frame_idx": 40, "global_frame_idx": 13809, "task_index": 10}, {"db_idx": 13810, "episode_idx": 51, "frame_idx": 41, "global_frame_idx": 13810, "task_index": 10}, {"db_idx": 13811, "episode_idx": 51, "frame_idx": 42, "global_frame_idx": 13811, "task_index": 10}, {"db_idx": 13812, "episode_idx": 51, "frame_idx": 43, "global_frame_idx": 13812, "task_index": 10}, {"db_idx": 13813, "episode_idx": 51, "frame_idx": 44, "global_frame_idx": 13813, "task_index": 10}, {"db_idx": 13814, "episode_idx": 51, "frame_idx": 45, "global_frame_idx": 13814, "task_index": 10}, {"db_idx": 13815, "episode_idx": 51, "frame_idx": 46, "global_frame_idx": 13815, "task_index": 10}, {"db_idx": 13816, "episode_idx": 51, "frame_idx": 47, "global_frame_idx": 13816, "task_index": 10}, {"db_idx": 13817, "episode_idx": 51, "frame_idx": 48, "global_frame_idx": 13817, "task_index": 10}, {"db_idx": 13818, "episode_idx": 51, "frame_idx": 49, "global_frame_idx": 13818, "task_index": 10}, {"db_idx": 13819, "episode_idx": 51, "frame_idx": 50, "global_frame_idx": 13819, "task_index": 10}, {"db_idx": 13820, "episode_idx": 51, "frame_idx": 51, "global_frame_idx": 13820, "task_index": 10}, {"db_idx": 13821, "episode_idx": 51, "frame_idx": 52, "global_frame_idx": 13821, "task_index": 10}, {"db_idx": 13822, "episode_idx": 51, "frame_idx": 53, "global_frame_idx": 13822, "task_index": 10}, {"db_idx": 13823, "episode_idx": 51, "frame_idx": 54, "global_frame_idx": 13823, "task_index": 10}, {"db_idx": 13824, "episode_idx": 51, "frame_idx": 55, "global_frame_idx": 13824, "task_index": 10}, {"db_idx": 13825, "episode_idx": 51, "frame_idx": 56, "global_frame_idx": 13825, "task_index": 10}, {"db_idx": 13826, "episode_idx": 51, "frame_idx": 57, "global_frame_idx": 13826, "task_index": 10}, {"db_idx": 13827, "episode_idx": 51, "frame_idx": 58, "global_frame_idx": 13827, "task_index": 10}, {"db_idx": 13828, "episode_idx": 51, "frame_idx": 59, "global_frame_idx": 13828, "task_index": 10}, {"db_idx": 13829, "episode_idx": 51, "frame_idx": 60, "global_frame_idx": 13829, "task_index": 10}, {"db_idx": 13830, "episode_idx": 51, "frame_idx": 61, "global_frame_idx": 13830, "task_index": 10}, {"db_idx": 13831, "episode_idx": 51, "frame_idx": 62, "global_frame_idx": 13831, "task_index": 10}, {"db_idx": 13832, "episode_idx": 51, "frame_idx": 63, "global_frame_idx": 13832, "task_index": 10}, {"db_idx": 13833, "episode_idx": 51, "frame_idx": 64, "global_frame_idx": 13833, "task_index": 10}, {"db_idx": 13834, "episode_idx": 51, "frame_idx": 65, "global_frame_idx": 13834, "task_index": 10}, {"db_idx": 13835, "episode_idx": 51, "frame_idx": 66, "global_frame_idx": 13835, "task_index": 10}, {"db_idx": 13836, "episode_idx": 51, "frame_idx": 67, "global_frame_idx": 13836, "task_index": 10}, {"db_idx": 13837, "episode_idx": 51, "frame_idx": 68, "global_frame_idx": 13837, "task_index": 10}, {"db_idx": 13838, "episode_idx": 51, "frame_idx": 69, "global_frame_idx": 13838, "task_index": 10}, {"db_idx": 13839, "episode_idx": 51, "frame_idx": 70, "global_frame_idx": 13839, "task_index": 10}, {"db_idx": 13840, "episode_idx": 51, "frame_idx": 71, "global_frame_idx": 13840, "task_index": 10}, {"db_idx": 13841, "episode_idx": 51, "frame_idx": 72, "global_frame_idx": 13841, "task_index": 10}, {"db_idx": 13842, "episode_idx": 51, "frame_idx": 73, "global_frame_idx": 13842, "task_index": 10}, {"db_idx": 13843, "episode_idx": 51, "frame_idx": 74, "global_frame_idx": 13843, "task_index": 10}, {"db_idx": 13844, "episode_idx": 51, "frame_idx": 75, "global_frame_idx": 13844, "task_index": 10}, {"db_idx": 13845, "episode_idx": 51, "frame_idx": 76, "global_frame_idx": 13845, "task_index": 10}, {"db_idx": 13846, "episode_idx": 51, "frame_idx": 77, "global_frame_idx": 13846, "task_index": 10}, {"db_idx": 13847, "episode_idx": 51, "frame_idx": 78, "global_frame_idx": 13847, "task_index": 10}, {"db_idx": 13848, "episode_idx": 51, "frame_idx": 79, "global_frame_idx": 13848, "task_index": 10}, {"db_idx": 13849, "episode_idx": 51, "frame_idx": 80, "global_frame_idx": 13849, "task_index": 10}, {"db_idx": 13850, "episode_idx": 51, "frame_idx": 81, "global_frame_idx": 13850, "task_index": 10}, {"db_idx": 13851, "episode_idx": 51, "frame_idx": 82, "global_frame_idx": 13851, "task_index": 10}, {"db_idx": 13852, "episode_idx": 51, "frame_idx": 83, "global_frame_idx": 13852, "task_index": 10}, {"db_idx": 13853, "episode_idx": 51, "frame_idx": 84, "global_frame_idx": 13853, "task_index": 10}, {"db_idx": 13854, "episode_idx": 51, "frame_idx": 85, "global_frame_idx": 13854, "task_index": 10}, {"db_idx": 13855, "episode_idx": 51, "frame_idx": 86, "global_frame_idx": 13855, "task_index": 10}, {"db_idx": 13856, "episode_idx": 51, "frame_idx": 87, "global_frame_idx": 13856, "task_index": 10}, {"db_idx": 13857, "episode_idx": 51, "frame_idx": 88, "global_frame_idx": 13857, "task_index": 10}, {"db_idx": 13858, "episode_idx": 51, "frame_idx": 89, "global_frame_idx": 13858, "task_index": 10}, {"db_idx": 13859, "episode_idx": 51, "frame_idx": 90, "global_frame_idx": 13859, "task_index": 10}, {"db_idx": 13860, "episode_idx": 51, "frame_idx": 91, "global_frame_idx": 13860, "task_index": 10}, {"db_idx": 13861, "episode_idx": 51, "frame_idx": 92, "global_frame_idx": 13861, "task_index": 10}, {"db_idx": 13862, "episode_idx": 51, "frame_idx": 93, "global_frame_idx": 13862, "task_index": 10}, {"db_idx": 13863, "episode_idx": 51, "frame_idx": 94, "global_frame_idx": 13863, "task_index": 10}, {"db_idx": 13864, "episode_idx": 51, "frame_idx": 95, "global_frame_idx": 13864, "task_index": 10}, {"db_idx": 13865, "episode_idx": 51, "frame_idx": 96, "global_frame_idx": 13865, "task_index": 10}, {"db_idx": 13866, "episode_idx": 51, "frame_idx": 97, "global_frame_idx": 13866, "task_index": 10}, {"db_idx": 13867, "episode_idx": 51, "frame_idx": 98, "global_frame_idx": 13867, "task_index": 10}, {"db_idx": 13868, "episode_idx": 51, "frame_idx": 99, "global_frame_idx": 13868, "task_index": 10}, {"db_idx": 13869, "episode_idx": 51, "frame_idx": 100, "global_frame_idx": 13869, "task_index": 10}, {"db_idx": 13870, "episode_idx": 51, "frame_idx": 101, "global_frame_idx": 13870, "task_index": 10}, {"db_idx": 13871, "episode_idx": 51, "frame_idx": 102, "global_frame_idx": 13871, "task_index": 10}, {"db_idx": 13872, "episode_idx": 51, "frame_idx": 103, "global_frame_idx": 13872, "task_index": 10}, {"db_idx": 13873, "episode_idx": 51, "frame_idx": 104, "global_frame_idx": 13873, "task_index": 10}, {"db_idx": 13874, "episode_idx": 51, "frame_idx": 105, "global_frame_idx": 13874, "task_index": 10}, {"db_idx": 13875, "episode_idx": 51, "frame_idx": 106, "global_frame_idx": 13875, "task_index": 10}, {"db_idx": 13876, "episode_idx": 51, "frame_idx": 107, "global_frame_idx": 13876, "task_index": 10}, {"db_idx": 13877, "episode_idx": 51, "frame_idx": 108, "global_frame_idx": 13877, "task_index": 10}, {"db_idx": 13878, "episode_idx": 51, "frame_idx": 109, "global_frame_idx": 13878, "task_index": 10}, {"db_idx": 13879, "episode_idx": 51, "frame_idx": 110, "global_frame_idx": 13879, "task_index": 10}, {"db_idx": 13880, "episode_idx": 51, "frame_idx": 111, "global_frame_idx": 13880, "task_index": 10}, {"db_idx": 13881, "episode_idx": 51, "frame_idx": 112, "global_frame_idx": 13881, "task_index": 10}, {"db_idx": 13882, "episode_idx": 51, "frame_idx": 113, "global_frame_idx": 13882, "task_index": 10}, {"db_idx": 13883, "episode_idx": 51, "frame_idx": 114, "global_frame_idx": 13883, "task_index": 10}, {"db_idx": 13884, "episode_idx": 51, "frame_idx": 115, "global_frame_idx": 13884, "task_index": 10}, {"db_idx": 13885, "episode_idx": 51, "frame_idx": 116, "global_frame_idx": 13885, "task_index": 10}, {"db_idx": 13886, "episode_idx": 51, "frame_idx": 117, "global_frame_idx": 13886, "task_index": 10}, {"db_idx": 13887, "episode_idx": 51, "frame_idx": 118, "global_frame_idx": 13887, "task_index": 10}, {"db_idx": 13888, "episode_idx": 51, "frame_idx": 119, "global_frame_idx": 13888, "task_index": 10}, {"db_idx": 13889, "episode_idx": 51, "frame_idx": 120, "global_frame_idx": 13889, "task_index": 10}, {"db_idx": 13890, "episode_idx": 51, "frame_idx": 121, "global_frame_idx": 13890, "task_index": 10}, {"db_idx": 13891, "episode_idx": 51, "frame_idx": 122, "global_frame_idx": 13891, "task_index": 10}, {"db_idx": 13892, "episode_idx": 51, "frame_idx": 123, "global_frame_idx": 13892, "task_index": 10}, {"db_idx": 13893, "episode_idx": 51, "frame_idx": 124, "global_frame_idx": 13893, "task_index": 10}, {"db_idx": 13894, "episode_idx": 51, "frame_idx": 125, "global_frame_idx": 13894, "task_index": 10}, {"db_idx": 13895, "episode_idx": 51, "frame_idx": 126, "global_frame_idx": 13895, "task_index": 10}, {"db_idx": 13896, "episode_idx": 51, "frame_idx": 127, "global_frame_idx": 13896, "task_index": 10}, {"db_idx": 13897, "episode_idx": 51, "frame_idx": 128, "global_frame_idx": 13897, "task_index": 10}, {"db_idx": 13898, "episode_idx": 51, "frame_idx": 129, "global_frame_idx": 13898, "task_index": 10}, {"db_idx": 13899, "episode_idx": 51, "frame_idx": 130, "global_frame_idx": 13899, "task_index": 10}, {"db_idx": 13900, "episode_idx": 51, "frame_idx": 131, "global_frame_idx": 13900, "task_index": 10}, {"db_idx": 13901, "episode_idx": 51, "frame_idx": 132, "global_frame_idx": 13901, "task_index": 10}, {"db_idx": 13902, "episode_idx": 51, "frame_idx": 133, "global_frame_idx": 13902, "task_index": 10}, {"db_idx": 13903, "episode_idx": 52, "frame_idx": 0, "global_frame_idx": 13903, "task_index": 10}, {"db_idx": 13904, "episode_idx": 52, "frame_idx": 1, "global_frame_idx": 13904, "task_index": 10}, {"db_idx": 13905, "episode_idx": 52, "frame_idx": 2, "global_frame_idx": 13905, "task_index": 10}, {"db_idx": 13906, "episode_idx": 52, "frame_idx": 3, "global_frame_idx": 13906, "task_index": 10}, {"db_idx": 13907, "episode_idx": 52, "frame_idx": 4, "global_frame_idx": 13907, "task_index": 10}, {"db_idx": 13908, "episode_idx": 52, "frame_idx": 5, "global_frame_idx": 13908, "task_index": 10}, {"db_idx": 13909, "episode_idx": 52, "frame_idx": 6, "global_frame_idx": 13909, "task_index": 10}, {"db_idx": 13910, "episode_idx": 52, "frame_idx": 7, "global_frame_idx": 13910, "task_index": 10}, {"db_idx": 13911, "episode_idx": 52, "frame_idx": 8, "global_frame_idx": 13911, "task_index": 10}, {"db_idx": 13912, "episode_idx": 52, "frame_idx": 9, "global_frame_idx": 13912, "task_index": 10}, {"db_idx": 13913, "episode_idx": 52, "frame_idx": 10, "global_frame_idx": 13913, "task_index": 10}, {"db_idx": 13914, "episode_idx": 52, "frame_idx": 11, "global_frame_idx": 13914, "task_index": 10}, {"db_idx": 13915, "episode_idx": 52, "frame_idx": 12, "global_frame_idx": 13915, "task_index": 10}, {"db_idx": 13916, "episode_idx": 52, "frame_idx": 13, "global_frame_idx": 13916, "task_index": 10}, {"db_idx": 13917, "episode_idx": 52, "frame_idx": 14, "global_frame_idx": 13917, "task_index": 10}, {"db_idx": 13918, "episode_idx": 52, "frame_idx": 15, "global_frame_idx": 13918, "task_index": 10}, {"db_idx": 13919, "episode_idx": 52, "frame_idx": 16, "global_frame_idx": 13919, "task_index": 10}, {"db_idx": 13920, "episode_idx": 52, "frame_idx": 17, "global_frame_idx": 13920, "task_index": 10}, {"db_idx": 13921, "episode_idx": 52, "frame_idx": 18, "global_frame_idx": 13921, "task_index": 10}, {"db_idx": 13922, "episode_idx": 52, "frame_idx": 19, "global_frame_idx": 13922, "task_index": 10}, {"db_idx": 13923, "episode_idx": 52, "frame_idx": 20, "global_frame_idx": 13923, "task_index": 10}, {"db_idx": 13924, "episode_idx": 52, "frame_idx": 21, "global_frame_idx": 13924, "task_index": 10}, {"db_idx": 13925, "episode_idx": 52, "frame_idx": 22, "global_frame_idx": 13925, "task_index": 10}, {"db_idx": 13926, "episode_idx": 52, "frame_idx": 23, "global_frame_idx": 13926, "task_index": 10}, {"db_idx": 13927, "episode_idx": 52, "frame_idx": 24, "global_frame_idx": 13927, "task_index": 10}, {"db_idx": 13928, "episode_idx": 52, "frame_idx": 25, "global_frame_idx": 13928, "task_index": 10}, {"db_idx": 13929, "episode_idx": 52, "frame_idx": 26, "global_frame_idx": 13929, "task_index": 10}, {"db_idx": 13930, "episode_idx": 52, "frame_idx": 27, "global_frame_idx": 13930, "task_index": 10}, {"db_idx": 13931, "episode_idx": 52, "frame_idx": 28, "global_frame_idx": 13931, "task_index": 10}, {"db_idx": 13932, "episode_idx": 52, "frame_idx": 29, "global_frame_idx": 13932, "task_index": 10}, {"db_idx": 13933, "episode_idx": 52, "frame_idx": 30, "global_frame_idx": 13933, "task_index": 10}, {"db_idx": 13934, "episode_idx": 52, "frame_idx": 31, "global_frame_idx": 13934, "task_index": 10}, {"db_idx": 13935, "episode_idx": 52, "frame_idx": 32, "global_frame_idx": 13935, "task_index": 10}, {"db_idx": 13936, "episode_idx": 52, "frame_idx": 33, "global_frame_idx": 13936, "task_index": 10}, {"db_idx": 13937, "episode_idx": 52, "frame_idx": 34, "global_frame_idx": 13937, "task_index": 10}, {"db_idx": 13938, "episode_idx": 52, "frame_idx": 35, "global_frame_idx": 13938, "task_index": 10}, {"db_idx": 13939, "episode_idx": 52, "frame_idx": 36, "global_frame_idx": 13939, "task_index": 10}, {"db_idx": 13940, "episode_idx": 52, "frame_idx": 37, "global_frame_idx": 13940, "task_index": 10}, {"db_idx": 13941, "episode_idx": 52, "frame_idx": 38, "global_frame_idx": 13941, "task_index": 10}, {"db_idx": 13942, "episode_idx": 52, "frame_idx": 39, "global_frame_idx": 13942, "task_index": 10}, {"db_idx": 13943, "episode_idx": 52, "frame_idx": 40, "global_frame_idx": 13943, "task_index": 10}, {"db_idx": 13944, "episode_idx": 52, "frame_idx": 41, "global_frame_idx": 13944, "task_index": 10}, {"db_idx": 13945, "episode_idx": 52, "frame_idx": 42, "global_frame_idx": 13945, "task_index": 10}, {"db_idx": 13946, "episode_idx": 52, "frame_idx": 43, "global_frame_idx": 13946, "task_index": 10}, {"db_idx": 13947, "episode_idx": 52, "frame_idx": 44, "global_frame_idx": 13947, "task_index": 10}, {"db_idx": 13948, "episode_idx": 52, "frame_idx": 45, "global_frame_idx": 13948, "task_index": 10}, {"db_idx": 13949, "episode_idx": 52, "frame_idx": 46, "global_frame_idx": 13949, "task_index": 10}, {"db_idx": 13950, "episode_idx": 52, "frame_idx": 47, "global_frame_idx": 13950, "task_index": 10}, {"db_idx": 13951, "episode_idx": 52, "frame_idx": 48, "global_frame_idx": 13951, "task_index": 10}, {"db_idx": 13952, "episode_idx": 52, "frame_idx": 49, "global_frame_idx": 13952, "task_index": 10}, {"db_idx": 13953, "episode_idx": 52, "frame_idx": 50, "global_frame_idx": 13953, "task_index": 10}, {"db_idx": 13954, "episode_idx": 52, "frame_idx": 51, "global_frame_idx": 13954, "task_index": 10}, {"db_idx": 13955, "episode_idx": 52, "frame_idx": 52, "global_frame_idx": 13955, "task_index": 10}, {"db_idx": 13956, "episode_idx": 52, "frame_idx": 53, "global_frame_idx": 13956, "task_index": 10}, {"db_idx": 13957, "episode_idx": 52, "frame_idx": 54, "global_frame_idx": 13957, "task_index": 10}, {"db_idx": 13958, "episode_idx": 52, "frame_idx": 55, "global_frame_idx": 13958, "task_index": 10}, {"db_idx": 13959, "episode_idx": 52, "frame_idx": 56, "global_frame_idx": 13959, "task_index": 10}, {"db_idx": 13960, "episode_idx": 52, "frame_idx": 57, "global_frame_idx": 13960, "task_index": 10}, {"db_idx": 13961, "episode_idx": 52, "frame_idx": 58, "global_frame_idx": 13961, "task_index": 10}, {"db_idx": 13962, "episode_idx": 52, "frame_idx": 59, "global_frame_idx": 13962, "task_index": 10}, {"db_idx": 13963, "episode_idx": 52, "frame_idx": 60, "global_frame_idx": 13963, "task_index": 10}, {"db_idx": 13964, "episode_idx": 52, "frame_idx": 61, "global_frame_idx": 13964, "task_index": 10}, {"db_idx": 13965, "episode_idx": 52, "frame_idx": 62, "global_frame_idx": 13965, "task_index": 10}, {"db_idx": 13966, "episode_idx": 52, "frame_idx": 63, "global_frame_idx": 13966, "task_index": 10}, {"db_idx": 13967, "episode_idx": 52, "frame_idx": 64, "global_frame_idx": 13967, "task_index": 10}, {"db_idx": 13968, "episode_idx": 52, "frame_idx": 65, "global_frame_idx": 13968, "task_index": 10}, {"db_idx": 13969, "episode_idx": 52, "frame_idx": 66, "global_frame_idx": 13969, "task_index": 10}, {"db_idx": 13970, "episode_idx": 52, "frame_idx": 67, "global_frame_idx": 13970, "task_index": 10}, {"db_idx": 13971, "episode_idx": 52, "frame_idx": 68, "global_frame_idx": 13971, "task_index": 10}, {"db_idx": 13972, "episode_idx": 52, "frame_idx": 69, "global_frame_idx": 13972, "task_index": 10}, {"db_idx": 13973, "episode_idx": 52, "frame_idx": 70, "global_frame_idx": 13973, "task_index": 10}, {"db_idx": 13974, "episode_idx": 52, "frame_idx": 71, "global_frame_idx": 13974, "task_index": 10}, {"db_idx": 13975, "episode_idx": 52, "frame_idx": 72, "global_frame_idx": 13975, "task_index": 10}, {"db_idx": 13976, "episode_idx": 52, "frame_idx": 73, "global_frame_idx": 13976, "task_index": 10}, {"db_idx": 13977, "episode_idx": 52, "frame_idx": 74, "global_frame_idx": 13977, "task_index": 10}, {"db_idx": 13978, "episode_idx": 52, "frame_idx": 75, "global_frame_idx": 13978, "task_index": 10}, {"db_idx": 13979, "episode_idx": 52, "frame_idx": 76, "global_frame_idx": 13979, "task_index": 10}, {"db_idx": 13980, "episode_idx": 52, "frame_idx": 77, "global_frame_idx": 13980, "task_index": 10}, {"db_idx": 13981, "episode_idx": 52, "frame_idx": 78, "global_frame_idx": 13981, "task_index": 10}, {"db_idx": 13982, "episode_idx": 52, "frame_idx": 79, "global_frame_idx": 13982, "task_index": 10}, {"db_idx": 13983, "episode_idx": 52, "frame_idx": 80, "global_frame_idx": 13983, "task_index": 10}, {"db_idx": 13984, "episode_idx": 52, "frame_idx": 81, "global_frame_idx": 13984, "task_index": 10}, {"db_idx": 13985, "episode_idx": 52, "frame_idx": 82, "global_frame_idx": 13985, "task_index": 10}, {"db_idx": 13986, "episode_idx": 52, "frame_idx": 83, "global_frame_idx": 13986, "task_index": 10}, {"db_idx": 13987, "episode_idx": 52, "frame_idx": 84, "global_frame_idx": 13987, "task_index": 10}, {"db_idx": 13988, "episode_idx": 52, "frame_idx": 85, "global_frame_idx": 13988, "task_index": 10}, {"db_idx": 13989, "episode_idx": 52, "frame_idx": 86, "global_frame_idx": 13989, "task_index": 10}, {"db_idx": 13990, "episode_idx": 52, "frame_idx": 87, "global_frame_idx": 13990, "task_index": 10}, {"db_idx": 13991, "episode_idx": 52, "frame_idx": 88, "global_frame_idx": 13991, "task_index": 10}, {"db_idx": 13992, "episode_idx": 52, "frame_idx": 89, "global_frame_idx": 13992, "task_index": 10}, {"db_idx": 13993, "episode_idx": 52, "frame_idx": 90, "global_frame_idx": 13993, "task_index": 10}, {"db_idx": 13994, "episode_idx": 52, "frame_idx": 91, "global_frame_idx": 13994, "task_index": 10}, {"db_idx": 13995, "episode_idx": 52, "frame_idx": 92, "global_frame_idx": 13995, "task_index": 10}, {"db_idx": 13996, "episode_idx": 52, "frame_idx": 93, "global_frame_idx": 13996, "task_index": 10}, {"db_idx": 13997, "episode_idx": 52, "frame_idx": 94, "global_frame_idx": 13997, "task_index": 10}, {"db_idx": 13998, "episode_idx": 52, "frame_idx": 95, "global_frame_idx": 13998, "task_index": 10}, {"db_idx": 13999, "episode_idx": 52, "frame_idx": 96, "global_frame_idx": 13999, "task_index": 10}, {"db_idx": 14000, "episode_idx": 52, "frame_idx": 97, "global_frame_idx": 14000, "task_index": 10}, {"db_idx": 14001, "episode_idx": 52, "frame_idx": 98, "global_frame_idx": 14001, "task_index": 10}, {"db_idx": 14002, "episode_idx": 52, "frame_idx": 99, "global_frame_idx": 14002, "task_index": 10}, {"db_idx": 14003, "episode_idx": 52, "frame_idx": 100, "global_frame_idx": 14003, "task_index": 10}, {"db_idx": 14004, "episode_idx": 52, "frame_idx": 101, "global_frame_idx": 14004, "task_index": 10}, {"db_idx": 14005, "episode_idx": 52, "frame_idx": 102, "global_frame_idx": 14005, "task_index": 10}, {"db_idx": 14006, "episode_idx": 52, "frame_idx": 103, "global_frame_idx": 14006, "task_index": 10}, {"db_idx": 14007, "episode_idx": 52, "frame_idx": 104, "global_frame_idx": 14007, "task_index": 10}, {"db_idx": 14008, "episode_idx": 52, "frame_idx": 105, "global_frame_idx": 14008, "task_index": 10}, {"db_idx": 14009, "episode_idx": 52, "frame_idx": 106, "global_frame_idx": 14009, "task_index": 10}, {"db_idx": 14010, "episode_idx": 52, "frame_idx": 107, "global_frame_idx": 14010, "task_index": 10}, {"db_idx": 14011, "episode_idx": 52, "frame_idx": 108, "global_frame_idx": 14011, "task_index": 10}, {"db_idx": 14012, "episode_idx": 52, "frame_idx": 109, "global_frame_idx": 14012, "task_index": 10}, {"db_idx": 14013, "episode_idx": 52, "frame_idx": 110, "global_frame_idx": 14013, "task_index": 10}, {"db_idx": 14014, "episode_idx": 52, "frame_idx": 111, "global_frame_idx": 14014, "task_index": 10}, {"db_idx": 14015, "episode_idx": 52, "frame_idx": 112, "global_frame_idx": 14015, "task_index": 10}, {"db_idx": 14016, "episode_idx": 52, "frame_idx": 113, "global_frame_idx": 14016, "task_index": 10}, {"db_idx": 14017, "episode_idx": 52, "frame_idx": 114, "global_frame_idx": 14017, "task_index": 10}, {"db_idx": 14018, "episode_idx": 52, "frame_idx": 115, "global_frame_idx": 14018, "task_index": 10}, {"db_idx": 14019, "episode_idx": 52, "frame_idx": 116, "global_frame_idx": 14019, "task_index": 10}, {"db_idx": 14020, "episode_idx": 52, "frame_idx": 117, "global_frame_idx": 14020, "task_index": 10}, {"db_idx": 14021, "episode_idx": 52, "frame_idx": 118, "global_frame_idx": 14021, "task_index": 10}, {"db_idx": 14022, "episode_idx": 52, "frame_idx": 119, "global_frame_idx": 14022, "task_index": 10}, {"db_idx": 14023, "episode_idx": 52, "frame_idx": 120, "global_frame_idx": 14023, "task_index": 10}, {"db_idx": 14024, "episode_idx": 52, "frame_idx": 121, "global_frame_idx": 14024, "task_index": 10}, {"db_idx": 14025, "episode_idx": 53, "frame_idx": 0, "global_frame_idx": 14025, "task_index": 10}, {"db_idx": 14026, "episode_idx": 53, "frame_idx": 1, "global_frame_idx": 14026, "task_index": 10}, {"db_idx": 14027, "episode_idx": 53, "frame_idx": 2, "global_frame_idx": 14027, "task_index": 10}, {"db_idx": 14028, "episode_idx": 53, "frame_idx": 3, "global_frame_idx": 14028, "task_index": 10}, {"db_idx": 14029, "episode_idx": 53, "frame_idx": 4, "global_frame_idx": 14029, "task_index": 10}, {"db_idx": 14030, "episode_idx": 53, "frame_idx": 5, "global_frame_idx": 14030, "task_index": 10}, {"db_idx": 14031, "episode_idx": 53, "frame_idx": 6, "global_frame_idx": 14031, "task_index": 10}, {"db_idx": 14032, "episode_idx": 53, "frame_idx": 7, "global_frame_idx": 14032, "task_index": 10}, {"db_idx": 14033, "episode_idx": 53, "frame_idx": 8, "global_frame_idx": 14033, "task_index": 10}, {"db_idx": 14034, "episode_idx": 53, "frame_idx": 9, "global_frame_idx": 14034, "task_index": 10}, {"db_idx": 14035, "episode_idx": 53, "frame_idx": 10, "global_frame_idx": 14035, "task_index": 10}, {"db_idx": 14036, "episode_idx": 53, "frame_idx": 11, "global_frame_idx": 14036, "task_index": 10}, {"db_idx": 14037, "episode_idx": 53, "frame_idx": 12, "global_frame_idx": 14037, "task_index": 10}, {"db_idx": 14038, "episode_idx": 53, "frame_idx": 13, "global_frame_idx": 14038, "task_index": 10}, {"db_idx": 14039, "episode_idx": 53, "frame_idx": 14, "global_frame_idx": 14039, "task_index": 10}, {"db_idx": 14040, "episode_idx": 53, "frame_idx": 15, "global_frame_idx": 14040, "task_index": 10}, {"db_idx": 14041, "episode_idx": 53, "frame_idx": 16, "global_frame_idx": 14041, "task_index": 10}, {"db_idx": 14042, "episode_idx": 53, "frame_idx": 17, "global_frame_idx": 14042, "task_index": 10}, {"db_idx": 14043, "episode_idx": 53, "frame_idx": 18, "global_frame_idx": 14043, "task_index": 10}, {"db_idx": 14044, "episode_idx": 53, "frame_idx": 19, "global_frame_idx": 14044, "task_index": 10}, {"db_idx": 14045, "episode_idx": 53, "frame_idx": 20, "global_frame_idx": 14045, "task_index": 10}, {"db_idx": 14046, "episode_idx": 53, "frame_idx": 21, "global_frame_idx": 14046, "task_index": 10}, {"db_idx": 14047, "episode_idx": 53, "frame_idx": 22, "global_frame_idx": 14047, "task_index": 10}, {"db_idx": 14048, "episode_idx": 53, "frame_idx": 23, "global_frame_idx": 14048, "task_index": 10}, {"db_idx": 14049, "episode_idx": 53, "frame_idx": 24, "global_frame_idx": 14049, "task_index": 10}, {"db_idx": 14050, "episode_idx": 53, "frame_idx": 25, "global_frame_idx": 14050, "task_index": 10}, {"db_idx": 14051, "episode_idx": 53, "frame_idx": 26, "global_frame_idx": 14051, "task_index": 10}, {"db_idx": 14052, "episode_idx": 53, "frame_idx": 27, "global_frame_idx": 14052, "task_index": 10}, {"db_idx": 14053, "episode_idx": 53, "frame_idx": 28, "global_frame_idx": 14053, "task_index": 10}, {"db_idx": 14054, "episode_idx": 53, "frame_idx": 29, "global_frame_idx": 14054, "task_index": 10}, {"db_idx": 14055, "episode_idx": 53, "frame_idx": 30, "global_frame_idx": 14055, "task_index": 10}, {"db_idx": 14056, "episode_idx": 53, "frame_idx": 31, "global_frame_idx": 14056, "task_index": 10}, {"db_idx": 14057, "episode_idx": 53, "frame_idx": 32, "global_frame_idx": 14057, "task_index": 10}, {"db_idx": 14058, "episode_idx": 53, "frame_idx": 33, "global_frame_idx": 14058, "task_index": 10}, {"db_idx": 14059, "episode_idx": 53, "frame_idx": 34, "global_frame_idx": 14059, "task_index": 10}, {"db_idx": 14060, "episode_idx": 53, "frame_idx": 35, "global_frame_idx": 14060, "task_index": 10}, {"db_idx": 14061, "episode_idx": 53, "frame_idx": 36, "global_frame_idx": 14061, "task_index": 10}, {"db_idx": 14062, "episode_idx": 53, "frame_idx": 37, "global_frame_idx": 14062, "task_index": 10}, {"db_idx": 14063, "episode_idx": 53, "frame_idx": 38, "global_frame_idx": 14063, "task_index": 10}, {"db_idx": 14064, "episode_idx": 53, "frame_idx": 39, "global_frame_idx": 14064, "task_index": 10}, {"db_idx": 14065, "episode_idx": 53, "frame_idx": 40, "global_frame_idx": 14065, "task_index": 10}, {"db_idx": 14066, "episode_idx": 53, "frame_idx": 41, "global_frame_idx": 14066, "task_index": 10}, {"db_idx": 14067, "episode_idx": 53, "frame_idx": 42, "global_frame_idx": 14067, "task_index": 10}, {"db_idx": 14068, "episode_idx": 53, "frame_idx": 43, "global_frame_idx": 14068, "task_index": 10}, {"db_idx": 14069, "episode_idx": 53, "frame_idx": 44, "global_frame_idx": 14069, "task_index": 10}, {"db_idx": 14070, "episode_idx": 53, "frame_idx": 45, "global_frame_idx": 14070, "task_index": 10}, {"db_idx": 14071, "episode_idx": 53, "frame_idx": 46, "global_frame_idx": 14071, "task_index": 10}, {"db_idx": 14072, "episode_idx": 53, "frame_idx": 47, "global_frame_idx": 14072, "task_index": 10}, {"db_idx": 14073, "episode_idx": 53, "frame_idx": 48, "global_frame_idx": 14073, "task_index": 10}, {"db_idx": 14074, "episode_idx": 53, "frame_idx": 49, "global_frame_idx": 14074, "task_index": 10}, {"db_idx": 14075, "episode_idx": 53, "frame_idx": 50, "global_frame_idx": 14075, "task_index": 10}, {"db_idx": 14076, "episode_idx": 53, "frame_idx": 51, "global_frame_idx": 14076, "task_index": 10}, {"db_idx": 14077, "episode_idx": 53, "frame_idx": 52, "global_frame_idx": 14077, "task_index": 10}, {"db_idx": 14078, "episode_idx": 53, "frame_idx": 53, "global_frame_idx": 14078, "task_index": 10}, {"db_idx": 14079, "episode_idx": 53, "frame_idx": 54, "global_frame_idx": 14079, "task_index": 10}, {"db_idx": 14080, "episode_idx": 53, "frame_idx": 55, "global_frame_idx": 14080, "task_index": 10}, {"db_idx": 14081, "episode_idx": 53, "frame_idx": 56, "global_frame_idx": 14081, "task_index": 10}, {"db_idx": 14082, "episode_idx": 53, "frame_idx": 57, "global_frame_idx": 14082, "task_index": 10}, {"db_idx": 14083, "episode_idx": 53, "frame_idx": 58, "global_frame_idx": 14083, "task_index": 10}, {"db_idx": 14084, "episode_idx": 53, "frame_idx": 59, "global_frame_idx": 14084, "task_index": 10}, {"db_idx": 14085, "episode_idx": 53, "frame_idx": 60, "global_frame_idx": 14085, "task_index": 10}, {"db_idx": 14086, "episode_idx": 53, "frame_idx": 61, "global_frame_idx": 14086, "task_index": 10}, {"db_idx": 14087, "episode_idx": 53, "frame_idx": 62, "global_frame_idx": 14087, "task_index": 10}, {"db_idx": 14088, "episode_idx": 53, "frame_idx": 63, "global_frame_idx": 14088, "task_index": 10}, {"db_idx": 14089, "episode_idx": 53, "frame_idx": 64, "global_frame_idx": 14089, "task_index": 10}, {"db_idx": 14090, "episode_idx": 53, "frame_idx": 65, "global_frame_idx": 14090, "task_index": 10}, {"db_idx": 14091, "episode_idx": 53, "frame_idx": 66, "global_frame_idx": 14091, "task_index": 10}, {"db_idx": 14092, "episode_idx": 53, "frame_idx": 67, "global_frame_idx": 14092, "task_index": 10}, {"db_idx": 14093, "episode_idx": 53, "frame_idx": 68, "global_frame_idx": 14093, "task_index": 10}, {"db_idx": 14094, "episode_idx": 53, "frame_idx": 69, "global_frame_idx": 14094, "task_index": 10}, {"db_idx": 14095, "episode_idx": 53, "frame_idx": 70, "global_frame_idx": 14095, "task_index": 10}, {"db_idx": 14096, "episode_idx": 53, "frame_idx": 71, "global_frame_idx": 14096, "task_index": 10}, {"db_idx": 14097, "episode_idx": 53, "frame_idx": 72, "global_frame_idx": 14097, "task_index": 10}, {"db_idx": 14098, "episode_idx": 53, "frame_idx": 73, "global_frame_idx": 14098, "task_index": 10}, {"db_idx": 14099, "episode_idx": 53, "frame_idx": 74, "global_frame_idx": 14099, "task_index": 10}, {"db_idx": 14100, "episode_idx": 53, "frame_idx": 75, "global_frame_idx": 14100, "task_index": 10}, {"db_idx": 14101, "episode_idx": 53, "frame_idx": 76, "global_frame_idx": 14101, "task_index": 10}, {"db_idx": 14102, "episode_idx": 53, "frame_idx": 77, "global_frame_idx": 14102, "task_index": 10}, {"db_idx": 14103, "episode_idx": 53, "frame_idx": 78, "global_frame_idx": 14103, "task_index": 10}, {"db_idx": 14104, "episode_idx": 53, "frame_idx": 79, "global_frame_idx": 14104, "task_index": 10}, {"db_idx": 14105, "episode_idx": 53, "frame_idx": 80, "global_frame_idx": 14105, "task_index": 10}, {"db_idx": 14106, "episode_idx": 53, "frame_idx": 81, "global_frame_idx": 14106, "task_index": 10}, {"db_idx": 14107, "episode_idx": 53, "frame_idx": 82, "global_frame_idx": 14107, "task_index": 10}, {"db_idx": 14108, "episode_idx": 53, "frame_idx": 83, "global_frame_idx": 14108, "task_index": 10}, {"db_idx": 14109, "episode_idx": 53, "frame_idx": 84, "global_frame_idx": 14109, "task_index": 10}, {"db_idx": 14110, "episode_idx": 53, "frame_idx": 85, "global_frame_idx": 14110, "task_index": 10}, {"db_idx": 14111, "episode_idx": 53, "frame_idx": 86, "global_frame_idx": 14111, "task_index": 10}, {"db_idx": 14112, "episode_idx": 53, "frame_idx": 87, "global_frame_idx": 14112, "task_index": 10}, {"db_idx": 14113, "episode_idx": 53, "frame_idx": 88, "global_frame_idx": 14113, "task_index": 10}, {"db_idx": 14114, "episode_idx": 53, "frame_idx": 89, "global_frame_idx": 14114, "task_index": 10}, {"db_idx": 14115, "episode_idx": 53, "frame_idx": 90, "global_frame_idx": 14115, "task_index": 10}, {"db_idx": 14116, "episode_idx": 53, "frame_idx": 91, "global_frame_idx": 14116, "task_index": 10}, {"db_idx": 14117, "episode_idx": 53, "frame_idx": 92, "global_frame_idx": 14117, "task_index": 10}, {"db_idx": 14118, "episode_idx": 53, "frame_idx": 93, "global_frame_idx": 14118, "task_index": 10}, {"db_idx": 14119, "episode_idx": 53, "frame_idx": 94, "global_frame_idx": 14119, "task_index": 10}, {"db_idx": 14120, "episode_idx": 53, "frame_idx": 95, "global_frame_idx": 14120, "task_index": 10}, {"db_idx": 14121, "episode_idx": 53, "frame_idx": 96, "global_frame_idx": 14121, "task_index": 10}, {"db_idx": 14122, "episode_idx": 53, "frame_idx": 97, "global_frame_idx": 14122, "task_index": 10}, {"db_idx": 14123, "episode_idx": 53, "frame_idx": 98, "global_frame_idx": 14123, "task_index": 10}, {"db_idx": 14124, "episode_idx": 53, "frame_idx": 99, "global_frame_idx": 14124, "task_index": 10}, {"db_idx": 14125, "episode_idx": 53, "frame_idx": 100, "global_frame_idx": 14125, "task_index": 10}, {"db_idx": 14126, "episode_idx": 53, "frame_idx": 101, "global_frame_idx": 14126, "task_index": 10}, {"db_idx": 14127, "episode_idx": 53, "frame_idx": 102, "global_frame_idx": 14127, "task_index": 10}, {"db_idx": 14128, "episode_idx": 53, "frame_idx": 103, "global_frame_idx": 14128, "task_index": 10}, {"db_idx": 14129, "episode_idx": 53, "frame_idx": 104, "global_frame_idx": 14129, "task_index": 10}, {"db_idx": 14130, "episode_idx": 53, "frame_idx": 105, "global_frame_idx": 14130, "task_index": 10}, {"db_idx": 14131, "episode_idx": 53, "frame_idx": 106, "global_frame_idx": 14131, "task_index": 10}, {"db_idx": 14132, "episode_idx": 53, "frame_idx": 107, "global_frame_idx": 14132, "task_index": 10}, {"db_idx": 14133, "episode_idx": 53, "frame_idx": 108, "global_frame_idx": 14133, "task_index": 10}, {"db_idx": 14134, "episode_idx": 53, "frame_idx": 109, "global_frame_idx": 14134, "task_index": 10}, {"db_idx": 14135, "episode_idx": 53, "frame_idx": 110, "global_frame_idx": 14135, "task_index": 10}, {"db_idx": 14136, "episode_idx": 53, "frame_idx": 111, "global_frame_idx": 14136, "task_index": 10}, {"db_idx": 14137, "episode_idx": 53, "frame_idx": 112, "global_frame_idx": 14137, "task_index": 10}, {"db_idx": 14138, "episode_idx": 53, "frame_idx": 113, "global_frame_idx": 14138, "task_index": 10}, {"db_idx": 14139, "episode_idx": 53, "frame_idx": 114, "global_frame_idx": 14139, "task_index": 10}, {"db_idx": 14140, "episode_idx": 53, "frame_idx": 115, "global_frame_idx": 14140, "task_index": 10}, {"db_idx": 14141, "episode_idx": 53, "frame_idx": 116, "global_frame_idx": 14141, "task_index": 10}, {"db_idx": 14142, "episode_idx": 53, "frame_idx": 117, "global_frame_idx": 14142, "task_index": 10}, {"db_idx": 14143, "episode_idx": 53, "frame_idx": 118, "global_frame_idx": 14143, "task_index": 10}, {"db_idx": 14144, "episode_idx": 53, "frame_idx": 119, "global_frame_idx": 14144, "task_index": 10}, {"db_idx": 14145, "episode_idx": 53, "frame_idx": 120, "global_frame_idx": 14145, "task_index": 10}, {"db_idx": 14146, "episode_idx": 53, "frame_idx": 121, "global_frame_idx": 14146, "task_index": 10}, {"db_idx": 14147, "episode_idx": 53, "frame_idx": 122, "global_frame_idx": 14147, "task_index": 10}, {"db_idx": 14148, "episode_idx": 53, "frame_idx": 123, "global_frame_idx": 14148, "task_index": 10}, {"db_idx": 14149, "episode_idx": 53, "frame_idx": 124, "global_frame_idx": 14149, "task_index": 10}, {"db_idx": 14150, "episode_idx": 53, "frame_idx": 125, "global_frame_idx": 14150, "task_index": 10}, {"db_idx": 14151, "episode_idx": 53, "frame_idx": 126, "global_frame_idx": 14151, "task_index": 10}, {"db_idx": 14152, "episode_idx": 53, "frame_idx": 127, "global_frame_idx": 14152, "task_index": 10}, {"db_idx": 14153, "episode_idx": 54, "frame_idx": 0, "global_frame_idx": 14153, "task_index": 10}, {"db_idx": 14154, "episode_idx": 54, "frame_idx": 1, "global_frame_idx": 14154, "task_index": 10}, {"db_idx": 14155, "episode_idx": 54, "frame_idx": 2, "global_frame_idx": 14155, "task_index": 10}, {"db_idx": 14156, "episode_idx": 54, "frame_idx": 3, "global_frame_idx": 14156, "task_index": 10}, {"db_idx": 14157, "episode_idx": 54, "frame_idx": 4, "global_frame_idx": 14157, "task_index": 10}, {"db_idx": 14158, "episode_idx": 54, "frame_idx": 5, "global_frame_idx": 14158, "task_index": 10}, {"db_idx": 14159, "episode_idx": 54, "frame_idx": 6, "global_frame_idx": 14159, "task_index": 10}, {"db_idx": 14160, "episode_idx": 54, "frame_idx": 7, "global_frame_idx": 14160, "task_index": 10}, {"db_idx": 14161, "episode_idx": 54, "frame_idx": 8, "global_frame_idx": 14161, "task_index": 10}, {"db_idx": 14162, "episode_idx": 54, "frame_idx": 9, "global_frame_idx": 14162, "task_index": 10}, {"db_idx": 14163, "episode_idx": 54, "frame_idx": 10, "global_frame_idx": 14163, "task_index": 10}, {"db_idx": 14164, "episode_idx": 54, "frame_idx": 11, "global_frame_idx": 14164, "task_index": 10}, {"db_idx": 14165, "episode_idx": 54, "frame_idx": 12, "global_frame_idx": 14165, "task_index": 10}, {"db_idx": 14166, "episode_idx": 54, "frame_idx": 13, "global_frame_idx": 14166, "task_index": 10}, {"db_idx": 14167, "episode_idx": 54, "frame_idx": 14, "global_frame_idx": 14167, "task_index": 10}, {"db_idx": 14168, "episode_idx": 54, "frame_idx": 15, "global_frame_idx": 14168, "task_index": 10}, {"db_idx": 14169, "episode_idx": 54, "frame_idx": 16, "global_frame_idx": 14169, "task_index": 10}, {"db_idx": 14170, "episode_idx": 54, "frame_idx": 17, "global_frame_idx": 14170, "task_index": 10}, {"db_idx": 14171, "episode_idx": 54, "frame_idx": 18, "global_frame_idx": 14171, "task_index": 10}, {"db_idx": 14172, "episode_idx": 54, "frame_idx": 19, "global_frame_idx": 14172, "task_index": 10}, {"db_idx": 14173, "episode_idx": 54, "frame_idx": 20, "global_frame_idx": 14173, "task_index": 10}, {"db_idx": 14174, "episode_idx": 54, "frame_idx": 21, "global_frame_idx": 14174, "task_index": 10}, {"db_idx": 14175, "episode_idx": 54, "frame_idx": 22, "global_frame_idx": 14175, "task_index": 10}, {"db_idx": 14176, "episode_idx": 54, "frame_idx": 23, "global_frame_idx": 14176, "task_index": 10}, {"db_idx": 14177, "episode_idx": 54, "frame_idx": 24, "global_frame_idx": 14177, "task_index": 10}, {"db_idx": 14178, "episode_idx": 54, "frame_idx": 25, "global_frame_idx": 14178, "task_index": 10}, {"db_idx": 14179, "episode_idx": 54, "frame_idx": 26, "global_frame_idx": 14179, "task_index": 10}, {"db_idx": 14180, "episode_idx": 54, "frame_idx": 27, "global_frame_idx": 14180, "task_index": 10}, {"db_idx": 14181, "episode_idx": 54, "frame_idx": 28, "global_frame_idx": 14181, "task_index": 10}, {"db_idx": 14182, "episode_idx": 54, "frame_idx": 29, "global_frame_idx": 14182, "task_index": 10}, {"db_idx": 14183, "episode_idx": 54, "frame_idx": 30, "global_frame_idx": 14183, "task_index": 10}, {"db_idx": 14184, "episode_idx": 54, "frame_idx": 31, "global_frame_idx": 14184, "task_index": 10}, {"db_idx": 14185, "episode_idx": 54, "frame_idx": 32, "global_frame_idx": 14185, "task_index": 10}, {"db_idx": 14186, "episode_idx": 54, "frame_idx": 33, "global_frame_idx": 14186, "task_index": 10}, {"db_idx": 14187, "episode_idx": 54, "frame_idx": 34, "global_frame_idx": 14187, "task_index": 10}, {"db_idx": 14188, "episode_idx": 54, "frame_idx": 35, "global_frame_idx": 14188, "task_index": 10}, {"db_idx": 14189, "episode_idx": 54, "frame_idx": 36, "global_frame_idx": 14189, "task_index": 10}, {"db_idx": 14190, "episode_idx": 54, "frame_idx": 37, "global_frame_idx": 14190, "task_index": 10}, {"db_idx": 14191, "episode_idx": 54, "frame_idx": 38, "global_frame_idx": 14191, "task_index": 10}, {"db_idx": 14192, "episode_idx": 54, "frame_idx": 39, "global_frame_idx": 14192, "task_index": 10}, {"db_idx": 14193, "episode_idx": 54, "frame_idx": 40, "global_frame_idx": 14193, "task_index": 10}, {"db_idx": 14194, "episode_idx": 54, "frame_idx": 41, "global_frame_idx": 14194, "task_index": 10}, {"db_idx": 14195, "episode_idx": 54, "frame_idx": 42, "global_frame_idx": 14195, "task_index": 10}, {"db_idx": 14196, "episode_idx": 54, "frame_idx": 43, "global_frame_idx": 14196, "task_index": 10}, {"db_idx": 14197, "episode_idx": 54, "frame_idx": 44, "global_frame_idx": 14197, "task_index": 10}, {"db_idx": 14198, "episode_idx": 54, "frame_idx": 45, "global_frame_idx": 14198, "task_index": 10}, {"db_idx": 14199, "episode_idx": 54, "frame_idx": 46, "global_frame_idx": 14199, "task_index": 10}, {"db_idx": 14200, "episode_idx": 54, "frame_idx": 47, "global_frame_idx": 14200, "task_index": 10}, {"db_idx": 14201, "episode_idx": 54, "frame_idx": 48, "global_frame_idx": 14201, "task_index": 10}, {"db_idx": 14202, "episode_idx": 54, "frame_idx": 49, "global_frame_idx": 14202, "task_index": 10}, {"db_idx": 14203, "episode_idx": 54, "frame_idx": 50, "global_frame_idx": 14203, "task_index": 10}, {"db_idx": 14204, "episode_idx": 54, "frame_idx": 51, "global_frame_idx": 14204, "task_index": 10}, {"db_idx": 14205, "episode_idx": 54, "frame_idx": 52, "global_frame_idx": 14205, "task_index": 10}, {"db_idx": 14206, "episode_idx": 54, "frame_idx": 53, "global_frame_idx": 14206, "task_index": 10}, {"db_idx": 14207, "episode_idx": 54, "frame_idx": 54, "global_frame_idx": 14207, "task_index": 10}, {"db_idx": 14208, "episode_idx": 54, "frame_idx": 55, "global_frame_idx": 14208, "task_index": 10}, {"db_idx": 14209, "episode_idx": 54, "frame_idx": 56, "global_frame_idx": 14209, "task_index": 10}, {"db_idx": 14210, "episode_idx": 54, "frame_idx": 57, "global_frame_idx": 14210, "task_index": 10}, {"db_idx": 14211, "episode_idx": 54, "frame_idx": 58, "global_frame_idx": 14211, "task_index": 10}, {"db_idx": 14212, "episode_idx": 54, "frame_idx": 59, "global_frame_idx": 14212, "task_index": 10}, {"db_idx": 14213, "episode_idx": 54, "frame_idx": 60, "global_frame_idx": 14213, "task_index": 10}, {"db_idx": 14214, "episode_idx": 54, "frame_idx": 61, "global_frame_idx": 14214, "task_index": 10}, {"db_idx": 14215, "episode_idx": 54, "frame_idx": 62, "global_frame_idx": 14215, "task_index": 10}, {"db_idx": 14216, "episode_idx": 54, "frame_idx": 63, "global_frame_idx": 14216, "task_index": 10}, {"db_idx": 14217, "episode_idx": 54, "frame_idx": 64, "global_frame_idx": 14217, "task_index": 10}, {"db_idx": 14218, "episode_idx": 54, "frame_idx": 65, "global_frame_idx": 14218, "task_index": 10}, {"db_idx": 14219, "episode_idx": 54, "frame_idx": 66, "global_frame_idx": 14219, "task_index": 10}, {"db_idx": 14220, "episode_idx": 54, "frame_idx": 67, "global_frame_idx": 14220, "task_index": 10}, {"db_idx": 14221, "episode_idx": 54, "frame_idx": 68, "global_frame_idx": 14221, "task_index": 10}, {"db_idx": 14222, "episode_idx": 54, "frame_idx": 69, "global_frame_idx": 14222, "task_index": 10}, {"db_idx": 14223, "episode_idx": 54, "frame_idx": 70, "global_frame_idx": 14223, "task_index": 10}, {"db_idx": 14224, "episode_idx": 54, "frame_idx": 71, "global_frame_idx": 14224, "task_index": 10}, {"db_idx": 14225, "episode_idx": 54, "frame_idx": 72, "global_frame_idx": 14225, "task_index": 10}, {"db_idx": 14226, "episode_idx": 54, "frame_idx": 73, "global_frame_idx": 14226, "task_index": 10}, {"db_idx": 14227, "episode_idx": 54, "frame_idx": 74, "global_frame_idx": 14227, "task_index": 10}, {"db_idx": 14228, "episode_idx": 54, "frame_idx": 75, "global_frame_idx": 14228, "task_index": 10}, {"db_idx": 14229, "episode_idx": 54, "frame_idx": 76, "global_frame_idx": 14229, "task_index": 10}, {"db_idx": 14230, "episode_idx": 54, "frame_idx": 77, "global_frame_idx": 14230, "task_index": 10}, {"db_idx": 14231, "episode_idx": 54, "frame_idx": 78, "global_frame_idx": 14231, "task_index": 10}, {"db_idx": 14232, "episode_idx": 54, "frame_idx": 79, "global_frame_idx": 14232, "task_index": 10}, {"db_idx": 14233, "episode_idx": 54, "frame_idx": 80, "global_frame_idx": 14233, "task_index": 10}, {"db_idx": 14234, "episode_idx": 54, "frame_idx": 81, "global_frame_idx": 14234, "task_index": 10}, {"db_idx": 14235, "episode_idx": 54, "frame_idx": 82, "global_frame_idx": 14235, "task_index": 10}, {"db_idx": 14236, "episode_idx": 54, "frame_idx": 83, "global_frame_idx": 14236, "task_index": 10}, {"db_idx": 14237, "episode_idx": 54, "frame_idx": 84, "global_frame_idx": 14237, "task_index": 10}, {"db_idx": 14238, "episode_idx": 54, "frame_idx": 85, "global_frame_idx": 14238, "task_index": 10}, {"db_idx": 14239, "episode_idx": 54, "frame_idx": 86, "global_frame_idx": 14239, "task_index": 10}, {"db_idx": 14240, "episode_idx": 54, "frame_idx": 87, "global_frame_idx": 14240, "task_index": 10}, {"db_idx": 14241, "episode_idx": 54, "frame_idx": 88, "global_frame_idx": 14241, "task_index": 10}, {"db_idx": 14242, "episode_idx": 54, "frame_idx": 89, "global_frame_idx": 14242, "task_index": 10}, {"db_idx": 14243, "episode_idx": 54, "frame_idx": 90, "global_frame_idx": 14243, "task_index": 10}, {"db_idx": 14244, "episode_idx": 54, "frame_idx": 91, "global_frame_idx": 14244, "task_index": 10}, {"db_idx": 14245, "episode_idx": 54, "frame_idx": 92, "global_frame_idx": 14245, "task_index": 10}, {"db_idx": 14246, "episode_idx": 54, "frame_idx": 93, "global_frame_idx": 14246, "task_index": 10}, {"db_idx": 14247, "episode_idx": 54, "frame_idx": 94, "global_frame_idx": 14247, "task_index": 10}, {"db_idx": 14248, "episode_idx": 54, "frame_idx": 95, "global_frame_idx": 14248, "task_index": 10}, {"db_idx": 14249, "episode_idx": 54, "frame_idx": 96, "global_frame_idx": 14249, "task_index": 10}, {"db_idx": 14250, "episode_idx": 54, "frame_idx": 97, "global_frame_idx": 14250, "task_index": 10}, {"db_idx": 14251, "episode_idx": 54, "frame_idx": 98, "global_frame_idx": 14251, "task_index": 10}, {"db_idx": 14252, "episode_idx": 54, "frame_idx": 99, "global_frame_idx": 14252, "task_index": 10}, {"db_idx": 14253, "episode_idx": 54, "frame_idx": 100, "global_frame_idx": 14253, "task_index": 10}, {"db_idx": 14254, "episode_idx": 54, "frame_idx": 101, "global_frame_idx": 14254, "task_index": 10}, {"db_idx": 14255, "episode_idx": 54, "frame_idx": 102, "global_frame_idx": 14255, "task_index": 10}, {"db_idx": 14256, "episode_idx": 54, "frame_idx": 103, "global_frame_idx": 14256, "task_index": 10}, {"db_idx": 14257, "episode_idx": 54, "frame_idx": 104, "global_frame_idx": 14257, "task_index": 10}, {"db_idx": 14258, "episode_idx": 54, "frame_idx": 105, "global_frame_idx": 14258, "task_index": 10}, {"db_idx": 14259, "episode_idx": 54, "frame_idx": 106, "global_frame_idx": 14259, "task_index": 10}, {"db_idx": 14260, "episode_idx": 54, "frame_idx": 107, "global_frame_idx": 14260, "task_index": 10}, {"db_idx": 14261, "episode_idx": 54, "frame_idx": 108, "global_frame_idx": 14261, "task_index": 10}, {"db_idx": 14262, "episode_idx": 54, "frame_idx": 109, "global_frame_idx": 14262, "task_index": 10}, {"db_idx": 14263, "episode_idx": 54, "frame_idx": 110, "global_frame_idx": 14263, "task_index": 10}, {"db_idx": 14264, "episode_idx": 54, "frame_idx": 111, "global_frame_idx": 14264, "task_index": 10}, {"db_idx": 14265, "episode_idx": 54, "frame_idx": 112, "global_frame_idx": 14265, "task_index": 10}, {"db_idx": 14266, "episode_idx": 54, "frame_idx": 113, "global_frame_idx": 14266, "task_index": 10}, {"db_idx": 14267, "episode_idx": 54, "frame_idx": 114, "global_frame_idx": 14267, "task_index": 10}, {"db_idx": 14268, "episode_idx": 54, "frame_idx": 115, "global_frame_idx": 14268, "task_index": 10}, {"db_idx": 14269, "episode_idx": 54, "frame_idx": 116, "global_frame_idx": 14269, "task_index": 10}, {"db_idx": 14270, "episode_idx": 54, "frame_idx": 117, "global_frame_idx": 14270, "task_index": 10}, {"db_idx": 14271, "episode_idx": 54, "frame_idx": 118, "global_frame_idx": 14271, "task_index": 10}, {"db_idx": 14272, "episode_idx": 54, "frame_idx": 119, "global_frame_idx": 14272, "task_index": 10}, {"db_idx": 14273, "episode_idx": 54, "frame_idx": 120, "global_frame_idx": 14273, "task_index": 10}, {"db_idx": 14274, "episode_idx": 54, "frame_idx": 121, "global_frame_idx": 14274, "task_index": 10}, {"db_idx": 14275, "episode_idx": 54, "frame_idx": 122, "global_frame_idx": 14275, "task_index": 10}, {"db_idx": 14276, "episode_idx": 54, "frame_idx": 123, "global_frame_idx": 14276, "task_index": 10}, {"db_idx": 14277, "episode_idx": 54, "frame_idx": 124, "global_frame_idx": 14277, "task_index": 10}, {"db_idx": 14278, "episode_idx": 54, "frame_idx": 125, "global_frame_idx": 14278, "task_index": 10}, {"db_idx": 14279, "episode_idx": 54, "frame_idx": 126, "global_frame_idx": 14279, "task_index": 10}, {"db_idx": 14280, "episode_idx": 54, "frame_idx": 127, "global_frame_idx": 14280, "task_index": 10}, {"db_idx": 14281, "episode_idx": 54, "frame_idx": 128, "global_frame_idx": 14281, "task_index": 10}, {"db_idx": 14282, "episode_idx": 54, "frame_idx": 129, "global_frame_idx": 14282, "task_index": 10}, {"db_idx": 14283, "episode_idx": 54, "frame_idx": 130, "global_frame_idx": 14283, "task_index": 10}, {"db_idx": 14284, "episode_idx": 54, "frame_idx": 131, "global_frame_idx": 14284, "task_index": 10}, {"db_idx": 14285, "episode_idx": 54, "frame_idx": 132, "global_frame_idx": 14285, "task_index": 10}, {"db_idx": 14286, "episode_idx": 54, "frame_idx": 133, "global_frame_idx": 14286, "task_index": 10}, {"db_idx": 14287, "episode_idx": 54, "frame_idx": 134, "global_frame_idx": 14287, "task_index": 10}, {"db_idx": 14288, "episode_idx": 54, "frame_idx": 135, "global_frame_idx": 14288, "task_index": 10}, {"db_idx": 14289, "episode_idx": 54, "frame_idx": 136, "global_frame_idx": 14289, "task_index": 10}, {"db_idx": 14290, "episode_idx": 54, "frame_idx": 137, "global_frame_idx": 14290, "task_index": 10}, {"db_idx": 14291, "episode_idx": 54, "frame_idx": 138, "global_frame_idx": 14291, "task_index": 10}, {"db_idx": 14292, "episode_idx": 54, "frame_idx": 139, "global_frame_idx": 14292, "task_index": 10}, {"db_idx": 14293, "episode_idx": 54, "frame_idx": 140, "global_frame_idx": 14293, "task_index": 10}, {"db_idx": 14294, "episode_idx": 54, "frame_idx": 141, "global_frame_idx": 14294, "task_index": 10}, {"db_idx": 14295, "episode_idx": 54, "frame_idx": 142, "global_frame_idx": 14295, "task_index": 10}, {"db_idx": 14296, "episode_idx": 54, "frame_idx": 143, "global_frame_idx": 14296, "task_index": 10}, {"db_idx": 14297, "episode_idx": 54, "frame_idx": 144, "global_frame_idx": 14297, "task_index": 10}, {"db_idx": 14298, "episode_idx": 54, "frame_idx": 145, "global_frame_idx": 14298, "task_index": 10}, {"db_idx": 14299, "episode_idx": 54, "frame_idx": 146, "global_frame_idx": 14299, "task_index": 10}, {"db_idx": 14300, "episode_idx": 54, "frame_idx": 147, "global_frame_idx": 14300, "task_index": 10}, {"db_idx": 14301, "episode_idx": 54, "frame_idx": 148, "global_frame_idx": 14301, "task_index": 10}, {"db_idx": 14302, "episode_idx": 54, "frame_idx": 149, "global_frame_idx": 14302, "task_index": 10}, {"db_idx": 14303, "episode_idx": 54, "frame_idx": 150, "global_frame_idx": 14303, "task_index": 10}, {"db_idx": 14304, "episode_idx": 54, "frame_idx": 151, "global_frame_idx": 14304, "task_index": 10}, {"db_idx": 14305, "episode_idx": 54, "frame_idx": 152, "global_frame_idx": 14305, "task_index": 10}, {"db_idx": 14306, "episode_idx": 54, "frame_idx": 153, "global_frame_idx": 14306, "task_index": 10}, {"db_idx": 14307, "episode_idx": 54, "frame_idx": 154, "global_frame_idx": 14307, "task_index": 10}, {"db_idx": 14308, "episode_idx": 54, "frame_idx": 155, "global_frame_idx": 14308, "task_index": 10}, {"db_idx": 14309, "episode_idx": 54, "frame_idx": 156, "global_frame_idx": 14309, "task_index": 10}, {"db_idx": 14310, "episode_idx": 54, "frame_idx": 157, "global_frame_idx": 14310, "task_index": 10}, {"db_idx": 14311, "episode_idx": 54, "frame_idx": 158, "global_frame_idx": 14311, "task_index": 10}, {"db_idx": 14312, "episode_idx": 54, "frame_idx": 159, "global_frame_idx": 14312, "task_index": 10}, {"db_idx": 14313, "episode_idx": 54, "frame_idx": 160, "global_frame_idx": 14313, "task_index": 10}, {"db_idx": 14314, "episode_idx": 54, "frame_idx": 161, "global_frame_idx": 14314, "task_index": 10}, {"db_idx": 14315, "episode_idx": 54, "frame_idx": 162, "global_frame_idx": 14315, "task_index": 10}, {"db_idx": 14316, "episode_idx": 54, "frame_idx": 163, "global_frame_idx": 14316, "task_index": 10}, {"db_idx": 14317, "episode_idx": 54, "frame_idx": 164, "global_frame_idx": 14317, "task_index": 10}, {"db_idx": 14318, "episode_idx": 54, "frame_idx": 165, "global_frame_idx": 14318, "task_index": 10}, {"db_idx": 14319, "episode_idx": 54, "frame_idx": 166, "global_frame_idx": 14319, "task_index": 10}, {"db_idx": 14320, "episode_idx": 54, "frame_idx": 167, "global_frame_idx": 14320, "task_index": 10}, {"db_idx": 14321, "episode_idx": 54, "frame_idx": 168, "global_frame_idx": 14321, "task_index": 10}, {"db_idx": 14322, "episode_idx": 54, "frame_idx": 169, "global_frame_idx": 14322, "task_index": 10}, {"db_idx": 14323, "episode_idx": 54, "frame_idx": 170, "global_frame_idx": 14323, "task_index": 10}, {"db_idx": 14324, "episode_idx": 54, "frame_idx": 171, "global_frame_idx": 14324, "task_index": 10}, {"db_idx": 14325, "episode_idx": 54, "frame_idx": 172, "global_frame_idx": 14325, "task_index": 10}, {"db_idx": 14326, "episode_idx": 54, "frame_idx": 173, "global_frame_idx": 14326, "task_index": 10}, {"db_idx": 14327, "episode_idx": 54, "frame_idx": 174, "global_frame_idx": 14327, "task_index": 10}, {"db_idx": 14328, "episode_idx": 54, "frame_idx": 175, "global_frame_idx": 14328, "task_index": 10}, {"db_idx": 14329, "episode_idx": 54, "frame_idx": 176, "global_frame_idx": 14329, "task_index": 10}, {"db_idx": 14330, "episode_idx": 54, "frame_idx": 177, "global_frame_idx": 14330, "task_index": 10}, {"db_idx": 14331, "episode_idx": 54, "frame_idx": 178, "global_frame_idx": 14331, "task_index": 10}, {"db_idx": 14332, "episode_idx": 54, "frame_idx": 179, "global_frame_idx": 14332, "task_index": 10}, {"db_idx": 14333, "episode_idx": 54, "frame_idx": 180, "global_frame_idx": 14333, "task_index": 10}, {"db_idx": 14334, "episode_idx": 54, "frame_idx": 181, "global_frame_idx": 14334, "task_index": 10}, {"db_idx": 14335, "episode_idx": 54, "frame_idx": 182, "global_frame_idx": 14335, "task_index": 10}, {"db_idx": 14336, "episode_idx": 54, "frame_idx": 183, "global_frame_idx": 14336, "task_index": 10}, {"db_idx": 14337, "episode_idx": 54, "frame_idx": 184, "global_frame_idx": 14337, "task_index": 10}, {"db_idx": 14338, "episode_idx": 54, "frame_idx": 185, "global_frame_idx": 14338, "task_index": 10}, {"db_idx": 14339, "episode_idx": 54, "frame_idx": 186, "global_frame_idx": 14339, "task_index": 10}, {"db_idx": 14340, "episode_idx": 54, "frame_idx": 187, "global_frame_idx": 14340, "task_index": 10}, {"db_idx": 14341, "episode_idx": 54, "frame_idx": 188, "global_frame_idx": 14341, "task_index": 10}, {"db_idx": 14342, "episode_idx": 54, "frame_idx": 189, "global_frame_idx": 14342, "task_index": 10}, {"db_idx": 14343, "episode_idx": 54, "frame_idx": 190, "global_frame_idx": 14343, "task_index": 10}, {"db_idx": 14344, "episode_idx": 54, "frame_idx": 191, "global_frame_idx": 14344, "task_index": 10}, {"db_idx": 14345, "episode_idx": 54, "frame_idx": 192, "global_frame_idx": 14345, "task_index": 10}, {"db_idx": 14346, "episode_idx": 54, "frame_idx": 193, "global_frame_idx": 14346, "task_index": 10}, {"db_idx": 14347, "episode_idx": 54, "frame_idx": 194, "global_frame_idx": 14347, "task_index": 10}, {"db_idx": 14348, "episode_idx": 54, "frame_idx": 195, "global_frame_idx": 14348, "task_index": 10}, {"db_idx": 14349, "episode_idx": 54, "frame_idx": 196, "global_frame_idx": 14349, "task_index": 10}, {"db_idx": 14350, "episode_idx": 54, "frame_idx": 197, "global_frame_idx": 14350, "task_index": 10}, {"db_idx": 14351, "episode_idx": 54, "frame_idx": 198, "global_frame_idx": 14351, "task_index": 10}, {"db_idx": 14352, "episode_idx": 54, "frame_idx": 199, "global_frame_idx": 14352, "task_index": 10}, {"db_idx": 14353, "episode_idx": 54, "frame_idx": 200, "global_frame_idx": 14353, "task_index": 10}, {"db_idx": 14354, "episode_idx": 54, "frame_idx": 201, "global_frame_idx": 14354, "task_index": 10}, {"db_idx": 14355, "episode_idx": 54, "frame_idx": 202, "global_frame_idx": 14355, "task_index": 10}, {"db_idx": 14356, "episode_idx": 54, "frame_idx": 203, "global_frame_idx": 14356, "task_index": 10}, {"db_idx": 14357, "episode_idx": 54, "frame_idx": 204, "global_frame_idx": 14357, "task_index": 10}, {"db_idx": 14358, "episode_idx": 54, "frame_idx": 205, "global_frame_idx": 14358, "task_index": 10}, {"db_idx": 14359, "episode_idx": 54, "frame_idx": 206, "global_frame_idx": 14359, "task_index": 10}, {"db_idx": 14360, "episode_idx": 54, "frame_idx": 207, "global_frame_idx": 14360, "task_index": 10}, {"db_idx": 14361, "episode_idx": 54, "frame_idx": 208, "global_frame_idx": 14361, "task_index": 10}, {"db_idx": 14362, "episode_idx": 54, "frame_idx": 209, "global_frame_idx": 14362, "task_index": 10}, {"db_idx": 14363, "episode_idx": 54, "frame_idx": 210, "global_frame_idx": 14363, "task_index": 10}, {"db_idx": 14364, "episode_idx": 54, "frame_idx": 211, "global_frame_idx": 14364, "task_index": 10}, {"db_idx": 14365, "episode_idx": 54, "frame_idx": 212, "global_frame_idx": 14365, "task_index": 10}, {"db_idx": 14366, "episode_idx": 54, "frame_idx": 213, "global_frame_idx": 14366, "task_index": 10}, {"db_idx": 14367, "episode_idx": 54, "frame_idx": 214, "global_frame_idx": 14367, "task_index": 10}, {"db_idx": 14368, "episode_idx": 54, "frame_idx": 215, "global_frame_idx": 14368, "task_index": 10}, {"db_idx": 14369, "episode_idx": 55, "frame_idx": 0, "global_frame_idx": 14369, "task_index": 11}, {"db_idx": 14370, "episode_idx": 55, "frame_idx": 1, "global_frame_idx": 14370, "task_index": 11}, {"db_idx": 14371, "episode_idx": 55, "frame_idx": 2, "global_frame_idx": 14371, "task_index": 11}, {"db_idx": 14372, "episode_idx": 55, "frame_idx": 3, "global_frame_idx": 14372, "task_index": 11}, {"db_idx": 14373, "episode_idx": 55, "frame_idx": 4, "global_frame_idx": 14373, "task_index": 11}, {"db_idx": 14374, "episode_idx": 55, "frame_idx": 5, "global_frame_idx": 14374, "task_index": 11}, {"db_idx": 14375, "episode_idx": 55, "frame_idx": 6, "global_frame_idx": 14375, "task_index": 11}, {"db_idx": 14376, "episode_idx": 55, "frame_idx": 7, "global_frame_idx": 14376, "task_index": 11}, {"db_idx": 14377, "episode_idx": 55, "frame_idx": 8, "global_frame_idx": 14377, "task_index": 11}, {"db_idx": 14378, "episode_idx": 55, "frame_idx": 9, "global_frame_idx": 14378, "task_index": 11}, {"db_idx": 14379, "episode_idx": 55, "frame_idx": 10, "global_frame_idx": 14379, "task_index": 11}, {"db_idx": 14380, "episode_idx": 55, "frame_idx": 11, "global_frame_idx": 14380, "task_index": 11}, {"db_idx": 14381, "episode_idx": 55, "frame_idx": 12, "global_frame_idx": 14381, "task_index": 11}, {"db_idx": 14382, "episode_idx": 55, "frame_idx": 13, "global_frame_idx": 14382, "task_index": 11}, {"db_idx": 14383, "episode_idx": 55, "frame_idx": 14, "global_frame_idx": 14383, "task_index": 11}, {"db_idx": 14384, "episode_idx": 55, "frame_idx": 15, "global_frame_idx": 14384, "task_index": 11}, {"db_idx": 14385, "episode_idx": 55, "frame_idx": 16, "global_frame_idx": 14385, "task_index": 11}, {"db_idx": 14386, "episode_idx": 55, "frame_idx": 17, "global_frame_idx": 14386, "task_index": 11}, {"db_idx": 14387, "episode_idx": 55, "frame_idx": 18, "global_frame_idx": 14387, "task_index": 11}, {"db_idx": 14388, "episode_idx": 55, "frame_idx": 19, "global_frame_idx": 14388, "task_index": 11}, {"db_idx": 14389, "episode_idx": 55, "frame_idx": 20, "global_frame_idx": 14389, "task_index": 11}, {"db_idx": 14390, "episode_idx": 55, "frame_idx": 21, "global_frame_idx": 14390, "task_index": 11}, {"db_idx": 14391, "episode_idx": 55, "frame_idx": 22, "global_frame_idx": 14391, "task_index": 11}, {"db_idx": 14392, "episode_idx": 55, "frame_idx": 23, "global_frame_idx": 14392, "task_index": 11}, {"db_idx": 14393, "episode_idx": 55, "frame_idx": 24, "global_frame_idx": 14393, "task_index": 11}, {"db_idx": 14394, "episode_idx": 55, "frame_idx": 25, "global_frame_idx": 14394, "task_index": 11}, {"db_idx": 14395, "episode_idx": 55, "frame_idx": 26, "global_frame_idx": 14395, "task_index": 11}, {"db_idx": 14396, "episode_idx": 55, "frame_idx": 27, "global_frame_idx": 14396, "task_index": 11}, {"db_idx": 14397, "episode_idx": 55, "frame_idx": 28, "global_frame_idx": 14397, "task_index": 11}, {"db_idx": 14398, "episode_idx": 55, "frame_idx": 29, "global_frame_idx": 14398, "task_index": 11}, {"db_idx": 14399, "episode_idx": 55, "frame_idx": 30, "global_frame_idx": 14399, "task_index": 11}, {"db_idx": 14400, "episode_idx": 55, "frame_idx": 31, "global_frame_idx": 14400, "task_index": 11}, {"db_idx": 14401, "episode_idx": 55, "frame_idx": 32, "global_frame_idx": 14401, "task_index": 11}, {"db_idx": 14402, "episode_idx": 55, "frame_idx": 33, "global_frame_idx": 14402, "task_index": 11}, {"db_idx": 14403, "episode_idx": 55, "frame_idx": 34, "global_frame_idx": 14403, "task_index": 11}, {"db_idx": 14404, "episode_idx": 55, "frame_idx": 35, "global_frame_idx": 14404, "task_index": 11}, {"db_idx": 14405, "episode_idx": 55, "frame_idx": 36, "global_frame_idx": 14405, "task_index": 11}, {"db_idx": 14406, "episode_idx": 55, "frame_idx": 37, "global_frame_idx": 14406, "task_index": 11}, {"db_idx": 14407, "episode_idx": 55, "frame_idx": 38, "global_frame_idx": 14407, "task_index": 11}, {"db_idx": 14408, "episode_idx": 55, "frame_idx": 39, "global_frame_idx": 14408, "task_index": 11}, {"db_idx": 14409, "episode_idx": 55, "frame_idx": 40, "global_frame_idx": 14409, "task_index": 11}, {"db_idx": 14410, "episode_idx": 55, "frame_idx": 41, "global_frame_idx": 14410, "task_index": 11}, {"db_idx": 14411, "episode_idx": 55, "frame_idx": 42, "global_frame_idx": 14411, "task_index": 11}, {"db_idx": 14412, "episode_idx": 55, "frame_idx": 43, "global_frame_idx": 14412, "task_index": 11}, {"db_idx": 14413, "episode_idx": 55, "frame_idx": 44, "global_frame_idx": 14413, "task_index": 11}, {"db_idx": 14414, "episode_idx": 55, "frame_idx": 45, "global_frame_idx": 14414, "task_index": 11}, {"db_idx": 14415, "episode_idx": 55, "frame_idx": 46, "global_frame_idx": 14415, "task_index": 11}, {"db_idx": 14416, "episode_idx": 55, "frame_idx": 47, "global_frame_idx": 14416, "task_index": 11}, {"db_idx": 14417, "episode_idx": 55, "frame_idx": 48, "global_frame_idx": 14417, "task_index": 11}, {"db_idx": 14418, "episode_idx": 55, "frame_idx": 49, "global_frame_idx": 14418, "task_index": 11}, {"db_idx": 14419, "episode_idx": 55, "frame_idx": 50, "global_frame_idx": 14419, "task_index": 11}, {"db_idx": 14420, "episode_idx": 55, "frame_idx": 51, "global_frame_idx": 14420, "task_index": 11}, {"db_idx": 14421, "episode_idx": 55, "frame_idx": 52, "global_frame_idx": 14421, "task_index": 11}, {"db_idx": 14422, "episode_idx": 55, "frame_idx": 53, "global_frame_idx": 14422, "task_index": 11}, {"db_idx": 14423, "episode_idx": 55, "frame_idx": 54, "global_frame_idx": 14423, "task_index": 11}, {"db_idx": 14424, "episode_idx": 55, "frame_idx": 55, "global_frame_idx": 14424, "task_index": 11}, {"db_idx": 14425, "episode_idx": 55, "frame_idx": 56, "global_frame_idx": 14425, "task_index": 11}, {"db_idx": 14426, "episode_idx": 55, "frame_idx": 57, "global_frame_idx": 14426, "task_index": 11}, {"db_idx": 14427, "episode_idx": 55, "frame_idx": 58, "global_frame_idx": 14427, "task_index": 11}, {"db_idx": 14428, "episode_idx": 55, "frame_idx": 59, "global_frame_idx": 14428, "task_index": 11}, {"db_idx": 14429, "episode_idx": 55, "frame_idx": 60, "global_frame_idx": 14429, "task_index": 11}, {"db_idx": 14430, "episode_idx": 55, "frame_idx": 61, "global_frame_idx": 14430, "task_index": 11}, {"db_idx": 14431, "episode_idx": 55, "frame_idx": 62, "global_frame_idx": 14431, "task_index": 11}, {"db_idx": 14432, "episode_idx": 55, "frame_idx": 63, "global_frame_idx": 14432, "task_index": 11}, {"db_idx": 14433, "episode_idx": 55, "frame_idx": 64, "global_frame_idx": 14433, "task_index": 11}, {"db_idx": 14434, "episode_idx": 55, "frame_idx": 65, "global_frame_idx": 14434, "task_index": 11}, {"db_idx": 14435, "episode_idx": 55, "frame_idx": 66, "global_frame_idx": 14435, "task_index": 11}, {"db_idx": 14436, "episode_idx": 55, "frame_idx": 67, "global_frame_idx": 14436, "task_index": 11}, {"db_idx": 14437, "episode_idx": 55, "frame_idx": 68, "global_frame_idx": 14437, "task_index": 11}, {"db_idx": 14438, "episode_idx": 55, "frame_idx": 69, "global_frame_idx": 14438, "task_index": 11}, {"db_idx": 14439, "episode_idx": 55, "frame_idx": 70, "global_frame_idx": 14439, "task_index": 11}, {"db_idx": 14440, "episode_idx": 55, "frame_idx": 71, "global_frame_idx": 14440, "task_index": 11}, {"db_idx": 14441, "episode_idx": 55, "frame_idx": 72, "global_frame_idx": 14441, "task_index": 11}, {"db_idx": 14442, "episode_idx": 55, "frame_idx": 73, "global_frame_idx": 14442, "task_index": 11}, {"db_idx": 14443, "episode_idx": 55, "frame_idx": 74, "global_frame_idx": 14443, "task_index": 11}, {"db_idx": 14444, "episode_idx": 55, "frame_idx": 75, "global_frame_idx": 14444, "task_index": 11}, {"db_idx": 14445, "episode_idx": 55, "frame_idx": 76, "global_frame_idx": 14445, "task_index": 11}, {"db_idx": 14446, "episode_idx": 55, "frame_idx": 77, "global_frame_idx": 14446, "task_index": 11}, {"db_idx": 14447, "episode_idx": 55, "frame_idx": 78, "global_frame_idx": 14447, "task_index": 11}, {"db_idx": 14448, "episode_idx": 55, "frame_idx": 79, "global_frame_idx": 14448, "task_index": 11}, {"db_idx": 14449, "episode_idx": 55, "frame_idx": 80, "global_frame_idx": 14449, "task_index": 11}, {"db_idx": 14450, "episode_idx": 55, "frame_idx": 81, "global_frame_idx": 14450, "task_index": 11}, {"db_idx": 14451, "episode_idx": 55, "frame_idx": 82, "global_frame_idx": 14451, "task_index": 11}, {"db_idx": 14452, "episode_idx": 55, "frame_idx": 83, "global_frame_idx": 14452, "task_index": 11}, {"db_idx": 14453, "episode_idx": 55, "frame_idx": 84, "global_frame_idx": 14453, "task_index": 11}, {"db_idx": 14454, "episode_idx": 55, "frame_idx": 85, "global_frame_idx": 14454, "task_index": 11}, {"db_idx": 14455, "episode_idx": 55, "frame_idx": 86, "global_frame_idx": 14455, "task_index": 11}, {"db_idx": 14456, "episode_idx": 55, "frame_idx": 87, "global_frame_idx": 14456, "task_index": 11}, {"db_idx": 14457, "episode_idx": 55, "frame_idx": 88, "global_frame_idx": 14457, "task_index": 11}, {"db_idx": 14458, "episode_idx": 55, "frame_idx": 89, "global_frame_idx": 14458, "task_index": 11}, {"db_idx": 14459, "episode_idx": 55, "frame_idx": 90, "global_frame_idx": 14459, "task_index": 11}, {"db_idx": 14460, "episode_idx": 55, "frame_idx": 91, "global_frame_idx": 14460, "task_index": 11}, {"db_idx": 14461, "episode_idx": 55, "frame_idx": 92, "global_frame_idx": 14461, "task_index": 11}, {"db_idx": 14462, "episode_idx": 55, "frame_idx": 93, "global_frame_idx": 14462, "task_index": 11}, {"db_idx": 14463, "episode_idx": 55, "frame_idx": 94, "global_frame_idx": 14463, "task_index": 11}, {"db_idx": 14464, "episode_idx": 55, "frame_idx": 95, "global_frame_idx": 14464, "task_index": 11}, {"db_idx": 14465, "episode_idx": 55, "frame_idx": 96, "global_frame_idx": 14465, "task_index": 11}, {"db_idx": 14466, "episode_idx": 55, "frame_idx": 97, "global_frame_idx": 14466, "task_index": 11}, {"db_idx": 14467, "episode_idx": 55, "frame_idx": 98, "global_frame_idx": 14467, "task_index": 11}, {"db_idx": 14468, "episode_idx": 55, "frame_idx": 99, "global_frame_idx": 14468, "task_index": 11}, {"db_idx": 14469, "episode_idx": 55, "frame_idx": 100, "global_frame_idx": 14469, "task_index": 11}, {"db_idx": 14470, "episode_idx": 55, "frame_idx": 101, "global_frame_idx": 14470, "task_index": 11}, {"db_idx": 14471, "episode_idx": 55, "frame_idx": 102, "global_frame_idx": 14471, "task_index": 11}, {"db_idx": 14472, "episode_idx": 55, "frame_idx": 103, "global_frame_idx": 14472, "task_index": 11}, {"db_idx": 14473, "episode_idx": 55, "frame_idx": 104, "global_frame_idx": 14473, "task_index": 11}, {"db_idx": 14474, "episode_idx": 55, "frame_idx": 105, "global_frame_idx": 14474, "task_index": 11}, {"db_idx": 14475, "episode_idx": 55, "frame_idx": 106, "global_frame_idx": 14475, "task_index": 11}, {"db_idx": 14476, "episode_idx": 55, "frame_idx": 107, "global_frame_idx": 14476, "task_index": 11}, {"db_idx": 14477, "episode_idx": 55, "frame_idx": 108, "global_frame_idx": 14477, "task_index": 11}, {"db_idx": 14478, "episode_idx": 55, "frame_idx": 109, "global_frame_idx": 14478, "task_index": 11}, {"db_idx": 14479, "episode_idx": 55, "frame_idx": 110, "global_frame_idx": 14479, "task_index": 11}, {"db_idx": 14480, "episode_idx": 55, "frame_idx": 111, "global_frame_idx": 14480, "task_index": 11}, {"db_idx": 14481, "episode_idx": 55, "frame_idx": 112, "global_frame_idx": 14481, "task_index": 11}, {"db_idx": 14482, "episode_idx": 55, "frame_idx": 113, "global_frame_idx": 14482, "task_index": 11}, {"db_idx": 14483, "episode_idx": 55, "frame_idx": 114, "global_frame_idx": 14483, "task_index": 11}, {"db_idx": 14484, "episode_idx": 55, "frame_idx": 115, "global_frame_idx": 14484, "task_index": 11}, {"db_idx": 14485, "episode_idx": 55, "frame_idx": 116, "global_frame_idx": 14485, "task_index": 11}, {"db_idx": 14486, "episode_idx": 55, "frame_idx": 117, "global_frame_idx": 14486, "task_index": 11}, {"db_idx": 14487, "episode_idx": 55, "frame_idx": 118, "global_frame_idx": 14487, "task_index": 11}, {"db_idx": 14488, "episode_idx": 55, "frame_idx": 119, "global_frame_idx": 14488, "task_index": 11}, {"db_idx": 14489, "episode_idx": 55, "frame_idx": 120, "global_frame_idx": 14489, "task_index": 11}, {"db_idx": 14490, "episode_idx": 55, "frame_idx": 121, "global_frame_idx": 14490, "task_index": 11}, {"db_idx": 14491, "episode_idx": 55, "frame_idx": 122, "global_frame_idx": 14491, "task_index": 11}, {"db_idx": 14492, "episode_idx": 55, "frame_idx": 123, "global_frame_idx": 14492, "task_index": 11}, {"db_idx": 14493, "episode_idx": 55, "frame_idx": 124, "global_frame_idx": 14493, "task_index": 11}, {"db_idx": 14494, "episode_idx": 55, "frame_idx": 125, "global_frame_idx": 14494, "task_index": 11}, {"db_idx": 14495, "episode_idx": 55, "frame_idx": 126, "global_frame_idx": 14495, "task_index": 11}, {"db_idx": 14496, "episode_idx": 55, "frame_idx": 127, "global_frame_idx": 14496, "task_index": 11}, {"db_idx": 14497, "episode_idx": 55, "frame_idx": 128, "global_frame_idx": 14497, "task_index": 11}, {"db_idx": 14498, "episode_idx": 55, "frame_idx": 129, "global_frame_idx": 14498, "task_index": 11}, {"db_idx": 14499, "episode_idx": 55, "frame_idx": 130, "global_frame_idx": 14499, "task_index": 11}, {"db_idx": 14500, "episode_idx": 55, "frame_idx": 131, "global_frame_idx": 14500, "task_index": 11}, {"db_idx": 14501, "episode_idx": 55, "frame_idx": 132, "global_frame_idx": 14501, "task_index": 11}, {"db_idx": 14502, "episode_idx": 55, "frame_idx": 133, "global_frame_idx": 14502, "task_index": 11}, {"db_idx": 14503, "episode_idx": 55, "frame_idx": 134, "global_frame_idx": 14503, "task_index": 11}, {"db_idx": 14504, "episode_idx": 55, "frame_idx": 135, "global_frame_idx": 14504, "task_index": 11}, {"db_idx": 14505, "episode_idx": 55, "frame_idx": 136, "global_frame_idx": 14505, "task_index": 11}, {"db_idx": 14506, "episode_idx": 55, "frame_idx": 137, "global_frame_idx": 14506, "task_index": 11}, {"db_idx": 14507, "episode_idx": 55, "frame_idx": 138, "global_frame_idx": 14507, "task_index": 11}, {"db_idx": 14508, "episode_idx": 55, "frame_idx": 139, "global_frame_idx": 14508, "task_index": 11}, {"db_idx": 14509, "episode_idx": 55, "frame_idx": 140, "global_frame_idx": 14509, "task_index": 11}, {"db_idx": 14510, "episode_idx": 55, "frame_idx": 141, "global_frame_idx": 14510, "task_index": 11}, {"db_idx": 14511, "episode_idx": 55, "frame_idx": 142, "global_frame_idx": 14511, "task_index": 11}, {"db_idx": 14512, "episode_idx": 55, "frame_idx": 143, "global_frame_idx": 14512, "task_index": 11}, {"db_idx": 14513, "episode_idx": 55, "frame_idx": 144, "global_frame_idx": 14513, "task_index": 11}, {"db_idx": 14514, "episode_idx": 55, "frame_idx": 145, "global_frame_idx": 14514, "task_index": 11}, {"db_idx": 14515, "episode_idx": 55, "frame_idx": 146, "global_frame_idx": 14515, "task_index": 11}, {"db_idx": 14516, "episode_idx": 55, "frame_idx": 147, "global_frame_idx": 14516, "task_index": 11}, {"db_idx": 14517, "episode_idx": 55, "frame_idx": 148, "global_frame_idx": 14517, "task_index": 11}, {"db_idx": 14518, "episode_idx": 55, "frame_idx": 149, "global_frame_idx": 14518, "task_index": 11}, {"db_idx": 14519, "episode_idx": 56, "frame_idx": 0, "global_frame_idx": 14519, "task_index": 11}, {"db_idx": 14520, "episode_idx": 56, "frame_idx": 1, "global_frame_idx": 14520, "task_index": 11}, {"db_idx": 14521, "episode_idx": 56, "frame_idx": 2, "global_frame_idx": 14521, "task_index": 11}, {"db_idx": 14522, "episode_idx": 56, "frame_idx": 3, "global_frame_idx": 14522, "task_index": 11}, {"db_idx": 14523, "episode_idx": 56, "frame_idx": 4, "global_frame_idx": 14523, "task_index": 11}, {"db_idx": 14524, "episode_idx": 56, "frame_idx": 5, "global_frame_idx": 14524, "task_index": 11}, {"db_idx": 14525, "episode_idx": 56, "frame_idx": 6, "global_frame_idx": 14525, "task_index": 11}, {"db_idx": 14526, "episode_idx": 56, "frame_idx": 7, "global_frame_idx": 14526, "task_index": 11}, {"db_idx": 14527, "episode_idx": 56, "frame_idx": 8, "global_frame_idx": 14527, "task_index": 11}, {"db_idx": 14528, "episode_idx": 56, "frame_idx": 9, "global_frame_idx": 14528, "task_index": 11}, {"db_idx": 14529, "episode_idx": 56, "frame_idx": 10, "global_frame_idx": 14529, "task_index": 11}, {"db_idx": 14530, "episode_idx": 56, "frame_idx": 11, "global_frame_idx": 14530, "task_index": 11}, {"db_idx": 14531, "episode_idx": 56, "frame_idx": 12, "global_frame_idx": 14531, "task_index": 11}, {"db_idx": 14532, "episode_idx": 56, "frame_idx": 13, "global_frame_idx": 14532, "task_index": 11}, {"db_idx": 14533, "episode_idx": 56, "frame_idx": 14, "global_frame_idx": 14533, "task_index": 11}, {"db_idx": 14534, "episode_idx": 56, "frame_idx": 15, "global_frame_idx": 14534, "task_index": 11}, {"db_idx": 14535, "episode_idx": 56, "frame_idx": 16, "global_frame_idx": 14535, "task_index": 11}, {"db_idx": 14536, "episode_idx": 56, "frame_idx": 17, "global_frame_idx": 14536, "task_index": 11}, {"db_idx": 14537, "episode_idx": 56, "frame_idx": 18, "global_frame_idx": 14537, "task_index": 11}, {"db_idx": 14538, "episode_idx": 56, "frame_idx": 19, "global_frame_idx": 14538, "task_index": 11}, {"db_idx": 14539, "episode_idx": 56, "frame_idx": 20, "global_frame_idx": 14539, "task_index": 11}, {"db_idx": 14540, "episode_idx": 56, "frame_idx": 21, "global_frame_idx": 14540, "task_index": 11}, {"db_idx": 14541, "episode_idx": 56, "frame_idx": 22, "global_frame_idx": 14541, "task_index": 11}, {"db_idx": 14542, "episode_idx": 56, "frame_idx": 23, "global_frame_idx": 14542, "task_index": 11}, {"db_idx": 14543, "episode_idx": 56, "frame_idx": 24, "global_frame_idx": 14543, "task_index": 11}, {"db_idx": 14544, "episode_idx": 56, "frame_idx": 25, "global_frame_idx": 14544, "task_index": 11}, {"db_idx": 14545, "episode_idx": 56, "frame_idx": 26, "global_frame_idx": 14545, "task_index": 11}, {"db_idx": 14546, "episode_idx": 56, "frame_idx": 27, "global_frame_idx": 14546, "task_index": 11}, {"db_idx": 14547, "episode_idx": 56, "frame_idx": 28, "global_frame_idx": 14547, "task_index": 11}, {"db_idx": 14548, "episode_idx": 56, "frame_idx": 29, "global_frame_idx": 14548, "task_index": 11}, {"db_idx": 14549, "episode_idx": 56, "frame_idx": 30, "global_frame_idx": 14549, "task_index": 11}, {"db_idx": 14550, "episode_idx": 56, "frame_idx": 31, "global_frame_idx": 14550, "task_index": 11}, {"db_idx": 14551, "episode_idx": 56, "frame_idx": 32, "global_frame_idx": 14551, "task_index": 11}, {"db_idx": 14552, "episode_idx": 56, "frame_idx": 33, "global_frame_idx": 14552, "task_index": 11}, {"db_idx": 14553, "episode_idx": 56, "frame_idx": 34, "global_frame_idx": 14553, "task_index": 11}, {"db_idx": 14554, "episode_idx": 56, "frame_idx": 35, "global_frame_idx": 14554, "task_index": 11}, {"db_idx": 14555, "episode_idx": 56, "frame_idx": 36, "global_frame_idx": 14555, "task_index": 11}, {"db_idx": 14556, "episode_idx": 56, "frame_idx": 37, "global_frame_idx": 14556, "task_index": 11}, {"db_idx": 14557, "episode_idx": 56, "frame_idx": 38, "global_frame_idx": 14557, "task_index": 11}, {"db_idx": 14558, "episode_idx": 56, "frame_idx": 39, "global_frame_idx": 14558, "task_index": 11}, {"db_idx": 14559, "episode_idx": 56, "frame_idx": 40, "global_frame_idx": 14559, "task_index": 11}, {"db_idx": 14560, "episode_idx": 56, "frame_idx": 41, "global_frame_idx": 14560, "task_index": 11}, {"db_idx": 14561, "episode_idx": 56, "frame_idx": 42, "global_frame_idx": 14561, "task_index": 11}, {"db_idx": 14562, "episode_idx": 56, "frame_idx": 43, "global_frame_idx": 14562, "task_index": 11}, {"db_idx": 14563, "episode_idx": 56, "frame_idx": 44, "global_frame_idx": 14563, "task_index": 11}, {"db_idx": 14564, "episode_idx": 56, "frame_idx": 45, "global_frame_idx": 14564, "task_index": 11}, {"db_idx": 14565, "episode_idx": 56, "frame_idx": 46, "global_frame_idx": 14565, "task_index": 11}, {"db_idx": 14566, "episode_idx": 56, "frame_idx": 47, "global_frame_idx": 14566, "task_index": 11}, {"db_idx": 14567, "episode_idx": 56, "frame_idx": 48, "global_frame_idx": 14567, "task_index": 11}, {"db_idx": 14568, "episode_idx": 56, "frame_idx": 49, "global_frame_idx": 14568, "task_index": 11}, {"db_idx": 14569, "episode_idx": 56, "frame_idx": 50, "global_frame_idx": 14569, "task_index": 11}, {"db_idx": 14570, "episode_idx": 56, "frame_idx": 51, "global_frame_idx": 14570, "task_index": 11}, {"db_idx": 14571, "episode_idx": 56, "frame_idx": 52, "global_frame_idx": 14571, "task_index": 11}, {"db_idx": 14572, "episode_idx": 56, "frame_idx": 53, "global_frame_idx": 14572, "task_index": 11}, {"db_idx": 14573, "episode_idx": 56, "frame_idx": 54, "global_frame_idx": 14573, "task_index": 11}, {"db_idx": 14574, "episode_idx": 56, "frame_idx": 55, "global_frame_idx": 14574, "task_index": 11}, {"db_idx": 14575, "episode_idx": 56, "frame_idx": 56, "global_frame_idx": 14575, "task_index": 11}, {"db_idx": 14576, "episode_idx": 56, "frame_idx": 57, "global_frame_idx": 14576, "task_index": 11}, {"db_idx": 14577, "episode_idx": 56, "frame_idx": 58, "global_frame_idx": 14577, "task_index": 11}, {"db_idx": 14578, "episode_idx": 56, "frame_idx": 59, "global_frame_idx": 14578, "task_index": 11}, {"db_idx": 14579, "episode_idx": 56, "frame_idx": 60, "global_frame_idx": 14579, "task_index": 11}, {"db_idx": 14580, "episode_idx": 56, "frame_idx": 61, "global_frame_idx": 14580, "task_index": 11}, {"db_idx": 14581, "episode_idx": 56, "frame_idx": 62, "global_frame_idx": 14581, "task_index": 11}, {"db_idx": 14582, "episode_idx": 56, "frame_idx": 63, "global_frame_idx": 14582, "task_index": 11}, {"db_idx": 14583, "episode_idx": 56, "frame_idx": 64, "global_frame_idx": 14583, "task_index": 11}, {"db_idx": 14584, "episode_idx": 56, "frame_idx": 65, "global_frame_idx": 14584, "task_index": 11}, {"db_idx": 14585, "episode_idx": 56, "frame_idx": 66, "global_frame_idx": 14585, "task_index": 11}, {"db_idx": 14586, "episode_idx": 56, "frame_idx": 67, "global_frame_idx": 14586, "task_index": 11}, {"db_idx": 14587, "episode_idx": 56, "frame_idx": 68, "global_frame_idx": 14587, "task_index": 11}, {"db_idx": 14588, "episode_idx": 56, "frame_idx": 69, "global_frame_idx": 14588, "task_index": 11}, {"db_idx": 14589, "episode_idx": 56, "frame_idx": 70, "global_frame_idx": 14589, "task_index": 11}, {"db_idx": 14590, "episode_idx": 56, "frame_idx": 71, "global_frame_idx": 14590, "task_index": 11}, {"db_idx": 14591, "episode_idx": 56, "frame_idx": 72, "global_frame_idx": 14591, "task_index": 11}, {"db_idx": 14592, "episode_idx": 56, "frame_idx": 73, "global_frame_idx": 14592, "task_index": 11}, {"db_idx": 14593, "episode_idx": 56, "frame_idx": 74, "global_frame_idx": 14593, "task_index": 11}, {"db_idx": 14594, "episode_idx": 56, "frame_idx": 75, "global_frame_idx": 14594, "task_index": 11}, {"db_idx": 14595, "episode_idx": 56, "frame_idx": 76, "global_frame_idx": 14595, "task_index": 11}, {"db_idx": 14596, "episode_idx": 56, "frame_idx": 77, "global_frame_idx": 14596, "task_index": 11}, {"db_idx": 14597, "episode_idx": 56, "frame_idx": 78, "global_frame_idx": 14597, "task_index": 11}, {"db_idx": 14598, "episode_idx": 56, "frame_idx": 79, "global_frame_idx": 14598, "task_index": 11}, {"db_idx": 14599, "episode_idx": 56, "frame_idx": 80, "global_frame_idx": 14599, "task_index": 11}, {"db_idx": 14600, "episode_idx": 56, "frame_idx": 81, "global_frame_idx": 14600, "task_index": 11}, {"db_idx": 14601, "episode_idx": 56, "frame_idx": 82, "global_frame_idx": 14601, "task_index": 11}, {"db_idx": 14602, "episode_idx": 56, "frame_idx": 83, "global_frame_idx": 14602, "task_index": 11}, {"db_idx": 14603, "episode_idx": 56, "frame_idx": 84, "global_frame_idx": 14603, "task_index": 11}, {"db_idx": 14604, "episode_idx": 56, "frame_idx": 85, "global_frame_idx": 14604, "task_index": 11}, {"db_idx": 14605, "episode_idx": 56, "frame_idx": 86, "global_frame_idx": 14605, "task_index": 11}, {"db_idx": 14606, "episode_idx": 56, "frame_idx": 87, "global_frame_idx": 14606, "task_index": 11}, {"db_idx": 14607, "episode_idx": 56, "frame_idx": 88, "global_frame_idx": 14607, "task_index": 11}, {"db_idx": 14608, "episode_idx": 56, "frame_idx": 89, "global_frame_idx": 14608, "task_index": 11}, {"db_idx": 14609, "episode_idx": 56, "frame_idx": 90, "global_frame_idx": 14609, "task_index": 11}, {"db_idx": 14610, "episode_idx": 56, "frame_idx": 91, "global_frame_idx": 14610, "task_index": 11}, {"db_idx": 14611, "episode_idx": 56, "frame_idx": 92, "global_frame_idx": 14611, "task_index": 11}, {"db_idx": 14612, "episode_idx": 56, "frame_idx": 93, "global_frame_idx": 14612, "task_index": 11}, {"db_idx": 14613, "episode_idx": 56, "frame_idx": 94, "global_frame_idx": 14613, "task_index": 11}, {"db_idx": 14614, "episode_idx": 56, "frame_idx": 95, "global_frame_idx": 14614, "task_index": 11}, {"db_idx": 14615, "episode_idx": 56, "frame_idx": 96, "global_frame_idx": 14615, "task_index": 11}, {"db_idx": 14616, "episode_idx": 56, "frame_idx": 97, "global_frame_idx": 14616, "task_index": 11}, {"db_idx": 14617, "episode_idx": 56, "frame_idx": 98, "global_frame_idx": 14617, "task_index": 11}, {"db_idx": 14618, "episode_idx": 56, "frame_idx": 99, "global_frame_idx": 14618, "task_index": 11}, {"db_idx": 14619, "episode_idx": 56, "frame_idx": 100, "global_frame_idx": 14619, "task_index": 11}, {"db_idx": 14620, "episode_idx": 56, "frame_idx": 101, "global_frame_idx": 14620, "task_index": 11}, {"db_idx": 14621, "episode_idx": 56, "frame_idx": 102, "global_frame_idx": 14621, "task_index": 11}, {"db_idx": 14622, "episode_idx": 56, "frame_idx": 103, "global_frame_idx": 14622, "task_index": 11}, {"db_idx": 14623, "episode_idx": 56, "frame_idx": 104, "global_frame_idx": 14623, "task_index": 11}, {"db_idx": 14624, "episode_idx": 56, "frame_idx": 105, "global_frame_idx": 14624, "task_index": 11}, {"db_idx": 14625, "episode_idx": 56, "frame_idx": 106, "global_frame_idx": 14625, "task_index": 11}, {"db_idx": 14626, "episode_idx": 56, "frame_idx": 107, "global_frame_idx": 14626, "task_index": 11}, {"db_idx": 14627, "episode_idx": 56, "frame_idx": 108, "global_frame_idx": 14627, "task_index": 11}, {"db_idx": 14628, "episode_idx": 56, "frame_idx": 109, "global_frame_idx": 14628, "task_index": 11}, {"db_idx": 14629, "episode_idx": 56, "frame_idx": 110, "global_frame_idx": 14629, "task_index": 11}, {"db_idx": 14630, "episode_idx": 56, "frame_idx": 111, "global_frame_idx": 14630, "task_index": 11}, {"db_idx": 14631, "episode_idx": 56, "frame_idx": 112, "global_frame_idx": 14631, "task_index": 11}, {"db_idx": 14632, "episode_idx": 56, "frame_idx": 113, "global_frame_idx": 14632, "task_index": 11}, {"db_idx": 14633, "episode_idx": 56, "frame_idx": 114, "global_frame_idx": 14633, "task_index": 11}, {"db_idx": 14634, "episode_idx": 56, "frame_idx": 115, "global_frame_idx": 14634, "task_index": 11}, {"db_idx": 14635, "episode_idx": 56, "frame_idx": 116, "global_frame_idx": 14635, "task_index": 11}, {"db_idx": 14636, "episode_idx": 56, "frame_idx": 117, "global_frame_idx": 14636, "task_index": 11}, {"db_idx": 14637, "episode_idx": 56, "frame_idx": 118, "global_frame_idx": 14637, "task_index": 11}, {"db_idx": 14638, "episode_idx": 56, "frame_idx": 119, "global_frame_idx": 14638, "task_index": 11}, {"db_idx": 14639, "episode_idx": 56, "frame_idx": 120, "global_frame_idx": 14639, "task_index": 11}, {"db_idx": 14640, "episode_idx": 56, "frame_idx": 121, "global_frame_idx": 14640, "task_index": 11}, {"db_idx": 14641, "episode_idx": 56, "frame_idx": 122, "global_frame_idx": 14641, "task_index": 11}, {"db_idx": 14642, "episode_idx": 56, "frame_idx": 123, "global_frame_idx": 14642, "task_index": 11}, {"db_idx": 14643, "episode_idx": 56, "frame_idx": 124, "global_frame_idx": 14643, "task_index": 11}, {"db_idx": 14644, "episode_idx": 56, "frame_idx": 125, "global_frame_idx": 14644, "task_index": 11}, {"db_idx": 14645, "episode_idx": 56, "frame_idx": 126, "global_frame_idx": 14645, "task_index": 11}, {"db_idx": 14646, "episode_idx": 56, "frame_idx": 127, "global_frame_idx": 14646, "task_index": 11}, {"db_idx": 14647, "episode_idx": 56, "frame_idx": 128, "global_frame_idx": 14647, "task_index": 11}, {"db_idx": 14648, "episode_idx": 56, "frame_idx": 129, "global_frame_idx": 14648, "task_index": 11}, {"db_idx": 14649, "episode_idx": 56, "frame_idx": 130, "global_frame_idx": 14649, "task_index": 11}, {"db_idx": 14650, "episode_idx": 56, "frame_idx": 131, "global_frame_idx": 14650, "task_index": 11}, {"db_idx": 14651, "episode_idx": 56, "frame_idx": 132, "global_frame_idx": 14651, "task_index": 11}, {"db_idx": 14652, "episode_idx": 56, "frame_idx": 133, "global_frame_idx": 14652, "task_index": 11}, {"db_idx": 14653, "episode_idx": 56, "frame_idx": 134, "global_frame_idx": 14653, "task_index": 11}, {"db_idx": 14654, "episode_idx": 56, "frame_idx": 135, "global_frame_idx": 14654, "task_index": 11}, {"db_idx": 14655, "episode_idx": 56, "frame_idx": 136, "global_frame_idx": 14655, "task_index": 11}, {"db_idx": 14656, "episode_idx": 56, "frame_idx": 137, "global_frame_idx": 14656, "task_index": 11}, {"db_idx": 14657, "episode_idx": 56, "frame_idx": 138, "global_frame_idx": 14657, "task_index": 11}, {"db_idx": 14658, "episode_idx": 56, "frame_idx": 139, "global_frame_idx": 14658, "task_index": 11}, {"db_idx": 14659, "episode_idx": 56, "frame_idx": 140, "global_frame_idx": 14659, "task_index": 11}, {"db_idx": 14660, "episode_idx": 56, "frame_idx": 141, "global_frame_idx": 14660, "task_index": 11}, {"db_idx": 14661, "episode_idx": 56, "frame_idx": 142, "global_frame_idx": 14661, "task_index": 11}, {"db_idx": 14662, "episode_idx": 56, "frame_idx": 143, "global_frame_idx": 14662, "task_index": 11}, {"db_idx": 14663, "episode_idx": 56, "frame_idx": 144, "global_frame_idx": 14663, "task_index": 11}, {"db_idx": 14664, "episode_idx": 56, "frame_idx": 145, "global_frame_idx": 14664, "task_index": 11}, {"db_idx": 14665, "episode_idx": 56, "frame_idx": 146, "global_frame_idx": 14665, "task_index": 11}, {"db_idx": 14666, "episode_idx": 56, "frame_idx": 147, "global_frame_idx": 14666, "task_index": 11}, {"db_idx": 14667, "episode_idx": 56, "frame_idx": 148, "global_frame_idx": 14667, "task_index": 11}, {"db_idx": 14668, "episode_idx": 56, "frame_idx": 149, "global_frame_idx": 14668, "task_index": 11}, {"db_idx": 14669, "episode_idx": 56, "frame_idx": 150, "global_frame_idx": 14669, "task_index": 11}, {"db_idx": 14670, "episode_idx": 56, "frame_idx": 151, "global_frame_idx": 14670, "task_index": 11}, {"db_idx": 14671, "episode_idx": 56, "frame_idx": 152, "global_frame_idx": 14671, "task_index": 11}, {"db_idx": 14672, "episode_idx": 56, "frame_idx": 153, "global_frame_idx": 14672, "task_index": 11}, {"db_idx": 14673, "episode_idx": 56, "frame_idx": 154, "global_frame_idx": 14673, "task_index": 11}, {"db_idx": 14674, "episode_idx": 56, "frame_idx": 155, "global_frame_idx": 14674, "task_index": 11}, {"db_idx": 14675, "episode_idx": 56, "frame_idx": 156, "global_frame_idx": 14675, "task_index": 11}, {"db_idx": 14676, "episode_idx": 56, "frame_idx": 157, "global_frame_idx": 14676, "task_index": 11}, {"db_idx": 14677, "episode_idx": 56, "frame_idx": 158, "global_frame_idx": 14677, "task_index": 11}, {"db_idx": 14678, "episode_idx": 56, "frame_idx": 159, "global_frame_idx": 14678, "task_index": 11}, {"db_idx": 14679, "episode_idx": 56, "frame_idx": 160, "global_frame_idx": 14679, "task_index": 11}, {"db_idx": 14680, "episode_idx": 56, "frame_idx": 161, "global_frame_idx": 14680, "task_index": 11}, {"db_idx": 14681, "episode_idx": 56, "frame_idx": 162, "global_frame_idx": 14681, "task_index": 11}, {"db_idx": 14682, "episode_idx": 56, "frame_idx": 163, "global_frame_idx": 14682, "task_index": 11}, {"db_idx": 14683, "episode_idx": 56, "frame_idx": 164, "global_frame_idx": 14683, "task_index": 11}, {"db_idx": 14684, "episode_idx": 56, "frame_idx": 165, "global_frame_idx": 14684, "task_index": 11}, {"db_idx": 14685, "episode_idx": 56, "frame_idx": 166, "global_frame_idx": 14685, "task_index": 11}, {"db_idx": 14686, "episode_idx": 56, "frame_idx": 167, "global_frame_idx": 14686, "task_index": 11}, {"db_idx": 14687, "episode_idx": 56, "frame_idx": 168, "global_frame_idx": 14687, "task_index": 11}, {"db_idx": 14688, "episode_idx": 56, "frame_idx": 169, "global_frame_idx": 14688, "task_index": 11}, {"db_idx": 14689, "episode_idx": 56, "frame_idx": 170, "global_frame_idx": 14689, "task_index": 11}, {"db_idx": 14690, "episode_idx": 56, "frame_idx": 171, "global_frame_idx": 14690, "task_index": 11}, {"db_idx": 14691, "episode_idx": 56, "frame_idx": 172, "global_frame_idx": 14691, "task_index": 11}, {"db_idx": 14692, "episode_idx": 56, "frame_idx": 173, "global_frame_idx": 14692, "task_index": 11}, {"db_idx": 14693, "episode_idx": 56, "frame_idx": 174, "global_frame_idx": 14693, "task_index": 11}, {"db_idx": 14694, "episode_idx": 57, "frame_idx": 0, "global_frame_idx": 14694, "task_index": 11}, {"db_idx": 14695, "episode_idx": 57, "frame_idx": 1, "global_frame_idx": 14695, "task_index": 11}, {"db_idx": 14696, "episode_idx": 57, "frame_idx": 2, "global_frame_idx": 14696, "task_index": 11}, {"db_idx": 14697, "episode_idx": 57, "frame_idx": 3, "global_frame_idx": 14697, "task_index": 11}, {"db_idx": 14698, "episode_idx": 57, "frame_idx": 4, "global_frame_idx": 14698, "task_index": 11}, {"db_idx": 14699, "episode_idx": 57, "frame_idx": 5, "global_frame_idx": 14699, "task_index": 11}, {"db_idx": 14700, "episode_idx": 57, "frame_idx": 6, "global_frame_idx": 14700, "task_index": 11}, {"db_idx": 14701, "episode_idx": 57, "frame_idx": 7, "global_frame_idx": 14701, "task_index": 11}, {"db_idx": 14702, "episode_idx": 57, "frame_idx": 8, "global_frame_idx": 14702, "task_index": 11}, {"db_idx": 14703, "episode_idx": 57, "frame_idx": 9, "global_frame_idx": 14703, "task_index": 11}, {"db_idx": 14704, "episode_idx": 57, "frame_idx": 10, "global_frame_idx": 14704, "task_index": 11}, {"db_idx": 14705, "episode_idx": 57, "frame_idx": 11, "global_frame_idx": 14705, "task_index": 11}, {"db_idx": 14706, "episode_idx": 57, "frame_idx": 12, "global_frame_idx": 14706, "task_index": 11}, {"db_idx": 14707, "episode_idx": 57, "frame_idx": 13, "global_frame_idx": 14707, "task_index": 11}, {"db_idx": 14708, "episode_idx": 57, "frame_idx": 14, "global_frame_idx": 14708, "task_index": 11}, {"db_idx": 14709, "episode_idx": 57, "frame_idx": 15, "global_frame_idx": 14709, "task_index": 11}, {"db_idx": 14710, "episode_idx": 57, "frame_idx": 16, "global_frame_idx": 14710, "task_index": 11}, {"db_idx": 14711, "episode_idx": 57, "frame_idx": 17, "global_frame_idx": 14711, "task_index": 11}, {"db_idx": 14712, "episode_idx": 57, "frame_idx": 18, "global_frame_idx": 14712, "task_index": 11}, {"db_idx": 14713, "episode_idx": 57, "frame_idx": 19, "global_frame_idx": 14713, "task_index": 11}, {"db_idx": 14714, "episode_idx": 57, "frame_idx": 20, "global_frame_idx": 14714, "task_index": 11}, {"db_idx": 14715, "episode_idx": 57, "frame_idx": 21, "global_frame_idx": 14715, "task_index": 11}, {"db_idx": 14716, "episode_idx": 57, "frame_idx": 22, "global_frame_idx": 14716, "task_index": 11}, {"db_idx": 14717, "episode_idx": 57, "frame_idx": 23, "global_frame_idx": 14717, "task_index": 11}, {"db_idx": 14718, "episode_idx": 57, "frame_idx": 24, "global_frame_idx": 14718, "task_index": 11}, {"db_idx": 14719, "episode_idx": 57, "frame_idx": 25, "global_frame_idx": 14719, "task_index": 11}, {"db_idx": 14720, "episode_idx": 57, "frame_idx": 26, "global_frame_idx": 14720, "task_index": 11}, {"db_idx": 14721, "episode_idx": 57, "frame_idx": 27, "global_frame_idx": 14721, "task_index": 11}, {"db_idx": 14722, "episode_idx": 57, "frame_idx": 28, "global_frame_idx": 14722, "task_index": 11}, {"db_idx": 14723, "episode_idx": 57, "frame_idx": 29, "global_frame_idx": 14723, "task_index": 11}, {"db_idx": 14724, "episode_idx": 57, "frame_idx": 30, "global_frame_idx": 14724, "task_index": 11}, {"db_idx": 14725, "episode_idx": 57, "frame_idx": 31, "global_frame_idx": 14725, "task_index": 11}, {"db_idx": 14726, "episode_idx": 57, "frame_idx": 32, "global_frame_idx": 14726, "task_index": 11}, {"db_idx": 14727, "episode_idx": 57, "frame_idx": 33, "global_frame_idx": 14727, "task_index": 11}, {"db_idx": 14728, "episode_idx": 57, "frame_idx": 34, "global_frame_idx": 14728, "task_index": 11}, {"db_idx": 14729, "episode_idx": 57, "frame_idx": 35, "global_frame_idx": 14729, "task_index": 11}, {"db_idx": 14730, "episode_idx": 57, "frame_idx": 36, "global_frame_idx": 14730, "task_index": 11}, {"db_idx": 14731, "episode_idx": 57, "frame_idx": 37, "global_frame_idx": 14731, "task_index": 11}, {"db_idx": 14732, "episode_idx": 57, "frame_idx": 38, "global_frame_idx": 14732, "task_index": 11}, {"db_idx": 14733, "episode_idx": 57, "frame_idx": 39, "global_frame_idx": 14733, "task_index": 11}, {"db_idx": 14734, "episode_idx": 57, "frame_idx": 40, "global_frame_idx": 14734, "task_index": 11}, {"db_idx": 14735, "episode_idx": 57, "frame_idx": 41, "global_frame_idx": 14735, "task_index": 11}, {"db_idx": 14736, "episode_idx": 57, "frame_idx": 42, "global_frame_idx": 14736, "task_index": 11}, {"db_idx": 14737, "episode_idx": 57, "frame_idx": 43, "global_frame_idx": 14737, "task_index": 11}, {"db_idx": 14738, "episode_idx": 57, "frame_idx": 44, "global_frame_idx": 14738, "task_index": 11}, {"db_idx": 14739, "episode_idx": 57, "frame_idx": 45, "global_frame_idx": 14739, "task_index": 11}, {"db_idx": 14740, "episode_idx": 57, "frame_idx": 46, "global_frame_idx": 14740, "task_index": 11}, {"db_idx": 14741, "episode_idx": 57, "frame_idx": 47, "global_frame_idx": 14741, "task_index": 11}, {"db_idx": 14742, "episode_idx": 57, "frame_idx": 48, "global_frame_idx": 14742, "task_index": 11}, {"db_idx": 14743, "episode_idx": 57, "frame_idx": 49, "global_frame_idx": 14743, "task_index": 11}, {"db_idx": 14744, "episode_idx": 57, "frame_idx": 50, "global_frame_idx": 14744, "task_index": 11}, {"db_idx": 14745, "episode_idx": 57, "frame_idx": 51, "global_frame_idx": 14745, "task_index": 11}, {"db_idx": 14746, "episode_idx": 57, "frame_idx": 52, "global_frame_idx": 14746, "task_index": 11}, {"db_idx": 14747, "episode_idx": 57, "frame_idx": 53, "global_frame_idx": 14747, "task_index": 11}, {"db_idx": 14748, "episode_idx": 57, "frame_idx": 54, "global_frame_idx": 14748, "task_index": 11}, {"db_idx": 14749, "episode_idx": 57, "frame_idx": 55, "global_frame_idx": 14749, "task_index": 11}, {"db_idx": 14750, "episode_idx": 57, "frame_idx": 56, "global_frame_idx": 14750, "task_index": 11}, {"db_idx": 14751, "episode_idx": 57, "frame_idx": 57, "global_frame_idx": 14751, "task_index": 11}, {"db_idx": 14752, "episode_idx": 57, "frame_idx": 58, "global_frame_idx": 14752, "task_index": 11}, {"db_idx": 14753, "episode_idx": 57, "frame_idx": 59, "global_frame_idx": 14753, "task_index": 11}, {"db_idx": 14754, "episode_idx": 57, "frame_idx": 60, "global_frame_idx": 14754, "task_index": 11}, {"db_idx": 14755, "episode_idx": 57, "frame_idx": 61, "global_frame_idx": 14755, "task_index": 11}, {"db_idx": 14756, "episode_idx": 57, "frame_idx": 62, "global_frame_idx": 14756, "task_index": 11}, {"db_idx": 14757, "episode_idx": 57, "frame_idx": 63, "global_frame_idx": 14757, "task_index": 11}, {"db_idx": 14758, "episode_idx": 57, "frame_idx": 64, "global_frame_idx": 14758, "task_index": 11}, {"db_idx": 14759, "episode_idx": 57, "frame_idx": 65, "global_frame_idx": 14759, "task_index": 11}, {"db_idx": 14760, "episode_idx": 57, "frame_idx": 66, "global_frame_idx": 14760, "task_index": 11}, {"db_idx": 14761, "episode_idx": 57, "frame_idx": 67, "global_frame_idx": 14761, "task_index": 11}, {"db_idx": 14762, "episode_idx": 57, "frame_idx": 68, "global_frame_idx": 14762, "task_index": 11}, {"db_idx": 14763, "episode_idx": 57, "frame_idx": 69, "global_frame_idx": 14763, "task_index": 11}, {"db_idx": 14764, "episode_idx": 57, "frame_idx": 70, "global_frame_idx": 14764, "task_index": 11}, {"db_idx": 14765, "episode_idx": 57, "frame_idx": 71, "global_frame_idx": 14765, "task_index": 11}, {"db_idx": 14766, "episode_idx": 57, "frame_idx": 72, "global_frame_idx": 14766, "task_index": 11}, {"db_idx": 14767, "episode_idx": 57, "frame_idx": 73, "global_frame_idx": 14767, "task_index": 11}, {"db_idx": 14768, "episode_idx": 57, "frame_idx": 74, "global_frame_idx": 14768, "task_index": 11}, {"db_idx": 14769, "episode_idx": 57, "frame_idx": 75, "global_frame_idx": 14769, "task_index": 11}, {"db_idx": 14770, "episode_idx": 57, "frame_idx": 76, "global_frame_idx": 14770, "task_index": 11}, {"db_idx": 14771, "episode_idx": 57, "frame_idx": 77, "global_frame_idx": 14771, "task_index": 11}, {"db_idx": 14772, "episode_idx": 57, "frame_idx": 78, "global_frame_idx": 14772, "task_index": 11}, {"db_idx": 14773, "episode_idx": 57, "frame_idx": 79, "global_frame_idx": 14773, "task_index": 11}, {"db_idx": 14774, "episode_idx": 57, "frame_idx": 80, "global_frame_idx": 14774, "task_index": 11}, {"db_idx": 14775, "episode_idx": 57, "frame_idx": 81, "global_frame_idx": 14775, "task_index": 11}, {"db_idx": 14776, "episode_idx": 57, "frame_idx": 82, "global_frame_idx": 14776, "task_index": 11}, {"db_idx": 14777, "episode_idx": 57, "frame_idx": 83, "global_frame_idx": 14777, "task_index": 11}, {"db_idx": 14778, "episode_idx": 57, "frame_idx": 84, "global_frame_idx": 14778, "task_index": 11}, {"db_idx": 14779, "episode_idx": 57, "frame_idx": 85, "global_frame_idx": 14779, "task_index": 11}, {"db_idx": 14780, "episode_idx": 57, "frame_idx": 86, "global_frame_idx": 14780, "task_index": 11}, {"db_idx": 14781, "episode_idx": 57, "frame_idx": 87, "global_frame_idx": 14781, "task_index": 11}, {"db_idx": 14782, "episode_idx": 57, "frame_idx": 88, "global_frame_idx": 14782, "task_index": 11}, {"db_idx": 14783, "episode_idx": 57, "frame_idx": 89, "global_frame_idx": 14783, "task_index": 11}, {"db_idx": 14784, "episode_idx": 57, "frame_idx": 90, "global_frame_idx": 14784, "task_index": 11}, {"db_idx": 14785, "episode_idx": 57, "frame_idx": 91, "global_frame_idx": 14785, "task_index": 11}, {"db_idx": 14786, "episode_idx": 57, "frame_idx": 92, "global_frame_idx": 14786, "task_index": 11}, {"db_idx": 14787, "episode_idx": 57, "frame_idx": 93, "global_frame_idx": 14787, "task_index": 11}, {"db_idx": 14788, "episode_idx": 57, "frame_idx": 94, "global_frame_idx": 14788, "task_index": 11}, {"db_idx": 14789, "episode_idx": 57, "frame_idx": 95, "global_frame_idx": 14789, "task_index": 11}, {"db_idx": 14790, "episode_idx": 57, "frame_idx": 96, "global_frame_idx": 14790, "task_index": 11}, {"db_idx": 14791, "episode_idx": 57, "frame_idx": 97, "global_frame_idx": 14791, "task_index": 11}, {"db_idx": 14792, "episode_idx": 57, "frame_idx": 98, "global_frame_idx": 14792, "task_index": 11}, {"db_idx": 14793, "episode_idx": 57, "frame_idx": 99, "global_frame_idx": 14793, "task_index": 11}, {"db_idx": 14794, "episode_idx": 57, "frame_idx": 100, "global_frame_idx": 14794, "task_index": 11}, {"db_idx": 14795, "episode_idx": 57, "frame_idx": 101, "global_frame_idx": 14795, "task_index": 11}, {"db_idx": 14796, "episode_idx": 57, "frame_idx": 102, "global_frame_idx": 14796, "task_index": 11}, {"db_idx": 14797, "episode_idx": 57, "frame_idx": 103, "global_frame_idx": 14797, "task_index": 11}, {"db_idx": 14798, "episode_idx": 57, "frame_idx": 104, "global_frame_idx": 14798, "task_index": 11}, {"db_idx": 14799, "episode_idx": 57, "frame_idx": 105, "global_frame_idx": 14799, "task_index": 11}, {"db_idx": 14800, "episode_idx": 57, "frame_idx": 106, "global_frame_idx": 14800, "task_index": 11}, {"db_idx": 14801, "episode_idx": 57, "frame_idx": 107, "global_frame_idx": 14801, "task_index": 11}, {"db_idx": 14802, "episode_idx": 57, "frame_idx": 108, "global_frame_idx": 14802, "task_index": 11}, {"db_idx": 14803, "episode_idx": 57, "frame_idx": 109, "global_frame_idx": 14803, "task_index": 11}, {"db_idx": 14804, "episode_idx": 57, "frame_idx": 110, "global_frame_idx": 14804, "task_index": 11}, {"db_idx": 14805, "episode_idx": 57, "frame_idx": 111, "global_frame_idx": 14805, "task_index": 11}, {"db_idx": 14806, "episode_idx": 57, "frame_idx": 112, "global_frame_idx": 14806, "task_index": 11}, {"db_idx": 14807, "episode_idx": 57, "frame_idx": 113, "global_frame_idx": 14807, "task_index": 11}, {"db_idx": 14808, "episode_idx": 57, "frame_idx": 114, "global_frame_idx": 14808, "task_index": 11}, {"db_idx": 14809, "episode_idx": 57, "frame_idx": 115, "global_frame_idx": 14809, "task_index": 11}, {"db_idx": 14810, "episode_idx": 57, "frame_idx": 116, "global_frame_idx": 14810, "task_index": 11}, {"db_idx": 14811, "episode_idx": 57, "frame_idx": 117, "global_frame_idx": 14811, "task_index": 11}, {"db_idx": 14812, "episode_idx": 57, "frame_idx": 118, "global_frame_idx": 14812, "task_index": 11}, {"db_idx": 14813, "episode_idx": 57, "frame_idx": 119, "global_frame_idx": 14813, "task_index": 11}, {"db_idx": 14814, "episode_idx": 57, "frame_idx": 120, "global_frame_idx": 14814, "task_index": 11}, {"db_idx": 14815, "episode_idx": 57, "frame_idx": 121, "global_frame_idx": 14815, "task_index": 11}, {"db_idx": 14816, "episode_idx": 57, "frame_idx": 122, "global_frame_idx": 14816, "task_index": 11}, {"db_idx": 14817, "episode_idx": 57, "frame_idx": 123, "global_frame_idx": 14817, "task_index": 11}, {"db_idx": 14818, "episode_idx": 57, "frame_idx": 124, "global_frame_idx": 14818, "task_index": 11}, {"db_idx": 14819, "episode_idx": 57, "frame_idx": 125, "global_frame_idx": 14819, "task_index": 11}, {"db_idx": 14820, "episode_idx": 57, "frame_idx": 126, "global_frame_idx": 14820, "task_index": 11}, {"db_idx": 14821, "episode_idx": 57, "frame_idx": 127, "global_frame_idx": 14821, "task_index": 11}, {"db_idx": 14822, "episode_idx": 57, "frame_idx": 128, "global_frame_idx": 14822, "task_index": 11}, {"db_idx": 14823, "episode_idx": 57, "frame_idx": 129, "global_frame_idx": 14823, "task_index": 11}, {"db_idx": 14824, "episode_idx": 57, "frame_idx": 130, "global_frame_idx": 14824, "task_index": 11}, {"db_idx": 14825, "episode_idx": 57, "frame_idx": 131, "global_frame_idx": 14825, "task_index": 11}, {"db_idx": 14826, "episode_idx": 57, "frame_idx": 132, "global_frame_idx": 14826, "task_index": 11}, {"db_idx": 14827, "episode_idx": 58, "frame_idx": 0, "global_frame_idx": 14827, "task_index": 11}, {"db_idx": 14828, "episode_idx": 58, "frame_idx": 1, "global_frame_idx": 14828, "task_index": 11}, {"db_idx": 14829, "episode_idx": 58, "frame_idx": 2, "global_frame_idx": 14829, "task_index": 11}, {"db_idx": 14830, "episode_idx": 58, "frame_idx": 3, "global_frame_idx": 14830, "task_index": 11}, {"db_idx": 14831, "episode_idx": 58, "frame_idx": 4, "global_frame_idx": 14831, "task_index": 11}, {"db_idx": 14832, "episode_idx": 58, "frame_idx": 5, "global_frame_idx": 14832, "task_index": 11}, {"db_idx": 14833, "episode_idx": 58, "frame_idx": 6, "global_frame_idx": 14833, "task_index": 11}, {"db_idx": 14834, "episode_idx": 58, "frame_idx": 7, "global_frame_idx": 14834, "task_index": 11}, {"db_idx": 14835, "episode_idx": 58, "frame_idx": 8, "global_frame_idx": 14835, "task_index": 11}, {"db_idx": 14836, "episode_idx": 58, "frame_idx": 9, "global_frame_idx": 14836, "task_index": 11}, {"db_idx": 14837, "episode_idx": 58, "frame_idx": 10, "global_frame_idx": 14837, "task_index": 11}, {"db_idx": 14838, "episode_idx": 58, "frame_idx": 11, "global_frame_idx": 14838, "task_index": 11}, {"db_idx": 14839, "episode_idx": 58, "frame_idx": 12, "global_frame_idx": 14839, "task_index": 11}, {"db_idx": 14840, "episode_idx": 58, "frame_idx": 13, "global_frame_idx": 14840, "task_index": 11}, {"db_idx": 14841, "episode_idx": 58, "frame_idx": 14, "global_frame_idx": 14841, "task_index": 11}, {"db_idx": 14842, "episode_idx": 58, "frame_idx": 15, "global_frame_idx": 14842, "task_index": 11}, {"db_idx": 14843, "episode_idx": 58, "frame_idx": 16, "global_frame_idx": 14843, "task_index": 11}, {"db_idx": 14844, "episode_idx": 58, "frame_idx": 17, "global_frame_idx": 14844, "task_index": 11}, {"db_idx": 14845, "episode_idx": 58, "frame_idx": 18, "global_frame_idx": 14845, "task_index": 11}, {"db_idx": 14846, "episode_idx": 58, "frame_idx": 19, "global_frame_idx": 14846, "task_index": 11}, {"db_idx": 14847, "episode_idx": 58, "frame_idx": 20, "global_frame_idx": 14847, "task_index": 11}, {"db_idx": 14848, "episode_idx": 58, "frame_idx": 21, "global_frame_idx": 14848, "task_index": 11}, {"db_idx": 14849, "episode_idx": 58, "frame_idx": 22, "global_frame_idx": 14849, "task_index": 11}, {"db_idx": 14850, "episode_idx": 58, "frame_idx": 23, "global_frame_idx": 14850, "task_index": 11}, {"db_idx": 14851, "episode_idx": 58, "frame_idx": 24, "global_frame_idx": 14851, "task_index": 11}, {"db_idx": 14852, "episode_idx": 58, "frame_idx": 25, "global_frame_idx": 14852, "task_index": 11}, {"db_idx": 14853, "episode_idx": 58, "frame_idx": 26, "global_frame_idx": 14853, "task_index": 11}, {"db_idx": 14854, "episode_idx": 58, "frame_idx": 27, "global_frame_idx": 14854, "task_index": 11}, {"db_idx": 14855, "episode_idx": 58, "frame_idx": 28, "global_frame_idx": 14855, "task_index": 11}, {"db_idx": 14856, "episode_idx": 58, "frame_idx": 29, "global_frame_idx": 14856, "task_index": 11}, {"db_idx": 14857, "episode_idx": 58, "frame_idx": 30, "global_frame_idx": 14857, "task_index": 11}, {"db_idx": 14858, "episode_idx": 58, "frame_idx": 31, "global_frame_idx": 14858, "task_index": 11}, {"db_idx": 14859, "episode_idx": 58, "frame_idx": 32, "global_frame_idx": 14859, "task_index": 11}, {"db_idx": 14860, "episode_idx": 58, "frame_idx": 33, "global_frame_idx": 14860, "task_index": 11}, {"db_idx": 14861, "episode_idx": 58, "frame_idx": 34, "global_frame_idx": 14861, "task_index": 11}, {"db_idx": 14862, "episode_idx": 58, "frame_idx": 35, "global_frame_idx": 14862, "task_index": 11}, {"db_idx": 14863, "episode_idx": 58, "frame_idx": 36, "global_frame_idx": 14863, "task_index": 11}, {"db_idx": 14864, "episode_idx": 58, "frame_idx": 37, "global_frame_idx": 14864, "task_index": 11}, {"db_idx": 14865, "episode_idx": 58, "frame_idx": 38, "global_frame_idx": 14865, "task_index": 11}, {"db_idx": 14866, "episode_idx": 58, "frame_idx": 39, "global_frame_idx": 14866, "task_index": 11}, {"db_idx": 14867, "episode_idx": 58, "frame_idx": 40, "global_frame_idx": 14867, "task_index": 11}, {"db_idx": 14868, "episode_idx": 58, "frame_idx": 41, "global_frame_idx": 14868, "task_index": 11}, {"db_idx": 14869, "episode_idx": 58, "frame_idx": 42, "global_frame_idx": 14869, "task_index": 11}, {"db_idx": 14870, "episode_idx": 58, "frame_idx": 43, "global_frame_idx": 14870, "task_index": 11}, {"db_idx": 14871, "episode_idx": 58, "frame_idx": 44, "global_frame_idx": 14871, "task_index": 11}, {"db_idx": 14872, "episode_idx": 58, "frame_idx": 45, "global_frame_idx": 14872, "task_index": 11}, {"db_idx": 14873, "episode_idx": 58, "frame_idx": 46, "global_frame_idx": 14873, "task_index": 11}, {"db_idx": 14874, "episode_idx": 58, "frame_idx": 47, "global_frame_idx": 14874, "task_index": 11}, {"db_idx": 14875, "episode_idx": 58, "frame_idx": 48, "global_frame_idx": 14875, "task_index": 11}, {"db_idx": 14876, "episode_idx": 58, "frame_idx": 49, "global_frame_idx": 14876, "task_index": 11}, {"db_idx": 14877, "episode_idx": 58, "frame_idx": 50, "global_frame_idx": 14877, "task_index": 11}, {"db_idx": 14878, "episode_idx": 58, "frame_idx": 51, "global_frame_idx": 14878, "task_index": 11}, {"db_idx": 14879, "episode_idx": 58, "frame_idx": 52, "global_frame_idx": 14879, "task_index": 11}, {"db_idx": 14880, "episode_idx": 58, "frame_idx": 53, "global_frame_idx": 14880, "task_index": 11}, {"db_idx": 14881, "episode_idx": 58, "frame_idx": 54, "global_frame_idx": 14881, "task_index": 11}, {"db_idx": 14882, "episode_idx": 58, "frame_idx": 55, "global_frame_idx": 14882, "task_index": 11}, {"db_idx": 14883, "episode_idx": 58, "frame_idx": 56, "global_frame_idx": 14883, "task_index": 11}, {"db_idx": 14884, "episode_idx": 58, "frame_idx": 57, "global_frame_idx": 14884, "task_index": 11}, {"db_idx": 14885, "episode_idx": 58, "frame_idx": 58, "global_frame_idx": 14885, "task_index": 11}, {"db_idx": 14886, "episode_idx": 58, "frame_idx": 59, "global_frame_idx": 14886, "task_index": 11}, {"db_idx": 14887, "episode_idx": 58, "frame_idx": 60, "global_frame_idx": 14887, "task_index": 11}, {"db_idx": 14888, "episode_idx": 58, "frame_idx": 61, "global_frame_idx": 14888, "task_index": 11}, {"db_idx": 14889, "episode_idx": 58, "frame_idx": 62, "global_frame_idx": 14889, "task_index": 11}, {"db_idx": 14890, "episode_idx": 58, "frame_idx": 63, "global_frame_idx": 14890, "task_index": 11}, {"db_idx": 14891, "episode_idx": 58, "frame_idx": 64, "global_frame_idx": 14891, "task_index": 11}, {"db_idx": 14892, "episode_idx": 58, "frame_idx": 65, "global_frame_idx": 14892, "task_index": 11}, {"db_idx": 14893, "episode_idx": 58, "frame_idx": 66, "global_frame_idx": 14893, "task_index": 11}, {"db_idx": 14894, "episode_idx": 58, "frame_idx": 67, "global_frame_idx": 14894, "task_index": 11}, {"db_idx": 14895, "episode_idx": 58, "frame_idx": 68, "global_frame_idx": 14895, "task_index": 11}, {"db_idx": 14896, "episode_idx": 58, "frame_idx": 69, "global_frame_idx": 14896, "task_index": 11}, {"db_idx": 14897, "episode_idx": 58, "frame_idx": 70, "global_frame_idx": 14897, "task_index": 11}, {"db_idx": 14898, "episode_idx": 58, "frame_idx": 71, "global_frame_idx": 14898, "task_index": 11}, {"db_idx": 14899, "episode_idx": 58, "frame_idx": 72, "global_frame_idx": 14899, "task_index": 11}, {"db_idx": 14900, "episode_idx": 58, "frame_idx": 73, "global_frame_idx": 14900, "task_index": 11}, {"db_idx": 14901, "episode_idx": 58, "frame_idx": 74, "global_frame_idx": 14901, "task_index": 11}, {"db_idx": 14902, "episode_idx": 58, "frame_idx": 75, "global_frame_idx": 14902, "task_index": 11}, {"db_idx": 14903, "episode_idx": 58, "frame_idx": 76, "global_frame_idx": 14903, "task_index": 11}, {"db_idx": 14904, "episode_idx": 58, "frame_idx": 77, "global_frame_idx": 14904, "task_index": 11}, {"db_idx": 14905, "episode_idx": 58, "frame_idx": 78, "global_frame_idx": 14905, "task_index": 11}, {"db_idx": 14906, "episode_idx": 58, "frame_idx": 79, "global_frame_idx": 14906, "task_index": 11}, {"db_idx": 14907, "episode_idx": 58, "frame_idx": 80, "global_frame_idx": 14907, "task_index": 11}, {"db_idx": 14908, "episode_idx": 58, "frame_idx": 81, "global_frame_idx": 14908, "task_index": 11}, {"db_idx": 14909, "episode_idx": 58, "frame_idx": 82, "global_frame_idx": 14909, "task_index": 11}, {"db_idx": 14910, "episode_idx": 58, "frame_idx": 83, "global_frame_idx": 14910, "task_index": 11}, {"db_idx": 14911, "episode_idx": 58, "frame_idx": 84, "global_frame_idx": 14911, "task_index": 11}, {"db_idx": 14912, "episode_idx": 58, "frame_idx": 85, "global_frame_idx": 14912, "task_index": 11}, {"db_idx": 14913, "episode_idx": 58, "frame_idx": 86, "global_frame_idx": 14913, "task_index": 11}, {"db_idx": 14914, "episode_idx": 58, "frame_idx": 87, "global_frame_idx": 14914, "task_index": 11}, {"db_idx": 14915, "episode_idx": 58, "frame_idx": 88, "global_frame_idx": 14915, "task_index": 11}, {"db_idx": 14916, "episode_idx": 58, "frame_idx": 89, "global_frame_idx": 14916, "task_index": 11}, {"db_idx": 14917, "episode_idx": 58, "frame_idx": 90, "global_frame_idx": 14917, "task_index": 11}, {"db_idx": 14918, "episode_idx": 58, "frame_idx": 91, "global_frame_idx": 14918, "task_index": 11}, {"db_idx": 14919, "episode_idx": 58, "frame_idx": 92, "global_frame_idx": 14919, "task_index": 11}, {"db_idx": 14920, "episode_idx": 58, "frame_idx": 93, "global_frame_idx": 14920, "task_index": 11}, {"db_idx": 14921, "episode_idx": 58, "frame_idx": 94, "global_frame_idx": 14921, "task_index": 11}, {"db_idx": 14922, "episode_idx": 58, "frame_idx": 95, "global_frame_idx": 14922, "task_index": 11}, {"db_idx": 14923, "episode_idx": 58, "frame_idx": 96, "global_frame_idx": 14923, "task_index": 11}, {"db_idx": 14924, "episode_idx": 58, "frame_idx": 97, "global_frame_idx": 14924, "task_index": 11}, {"db_idx": 14925, "episode_idx": 58, "frame_idx": 98, "global_frame_idx": 14925, "task_index": 11}, {"db_idx": 14926, "episode_idx": 58, "frame_idx": 99, "global_frame_idx": 14926, "task_index": 11}, {"db_idx": 14927, "episode_idx": 58, "frame_idx": 100, "global_frame_idx": 14927, "task_index": 11}, {"db_idx": 14928, "episode_idx": 58, "frame_idx": 101, "global_frame_idx": 14928, "task_index": 11}, {"db_idx": 14929, "episode_idx": 58, "frame_idx": 102, "global_frame_idx": 14929, "task_index": 11}, {"db_idx": 14930, "episode_idx": 58, "frame_idx": 103, "global_frame_idx": 14930, "task_index": 11}, {"db_idx": 14931, "episode_idx": 58, "frame_idx": 104, "global_frame_idx": 14931, "task_index": 11}, {"db_idx": 14932, "episode_idx": 58, "frame_idx": 105, "global_frame_idx": 14932, "task_index": 11}, {"db_idx": 14933, "episode_idx": 58, "frame_idx": 106, "global_frame_idx": 14933, "task_index": 11}, {"db_idx": 14934, "episode_idx": 58, "frame_idx": 107, "global_frame_idx": 14934, "task_index": 11}, {"db_idx": 14935, "episode_idx": 58, "frame_idx": 108, "global_frame_idx": 14935, "task_index": 11}, {"db_idx": 14936, "episode_idx": 58, "frame_idx": 109, "global_frame_idx": 14936, "task_index": 11}, {"db_idx": 14937, "episode_idx": 58, "frame_idx": 110, "global_frame_idx": 14937, "task_index": 11}, {"db_idx": 14938, "episode_idx": 58, "frame_idx": 111, "global_frame_idx": 14938, "task_index": 11}, {"db_idx": 14939, "episode_idx": 58, "frame_idx": 112, "global_frame_idx": 14939, "task_index": 11}, {"db_idx": 14940, "episode_idx": 58, "frame_idx": 113, "global_frame_idx": 14940, "task_index": 11}, {"db_idx": 14941, "episode_idx": 58, "frame_idx": 114, "global_frame_idx": 14941, "task_index": 11}, {"db_idx": 14942, "episode_idx": 58, "frame_idx": 115, "global_frame_idx": 14942, "task_index": 11}, {"db_idx": 14943, "episode_idx": 58, "frame_idx": 116, "global_frame_idx": 14943, "task_index": 11}, {"db_idx": 14944, "episode_idx": 58, "frame_idx": 117, "global_frame_idx": 14944, "task_index": 11}, {"db_idx": 14945, "episode_idx": 58, "frame_idx": 118, "global_frame_idx": 14945, "task_index": 11}, {"db_idx": 14946, "episode_idx": 58, "frame_idx": 119, "global_frame_idx": 14946, "task_index": 11}, {"db_idx": 14947, "episode_idx": 58, "frame_idx": 120, "global_frame_idx": 14947, "task_index": 11}, {"db_idx": 14948, "episode_idx": 58, "frame_idx": 121, "global_frame_idx": 14948, "task_index": 11}, {"db_idx": 14949, "episode_idx": 58, "frame_idx": 122, "global_frame_idx": 14949, "task_index": 11}, {"db_idx": 14950, "episode_idx": 58, "frame_idx": 123, "global_frame_idx": 14950, "task_index": 11}, {"db_idx": 14951, "episode_idx": 58, "frame_idx": 124, "global_frame_idx": 14951, "task_index": 11}, {"db_idx": 14952, "episode_idx": 58, "frame_idx": 125, "global_frame_idx": 14952, "task_index": 11}, {"db_idx": 14953, "episode_idx": 58, "frame_idx": 126, "global_frame_idx": 14953, "task_index": 11}, {"db_idx": 14954, "episode_idx": 58, "frame_idx": 127, "global_frame_idx": 14954, "task_index": 11}, {"db_idx": 14955, "episode_idx": 58, "frame_idx": 128, "global_frame_idx": 14955, "task_index": 11}, {"db_idx": 14956, "episode_idx": 58, "frame_idx": 129, "global_frame_idx": 14956, "task_index": 11}, {"db_idx": 14957, "episode_idx": 58, "frame_idx": 130, "global_frame_idx": 14957, "task_index": 11}, {"db_idx": 14958, "episode_idx": 58, "frame_idx": 131, "global_frame_idx": 14958, "task_index": 11}, {"db_idx": 14959, "episode_idx": 59, "frame_idx": 0, "global_frame_idx": 14959, "task_index": 11}, {"db_idx": 14960, "episode_idx": 59, "frame_idx": 1, "global_frame_idx": 14960, "task_index": 11}, {"db_idx": 14961, "episode_idx": 59, "frame_idx": 2, "global_frame_idx": 14961, "task_index": 11}, {"db_idx": 14962, "episode_idx": 59, "frame_idx": 3, "global_frame_idx": 14962, "task_index": 11}, {"db_idx": 14963, "episode_idx": 59, "frame_idx": 4, "global_frame_idx": 14963, "task_index": 11}, {"db_idx": 14964, "episode_idx": 59, "frame_idx": 5, "global_frame_idx": 14964, "task_index": 11}, {"db_idx": 14965, "episode_idx": 59, "frame_idx": 6, "global_frame_idx": 14965, "task_index": 11}, {"db_idx": 14966, "episode_idx": 59, "frame_idx": 7, "global_frame_idx": 14966, "task_index": 11}, {"db_idx": 14967, "episode_idx": 59, "frame_idx": 8, "global_frame_idx": 14967, "task_index": 11}, {"db_idx": 14968, "episode_idx": 59, "frame_idx": 9, "global_frame_idx": 14968, "task_index": 11}, {"db_idx": 14969, "episode_idx": 59, "frame_idx": 10, "global_frame_idx": 14969, "task_index": 11}, {"db_idx": 14970, "episode_idx": 59, "frame_idx": 11, "global_frame_idx": 14970, "task_index": 11}, {"db_idx": 14971, "episode_idx": 59, "frame_idx": 12, "global_frame_idx": 14971, "task_index": 11}, {"db_idx": 14972, "episode_idx": 59, "frame_idx": 13, "global_frame_idx": 14972, "task_index": 11}, {"db_idx": 14973, "episode_idx": 59, "frame_idx": 14, "global_frame_idx": 14973, "task_index": 11}, {"db_idx": 14974, "episode_idx": 59, "frame_idx": 15, "global_frame_idx": 14974, "task_index": 11}, {"db_idx": 14975, "episode_idx": 59, "frame_idx": 16, "global_frame_idx": 14975, "task_index": 11}, {"db_idx": 14976, "episode_idx": 59, "frame_idx": 17, "global_frame_idx": 14976, "task_index": 11}, {"db_idx": 14977, "episode_idx": 59, "frame_idx": 18, "global_frame_idx": 14977, "task_index": 11}, {"db_idx": 14978, "episode_idx": 59, "frame_idx": 19, "global_frame_idx": 14978, "task_index": 11}, {"db_idx": 14979, "episode_idx": 59, "frame_idx": 20, "global_frame_idx": 14979, "task_index": 11}, {"db_idx": 14980, "episode_idx": 59, "frame_idx": 21, "global_frame_idx": 14980, "task_index": 11}, {"db_idx": 14981, "episode_idx": 59, "frame_idx": 22, "global_frame_idx": 14981, "task_index": 11}, {"db_idx": 14982, "episode_idx": 59, "frame_idx": 23, "global_frame_idx": 14982, "task_index": 11}, {"db_idx": 14983, "episode_idx": 59, "frame_idx": 24, "global_frame_idx": 14983, "task_index": 11}, {"db_idx": 14984, "episode_idx": 59, "frame_idx": 25, "global_frame_idx": 14984, "task_index": 11}, {"db_idx": 14985, "episode_idx": 59, "frame_idx": 26, "global_frame_idx": 14985, "task_index": 11}, {"db_idx": 14986, "episode_idx": 59, "frame_idx": 27, "global_frame_idx": 14986, "task_index": 11}, {"db_idx": 14987, "episode_idx": 59, "frame_idx": 28, "global_frame_idx": 14987, "task_index": 11}, {"db_idx": 14988, "episode_idx": 59, "frame_idx": 29, "global_frame_idx": 14988, "task_index": 11}, {"db_idx": 14989, "episode_idx": 59, "frame_idx": 30, "global_frame_idx": 14989, "task_index": 11}, {"db_idx": 14990, "episode_idx": 59, "frame_idx": 31, "global_frame_idx": 14990, "task_index": 11}, {"db_idx": 14991, "episode_idx": 59, "frame_idx": 32, "global_frame_idx": 14991, "task_index": 11}, {"db_idx": 14992, "episode_idx": 59, "frame_idx": 33, "global_frame_idx": 14992, "task_index": 11}, {"db_idx": 14993, "episode_idx": 59, "frame_idx": 34, "global_frame_idx": 14993, "task_index": 11}, {"db_idx": 14994, "episode_idx": 59, "frame_idx": 35, "global_frame_idx": 14994, "task_index": 11}, {"db_idx": 14995, "episode_idx": 59, "frame_idx": 36, "global_frame_idx": 14995, "task_index": 11}, {"db_idx": 14996, "episode_idx": 59, "frame_idx": 37, "global_frame_idx": 14996, "task_index": 11}, {"db_idx": 14997, "episode_idx": 59, "frame_idx": 38, "global_frame_idx": 14997, "task_index": 11}, {"db_idx": 14998, "episode_idx": 59, "frame_idx": 39, "global_frame_idx": 14998, "task_index": 11}, {"db_idx": 14999, "episode_idx": 59, "frame_idx": 40, "global_frame_idx": 14999, "task_index": 11}, {"db_idx": 15000, "episode_idx": 59, "frame_idx": 41, "global_frame_idx": 15000, "task_index": 11}, {"db_idx": 15001, "episode_idx": 59, "frame_idx": 42, "global_frame_idx": 15001, "task_index": 11}, {"db_idx": 15002, "episode_idx": 59, "frame_idx": 43, "global_frame_idx": 15002, "task_index": 11}, {"db_idx": 15003, "episode_idx": 59, "frame_idx": 44, "global_frame_idx": 15003, "task_index": 11}, {"db_idx": 15004, "episode_idx": 59, "frame_idx": 45, "global_frame_idx": 15004, "task_index": 11}, {"db_idx": 15005, "episode_idx": 59, "frame_idx": 46, "global_frame_idx": 15005, "task_index": 11}, {"db_idx": 15006, "episode_idx": 59, "frame_idx": 47, "global_frame_idx": 15006, "task_index": 11}, {"db_idx": 15007, "episode_idx": 59, "frame_idx": 48, "global_frame_idx": 15007, "task_index": 11}, {"db_idx": 15008, "episode_idx": 59, "frame_idx": 49, "global_frame_idx": 15008, "task_index": 11}, {"db_idx": 15009, "episode_idx": 59, "frame_idx": 50, "global_frame_idx": 15009, "task_index": 11}, {"db_idx": 15010, "episode_idx": 59, "frame_idx": 51, "global_frame_idx": 15010, "task_index": 11}, {"db_idx": 15011, "episode_idx": 59, "frame_idx": 52, "global_frame_idx": 15011, "task_index": 11}, {"db_idx": 15012, "episode_idx": 59, "frame_idx": 53, "global_frame_idx": 15012, "task_index": 11}, {"db_idx": 15013, "episode_idx": 59, "frame_idx": 54, "global_frame_idx": 15013, "task_index": 11}, {"db_idx": 15014, "episode_idx": 59, "frame_idx": 55, "global_frame_idx": 15014, "task_index": 11}, {"db_idx": 15015, "episode_idx": 59, "frame_idx": 56, "global_frame_idx": 15015, "task_index": 11}, {"db_idx": 15016, "episode_idx": 59, "frame_idx": 57, "global_frame_idx": 15016, "task_index": 11}, {"db_idx": 15017, "episode_idx": 59, "frame_idx": 58, "global_frame_idx": 15017, "task_index": 11}, {"db_idx": 15018, "episode_idx": 59, "frame_idx": 59, "global_frame_idx": 15018, "task_index": 11}, {"db_idx": 15019, "episode_idx": 59, "frame_idx": 60, "global_frame_idx": 15019, "task_index": 11}, {"db_idx": 15020, "episode_idx": 59, "frame_idx": 61, "global_frame_idx": 15020, "task_index": 11}, {"db_idx": 15021, "episode_idx": 59, "frame_idx": 62, "global_frame_idx": 15021, "task_index": 11}, {"db_idx": 15022, "episode_idx": 59, "frame_idx": 63, "global_frame_idx": 15022, "task_index": 11}, {"db_idx": 15023, "episode_idx": 59, "frame_idx": 64, "global_frame_idx": 15023, "task_index": 11}, {"db_idx": 15024, "episode_idx": 59, "frame_idx": 65, "global_frame_idx": 15024, "task_index": 11}, {"db_idx": 15025, "episode_idx": 59, "frame_idx": 66, "global_frame_idx": 15025, "task_index": 11}, {"db_idx": 15026, "episode_idx": 59, "frame_idx": 67, "global_frame_idx": 15026, "task_index": 11}, {"db_idx": 15027, "episode_idx": 59, "frame_idx": 68, "global_frame_idx": 15027, "task_index": 11}, {"db_idx": 15028, "episode_idx": 59, "frame_idx": 69, "global_frame_idx": 15028, "task_index": 11}, {"db_idx": 15029, "episode_idx": 59, "frame_idx": 70, "global_frame_idx": 15029, "task_index": 11}, {"db_idx": 15030, "episode_idx": 59, "frame_idx": 71, "global_frame_idx": 15030, "task_index": 11}, {"db_idx": 15031, "episode_idx": 59, "frame_idx": 72, "global_frame_idx": 15031, "task_index": 11}, {"db_idx": 15032, "episode_idx": 59, "frame_idx": 73, "global_frame_idx": 15032, "task_index": 11}, {"db_idx": 15033, "episode_idx": 59, "frame_idx": 74, "global_frame_idx": 15033, "task_index": 11}, {"db_idx": 15034, "episode_idx": 59, "frame_idx": 75, "global_frame_idx": 15034, "task_index": 11}, {"db_idx": 15035, "episode_idx": 59, "frame_idx": 76, "global_frame_idx": 15035, "task_index": 11}, {"db_idx": 15036, "episode_idx": 59, "frame_idx": 77, "global_frame_idx": 15036, "task_index": 11}, {"db_idx": 15037, "episode_idx": 59, "frame_idx": 78, "global_frame_idx": 15037, "task_index": 11}, {"db_idx": 15038, "episode_idx": 59, "frame_idx": 79, "global_frame_idx": 15038, "task_index": 11}, {"db_idx": 15039, "episode_idx": 59, "frame_idx": 80, "global_frame_idx": 15039, "task_index": 11}, {"db_idx": 15040, "episode_idx": 59, "frame_idx": 81, "global_frame_idx": 15040, "task_index": 11}, {"db_idx": 15041, "episode_idx": 59, "frame_idx": 82, "global_frame_idx": 15041, "task_index": 11}, {"db_idx": 15042, "episode_idx": 59, "frame_idx": 83, "global_frame_idx": 15042, "task_index": 11}, {"db_idx": 15043, "episode_idx": 59, "frame_idx": 84, "global_frame_idx": 15043, "task_index": 11}, {"db_idx": 15044, "episode_idx": 59, "frame_idx": 85, "global_frame_idx": 15044, "task_index": 11}, {"db_idx": 15045, "episode_idx": 59, "frame_idx": 86, "global_frame_idx": 15045, "task_index": 11}, {"db_idx": 15046, "episode_idx": 59, "frame_idx": 87, "global_frame_idx": 15046, "task_index": 11}, {"db_idx": 15047, "episode_idx": 59, "frame_idx": 88, "global_frame_idx": 15047, "task_index": 11}, {"db_idx": 15048, "episode_idx": 59, "frame_idx": 89, "global_frame_idx": 15048, "task_index": 11}, {"db_idx": 15049, "episode_idx": 59, "frame_idx": 90, "global_frame_idx": 15049, "task_index": 11}, {"db_idx": 15050, "episode_idx": 59, "frame_idx": 91, "global_frame_idx": 15050, "task_index": 11}, {"db_idx": 15051, "episode_idx": 59, "frame_idx": 92, "global_frame_idx": 15051, "task_index": 11}, {"db_idx": 15052, "episode_idx": 59, "frame_idx": 93, "global_frame_idx": 15052, "task_index": 11}, {"db_idx": 15053, "episode_idx": 59, "frame_idx": 94, "global_frame_idx": 15053, "task_index": 11}, {"db_idx": 15054, "episode_idx": 59, "frame_idx": 95, "global_frame_idx": 15054, "task_index": 11}, {"db_idx": 15055, "episode_idx": 59, "frame_idx": 96, "global_frame_idx": 15055, "task_index": 11}, {"db_idx": 15056, "episode_idx": 59, "frame_idx": 97, "global_frame_idx": 15056, "task_index": 11}, {"db_idx": 15057, "episode_idx": 59, "frame_idx": 98, "global_frame_idx": 15057, "task_index": 11}, {"db_idx": 15058, "episode_idx": 59, "frame_idx": 99, "global_frame_idx": 15058, "task_index": 11}, {"db_idx": 15059, "episode_idx": 59, "frame_idx": 100, "global_frame_idx": 15059, "task_index": 11}, {"db_idx": 15060, "episode_idx": 59, "frame_idx": 101, "global_frame_idx": 15060, "task_index": 11}, {"db_idx": 15061, "episode_idx": 59, "frame_idx": 102, "global_frame_idx": 15061, "task_index": 11}, {"db_idx": 15062, "episode_idx": 59, "frame_idx": 103, "global_frame_idx": 15062, "task_index": 11}, {"db_idx": 15063, "episode_idx": 59, "frame_idx": 104, "global_frame_idx": 15063, "task_index": 11}, {"db_idx": 15064, "episode_idx": 59, "frame_idx": 105, "global_frame_idx": 15064, "task_index": 11}, {"db_idx": 15065, "episode_idx": 59, "frame_idx": 106, "global_frame_idx": 15065, "task_index": 11}, {"db_idx": 15066, "episode_idx": 59, "frame_idx": 107, "global_frame_idx": 15066, "task_index": 11}, {"db_idx": 15067, "episode_idx": 59, "frame_idx": 108, "global_frame_idx": 15067, "task_index": 11}, {"db_idx": 15068, "episode_idx": 59, "frame_idx": 109, "global_frame_idx": 15068, "task_index": 11}, {"db_idx": 15069, "episode_idx": 59, "frame_idx": 110, "global_frame_idx": 15069, "task_index": 11}, {"db_idx": 15070, "episode_idx": 59, "frame_idx": 111, "global_frame_idx": 15070, "task_index": 11}, {"db_idx": 15071, "episode_idx": 59, "frame_idx": 112, "global_frame_idx": 15071, "task_index": 11}, {"db_idx": 15072, "episode_idx": 59, "frame_idx": 113, "global_frame_idx": 15072, "task_index": 11}, {"db_idx": 15073, "episode_idx": 59, "frame_idx": 114, "global_frame_idx": 15073, "task_index": 11}, {"db_idx": 15074, "episode_idx": 59, "frame_idx": 115, "global_frame_idx": 15074, "task_index": 11}, {"db_idx": 15075, "episode_idx": 59, "frame_idx": 116, "global_frame_idx": 15075, "task_index": 11}, {"db_idx": 15076, "episode_idx": 59, "frame_idx": 117, "global_frame_idx": 15076, "task_index": 11}, {"db_idx": 15077, "episode_idx": 59, "frame_idx": 118, "global_frame_idx": 15077, "task_index": 11}, {"db_idx": 15078, "episode_idx": 59, "frame_idx": 119, "global_frame_idx": 15078, "task_index": 11}, {"db_idx": 15079, "episode_idx": 59, "frame_idx": 120, "global_frame_idx": 15079, "task_index": 11}, {"db_idx": 15080, "episode_idx": 59, "frame_idx": 121, "global_frame_idx": 15080, "task_index": 11}, {"db_idx": 15081, "episode_idx": 59, "frame_idx": 122, "global_frame_idx": 15081, "task_index": 11}, {"db_idx": 15082, "episode_idx": 59, "frame_idx": 123, "global_frame_idx": 15082, "task_index": 11}, {"db_idx": 15083, "episode_idx": 59, "frame_idx": 124, "global_frame_idx": 15083, "task_index": 11}, {"db_idx": 15084, "episode_idx": 59, "frame_idx": 125, "global_frame_idx": 15084, "task_index": 11}, {"db_idx": 15085, "episode_idx": 59, "frame_idx": 126, "global_frame_idx": 15085, "task_index": 11}, {"db_idx": 15086, "episode_idx": 59, "frame_idx": 127, "global_frame_idx": 15086, "task_index": 11}, {"db_idx": 15087, "episode_idx": 59, "frame_idx": 128, "global_frame_idx": 15087, "task_index": 11}, {"db_idx": 15088, "episode_idx": 59, "frame_idx": 129, "global_frame_idx": 15088, "task_index": 11}, {"db_idx": 15089, "episode_idx": 59, "frame_idx": 130, "global_frame_idx": 15089, "task_index": 11}, {"db_idx": 15090, "episode_idx": 59, "frame_idx": 131, "global_frame_idx": 15090, "task_index": 11}, {"db_idx": 15091, "episode_idx": 59, "frame_idx": 132, "global_frame_idx": 15091, "task_index": 11}, {"db_idx": 15092, "episode_idx": 59, "frame_idx": 133, "global_frame_idx": 15092, "task_index": 11}, {"db_idx": 15093, "episode_idx": 59, "frame_idx": 134, "global_frame_idx": 15093, "task_index": 11}, {"db_idx": 15094, "episode_idx": 59, "frame_idx": 135, "global_frame_idx": 15094, "task_index": 11}, {"db_idx": 15095, "episode_idx": 59, "frame_idx": 136, "global_frame_idx": 15095, "task_index": 11}, {"db_idx": 15096, "episode_idx": 59, "frame_idx": 137, "global_frame_idx": 15096, "task_index": 11}, {"db_idx": 15097, "episode_idx": 59, "frame_idx": 138, "global_frame_idx": 15097, "task_index": 11}, {"db_idx": 15098, "episode_idx": 59, "frame_idx": 139, "global_frame_idx": 15098, "task_index": 11}, {"db_idx": 15099, "episode_idx": 59, "frame_idx": 140, "global_frame_idx": 15099, "task_index": 11}, {"db_idx": 15100, "episode_idx": 59, "frame_idx": 141, "global_frame_idx": 15100, "task_index": 11}, {"db_idx": 15101, "episode_idx": 59, "frame_idx": 142, "global_frame_idx": 15101, "task_index": 11}, {"db_idx": 15102, "episode_idx": 59, "frame_idx": 143, "global_frame_idx": 15102, "task_index": 11}, {"db_idx": 15103, "episode_idx": 59, "frame_idx": 144, "global_frame_idx": 15103, "task_index": 11}, {"db_idx": 15104, "episode_idx": 59, "frame_idx": 145, "global_frame_idx": 15104, "task_index": 11}, {"db_idx": 15105, "episode_idx": 59, "frame_idx": 146, "global_frame_idx": 15105, "task_index": 11}, {"db_idx": 15106, "episode_idx": 59, "frame_idx": 147, "global_frame_idx": 15106, "task_index": 11}, {"db_idx": 15107, "episode_idx": 59, "frame_idx": 148, "global_frame_idx": 15107, "task_index": 11}, {"db_idx": 15108, "episode_idx": 59, "frame_idx": 149, "global_frame_idx": 15108, "task_index": 11}, {"db_idx": 15109, "episode_idx": 59, "frame_idx": 150, "global_frame_idx": 15109, "task_index": 11}, {"db_idx": 15110, "episode_idx": 59, "frame_idx": 151, "global_frame_idx": 15110, "task_index": 11}, {"db_idx": 15111, "episode_idx": 59, "frame_idx": 152, "global_frame_idx": 15111, "task_index": 11}, {"db_idx": 15112, "episode_idx": 59, "frame_idx": 153, "global_frame_idx": 15112, "task_index": 11}, {"db_idx": 15113, "episode_idx": 59, "frame_idx": 154, "global_frame_idx": 15113, "task_index": 11}, {"db_idx": 15114, "episode_idx": 59, "frame_idx": 155, "global_frame_idx": 15114, "task_index": 11}, {"db_idx": 15115, "episode_idx": 59, "frame_idx": 156, "global_frame_idx": 15115, "task_index": 11}, {"db_idx": 15116, "episode_idx": 59, "frame_idx": 157, "global_frame_idx": 15116, "task_index": 11}, {"db_idx": 15117, "episode_idx": 59, "frame_idx": 158, "global_frame_idx": 15117, "task_index": 11}, {"db_idx": 15118, "episode_idx": 59, "frame_idx": 159, "global_frame_idx": 15118, "task_index": 11}, {"db_idx": 15119, "episode_idx": 59, "frame_idx": 160, "global_frame_idx": 15119, "task_index": 11}, {"db_idx": 15120, "episode_idx": 59, "frame_idx": 161, "global_frame_idx": 15120, "task_index": 11}, {"db_idx": 15121, "episode_idx": 59, "frame_idx": 162, "global_frame_idx": 15121, "task_index": 11}, {"db_idx": 15122, "episode_idx": 59, "frame_idx": 163, "global_frame_idx": 15122, "task_index": 11}, {"db_idx": 15123, "episode_idx": 59, "frame_idx": 164, "global_frame_idx": 15123, "task_index": 11}, {"db_idx": 15124, "episode_idx": 59, "frame_idx": 165, "global_frame_idx": 15124, "task_index": 11}, {"db_idx": 15125, "episode_idx": 59, "frame_idx": 166, "global_frame_idx": 15125, "task_index": 11}, {"db_idx": 15126, "episode_idx": 59, "frame_idx": 167, "global_frame_idx": 15126, "task_index": 11}, {"db_idx": 15127, "episode_idx": 59, "frame_idx": 168, "global_frame_idx": 15127, "task_index": 11}, {"db_idx": 15128, "episode_idx": 59, "frame_idx": 169, "global_frame_idx": 15128, "task_index": 11}, {"db_idx": 15129, "episode_idx": 59, "frame_idx": 170, "global_frame_idx": 15129, "task_index": 11}, {"db_idx": 15130, "episode_idx": 59, "frame_idx": 171, "global_frame_idx": 15130, "task_index": 11}, {"db_idx": 15131, "episode_idx": 60, "frame_idx": 0, "global_frame_idx": 15131, "task_index": 12}, {"db_idx": 15132, "episode_idx": 60, "frame_idx": 1, "global_frame_idx": 15132, "task_index": 12}, {"db_idx": 15133, "episode_idx": 60, "frame_idx": 2, "global_frame_idx": 15133, "task_index": 12}, {"db_idx": 15134, "episode_idx": 60, "frame_idx": 3, "global_frame_idx": 15134, "task_index": 12}, {"db_idx": 15135, "episode_idx": 60, "frame_idx": 4, "global_frame_idx": 15135, "task_index": 12}, {"db_idx": 15136, "episode_idx": 60, "frame_idx": 5, "global_frame_idx": 15136, "task_index": 12}, {"db_idx": 15137, "episode_idx": 60, "frame_idx": 6, "global_frame_idx": 15137, "task_index": 12}, {"db_idx": 15138, "episode_idx": 60, "frame_idx": 7, "global_frame_idx": 15138, "task_index": 12}, {"db_idx": 15139, "episode_idx": 60, "frame_idx": 8, "global_frame_idx": 15139, "task_index": 12}, {"db_idx": 15140, "episode_idx": 60, "frame_idx": 9, "global_frame_idx": 15140, "task_index": 12}, {"db_idx": 15141, "episode_idx": 60, "frame_idx": 10, "global_frame_idx": 15141, "task_index": 12}, {"db_idx": 15142, "episode_idx": 60, "frame_idx": 11, "global_frame_idx": 15142, "task_index": 12}, {"db_idx": 15143, "episode_idx": 60, "frame_idx": 12, "global_frame_idx": 15143, "task_index": 12}, {"db_idx": 15144, "episode_idx": 60, "frame_idx": 13, "global_frame_idx": 15144, "task_index": 12}, {"db_idx": 15145, "episode_idx": 60, "frame_idx": 14, "global_frame_idx": 15145, "task_index": 12}, {"db_idx": 15146, "episode_idx": 60, "frame_idx": 15, "global_frame_idx": 15146, "task_index": 12}, {"db_idx": 15147, "episode_idx": 60, "frame_idx": 16, "global_frame_idx": 15147, "task_index": 12}, {"db_idx": 15148, "episode_idx": 60, "frame_idx": 17, "global_frame_idx": 15148, "task_index": 12}, {"db_idx": 15149, "episode_idx": 60, "frame_idx": 18, "global_frame_idx": 15149, "task_index": 12}, {"db_idx": 15150, "episode_idx": 60, "frame_idx": 19, "global_frame_idx": 15150, "task_index": 12}, {"db_idx": 15151, "episode_idx": 60, "frame_idx": 20, "global_frame_idx": 15151, "task_index": 12}, {"db_idx": 15152, "episode_idx": 60, "frame_idx": 21, "global_frame_idx": 15152, "task_index": 12}, {"db_idx": 15153, "episode_idx": 60, "frame_idx": 22, "global_frame_idx": 15153, "task_index": 12}, {"db_idx": 15154, "episode_idx": 60, "frame_idx": 23, "global_frame_idx": 15154, "task_index": 12}, {"db_idx": 15155, "episode_idx": 60, "frame_idx": 24, "global_frame_idx": 15155, "task_index": 12}, {"db_idx": 15156, "episode_idx": 60, "frame_idx": 25, "global_frame_idx": 15156, "task_index": 12}, {"db_idx": 15157, "episode_idx": 60, "frame_idx": 26, "global_frame_idx": 15157, "task_index": 12}, {"db_idx": 15158, "episode_idx": 60, "frame_idx": 27, "global_frame_idx": 15158, "task_index": 12}, {"db_idx": 15159, "episode_idx": 60, "frame_idx": 28, "global_frame_idx": 15159, "task_index": 12}, {"db_idx": 15160, "episode_idx": 60, "frame_idx": 29, "global_frame_idx": 15160, "task_index": 12}, {"db_idx": 15161, "episode_idx": 60, "frame_idx": 30, "global_frame_idx": 15161, "task_index": 12}, {"db_idx": 15162, "episode_idx": 60, "frame_idx": 31, "global_frame_idx": 15162, "task_index": 12}, {"db_idx": 15163, "episode_idx": 60, "frame_idx": 32, "global_frame_idx": 15163, "task_index": 12}, {"db_idx": 15164, "episode_idx": 60, "frame_idx": 33, "global_frame_idx": 15164, "task_index": 12}, {"db_idx": 15165, "episode_idx": 60, "frame_idx": 34, "global_frame_idx": 15165, "task_index": 12}, {"db_idx": 15166, "episode_idx": 60, "frame_idx": 35, "global_frame_idx": 15166, "task_index": 12}, {"db_idx": 15167, "episode_idx": 60, "frame_idx": 36, "global_frame_idx": 15167, "task_index": 12}, {"db_idx": 15168, "episode_idx": 60, "frame_idx": 37, "global_frame_idx": 15168, "task_index": 12}, {"db_idx": 15169, "episode_idx": 60, "frame_idx": 38, "global_frame_idx": 15169, "task_index": 12}, {"db_idx": 15170, "episode_idx": 60, "frame_idx": 39, "global_frame_idx": 15170, "task_index": 12}, {"db_idx": 15171, "episode_idx": 60, "frame_idx": 40, "global_frame_idx": 15171, "task_index": 12}, {"db_idx": 15172, "episode_idx": 60, "frame_idx": 41, "global_frame_idx": 15172, "task_index": 12}, {"db_idx": 15173, "episode_idx": 60, "frame_idx": 42, "global_frame_idx": 15173, "task_index": 12}, {"db_idx": 15174, "episode_idx": 60, "frame_idx": 43, "global_frame_idx": 15174, "task_index": 12}, {"db_idx": 15175, "episode_idx": 60, "frame_idx": 44, "global_frame_idx": 15175, "task_index": 12}, {"db_idx": 15176, "episode_idx": 60, "frame_idx": 45, "global_frame_idx": 15176, "task_index": 12}, {"db_idx": 15177, "episode_idx": 60, "frame_idx": 46, "global_frame_idx": 15177, "task_index": 12}, {"db_idx": 15178, "episode_idx": 60, "frame_idx": 47, "global_frame_idx": 15178, "task_index": 12}, {"db_idx": 15179, "episode_idx": 60, "frame_idx": 48, "global_frame_idx": 15179, "task_index": 12}, {"db_idx": 15180, "episode_idx": 60, "frame_idx": 49, "global_frame_idx": 15180, "task_index": 12}, {"db_idx": 15181, "episode_idx": 60, "frame_idx": 50, "global_frame_idx": 15181, "task_index": 12}, {"db_idx": 15182, "episode_idx": 60, "frame_idx": 51, "global_frame_idx": 15182, "task_index": 12}, {"db_idx": 15183, "episode_idx": 60, "frame_idx": 52, "global_frame_idx": 15183, "task_index": 12}, {"db_idx": 15184, "episode_idx": 60, "frame_idx": 53, "global_frame_idx": 15184, "task_index": 12}, {"db_idx": 15185, "episode_idx": 60, "frame_idx": 54, "global_frame_idx": 15185, "task_index": 12}, {"db_idx": 15186, "episode_idx": 60, "frame_idx": 55, "global_frame_idx": 15186, "task_index": 12}, {"db_idx": 15187, "episode_idx": 60, "frame_idx": 56, "global_frame_idx": 15187, "task_index": 12}, {"db_idx": 15188, "episode_idx": 60, "frame_idx": 57, "global_frame_idx": 15188, "task_index": 12}, {"db_idx": 15189, "episode_idx": 60, "frame_idx": 58, "global_frame_idx": 15189, "task_index": 12}, {"db_idx": 15190, "episode_idx": 60, "frame_idx": 59, "global_frame_idx": 15190, "task_index": 12}, {"db_idx": 15191, "episode_idx": 60, "frame_idx": 60, "global_frame_idx": 15191, "task_index": 12}, {"db_idx": 15192, "episode_idx": 60, "frame_idx": 61, "global_frame_idx": 15192, "task_index": 12}, {"db_idx": 15193, "episode_idx": 60, "frame_idx": 62, "global_frame_idx": 15193, "task_index": 12}, {"db_idx": 15194, "episode_idx": 60, "frame_idx": 63, "global_frame_idx": 15194, "task_index": 12}, {"db_idx": 15195, "episode_idx": 60, "frame_idx": 64, "global_frame_idx": 15195, "task_index": 12}, {"db_idx": 15196, "episode_idx": 60, "frame_idx": 65, "global_frame_idx": 15196, "task_index": 12}, {"db_idx": 15197, "episode_idx": 60, "frame_idx": 66, "global_frame_idx": 15197, "task_index": 12}, {"db_idx": 15198, "episode_idx": 60, "frame_idx": 67, "global_frame_idx": 15198, "task_index": 12}, {"db_idx": 15199, "episode_idx": 60, "frame_idx": 68, "global_frame_idx": 15199, "task_index": 12}, {"db_idx": 15200, "episode_idx": 60, "frame_idx": 69, "global_frame_idx": 15200, "task_index": 12}, {"db_idx": 15201, "episode_idx": 60, "frame_idx": 70, "global_frame_idx": 15201, "task_index": 12}, {"db_idx": 15202, "episode_idx": 60, "frame_idx": 71, "global_frame_idx": 15202, "task_index": 12}, {"db_idx": 15203, "episode_idx": 60, "frame_idx": 72, "global_frame_idx": 15203, "task_index": 12}, {"db_idx": 15204, "episode_idx": 60, "frame_idx": 73, "global_frame_idx": 15204, "task_index": 12}, {"db_idx": 15205, "episode_idx": 60, "frame_idx": 74, "global_frame_idx": 15205, "task_index": 12}, {"db_idx": 15206, "episode_idx": 60, "frame_idx": 75, "global_frame_idx": 15206, "task_index": 12}, {"db_idx": 15207, "episode_idx": 60, "frame_idx": 76, "global_frame_idx": 15207, "task_index": 12}, {"db_idx": 15208, "episode_idx": 60, "frame_idx": 77, "global_frame_idx": 15208, "task_index": 12}, {"db_idx": 15209, "episode_idx": 60, "frame_idx": 78, "global_frame_idx": 15209, "task_index": 12}, {"db_idx": 15210, "episode_idx": 60, "frame_idx": 79, "global_frame_idx": 15210, "task_index": 12}, {"db_idx": 15211, "episode_idx": 60, "frame_idx": 80, "global_frame_idx": 15211, "task_index": 12}, {"db_idx": 15212, "episode_idx": 60, "frame_idx": 81, "global_frame_idx": 15212, "task_index": 12}, {"db_idx": 15213, "episode_idx": 60, "frame_idx": 82, "global_frame_idx": 15213, "task_index": 12}, {"db_idx": 15214, "episode_idx": 60, "frame_idx": 83, "global_frame_idx": 15214, "task_index": 12}, {"db_idx": 15215, "episode_idx": 60, "frame_idx": 84, "global_frame_idx": 15215, "task_index": 12}, {"db_idx": 15216, "episode_idx": 60, "frame_idx": 85, "global_frame_idx": 15216, "task_index": 12}, {"db_idx": 15217, "episode_idx": 60, "frame_idx": 86, "global_frame_idx": 15217, "task_index": 12}, {"db_idx": 15218, "episode_idx": 60, "frame_idx": 87, "global_frame_idx": 15218, "task_index": 12}, {"db_idx": 15219, "episode_idx": 60, "frame_idx": 88, "global_frame_idx": 15219, "task_index": 12}, {"db_idx": 15220, "episode_idx": 60, "frame_idx": 89, "global_frame_idx": 15220, "task_index": 12}, {"db_idx": 15221, "episode_idx": 60, "frame_idx": 90, "global_frame_idx": 15221, "task_index": 12}, {"db_idx": 15222, "episode_idx": 60, "frame_idx": 91, "global_frame_idx": 15222, "task_index": 12}, {"db_idx": 15223, "episode_idx": 60, "frame_idx": 92, "global_frame_idx": 15223, "task_index": 12}, {"db_idx": 15224, "episode_idx": 60, "frame_idx": 93, "global_frame_idx": 15224, "task_index": 12}, {"db_idx": 15225, "episode_idx": 60, "frame_idx": 94, "global_frame_idx": 15225, "task_index": 12}, {"db_idx": 15226, "episode_idx": 60, "frame_idx": 95, "global_frame_idx": 15226, "task_index": 12}, {"db_idx": 15227, "episode_idx": 60, "frame_idx": 96, "global_frame_idx": 15227, "task_index": 12}, {"db_idx": 15228, "episode_idx": 60, "frame_idx": 97, "global_frame_idx": 15228, "task_index": 12}, {"db_idx": 15229, "episode_idx": 60, "frame_idx": 98, "global_frame_idx": 15229, "task_index": 12}, {"db_idx": 15230, "episode_idx": 60, "frame_idx": 99, "global_frame_idx": 15230, "task_index": 12}, {"db_idx": 15231, "episode_idx": 60, "frame_idx": 100, "global_frame_idx": 15231, "task_index": 12}, {"db_idx": 15232, "episode_idx": 60, "frame_idx": 101, "global_frame_idx": 15232, "task_index": 12}, {"db_idx": 15233, "episode_idx": 60, "frame_idx": 102, "global_frame_idx": 15233, "task_index": 12}, {"db_idx": 15234, "episode_idx": 60, "frame_idx": 103, "global_frame_idx": 15234, "task_index": 12}, {"db_idx": 15235, "episode_idx": 60, "frame_idx": 104, "global_frame_idx": 15235, "task_index": 12}, {"db_idx": 15236, "episode_idx": 60, "frame_idx": 105, "global_frame_idx": 15236, "task_index": 12}, {"db_idx": 15237, "episode_idx": 60, "frame_idx": 106, "global_frame_idx": 15237, "task_index": 12}, {"db_idx": 15238, "episode_idx": 60, "frame_idx": 107, "global_frame_idx": 15238, "task_index": 12}, {"db_idx": 15239, "episode_idx": 60, "frame_idx": 108, "global_frame_idx": 15239, "task_index": 12}, {"db_idx": 15240, "episode_idx": 60, "frame_idx": 109, "global_frame_idx": 15240, "task_index": 12}, {"db_idx": 15241, "episode_idx": 60, "frame_idx": 110, "global_frame_idx": 15241, "task_index": 12}, {"db_idx": 15242, "episode_idx": 60, "frame_idx": 111, "global_frame_idx": 15242, "task_index": 12}, {"db_idx": 15243, "episode_idx": 60, "frame_idx": 112, "global_frame_idx": 15243, "task_index": 12}, {"db_idx": 15244, "episode_idx": 60, "frame_idx": 113, "global_frame_idx": 15244, "task_index": 12}, {"db_idx": 15245, "episode_idx": 60, "frame_idx": 114, "global_frame_idx": 15245, "task_index": 12}, {"db_idx": 15246, "episode_idx": 60, "frame_idx": 115, "global_frame_idx": 15246, "task_index": 12}, {"db_idx": 15247, "episode_idx": 60, "frame_idx": 116, "global_frame_idx": 15247, "task_index": 12}, {"db_idx": 15248, "episode_idx": 60, "frame_idx": 117, "global_frame_idx": 15248, "task_index": 12}, {"db_idx": 15249, "episode_idx": 60, "frame_idx": 118, "global_frame_idx": 15249, "task_index": 12}, {"db_idx": 15250, "episode_idx": 60, "frame_idx": 119, "global_frame_idx": 15250, "task_index": 12}, {"db_idx": 15251, "episode_idx": 60, "frame_idx": 120, "global_frame_idx": 15251, "task_index": 12}, {"db_idx": 15252, "episode_idx": 60, "frame_idx": 121, "global_frame_idx": 15252, "task_index": 12}, {"db_idx": 15253, "episode_idx": 60, "frame_idx": 122, "global_frame_idx": 15253, "task_index": 12}, {"db_idx": 15254, "episode_idx": 60, "frame_idx": 123, "global_frame_idx": 15254, "task_index": 12}, {"db_idx": 15255, "episode_idx": 60, "frame_idx": 124, "global_frame_idx": 15255, "task_index": 12}, {"db_idx": 15256, "episode_idx": 60, "frame_idx": 125, "global_frame_idx": 15256, "task_index": 12}, {"db_idx": 15257, "episode_idx": 60, "frame_idx": 126, "global_frame_idx": 15257, "task_index": 12}, {"db_idx": 15258, "episode_idx": 60, "frame_idx": 127, "global_frame_idx": 15258, "task_index": 12}, {"db_idx": 15259, "episode_idx": 60, "frame_idx": 128, "global_frame_idx": 15259, "task_index": 12}, {"db_idx": 15260, "episode_idx": 60, "frame_idx": 129, "global_frame_idx": 15260, "task_index": 12}, {"db_idx": 15261, "episode_idx": 60, "frame_idx": 130, "global_frame_idx": 15261, "task_index": 12}, {"db_idx": 15262, "episode_idx": 60, "frame_idx": 131, "global_frame_idx": 15262, "task_index": 12}, {"db_idx": 15263, "episode_idx": 60, "frame_idx": 132, "global_frame_idx": 15263, "task_index": 12}, {"db_idx": 15264, "episode_idx": 60, "frame_idx": 133, "global_frame_idx": 15264, "task_index": 12}, {"db_idx": 15265, "episode_idx": 60, "frame_idx": 134, "global_frame_idx": 15265, "task_index": 12}, {"db_idx": 15266, "episode_idx": 60, "frame_idx": 135, "global_frame_idx": 15266, "task_index": 12}, {"db_idx": 15267, "episode_idx": 61, "frame_idx": 0, "global_frame_idx": 15267, "task_index": 12}, {"db_idx": 15268, "episode_idx": 61, "frame_idx": 1, "global_frame_idx": 15268, "task_index": 12}, {"db_idx": 15269, "episode_idx": 61, "frame_idx": 2, "global_frame_idx": 15269, "task_index": 12}, {"db_idx": 15270, "episode_idx": 61, "frame_idx": 3, "global_frame_idx": 15270, "task_index": 12}, {"db_idx": 15271, "episode_idx": 61, "frame_idx": 4, "global_frame_idx": 15271, "task_index": 12}, {"db_idx": 15272, "episode_idx": 61, "frame_idx": 5, "global_frame_idx": 15272, "task_index": 12}, {"db_idx": 15273, "episode_idx": 61, "frame_idx": 6, "global_frame_idx": 15273, "task_index": 12}, {"db_idx": 15274, "episode_idx": 61, "frame_idx": 7, "global_frame_idx": 15274, "task_index": 12}, {"db_idx": 15275, "episode_idx": 61, "frame_idx": 8, "global_frame_idx": 15275, "task_index": 12}, {"db_idx": 15276, "episode_idx": 61, "frame_idx": 9, "global_frame_idx": 15276, "task_index": 12}, {"db_idx": 15277, "episode_idx": 61, "frame_idx": 10, "global_frame_idx": 15277, "task_index": 12}, {"db_idx": 15278, "episode_idx": 61, "frame_idx": 11, "global_frame_idx": 15278, "task_index": 12}, {"db_idx": 15279, "episode_idx": 61, "frame_idx": 12, "global_frame_idx": 15279, "task_index": 12}, {"db_idx": 15280, "episode_idx": 61, "frame_idx": 13, "global_frame_idx": 15280, "task_index": 12}, {"db_idx": 15281, "episode_idx": 61, "frame_idx": 14, "global_frame_idx": 15281, "task_index": 12}, {"db_idx": 15282, "episode_idx": 61, "frame_idx": 15, "global_frame_idx": 15282, "task_index": 12}, {"db_idx": 15283, "episode_idx": 61, "frame_idx": 16, "global_frame_idx": 15283, "task_index": 12}, {"db_idx": 15284, "episode_idx": 61, "frame_idx": 17, "global_frame_idx": 15284, "task_index": 12}, {"db_idx": 15285, "episode_idx": 61, "frame_idx": 18, "global_frame_idx": 15285, "task_index": 12}, {"db_idx": 15286, "episode_idx": 61, "frame_idx": 19, "global_frame_idx": 15286, "task_index": 12}, {"db_idx": 15287, "episode_idx": 61, "frame_idx": 20, "global_frame_idx": 15287, "task_index": 12}, {"db_idx": 15288, "episode_idx": 61, "frame_idx": 21, "global_frame_idx": 15288, "task_index": 12}, {"db_idx": 15289, "episode_idx": 61, "frame_idx": 22, "global_frame_idx": 15289, "task_index": 12}, {"db_idx": 15290, "episode_idx": 61, "frame_idx": 23, "global_frame_idx": 15290, "task_index": 12}, {"db_idx": 15291, "episode_idx": 61, "frame_idx": 24, "global_frame_idx": 15291, "task_index": 12}, {"db_idx": 15292, "episode_idx": 61, "frame_idx": 25, "global_frame_idx": 15292, "task_index": 12}, {"db_idx": 15293, "episode_idx": 61, "frame_idx": 26, "global_frame_idx": 15293, "task_index": 12}, {"db_idx": 15294, "episode_idx": 61, "frame_idx": 27, "global_frame_idx": 15294, "task_index": 12}, {"db_idx": 15295, "episode_idx": 61, "frame_idx": 28, "global_frame_idx": 15295, "task_index": 12}, {"db_idx": 15296, "episode_idx": 61, "frame_idx": 29, "global_frame_idx": 15296, "task_index": 12}, {"db_idx": 15297, "episode_idx": 61, "frame_idx": 30, "global_frame_idx": 15297, "task_index": 12}, {"db_idx": 15298, "episode_idx": 61, "frame_idx": 31, "global_frame_idx": 15298, "task_index": 12}, {"db_idx": 15299, "episode_idx": 61, "frame_idx": 32, "global_frame_idx": 15299, "task_index": 12}, {"db_idx": 15300, "episode_idx": 61, "frame_idx": 33, "global_frame_idx": 15300, "task_index": 12}, {"db_idx": 15301, "episode_idx": 61, "frame_idx": 34, "global_frame_idx": 15301, "task_index": 12}, {"db_idx": 15302, "episode_idx": 61, "frame_idx": 35, "global_frame_idx": 15302, "task_index": 12}, {"db_idx": 15303, "episode_idx": 61, "frame_idx": 36, "global_frame_idx": 15303, "task_index": 12}, {"db_idx": 15304, "episode_idx": 61, "frame_idx": 37, "global_frame_idx": 15304, "task_index": 12}, {"db_idx": 15305, "episode_idx": 61, "frame_idx": 38, "global_frame_idx": 15305, "task_index": 12}, {"db_idx": 15306, "episode_idx": 61, "frame_idx": 39, "global_frame_idx": 15306, "task_index": 12}, {"db_idx": 15307, "episode_idx": 61, "frame_idx": 40, "global_frame_idx": 15307, "task_index": 12}, {"db_idx": 15308, "episode_idx": 61, "frame_idx": 41, "global_frame_idx": 15308, "task_index": 12}, {"db_idx": 15309, "episode_idx": 61, "frame_idx": 42, "global_frame_idx": 15309, "task_index": 12}, {"db_idx": 15310, "episode_idx": 61, "frame_idx": 43, "global_frame_idx": 15310, "task_index": 12}, {"db_idx": 15311, "episode_idx": 61, "frame_idx": 44, "global_frame_idx": 15311, "task_index": 12}, {"db_idx": 15312, "episode_idx": 61, "frame_idx": 45, "global_frame_idx": 15312, "task_index": 12}, {"db_idx": 15313, "episode_idx": 61, "frame_idx": 46, "global_frame_idx": 15313, "task_index": 12}, {"db_idx": 15314, "episode_idx": 61, "frame_idx": 47, "global_frame_idx": 15314, "task_index": 12}, {"db_idx": 15315, "episode_idx": 61, "frame_idx": 48, "global_frame_idx": 15315, "task_index": 12}, {"db_idx": 15316, "episode_idx": 61, "frame_idx": 49, "global_frame_idx": 15316, "task_index": 12}, {"db_idx": 15317, "episode_idx": 61, "frame_idx": 50, "global_frame_idx": 15317, "task_index": 12}, {"db_idx": 15318, "episode_idx": 61, "frame_idx": 51, "global_frame_idx": 15318, "task_index": 12}, {"db_idx": 15319, "episode_idx": 61, "frame_idx": 52, "global_frame_idx": 15319, "task_index": 12}, {"db_idx": 15320, "episode_idx": 61, "frame_idx": 53, "global_frame_idx": 15320, "task_index": 12}, {"db_idx": 15321, "episode_idx": 61, "frame_idx": 54, "global_frame_idx": 15321, "task_index": 12}, {"db_idx": 15322, "episode_idx": 61, "frame_idx": 55, "global_frame_idx": 15322, "task_index": 12}, {"db_idx": 15323, "episode_idx": 61, "frame_idx": 56, "global_frame_idx": 15323, "task_index": 12}, {"db_idx": 15324, "episode_idx": 61, "frame_idx": 57, "global_frame_idx": 15324, "task_index": 12}, {"db_idx": 15325, "episode_idx": 61, "frame_idx": 58, "global_frame_idx": 15325, "task_index": 12}, {"db_idx": 15326, "episode_idx": 61, "frame_idx": 59, "global_frame_idx": 15326, "task_index": 12}, {"db_idx": 15327, "episode_idx": 61, "frame_idx": 60, "global_frame_idx": 15327, "task_index": 12}, {"db_idx": 15328, "episode_idx": 61, "frame_idx": 61, "global_frame_idx": 15328, "task_index": 12}, {"db_idx": 15329, "episode_idx": 61, "frame_idx": 62, "global_frame_idx": 15329, "task_index": 12}, {"db_idx": 15330, "episode_idx": 61, "frame_idx": 63, "global_frame_idx": 15330, "task_index": 12}, {"db_idx": 15331, "episode_idx": 61, "frame_idx": 64, "global_frame_idx": 15331, "task_index": 12}, {"db_idx": 15332, "episode_idx": 61, "frame_idx": 65, "global_frame_idx": 15332, "task_index": 12}, {"db_idx": 15333, "episode_idx": 61, "frame_idx": 66, "global_frame_idx": 15333, "task_index": 12}, {"db_idx": 15334, "episode_idx": 61, "frame_idx": 67, "global_frame_idx": 15334, "task_index": 12}, {"db_idx": 15335, "episode_idx": 61, "frame_idx": 68, "global_frame_idx": 15335, "task_index": 12}, {"db_idx": 15336, "episode_idx": 61, "frame_idx": 69, "global_frame_idx": 15336, "task_index": 12}, {"db_idx": 15337, "episode_idx": 61, "frame_idx": 70, "global_frame_idx": 15337, "task_index": 12}, {"db_idx": 15338, "episode_idx": 61, "frame_idx": 71, "global_frame_idx": 15338, "task_index": 12}, {"db_idx": 15339, "episode_idx": 61, "frame_idx": 72, "global_frame_idx": 15339, "task_index": 12}, {"db_idx": 15340, "episode_idx": 61, "frame_idx": 73, "global_frame_idx": 15340, "task_index": 12}, {"db_idx": 15341, "episode_idx": 61, "frame_idx": 74, "global_frame_idx": 15341, "task_index": 12}, {"db_idx": 15342, "episode_idx": 61, "frame_idx": 75, "global_frame_idx": 15342, "task_index": 12}, {"db_idx": 15343, "episode_idx": 61, "frame_idx": 76, "global_frame_idx": 15343, "task_index": 12}, {"db_idx": 15344, "episode_idx": 61, "frame_idx": 77, "global_frame_idx": 15344, "task_index": 12}, {"db_idx": 15345, "episode_idx": 61, "frame_idx": 78, "global_frame_idx": 15345, "task_index": 12}, {"db_idx": 15346, "episode_idx": 61, "frame_idx": 79, "global_frame_idx": 15346, "task_index": 12}, {"db_idx": 15347, "episode_idx": 61, "frame_idx": 80, "global_frame_idx": 15347, "task_index": 12}, {"db_idx": 15348, "episode_idx": 61, "frame_idx": 81, "global_frame_idx": 15348, "task_index": 12}, {"db_idx": 15349, "episode_idx": 61, "frame_idx": 82, "global_frame_idx": 15349, "task_index": 12}, {"db_idx": 15350, "episode_idx": 61, "frame_idx": 83, "global_frame_idx": 15350, "task_index": 12}, {"db_idx": 15351, "episode_idx": 61, "frame_idx": 84, "global_frame_idx": 15351, "task_index": 12}, {"db_idx": 15352, "episode_idx": 61, "frame_idx": 85, "global_frame_idx": 15352, "task_index": 12}, {"db_idx": 15353, "episode_idx": 61, "frame_idx": 86, "global_frame_idx": 15353, "task_index": 12}, {"db_idx": 15354, "episode_idx": 61, "frame_idx": 87, "global_frame_idx": 15354, "task_index": 12}, {"db_idx": 15355, "episode_idx": 61, "frame_idx": 88, "global_frame_idx": 15355, "task_index": 12}, {"db_idx": 15356, "episode_idx": 61, "frame_idx": 89, "global_frame_idx": 15356, "task_index": 12}, {"db_idx": 15357, "episode_idx": 61, "frame_idx": 90, "global_frame_idx": 15357, "task_index": 12}, {"db_idx": 15358, "episode_idx": 61, "frame_idx": 91, "global_frame_idx": 15358, "task_index": 12}, {"db_idx": 15359, "episode_idx": 61, "frame_idx": 92, "global_frame_idx": 15359, "task_index": 12}, {"db_idx": 15360, "episode_idx": 61, "frame_idx": 93, "global_frame_idx": 15360, "task_index": 12}, {"db_idx": 15361, "episode_idx": 61, "frame_idx": 94, "global_frame_idx": 15361, "task_index": 12}, {"db_idx": 15362, "episode_idx": 61, "frame_idx": 95, "global_frame_idx": 15362, "task_index": 12}, {"db_idx": 15363, "episode_idx": 61, "frame_idx": 96, "global_frame_idx": 15363, "task_index": 12}, {"db_idx": 15364, "episode_idx": 61, "frame_idx": 97, "global_frame_idx": 15364, "task_index": 12}, {"db_idx": 15365, "episode_idx": 61, "frame_idx": 98, "global_frame_idx": 15365, "task_index": 12}, {"db_idx": 15366, "episode_idx": 61, "frame_idx": 99, "global_frame_idx": 15366, "task_index": 12}, {"db_idx": 15367, "episode_idx": 61, "frame_idx": 100, "global_frame_idx": 15367, "task_index": 12}, {"db_idx": 15368, "episode_idx": 61, "frame_idx": 101, "global_frame_idx": 15368, "task_index": 12}, {"db_idx": 15369, "episode_idx": 61, "frame_idx": 102, "global_frame_idx": 15369, "task_index": 12}, {"db_idx": 15370, "episode_idx": 61, "frame_idx": 103, "global_frame_idx": 15370, "task_index": 12}, {"db_idx": 15371, "episode_idx": 61, "frame_idx": 104, "global_frame_idx": 15371, "task_index": 12}, {"db_idx": 15372, "episode_idx": 61, "frame_idx": 105, "global_frame_idx": 15372, "task_index": 12}, {"db_idx": 15373, "episode_idx": 61, "frame_idx": 106, "global_frame_idx": 15373, "task_index": 12}, {"db_idx": 15374, "episode_idx": 61, "frame_idx": 107, "global_frame_idx": 15374, "task_index": 12}, {"db_idx": 15375, "episode_idx": 61, "frame_idx": 108, "global_frame_idx": 15375, "task_index": 12}, {"db_idx": 15376, "episode_idx": 61, "frame_idx": 109, "global_frame_idx": 15376, "task_index": 12}, {"db_idx": 15377, "episode_idx": 61, "frame_idx": 110, "global_frame_idx": 15377, "task_index": 12}, {"db_idx": 15378, "episode_idx": 61, "frame_idx": 111, "global_frame_idx": 15378, "task_index": 12}, {"db_idx": 15379, "episode_idx": 61, "frame_idx": 112, "global_frame_idx": 15379, "task_index": 12}, {"db_idx": 15380, "episode_idx": 61, "frame_idx": 113, "global_frame_idx": 15380, "task_index": 12}, {"db_idx": 15381, "episode_idx": 61, "frame_idx": 114, "global_frame_idx": 15381, "task_index": 12}, {"db_idx": 15382, "episode_idx": 61, "frame_idx": 115, "global_frame_idx": 15382, "task_index": 12}, {"db_idx": 15383, "episode_idx": 61, "frame_idx": 116, "global_frame_idx": 15383, "task_index": 12}, {"db_idx": 15384, "episode_idx": 61, "frame_idx": 117, "global_frame_idx": 15384, "task_index": 12}, {"db_idx": 15385, "episode_idx": 61, "frame_idx": 118, "global_frame_idx": 15385, "task_index": 12}, {"db_idx": 15386, "episode_idx": 61, "frame_idx": 119, "global_frame_idx": 15386, "task_index": 12}, {"db_idx": 15387, "episode_idx": 61, "frame_idx": 120, "global_frame_idx": 15387, "task_index": 12}, {"db_idx": 15388, "episode_idx": 61, "frame_idx": 121, "global_frame_idx": 15388, "task_index": 12}, {"db_idx": 15389, "episode_idx": 61, "frame_idx": 122, "global_frame_idx": 15389, "task_index": 12}, {"db_idx": 15390, "episode_idx": 61, "frame_idx": 123, "global_frame_idx": 15390, "task_index": 12}, {"db_idx": 15391, "episode_idx": 61, "frame_idx": 124, "global_frame_idx": 15391, "task_index": 12}, {"db_idx": 15392, "episode_idx": 61, "frame_idx": 125, "global_frame_idx": 15392, "task_index": 12}, {"db_idx": 15393, "episode_idx": 61, "frame_idx": 126, "global_frame_idx": 15393, "task_index": 12}, {"db_idx": 15394, "episode_idx": 61, "frame_idx": 127, "global_frame_idx": 15394, "task_index": 12}, {"db_idx": 15395, "episode_idx": 61, "frame_idx": 128, "global_frame_idx": 15395, "task_index": 12}, {"db_idx": 15396, "episode_idx": 61, "frame_idx": 129, "global_frame_idx": 15396, "task_index": 12}, {"db_idx": 15397, "episode_idx": 61, "frame_idx": 130, "global_frame_idx": 15397, "task_index": 12}, {"db_idx": 15398, "episode_idx": 61, "frame_idx": 131, "global_frame_idx": 15398, "task_index": 12}, {"db_idx": 15399, "episode_idx": 61, "frame_idx": 132, "global_frame_idx": 15399, "task_index": 12}, {"db_idx": 15400, "episode_idx": 61, "frame_idx": 133, "global_frame_idx": 15400, "task_index": 12}, {"db_idx": 15401, "episode_idx": 61, "frame_idx": 134, "global_frame_idx": 15401, "task_index": 12}, {"db_idx": 15402, "episode_idx": 61, "frame_idx": 135, "global_frame_idx": 15402, "task_index": 12}, {"db_idx": 15403, "episode_idx": 61, "frame_idx": 136, "global_frame_idx": 15403, "task_index": 12}, {"db_idx": 15404, "episode_idx": 61, "frame_idx": 137, "global_frame_idx": 15404, "task_index": 12}, {"db_idx": 15405, "episode_idx": 61, "frame_idx": 138, "global_frame_idx": 15405, "task_index": 12}, {"db_idx": 15406, "episode_idx": 61, "frame_idx": 139, "global_frame_idx": 15406, "task_index": 12}, {"db_idx": 15407, "episode_idx": 61, "frame_idx": 140, "global_frame_idx": 15407, "task_index": 12}, {"db_idx": 15408, "episode_idx": 61, "frame_idx": 141, "global_frame_idx": 15408, "task_index": 12}, {"db_idx": 15409, "episode_idx": 62, "frame_idx": 0, "global_frame_idx": 15409, "task_index": 12}, {"db_idx": 15410, "episode_idx": 62, "frame_idx": 1, "global_frame_idx": 15410, "task_index": 12}, {"db_idx": 15411, "episode_idx": 62, "frame_idx": 2, "global_frame_idx": 15411, "task_index": 12}, {"db_idx": 15412, "episode_idx": 62, "frame_idx": 3, "global_frame_idx": 15412, "task_index": 12}, {"db_idx": 15413, "episode_idx": 62, "frame_idx": 4, "global_frame_idx": 15413, "task_index": 12}, {"db_idx": 15414, "episode_idx": 62, "frame_idx": 5, "global_frame_idx": 15414, "task_index": 12}, {"db_idx": 15415, "episode_idx": 62, "frame_idx": 6, "global_frame_idx": 15415, "task_index": 12}, {"db_idx": 15416, "episode_idx": 62, "frame_idx": 7, "global_frame_idx": 15416, "task_index": 12}, {"db_idx": 15417, "episode_idx": 62, "frame_idx": 8, "global_frame_idx": 15417, "task_index": 12}, {"db_idx": 15418, "episode_idx": 62, "frame_idx": 9, "global_frame_idx": 15418, "task_index": 12}, {"db_idx": 15419, "episode_idx": 62, "frame_idx": 10, "global_frame_idx": 15419, "task_index": 12}, {"db_idx": 15420, "episode_idx": 62, "frame_idx": 11, "global_frame_idx": 15420, "task_index": 12}, {"db_idx": 15421, "episode_idx": 62, "frame_idx": 12, "global_frame_idx": 15421, "task_index": 12}, {"db_idx": 15422, "episode_idx": 62, "frame_idx": 13, "global_frame_idx": 15422, "task_index": 12}, {"db_idx": 15423, "episode_idx": 62, "frame_idx": 14, "global_frame_idx": 15423, "task_index": 12}, {"db_idx": 15424, "episode_idx": 62, "frame_idx": 15, "global_frame_idx": 15424, "task_index": 12}, {"db_idx": 15425, "episode_idx": 62, "frame_idx": 16, "global_frame_idx": 15425, "task_index": 12}, {"db_idx": 15426, "episode_idx": 62, "frame_idx": 17, "global_frame_idx": 15426, "task_index": 12}, {"db_idx": 15427, "episode_idx": 62, "frame_idx": 18, "global_frame_idx": 15427, "task_index": 12}, {"db_idx": 15428, "episode_idx": 62, "frame_idx": 19, "global_frame_idx": 15428, "task_index": 12}, {"db_idx": 15429, "episode_idx": 62, "frame_idx": 20, "global_frame_idx": 15429, "task_index": 12}, {"db_idx": 15430, "episode_idx": 62, "frame_idx": 21, "global_frame_idx": 15430, "task_index": 12}, {"db_idx": 15431, "episode_idx": 62, "frame_idx": 22, "global_frame_idx": 15431, "task_index": 12}, {"db_idx": 15432, "episode_idx": 62, "frame_idx": 23, "global_frame_idx": 15432, "task_index": 12}, {"db_idx": 15433, "episode_idx": 62, "frame_idx": 24, "global_frame_idx": 15433, "task_index": 12}, {"db_idx": 15434, "episode_idx": 62, "frame_idx": 25, "global_frame_idx": 15434, "task_index": 12}, {"db_idx": 15435, "episode_idx": 62, "frame_idx": 26, "global_frame_idx": 15435, "task_index": 12}, {"db_idx": 15436, "episode_idx": 62, "frame_idx": 27, "global_frame_idx": 15436, "task_index": 12}, {"db_idx": 15437, "episode_idx": 62, "frame_idx": 28, "global_frame_idx": 15437, "task_index": 12}, {"db_idx": 15438, "episode_idx": 62, "frame_idx": 29, "global_frame_idx": 15438, "task_index": 12}, {"db_idx": 15439, "episode_idx": 62, "frame_idx": 30, "global_frame_idx": 15439, "task_index": 12}, {"db_idx": 15440, "episode_idx": 62, "frame_idx": 31, "global_frame_idx": 15440, "task_index": 12}, {"db_idx": 15441, "episode_idx": 62, "frame_idx": 32, "global_frame_idx": 15441, "task_index": 12}, {"db_idx": 15442, "episode_idx": 62, "frame_idx": 33, "global_frame_idx": 15442, "task_index": 12}, {"db_idx": 15443, "episode_idx": 62, "frame_idx": 34, "global_frame_idx": 15443, "task_index": 12}, {"db_idx": 15444, "episode_idx": 62, "frame_idx": 35, "global_frame_idx": 15444, "task_index": 12}, {"db_idx": 15445, "episode_idx": 62, "frame_idx": 36, "global_frame_idx": 15445, "task_index": 12}, {"db_idx": 15446, "episode_idx": 62, "frame_idx": 37, "global_frame_idx": 15446, "task_index": 12}, {"db_idx": 15447, "episode_idx": 62, "frame_idx": 38, "global_frame_idx": 15447, "task_index": 12}, {"db_idx": 15448, "episode_idx": 62, "frame_idx": 39, "global_frame_idx": 15448, "task_index": 12}, {"db_idx": 15449, "episode_idx": 62, "frame_idx": 40, "global_frame_idx": 15449, "task_index": 12}, {"db_idx": 15450, "episode_idx": 62, "frame_idx": 41, "global_frame_idx": 15450, "task_index": 12}, {"db_idx": 15451, "episode_idx": 62, "frame_idx": 42, "global_frame_idx": 15451, "task_index": 12}, {"db_idx": 15452, "episode_idx": 62, "frame_idx": 43, "global_frame_idx": 15452, "task_index": 12}, {"db_idx": 15453, "episode_idx": 62, "frame_idx": 44, "global_frame_idx": 15453, "task_index": 12}, {"db_idx": 15454, "episode_idx": 62, "frame_idx": 45, "global_frame_idx": 15454, "task_index": 12}, {"db_idx": 15455, "episode_idx": 62, "frame_idx": 46, "global_frame_idx": 15455, "task_index": 12}, {"db_idx": 15456, "episode_idx": 62, "frame_idx": 47, "global_frame_idx": 15456, "task_index": 12}, {"db_idx": 15457, "episode_idx": 62, "frame_idx": 48, "global_frame_idx": 15457, "task_index": 12}, {"db_idx": 15458, "episode_idx": 62, "frame_idx": 49, "global_frame_idx": 15458, "task_index": 12}, {"db_idx": 15459, "episode_idx": 62, "frame_idx": 50, "global_frame_idx": 15459, "task_index": 12}, {"db_idx": 15460, "episode_idx": 62, "frame_idx": 51, "global_frame_idx": 15460, "task_index": 12}, {"db_idx": 15461, "episode_idx": 62, "frame_idx": 52, "global_frame_idx": 15461, "task_index": 12}, {"db_idx": 15462, "episode_idx": 62, "frame_idx": 53, "global_frame_idx": 15462, "task_index": 12}, {"db_idx": 15463, "episode_idx": 62, "frame_idx": 54, "global_frame_idx": 15463, "task_index": 12}, {"db_idx": 15464, "episode_idx": 62, "frame_idx": 55, "global_frame_idx": 15464, "task_index": 12}, {"db_idx": 15465, "episode_idx": 62, "frame_idx": 56, "global_frame_idx": 15465, "task_index": 12}, {"db_idx": 15466, "episode_idx": 62, "frame_idx": 57, "global_frame_idx": 15466, "task_index": 12}, {"db_idx": 15467, "episode_idx": 62, "frame_idx": 58, "global_frame_idx": 15467, "task_index": 12}, {"db_idx": 15468, "episode_idx": 62, "frame_idx": 59, "global_frame_idx": 15468, "task_index": 12}, {"db_idx": 15469, "episode_idx": 62, "frame_idx": 60, "global_frame_idx": 15469, "task_index": 12}, {"db_idx": 15470, "episode_idx": 62, "frame_idx": 61, "global_frame_idx": 15470, "task_index": 12}, {"db_idx": 15471, "episode_idx": 62, "frame_idx": 62, "global_frame_idx": 15471, "task_index": 12}, {"db_idx": 15472, "episode_idx": 62, "frame_idx": 63, "global_frame_idx": 15472, "task_index": 12}, {"db_idx": 15473, "episode_idx": 62, "frame_idx": 64, "global_frame_idx": 15473, "task_index": 12}, {"db_idx": 15474, "episode_idx": 62, "frame_idx": 65, "global_frame_idx": 15474, "task_index": 12}, {"db_idx": 15475, "episode_idx": 62, "frame_idx": 66, "global_frame_idx": 15475, "task_index": 12}, {"db_idx": 15476, "episode_idx": 62, "frame_idx": 67, "global_frame_idx": 15476, "task_index": 12}, {"db_idx": 15477, "episode_idx": 62, "frame_idx": 68, "global_frame_idx": 15477, "task_index": 12}, {"db_idx": 15478, "episode_idx": 62, "frame_idx": 69, "global_frame_idx": 15478, "task_index": 12}, {"db_idx": 15479, "episode_idx": 62, "frame_idx": 70, "global_frame_idx": 15479, "task_index": 12}, {"db_idx": 15480, "episode_idx": 62, "frame_idx": 71, "global_frame_idx": 15480, "task_index": 12}, {"db_idx": 15481, "episode_idx": 62, "frame_idx": 72, "global_frame_idx": 15481, "task_index": 12}, {"db_idx": 15482, "episode_idx": 62, "frame_idx": 73, "global_frame_idx": 15482, "task_index": 12}, {"db_idx": 15483, "episode_idx": 62, "frame_idx": 74, "global_frame_idx": 15483, "task_index": 12}, {"db_idx": 15484, "episode_idx": 62, "frame_idx": 75, "global_frame_idx": 15484, "task_index": 12}, {"db_idx": 15485, "episode_idx": 62, "frame_idx": 76, "global_frame_idx": 15485, "task_index": 12}, {"db_idx": 15486, "episode_idx": 62, "frame_idx": 77, "global_frame_idx": 15486, "task_index": 12}, {"db_idx": 15487, "episode_idx": 62, "frame_idx": 78, "global_frame_idx": 15487, "task_index": 12}, {"db_idx": 15488, "episode_idx": 62, "frame_idx": 79, "global_frame_idx": 15488, "task_index": 12}, {"db_idx": 15489, "episode_idx": 62, "frame_idx": 80, "global_frame_idx": 15489, "task_index": 12}, {"db_idx": 15490, "episode_idx": 62, "frame_idx": 81, "global_frame_idx": 15490, "task_index": 12}, {"db_idx": 15491, "episode_idx": 62, "frame_idx": 82, "global_frame_idx": 15491, "task_index": 12}, {"db_idx": 15492, "episode_idx": 62, "frame_idx": 83, "global_frame_idx": 15492, "task_index": 12}, {"db_idx": 15493, "episode_idx": 62, "frame_idx": 84, "global_frame_idx": 15493, "task_index": 12}, {"db_idx": 15494, "episode_idx": 62, "frame_idx": 85, "global_frame_idx": 15494, "task_index": 12}, {"db_idx": 15495, "episode_idx": 62, "frame_idx": 86, "global_frame_idx": 15495, "task_index": 12}, {"db_idx": 15496, "episode_idx": 62, "frame_idx": 87, "global_frame_idx": 15496, "task_index": 12}, {"db_idx": 15497, "episode_idx": 62, "frame_idx": 88, "global_frame_idx": 15497, "task_index": 12}, {"db_idx": 15498, "episode_idx": 62, "frame_idx": 89, "global_frame_idx": 15498, "task_index": 12}, {"db_idx": 15499, "episode_idx": 62, "frame_idx": 90, "global_frame_idx": 15499, "task_index": 12}, {"db_idx": 15500, "episode_idx": 62, "frame_idx": 91, "global_frame_idx": 15500, "task_index": 12}, {"db_idx": 15501, "episode_idx": 62, "frame_idx": 92, "global_frame_idx": 15501, "task_index": 12}, {"db_idx": 15502, "episode_idx": 62, "frame_idx": 93, "global_frame_idx": 15502, "task_index": 12}, {"db_idx": 15503, "episode_idx": 62, "frame_idx": 94, "global_frame_idx": 15503, "task_index": 12}, {"db_idx": 15504, "episode_idx": 62, "frame_idx": 95, "global_frame_idx": 15504, "task_index": 12}, {"db_idx": 15505, "episode_idx": 62, "frame_idx": 96, "global_frame_idx": 15505, "task_index": 12}, {"db_idx": 15506, "episode_idx": 62, "frame_idx": 97, "global_frame_idx": 15506, "task_index": 12}, {"db_idx": 15507, "episode_idx": 62, "frame_idx": 98, "global_frame_idx": 15507, "task_index": 12}, {"db_idx": 15508, "episode_idx": 62, "frame_idx": 99, "global_frame_idx": 15508, "task_index": 12}, {"db_idx": 15509, "episode_idx": 62, "frame_idx": 100, "global_frame_idx": 15509, "task_index": 12}, {"db_idx": 15510, "episode_idx": 62, "frame_idx": 101, "global_frame_idx": 15510, "task_index": 12}, {"db_idx": 15511, "episode_idx": 62, "frame_idx": 102, "global_frame_idx": 15511, "task_index": 12}, {"db_idx": 15512, "episode_idx": 62, "frame_idx": 103, "global_frame_idx": 15512, "task_index": 12}, {"db_idx": 15513, "episode_idx": 62, "frame_idx": 104, "global_frame_idx": 15513, "task_index": 12}, {"db_idx": 15514, "episode_idx": 62, "frame_idx": 105, "global_frame_idx": 15514, "task_index": 12}, {"db_idx": 15515, "episode_idx": 62, "frame_idx": 106, "global_frame_idx": 15515, "task_index": 12}, {"db_idx": 15516, "episode_idx": 62, "frame_idx": 107, "global_frame_idx": 15516, "task_index": 12}, {"db_idx": 15517, "episode_idx": 62, "frame_idx": 108, "global_frame_idx": 15517, "task_index": 12}, {"db_idx": 15518, "episode_idx": 62, "frame_idx": 109, "global_frame_idx": 15518, "task_index": 12}, {"db_idx": 15519, "episode_idx": 62, "frame_idx": 110, "global_frame_idx": 15519, "task_index": 12}, {"db_idx": 15520, "episode_idx": 62, "frame_idx": 111, "global_frame_idx": 15520, "task_index": 12}, {"db_idx": 15521, "episode_idx": 62, "frame_idx": 112, "global_frame_idx": 15521, "task_index": 12}, {"db_idx": 15522, "episode_idx": 62, "frame_idx": 113, "global_frame_idx": 15522, "task_index": 12}, {"db_idx": 15523, "episode_idx": 62, "frame_idx": 114, "global_frame_idx": 15523, "task_index": 12}, {"db_idx": 15524, "episode_idx": 62, "frame_idx": 115, "global_frame_idx": 15524, "task_index": 12}, {"db_idx": 15525, "episode_idx": 62, "frame_idx": 116, "global_frame_idx": 15525, "task_index": 12}, {"db_idx": 15526, "episode_idx": 62, "frame_idx": 117, "global_frame_idx": 15526, "task_index": 12}, {"db_idx": 15527, "episode_idx": 62, "frame_idx": 118, "global_frame_idx": 15527, "task_index": 12}, {"db_idx": 15528, "episode_idx": 62, "frame_idx": 119, "global_frame_idx": 15528, "task_index": 12}, {"db_idx": 15529, "episode_idx": 62, "frame_idx": 120, "global_frame_idx": 15529, "task_index": 12}, {"db_idx": 15530, "episode_idx": 62, "frame_idx": 121, "global_frame_idx": 15530, "task_index": 12}, {"db_idx": 15531, "episode_idx": 62, "frame_idx": 122, "global_frame_idx": 15531, "task_index": 12}, {"db_idx": 15532, "episode_idx": 62, "frame_idx": 123, "global_frame_idx": 15532, "task_index": 12}, {"db_idx": 15533, "episode_idx": 62, "frame_idx": 124, "global_frame_idx": 15533, "task_index": 12}, {"db_idx": 15534, "episode_idx": 62, "frame_idx": 125, "global_frame_idx": 15534, "task_index": 12}, {"db_idx": 15535, "episode_idx": 62, "frame_idx": 126, "global_frame_idx": 15535, "task_index": 12}, {"db_idx": 15536, "episode_idx": 62, "frame_idx": 127, "global_frame_idx": 15536, "task_index": 12}, {"db_idx": 15537, "episode_idx": 62, "frame_idx": 128, "global_frame_idx": 15537, "task_index": 12}, {"db_idx": 15538, "episode_idx": 62, "frame_idx": 129, "global_frame_idx": 15538, "task_index": 12}, {"db_idx": 15539, "episode_idx": 62, "frame_idx": 130, "global_frame_idx": 15539, "task_index": 12}, {"db_idx": 15540, "episode_idx": 62, "frame_idx": 131, "global_frame_idx": 15540, "task_index": 12}, {"db_idx": 15541, "episode_idx": 62, "frame_idx": 132, "global_frame_idx": 15541, "task_index": 12}, {"db_idx": 15542, "episode_idx": 62, "frame_idx": 133, "global_frame_idx": 15542, "task_index": 12}, {"db_idx": 15543, "episode_idx": 62, "frame_idx": 134, "global_frame_idx": 15543, "task_index": 12}, {"db_idx": 15544, "episode_idx": 62, "frame_idx": 135, "global_frame_idx": 15544, "task_index": 12}, {"db_idx": 15545, "episode_idx": 62, "frame_idx": 136, "global_frame_idx": 15545, "task_index": 12}, {"db_idx": 15546, "episode_idx": 63, "frame_idx": 0, "global_frame_idx": 15546, "task_index": 12}, {"db_idx": 15547, "episode_idx": 63, "frame_idx": 1, "global_frame_idx": 15547, "task_index": 12}, {"db_idx": 15548, "episode_idx": 63, "frame_idx": 2, "global_frame_idx": 15548, "task_index": 12}, {"db_idx": 15549, "episode_idx": 63, "frame_idx": 3, "global_frame_idx": 15549, "task_index": 12}, {"db_idx": 15550, "episode_idx": 63, "frame_idx": 4, "global_frame_idx": 15550, "task_index": 12}, {"db_idx": 15551, "episode_idx": 63, "frame_idx": 5, "global_frame_idx": 15551, "task_index": 12}, {"db_idx": 15552, "episode_idx": 63, "frame_idx": 6, "global_frame_idx": 15552, "task_index": 12}, {"db_idx": 15553, "episode_idx": 63, "frame_idx": 7, "global_frame_idx": 15553, "task_index": 12}, {"db_idx": 15554, "episode_idx": 63, "frame_idx": 8, "global_frame_idx": 15554, "task_index": 12}, {"db_idx": 15555, "episode_idx": 63, "frame_idx": 9, "global_frame_idx": 15555, "task_index": 12}, {"db_idx": 15556, "episode_idx": 63, "frame_idx": 10, "global_frame_idx": 15556, "task_index": 12}, {"db_idx": 15557, "episode_idx": 63, "frame_idx": 11, "global_frame_idx": 15557, "task_index": 12}, {"db_idx": 15558, "episode_idx": 63, "frame_idx": 12, "global_frame_idx": 15558, "task_index": 12}, {"db_idx": 15559, "episode_idx": 63, "frame_idx": 13, "global_frame_idx": 15559, "task_index": 12}, {"db_idx": 15560, "episode_idx": 63, "frame_idx": 14, "global_frame_idx": 15560, "task_index": 12}, {"db_idx": 15561, "episode_idx": 63, "frame_idx": 15, "global_frame_idx": 15561, "task_index": 12}, {"db_idx": 15562, "episode_idx": 63, "frame_idx": 16, "global_frame_idx": 15562, "task_index": 12}, {"db_idx": 15563, "episode_idx": 63, "frame_idx": 17, "global_frame_idx": 15563, "task_index": 12}, {"db_idx": 15564, "episode_idx": 63, "frame_idx": 18, "global_frame_idx": 15564, "task_index": 12}, {"db_idx": 15565, "episode_idx": 63, "frame_idx": 19, "global_frame_idx": 15565, "task_index": 12}, {"db_idx": 15566, "episode_idx": 63, "frame_idx": 20, "global_frame_idx": 15566, "task_index": 12}, {"db_idx": 15567, "episode_idx": 63, "frame_idx": 21, "global_frame_idx": 15567, "task_index": 12}, {"db_idx": 15568, "episode_idx": 63, "frame_idx": 22, "global_frame_idx": 15568, "task_index": 12}, {"db_idx": 15569, "episode_idx": 63, "frame_idx": 23, "global_frame_idx": 15569, "task_index": 12}, {"db_idx": 15570, "episode_idx": 63, "frame_idx": 24, "global_frame_idx": 15570, "task_index": 12}, {"db_idx": 15571, "episode_idx": 63, "frame_idx": 25, "global_frame_idx": 15571, "task_index": 12}, {"db_idx": 15572, "episode_idx": 63, "frame_idx": 26, "global_frame_idx": 15572, "task_index": 12}, {"db_idx": 15573, "episode_idx": 63, "frame_idx": 27, "global_frame_idx": 15573, "task_index": 12}, {"db_idx": 15574, "episode_idx": 63, "frame_idx": 28, "global_frame_idx": 15574, "task_index": 12}, {"db_idx": 15575, "episode_idx": 63, "frame_idx": 29, "global_frame_idx": 15575, "task_index": 12}, {"db_idx": 15576, "episode_idx": 63, "frame_idx": 30, "global_frame_idx": 15576, "task_index": 12}, {"db_idx": 15577, "episode_idx": 63, "frame_idx": 31, "global_frame_idx": 15577, "task_index": 12}, {"db_idx": 15578, "episode_idx": 63, "frame_idx": 32, "global_frame_idx": 15578, "task_index": 12}, {"db_idx": 15579, "episode_idx": 63, "frame_idx": 33, "global_frame_idx": 15579, "task_index": 12}, {"db_idx": 15580, "episode_idx": 63, "frame_idx": 34, "global_frame_idx": 15580, "task_index": 12}, {"db_idx": 15581, "episode_idx": 63, "frame_idx": 35, "global_frame_idx": 15581, "task_index": 12}, {"db_idx": 15582, "episode_idx": 63, "frame_idx": 36, "global_frame_idx": 15582, "task_index": 12}, {"db_idx": 15583, "episode_idx": 63, "frame_idx": 37, "global_frame_idx": 15583, "task_index": 12}, {"db_idx": 15584, "episode_idx": 63, "frame_idx": 38, "global_frame_idx": 15584, "task_index": 12}, {"db_idx": 15585, "episode_idx": 63, "frame_idx": 39, "global_frame_idx": 15585, "task_index": 12}, {"db_idx": 15586, "episode_idx": 63, "frame_idx": 40, "global_frame_idx": 15586, "task_index": 12}, {"db_idx": 15587, "episode_idx": 63, "frame_idx": 41, "global_frame_idx": 15587, "task_index": 12}, {"db_idx": 15588, "episode_idx": 63, "frame_idx": 42, "global_frame_idx": 15588, "task_index": 12}, {"db_idx": 15589, "episode_idx": 63, "frame_idx": 43, "global_frame_idx": 15589, "task_index": 12}, {"db_idx": 15590, "episode_idx": 63, "frame_idx": 44, "global_frame_idx": 15590, "task_index": 12}, {"db_idx": 15591, "episode_idx": 63, "frame_idx": 45, "global_frame_idx": 15591, "task_index": 12}, {"db_idx": 15592, "episode_idx": 63, "frame_idx": 46, "global_frame_idx": 15592, "task_index": 12}, {"db_idx": 15593, "episode_idx": 63, "frame_idx": 47, "global_frame_idx": 15593, "task_index": 12}, {"db_idx": 15594, "episode_idx": 63, "frame_idx": 48, "global_frame_idx": 15594, "task_index": 12}, {"db_idx": 15595, "episode_idx": 63, "frame_idx": 49, "global_frame_idx": 15595, "task_index": 12}, {"db_idx": 15596, "episode_idx": 63, "frame_idx": 50, "global_frame_idx": 15596, "task_index": 12}, {"db_idx": 15597, "episode_idx": 63, "frame_idx": 51, "global_frame_idx": 15597, "task_index": 12}, {"db_idx": 15598, "episode_idx": 63, "frame_idx": 52, "global_frame_idx": 15598, "task_index": 12}, {"db_idx": 15599, "episode_idx": 63, "frame_idx": 53, "global_frame_idx": 15599, "task_index": 12}, {"db_idx": 15600, "episode_idx": 63, "frame_idx": 54, "global_frame_idx": 15600, "task_index": 12}, {"db_idx": 15601, "episode_idx": 63, "frame_idx": 55, "global_frame_idx": 15601, "task_index": 12}, {"db_idx": 15602, "episode_idx": 63, "frame_idx": 56, "global_frame_idx": 15602, "task_index": 12}, {"db_idx": 15603, "episode_idx": 63, "frame_idx": 57, "global_frame_idx": 15603, "task_index": 12}, {"db_idx": 15604, "episode_idx": 63, "frame_idx": 58, "global_frame_idx": 15604, "task_index": 12}, {"db_idx": 15605, "episode_idx": 63, "frame_idx": 59, "global_frame_idx": 15605, "task_index": 12}, {"db_idx": 15606, "episode_idx": 63, "frame_idx": 60, "global_frame_idx": 15606, "task_index": 12}, {"db_idx": 15607, "episode_idx": 63, "frame_idx": 61, "global_frame_idx": 15607, "task_index": 12}, {"db_idx": 15608, "episode_idx": 63, "frame_idx": 62, "global_frame_idx": 15608, "task_index": 12}, {"db_idx": 15609, "episode_idx": 63, "frame_idx": 63, "global_frame_idx": 15609, "task_index": 12}, {"db_idx": 15610, "episode_idx": 63, "frame_idx": 64, "global_frame_idx": 15610, "task_index": 12}, {"db_idx": 15611, "episode_idx": 63, "frame_idx": 65, "global_frame_idx": 15611, "task_index": 12}, {"db_idx": 15612, "episode_idx": 63, "frame_idx": 66, "global_frame_idx": 15612, "task_index": 12}, {"db_idx": 15613, "episode_idx": 63, "frame_idx": 67, "global_frame_idx": 15613, "task_index": 12}, {"db_idx": 15614, "episode_idx": 63, "frame_idx": 68, "global_frame_idx": 15614, "task_index": 12}, {"db_idx": 15615, "episode_idx": 63, "frame_idx": 69, "global_frame_idx": 15615, "task_index": 12}, {"db_idx": 15616, "episode_idx": 63, "frame_idx": 70, "global_frame_idx": 15616, "task_index": 12}, {"db_idx": 15617, "episode_idx": 63, "frame_idx": 71, "global_frame_idx": 15617, "task_index": 12}, {"db_idx": 15618, "episode_idx": 63, "frame_idx": 72, "global_frame_idx": 15618, "task_index": 12}, {"db_idx": 15619, "episode_idx": 63, "frame_idx": 73, "global_frame_idx": 15619, "task_index": 12}, {"db_idx": 15620, "episode_idx": 63, "frame_idx": 74, "global_frame_idx": 15620, "task_index": 12}, {"db_idx": 15621, "episode_idx": 63, "frame_idx": 75, "global_frame_idx": 15621, "task_index": 12}, {"db_idx": 15622, "episode_idx": 63, "frame_idx": 76, "global_frame_idx": 15622, "task_index": 12}, {"db_idx": 15623, "episode_idx": 63, "frame_idx": 77, "global_frame_idx": 15623, "task_index": 12}, {"db_idx": 15624, "episode_idx": 63, "frame_idx": 78, "global_frame_idx": 15624, "task_index": 12}, {"db_idx": 15625, "episode_idx": 63, "frame_idx": 79, "global_frame_idx": 15625, "task_index": 12}, {"db_idx": 15626, "episode_idx": 63, "frame_idx": 80, "global_frame_idx": 15626, "task_index": 12}, {"db_idx": 15627, "episode_idx": 63, "frame_idx": 81, "global_frame_idx": 15627, "task_index": 12}, {"db_idx": 15628, "episode_idx": 63, "frame_idx": 82, "global_frame_idx": 15628, "task_index": 12}, {"db_idx": 15629, "episode_idx": 63, "frame_idx": 83, "global_frame_idx": 15629, "task_index": 12}, {"db_idx": 15630, "episode_idx": 63, "frame_idx": 84, "global_frame_idx": 15630, "task_index": 12}, {"db_idx": 15631, "episode_idx": 63, "frame_idx": 85, "global_frame_idx": 15631, "task_index": 12}, {"db_idx": 15632, "episode_idx": 63, "frame_idx": 86, "global_frame_idx": 15632, "task_index": 12}, {"db_idx": 15633, "episode_idx": 63, "frame_idx": 87, "global_frame_idx": 15633, "task_index": 12}, {"db_idx": 15634, "episode_idx": 63, "frame_idx": 88, "global_frame_idx": 15634, "task_index": 12}, {"db_idx": 15635, "episode_idx": 63, "frame_idx": 89, "global_frame_idx": 15635, "task_index": 12}, {"db_idx": 15636, "episode_idx": 63, "frame_idx": 90, "global_frame_idx": 15636, "task_index": 12}, {"db_idx": 15637, "episode_idx": 63, "frame_idx": 91, "global_frame_idx": 15637, "task_index": 12}, {"db_idx": 15638, "episode_idx": 63, "frame_idx": 92, "global_frame_idx": 15638, "task_index": 12}, {"db_idx": 15639, "episode_idx": 63, "frame_idx": 93, "global_frame_idx": 15639, "task_index": 12}, {"db_idx": 15640, "episode_idx": 63, "frame_idx": 94, "global_frame_idx": 15640, "task_index": 12}, {"db_idx": 15641, "episode_idx": 63, "frame_idx": 95, "global_frame_idx": 15641, "task_index": 12}, {"db_idx": 15642, "episode_idx": 63, "frame_idx": 96, "global_frame_idx": 15642, "task_index": 12}, {"db_idx": 15643, "episode_idx": 63, "frame_idx": 97, "global_frame_idx": 15643, "task_index": 12}, {"db_idx": 15644, "episode_idx": 63, "frame_idx": 98, "global_frame_idx": 15644, "task_index": 12}, {"db_idx": 15645, "episode_idx": 63, "frame_idx": 99, "global_frame_idx": 15645, "task_index": 12}, {"db_idx": 15646, "episode_idx": 63, "frame_idx": 100, "global_frame_idx": 15646, "task_index": 12}, {"db_idx": 15647, "episode_idx": 63, "frame_idx": 101, "global_frame_idx": 15647, "task_index": 12}, {"db_idx": 15648, "episode_idx": 63, "frame_idx": 102, "global_frame_idx": 15648, "task_index": 12}, {"db_idx": 15649, "episode_idx": 63, "frame_idx": 103, "global_frame_idx": 15649, "task_index": 12}, {"db_idx": 15650, "episode_idx": 63, "frame_idx": 104, "global_frame_idx": 15650, "task_index": 12}, {"db_idx": 15651, "episode_idx": 63, "frame_idx": 105, "global_frame_idx": 15651, "task_index": 12}, {"db_idx": 15652, "episode_idx": 63, "frame_idx": 106, "global_frame_idx": 15652, "task_index": 12}, {"db_idx": 15653, "episode_idx": 63, "frame_idx": 107, "global_frame_idx": 15653, "task_index": 12}, {"db_idx": 15654, "episode_idx": 63, "frame_idx": 108, "global_frame_idx": 15654, "task_index": 12}, {"db_idx": 15655, "episode_idx": 63, "frame_idx": 109, "global_frame_idx": 15655, "task_index": 12}, {"db_idx": 15656, "episode_idx": 63, "frame_idx": 110, "global_frame_idx": 15656, "task_index": 12}, {"db_idx": 15657, "episode_idx": 63, "frame_idx": 111, "global_frame_idx": 15657, "task_index": 12}, {"db_idx": 15658, "episode_idx": 63, "frame_idx": 112, "global_frame_idx": 15658, "task_index": 12}, {"db_idx": 15659, "episode_idx": 63, "frame_idx": 113, "global_frame_idx": 15659, "task_index": 12}, {"db_idx": 15660, "episode_idx": 63, "frame_idx": 114, "global_frame_idx": 15660, "task_index": 12}, {"db_idx": 15661, "episode_idx": 63, "frame_idx": 115, "global_frame_idx": 15661, "task_index": 12}, {"db_idx": 15662, "episode_idx": 63, "frame_idx": 116, "global_frame_idx": 15662, "task_index": 12}, {"db_idx": 15663, "episode_idx": 63, "frame_idx": 117, "global_frame_idx": 15663, "task_index": 12}, {"db_idx": 15664, "episode_idx": 63, "frame_idx": 118, "global_frame_idx": 15664, "task_index": 12}, {"db_idx": 15665, "episode_idx": 63, "frame_idx": 119, "global_frame_idx": 15665, "task_index": 12}, {"db_idx": 15666, "episode_idx": 63, "frame_idx": 120, "global_frame_idx": 15666, "task_index": 12}, {"db_idx": 15667, "episode_idx": 63, "frame_idx": 121, "global_frame_idx": 15667, "task_index": 12}, {"db_idx": 15668, "episode_idx": 63, "frame_idx": 122, "global_frame_idx": 15668, "task_index": 12}, {"db_idx": 15669, "episode_idx": 63, "frame_idx": 123, "global_frame_idx": 15669, "task_index": 12}, {"db_idx": 15670, "episode_idx": 63, "frame_idx": 124, "global_frame_idx": 15670, "task_index": 12}, {"db_idx": 15671, "episode_idx": 63, "frame_idx": 125, "global_frame_idx": 15671, "task_index": 12}, {"db_idx": 15672, "episode_idx": 63, "frame_idx": 126, "global_frame_idx": 15672, "task_index": 12}, {"db_idx": 15673, "episode_idx": 63, "frame_idx": 127, "global_frame_idx": 15673, "task_index": 12}, {"db_idx": 15674, "episode_idx": 63, "frame_idx": 128, "global_frame_idx": 15674, "task_index": 12}, {"db_idx": 15675, "episode_idx": 63, "frame_idx": 129, "global_frame_idx": 15675, "task_index": 12}, {"db_idx": 15676, "episode_idx": 63, "frame_idx": 130, "global_frame_idx": 15676, "task_index": 12}, {"db_idx": 15677, "episode_idx": 63, "frame_idx": 131, "global_frame_idx": 15677, "task_index": 12}, {"db_idx": 15678, "episode_idx": 63, "frame_idx": 132, "global_frame_idx": 15678, "task_index": 12}, {"db_idx": 15679, "episode_idx": 63, "frame_idx": 133, "global_frame_idx": 15679, "task_index": 12}, {"db_idx": 15680, "episode_idx": 63, "frame_idx": 134, "global_frame_idx": 15680, "task_index": 12}, {"db_idx": 15681, "episode_idx": 63, "frame_idx": 135, "global_frame_idx": 15681, "task_index": 12}, {"db_idx": 15682, "episode_idx": 63, "frame_idx": 136, "global_frame_idx": 15682, "task_index": 12}, {"db_idx": 15683, "episode_idx": 63, "frame_idx": 137, "global_frame_idx": 15683, "task_index": 12}, {"db_idx": 15684, "episode_idx": 63, "frame_idx": 138, "global_frame_idx": 15684, "task_index": 12}, {"db_idx": 15685, "episode_idx": 63, "frame_idx": 139, "global_frame_idx": 15685, "task_index": 12}, {"db_idx": 15686, "episode_idx": 63, "frame_idx": 140, "global_frame_idx": 15686, "task_index": 12}, {"db_idx": 15687, "episode_idx": 64, "frame_idx": 0, "global_frame_idx": 15687, "task_index": 12}, {"db_idx": 15688, "episode_idx": 64, "frame_idx": 1, "global_frame_idx": 15688, "task_index": 12}, {"db_idx": 15689, "episode_idx": 64, "frame_idx": 2, "global_frame_idx": 15689, "task_index": 12}, {"db_idx": 15690, "episode_idx": 64, "frame_idx": 3, "global_frame_idx": 15690, "task_index": 12}, {"db_idx": 15691, "episode_idx": 64, "frame_idx": 4, "global_frame_idx": 15691, "task_index": 12}, {"db_idx": 15692, "episode_idx": 64, "frame_idx": 5, "global_frame_idx": 15692, "task_index": 12}, {"db_idx": 15693, "episode_idx": 64, "frame_idx": 6, "global_frame_idx": 15693, "task_index": 12}, {"db_idx": 15694, "episode_idx": 64, "frame_idx": 7, "global_frame_idx": 15694, "task_index": 12}, {"db_idx": 15695, "episode_idx": 64, "frame_idx": 8, "global_frame_idx": 15695, "task_index": 12}, {"db_idx": 15696, "episode_idx": 64, "frame_idx": 9, "global_frame_idx": 15696, "task_index": 12}, {"db_idx": 15697, "episode_idx": 64, "frame_idx": 10, "global_frame_idx": 15697, "task_index": 12}, {"db_idx": 15698, "episode_idx": 64, "frame_idx": 11, "global_frame_idx": 15698, "task_index": 12}, {"db_idx": 15699, "episode_idx": 64, "frame_idx": 12, "global_frame_idx": 15699, "task_index": 12}, {"db_idx": 15700, "episode_idx": 64, "frame_idx": 13, "global_frame_idx": 15700, "task_index": 12}, {"db_idx": 15701, "episode_idx": 64, "frame_idx": 14, "global_frame_idx": 15701, "task_index": 12}, {"db_idx": 15702, "episode_idx": 64, "frame_idx": 15, "global_frame_idx": 15702, "task_index": 12}, {"db_idx": 15703, "episode_idx": 64, "frame_idx": 16, "global_frame_idx": 15703, "task_index": 12}, {"db_idx": 15704, "episode_idx": 64, "frame_idx": 17, "global_frame_idx": 15704, "task_index": 12}, {"db_idx": 15705, "episode_idx": 64, "frame_idx": 18, "global_frame_idx": 15705, "task_index": 12}, {"db_idx": 15706, "episode_idx": 64, "frame_idx": 19, "global_frame_idx": 15706, "task_index": 12}, {"db_idx": 15707, "episode_idx": 64, "frame_idx": 20, "global_frame_idx": 15707, "task_index": 12}, {"db_idx": 15708, "episode_idx": 64, "frame_idx": 21, "global_frame_idx": 15708, "task_index": 12}, {"db_idx": 15709, "episode_idx": 64, "frame_idx": 22, "global_frame_idx": 15709, "task_index": 12}, {"db_idx": 15710, "episode_idx": 64, "frame_idx": 23, "global_frame_idx": 15710, "task_index": 12}, {"db_idx": 15711, "episode_idx": 64, "frame_idx": 24, "global_frame_idx": 15711, "task_index": 12}, {"db_idx": 15712, "episode_idx": 64, "frame_idx": 25, "global_frame_idx": 15712, "task_index": 12}, {"db_idx": 15713, "episode_idx": 64, "frame_idx": 26, "global_frame_idx": 15713, "task_index": 12}, {"db_idx": 15714, "episode_idx": 64, "frame_idx": 27, "global_frame_idx": 15714, "task_index": 12}, {"db_idx": 15715, "episode_idx": 64, "frame_idx": 28, "global_frame_idx": 15715, "task_index": 12}, {"db_idx": 15716, "episode_idx": 64, "frame_idx": 29, "global_frame_idx": 15716, "task_index": 12}, {"db_idx": 15717, "episode_idx": 64, "frame_idx": 30, "global_frame_idx": 15717, "task_index": 12}, {"db_idx": 15718, "episode_idx": 64, "frame_idx": 31, "global_frame_idx": 15718, "task_index": 12}, {"db_idx": 15719, "episode_idx": 64, "frame_idx": 32, "global_frame_idx": 15719, "task_index": 12}, {"db_idx": 15720, "episode_idx": 64, "frame_idx": 33, "global_frame_idx": 15720, "task_index": 12}, {"db_idx": 15721, "episode_idx": 64, "frame_idx": 34, "global_frame_idx": 15721, "task_index": 12}, {"db_idx": 15722, "episode_idx": 64, "frame_idx": 35, "global_frame_idx": 15722, "task_index": 12}, {"db_idx": 15723, "episode_idx": 64, "frame_idx": 36, "global_frame_idx": 15723, "task_index": 12}, {"db_idx": 15724, "episode_idx": 64, "frame_idx": 37, "global_frame_idx": 15724, "task_index": 12}, {"db_idx": 15725, "episode_idx": 64, "frame_idx": 38, "global_frame_idx": 15725, "task_index": 12}, {"db_idx": 15726, "episode_idx": 64, "frame_idx": 39, "global_frame_idx": 15726, "task_index": 12}, {"db_idx": 15727, "episode_idx": 64, "frame_idx": 40, "global_frame_idx": 15727, "task_index": 12}, {"db_idx": 15728, "episode_idx": 64, "frame_idx": 41, "global_frame_idx": 15728, "task_index": 12}, {"db_idx": 15729, "episode_idx": 64, "frame_idx": 42, "global_frame_idx": 15729, "task_index": 12}, {"db_idx": 15730, "episode_idx": 64, "frame_idx": 43, "global_frame_idx": 15730, "task_index": 12}, {"db_idx": 15731, "episode_idx": 64, "frame_idx": 44, "global_frame_idx": 15731, "task_index": 12}, {"db_idx": 15732, "episode_idx": 64, "frame_idx": 45, "global_frame_idx": 15732, "task_index": 12}, {"db_idx": 15733, "episode_idx": 64, "frame_idx": 46, "global_frame_idx": 15733, "task_index": 12}, {"db_idx": 15734, "episode_idx": 64, "frame_idx": 47, "global_frame_idx": 15734, "task_index": 12}, {"db_idx": 15735, "episode_idx": 64, "frame_idx": 48, "global_frame_idx": 15735, "task_index": 12}, {"db_idx": 15736, "episode_idx": 64, "frame_idx": 49, "global_frame_idx": 15736, "task_index": 12}, {"db_idx": 15737, "episode_idx": 64, "frame_idx": 50, "global_frame_idx": 15737, "task_index": 12}, {"db_idx": 15738, "episode_idx": 64, "frame_idx": 51, "global_frame_idx": 15738, "task_index": 12}, {"db_idx": 15739, "episode_idx": 64, "frame_idx": 52, "global_frame_idx": 15739, "task_index": 12}, {"db_idx": 15740, "episode_idx": 64, "frame_idx": 53, "global_frame_idx": 15740, "task_index": 12}, {"db_idx": 15741, "episode_idx": 64, "frame_idx": 54, "global_frame_idx": 15741, "task_index": 12}, {"db_idx": 15742, "episode_idx": 64, "frame_idx": 55, "global_frame_idx": 15742, "task_index": 12}, {"db_idx": 15743, "episode_idx": 64, "frame_idx": 56, "global_frame_idx": 15743, "task_index": 12}, {"db_idx": 15744, "episode_idx": 64, "frame_idx": 57, "global_frame_idx": 15744, "task_index": 12}, {"db_idx": 15745, "episode_idx": 64, "frame_idx": 58, "global_frame_idx": 15745, "task_index": 12}, {"db_idx": 15746, "episode_idx": 64, "frame_idx": 59, "global_frame_idx": 15746, "task_index": 12}, {"db_idx": 15747, "episode_idx": 64, "frame_idx": 60, "global_frame_idx": 15747, "task_index": 12}, {"db_idx": 15748, "episode_idx": 64, "frame_idx": 61, "global_frame_idx": 15748, "task_index": 12}, {"db_idx": 15749, "episode_idx": 64, "frame_idx": 62, "global_frame_idx": 15749, "task_index": 12}, {"db_idx": 15750, "episode_idx": 64, "frame_idx": 63, "global_frame_idx": 15750, "task_index": 12}, {"db_idx": 15751, "episode_idx": 64, "frame_idx": 64, "global_frame_idx": 15751, "task_index": 12}, {"db_idx": 15752, "episode_idx": 64, "frame_idx": 65, "global_frame_idx": 15752, "task_index": 12}, {"db_idx": 15753, "episode_idx": 64, "frame_idx": 66, "global_frame_idx": 15753, "task_index": 12}, {"db_idx": 15754, "episode_idx": 64, "frame_idx": 67, "global_frame_idx": 15754, "task_index": 12}, {"db_idx": 15755, "episode_idx": 64, "frame_idx": 68, "global_frame_idx": 15755, "task_index": 12}, {"db_idx": 15756, "episode_idx": 64, "frame_idx": 69, "global_frame_idx": 15756, "task_index": 12}, {"db_idx": 15757, "episode_idx": 64, "frame_idx": 70, "global_frame_idx": 15757, "task_index": 12}, {"db_idx": 15758, "episode_idx": 64, "frame_idx": 71, "global_frame_idx": 15758, "task_index": 12}, {"db_idx": 15759, "episode_idx": 64, "frame_idx": 72, "global_frame_idx": 15759, "task_index": 12}, {"db_idx": 15760, "episode_idx": 64, "frame_idx": 73, "global_frame_idx": 15760, "task_index": 12}, {"db_idx": 15761, "episode_idx": 64, "frame_idx": 74, "global_frame_idx": 15761, "task_index": 12}, {"db_idx": 15762, "episode_idx": 64, "frame_idx": 75, "global_frame_idx": 15762, "task_index": 12}, {"db_idx": 15763, "episode_idx": 64, "frame_idx": 76, "global_frame_idx": 15763, "task_index": 12}, {"db_idx": 15764, "episode_idx": 64, "frame_idx": 77, "global_frame_idx": 15764, "task_index": 12}, {"db_idx": 15765, "episode_idx": 64, "frame_idx": 78, "global_frame_idx": 15765, "task_index": 12}, {"db_idx": 15766, "episode_idx": 64, "frame_idx": 79, "global_frame_idx": 15766, "task_index": 12}, {"db_idx": 15767, "episode_idx": 64, "frame_idx": 80, "global_frame_idx": 15767, "task_index": 12}, {"db_idx": 15768, "episode_idx": 64, "frame_idx": 81, "global_frame_idx": 15768, "task_index": 12}, {"db_idx": 15769, "episode_idx": 64, "frame_idx": 82, "global_frame_idx": 15769, "task_index": 12}, {"db_idx": 15770, "episode_idx": 64, "frame_idx": 83, "global_frame_idx": 15770, "task_index": 12}, {"db_idx": 15771, "episode_idx": 64, "frame_idx": 84, "global_frame_idx": 15771, "task_index": 12}, {"db_idx": 15772, "episode_idx": 64, "frame_idx": 85, "global_frame_idx": 15772, "task_index": 12}, {"db_idx": 15773, "episode_idx": 64, "frame_idx": 86, "global_frame_idx": 15773, "task_index": 12}, {"db_idx": 15774, "episode_idx": 64, "frame_idx": 87, "global_frame_idx": 15774, "task_index": 12}, {"db_idx": 15775, "episode_idx": 64, "frame_idx": 88, "global_frame_idx": 15775, "task_index": 12}, {"db_idx": 15776, "episode_idx": 64, "frame_idx": 89, "global_frame_idx": 15776, "task_index": 12}, {"db_idx": 15777, "episode_idx": 64, "frame_idx": 90, "global_frame_idx": 15777, "task_index": 12}, {"db_idx": 15778, "episode_idx": 64, "frame_idx": 91, "global_frame_idx": 15778, "task_index": 12}, {"db_idx": 15779, "episode_idx": 64, "frame_idx": 92, "global_frame_idx": 15779, "task_index": 12}, {"db_idx": 15780, "episode_idx": 64, "frame_idx": 93, "global_frame_idx": 15780, "task_index": 12}, {"db_idx": 15781, "episode_idx": 64, "frame_idx": 94, "global_frame_idx": 15781, "task_index": 12}, {"db_idx": 15782, "episode_idx": 64, "frame_idx": 95, "global_frame_idx": 15782, "task_index": 12}, {"db_idx": 15783, "episode_idx": 64, "frame_idx": 96, "global_frame_idx": 15783, "task_index": 12}, {"db_idx": 15784, "episode_idx": 64, "frame_idx": 97, "global_frame_idx": 15784, "task_index": 12}, {"db_idx": 15785, "episode_idx": 64, "frame_idx": 98, "global_frame_idx": 15785, "task_index": 12}, {"db_idx": 15786, "episode_idx": 64, "frame_idx": 99, "global_frame_idx": 15786, "task_index": 12}, {"db_idx": 15787, "episode_idx": 64, "frame_idx": 100, "global_frame_idx": 15787, "task_index": 12}, {"db_idx": 15788, "episode_idx": 64, "frame_idx": 101, "global_frame_idx": 15788, "task_index": 12}, {"db_idx": 15789, "episode_idx": 64, "frame_idx": 102, "global_frame_idx": 15789, "task_index": 12}, {"db_idx": 15790, "episode_idx": 64, "frame_idx": 103, "global_frame_idx": 15790, "task_index": 12}, {"db_idx": 15791, "episode_idx": 64, "frame_idx": 104, "global_frame_idx": 15791, "task_index": 12}, {"db_idx": 15792, "episode_idx": 64, "frame_idx": 105, "global_frame_idx": 15792, "task_index": 12}, {"db_idx": 15793, "episode_idx": 64, "frame_idx": 106, "global_frame_idx": 15793, "task_index": 12}, {"db_idx": 15794, "episode_idx": 64, "frame_idx": 107, "global_frame_idx": 15794, "task_index": 12}, {"db_idx": 15795, "episode_idx": 64, "frame_idx": 108, "global_frame_idx": 15795, "task_index": 12}, {"db_idx": 15796, "episode_idx": 64, "frame_idx": 109, "global_frame_idx": 15796, "task_index": 12}, {"db_idx": 15797, "episode_idx": 64, "frame_idx": 110, "global_frame_idx": 15797, "task_index": 12}, {"db_idx": 15798, "episode_idx": 64, "frame_idx": 111, "global_frame_idx": 15798, "task_index": 12}, {"db_idx": 15799, "episode_idx": 64, "frame_idx": 112, "global_frame_idx": 15799, "task_index": 12}, {"db_idx": 15800, "episode_idx": 64, "frame_idx": 113, "global_frame_idx": 15800, "task_index": 12}, {"db_idx": 15801, "episode_idx": 64, "frame_idx": 114, "global_frame_idx": 15801, "task_index": 12}, {"db_idx": 15802, "episode_idx": 64, "frame_idx": 115, "global_frame_idx": 15802, "task_index": 12}, {"db_idx": 15803, "episode_idx": 64, "frame_idx": 116, "global_frame_idx": 15803, "task_index": 12}, {"db_idx": 15804, "episode_idx": 64, "frame_idx": 117, "global_frame_idx": 15804, "task_index": 12}, {"db_idx": 15805, "episode_idx": 64, "frame_idx": 118, "global_frame_idx": 15805, "task_index": 12}, {"db_idx": 15806, "episode_idx": 64, "frame_idx": 119, "global_frame_idx": 15806, "task_index": 12}, {"db_idx": 15807, "episode_idx": 64, "frame_idx": 120, "global_frame_idx": 15807, "task_index": 12}, {"db_idx": 15808, "episode_idx": 64, "frame_idx": 121, "global_frame_idx": 15808, "task_index": 12}, {"db_idx": 15809, "episode_idx": 64, "frame_idx": 122, "global_frame_idx": 15809, "task_index": 12}, {"db_idx": 15810, "episode_idx": 64, "frame_idx": 123, "global_frame_idx": 15810, "task_index": 12}, {"db_idx": 15811, "episode_idx": 64, "frame_idx": 124, "global_frame_idx": 15811, "task_index": 12}, {"db_idx": 15812, "episode_idx": 64, "frame_idx": 125, "global_frame_idx": 15812, "task_index": 12}, {"db_idx": 15813, "episode_idx": 64, "frame_idx": 126, "global_frame_idx": 15813, "task_index": 12}, {"db_idx": 15814, "episode_idx": 64, "frame_idx": 127, "global_frame_idx": 15814, "task_index": 12}, {"db_idx": 15815, "episode_idx": 64, "frame_idx": 128, "global_frame_idx": 15815, "task_index": 12}, {"db_idx": 15816, "episode_idx": 64, "frame_idx": 129, "global_frame_idx": 15816, "task_index": 12}, {"db_idx": 15817, "episode_idx": 64, "frame_idx": 130, "global_frame_idx": 15817, "task_index": 12}, {"db_idx": 15818, "episode_idx": 64, "frame_idx": 131, "global_frame_idx": 15818, "task_index": 12}, {"db_idx": 15819, "episode_idx": 64, "frame_idx": 132, "global_frame_idx": 15819, "task_index": 12}, {"db_idx": 15820, "episode_idx": 64, "frame_idx": 133, "global_frame_idx": 15820, "task_index": 12}, {"db_idx": 15821, "episode_idx": 64, "frame_idx": 134, "global_frame_idx": 15821, "task_index": 12}, {"db_idx": 15822, "episode_idx": 64, "frame_idx": 135, "global_frame_idx": 15822, "task_index": 12}, {"db_idx": 15823, "episode_idx": 64, "frame_idx": 136, "global_frame_idx": 15823, "task_index": 12}, {"db_idx": 15824, "episode_idx": 64, "frame_idx": 137, "global_frame_idx": 15824, "task_index": 12}, {"db_idx": 15825, "episode_idx": 64, "frame_idx": 138, "global_frame_idx": 15825, "task_index": 12}, {"db_idx": 15826, "episode_idx": 64, "frame_idx": 139, "global_frame_idx": 15826, "task_index": 12}, {"db_idx": 15827, "episode_idx": 64, "frame_idx": 140, "global_frame_idx": 15827, "task_index": 12}, {"db_idx": 15828, "episode_idx": 64, "frame_idx": 141, "global_frame_idx": 15828, "task_index": 12}, {"db_idx": 15829, "episode_idx": 64, "frame_idx": 142, "global_frame_idx": 15829, "task_index": 12}, {"db_idx": 15830, "episode_idx": 64, "frame_idx": 143, "global_frame_idx": 15830, "task_index": 12}, {"db_idx": 15831, "episode_idx": 64, "frame_idx": 144, "global_frame_idx": 15831, "task_index": 12}, {"db_idx": 15832, "episode_idx": 64, "frame_idx": 145, "global_frame_idx": 15832, "task_index": 12}, {"db_idx": 15833, "episode_idx": 64, "frame_idx": 146, "global_frame_idx": 15833, "task_index": 12}, {"db_idx": 15834, "episode_idx": 64, "frame_idx": 147, "global_frame_idx": 15834, "task_index": 12}, {"db_idx": 15835, "episode_idx": 64, "frame_idx": 148, "global_frame_idx": 15835, "task_index": 12}, {"db_idx": 15836, "episode_idx": 64, "frame_idx": 149, "global_frame_idx": 15836, "task_index": 12}, {"db_idx": 15837, "episode_idx": 64, "frame_idx": 150, "global_frame_idx": 15837, "task_index": 12}, {"db_idx": 15838, "episode_idx": 64, "frame_idx": 151, "global_frame_idx": 15838, "task_index": 12}, {"db_idx": 15839, "episode_idx": 64, "frame_idx": 152, "global_frame_idx": 15839, "task_index": 12}, {"db_idx": 15840, "episode_idx": 64, "frame_idx": 153, "global_frame_idx": 15840, "task_index": 12}, {"db_idx": 15841, "episode_idx": 64, "frame_idx": 154, "global_frame_idx": 15841, "task_index": 12}, {"db_idx": 15842, "episode_idx": 64, "frame_idx": 155, "global_frame_idx": 15842, "task_index": 12}, {"db_idx": 15843, "episode_idx": 64, "frame_idx": 156, "global_frame_idx": 15843, "task_index": 12}, {"db_idx": 15844, "episode_idx": 64, "frame_idx": 157, "global_frame_idx": 15844, "task_index": 12}, {"db_idx": 15845, "episode_idx": 64, "frame_idx": 158, "global_frame_idx": 15845, "task_index": 12}, {"db_idx": 15846, "episode_idx": 65, "frame_idx": 0, "global_frame_idx": 15846, "task_index": 13}, {"db_idx": 15847, "episode_idx": 65, "frame_idx": 1, "global_frame_idx": 15847, "task_index": 13}, {"db_idx": 15848, "episode_idx": 65, "frame_idx": 2, "global_frame_idx": 15848, "task_index": 13}, {"db_idx": 15849, "episode_idx": 65, "frame_idx": 3, "global_frame_idx": 15849, "task_index": 13}, {"db_idx": 15850, "episode_idx": 65, "frame_idx": 4, "global_frame_idx": 15850, "task_index": 13}, {"db_idx": 15851, "episode_idx": 65, "frame_idx": 5, "global_frame_idx": 15851, "task_index": 13}, {"db_idx": 15852, "episode_idx": 65, "frame_idx": 6, "global_frame_idx": 15852, "task_index": 13}, {"db_idx": 15853, "episode_idx": 65, "frame_idx": 7, "global_frame_idx": 15853, "task_index": 13}, {"db_idx": 15854, "episode_idx": 65, "frame_idx": 8, "global_frame_idx": 15854, "task_index": 13}, {"db_idx": 15855, "episode_idx": 65, "frame_idx": 9, "global_frame_idx": 15855, "task_index": 13}, {"db_idx": 15856, "episode_idx": 65, "frame_idx": 10, "global_frame_idx": 15856, "task_index": 13}, {"db_idx": 15857, "episode_idx": 65, "frame_idx": 11, "global_frame_idx": 15857, "task_index": 13}, {"db_idx": 15858, "episode_idx": 65, "frame_idx": 12, "global_frame_idx": 15858, "task_index": 13}, {"db_idx": 15859, "episode_idx": 65, "frame_idx": 13, "global_frame_idx": 15859, "task_index": 13}, {"db_idx": 15860, "episode_idx": 65, "frame_idx": 14, "global_frame_idx": 15860, "task_index": 13}, {"db_idx": 15861, "episode_idx": 65, "frame_idx": 15, "global_frame_idx": 15861, "task_index": 13}, {"db_idx": 15862, "episode_idx": 65, "frame_idx": 16, "global_frame_idx": 15862, "task_index": 13}, {"db_idx": 15863, "episode_idx": 65, "frame_idx": 17, "global_frame_idx": 15863, "task_index": 13}, {"db_idx": 15864, "episode_idx": 65, "frame_idx": 18, "global_frame_idx": 15864, "task_index": 13}, {"db_idx": 15865, "episode_idx": 65, "frame_idx": 19, "global_frame_idx": 15865, "task_index": 13}, {"db_idx": 15866, "episode_idx": 65, "frame_idx": 20, "global_frame_idx": 15866, "task_index": 13}, {"db_idx": 15867, "episode_idx": 65, "frame_idx": 21, "global_frame_idx": 15867, "task_index": 13}, {"db_idx": 15868, "episode_idx": 65, "frame_idx": 22, "global_frame_idx": 15868, "task_index": 13}, {"db_idx": 15869, "episode_idx": 65, "frame_idx": 23, "global_frame_idx": 15869, "task_index": 13}, {"db_idx": 15870, "episode_idx": 65, "frame_idx": 24, "global_frame_idx": 15870, "task_index": 13}, {"db_idx": 15871, "episode_idx": 65, "frame_idx": 25, "global_frame_idx": 15871, "task_index": 13}, {"db_idx": 15872, "episode_idx": 65, "frame_idx": 26, "global_frame_idx": 15872, "task_index": 13}, {"db_idx": 15873, "episode_idx": 65, "frame_idx": 27, "global_frame_idx": 15873, "task_index": 13}, {"db_idx": 15874, "episode_idx": 65, "frame_idx": 28, "global_frame_idx": 15874, "task_index": 13}, {"db_idx": 15875, "episode_idx": 65, "frame_idx": 29, "global_frame_idx": 15875, "task_index": 13}, {"db_idx": 15876, "episode_idx": 65, "frame_idx": 30, "global_frame_idx": 15876, "task_index": 13}, {"db_idx": 15877, "episode_idx": 65, "frame_idx": 31, "global_frame_idx": 15877, "task_index": 13}, {"db_idx": 15878, "episode_idx": 65, "frame_idx": 32, "global_frame_idx": 15878, "task_index": 13}, {"db_idx": 15879, "episode_idx": 65, "frame_idx": 33, "global_frame_idx": 15879, "task_index": 13}, {"db_idx": 15880, "episode_idx": 65, "frame_idx": 34, "global_frame_idx": 15880, "task_index": 13}, {"db_idx": 15881, "episode_idx": 65, "frame_idx": 35, "global_frame_idx": 15881, "task_index": 13}, {"db_idx": 15882, "episode_idx": 65, "frame_idx": 36, "global_frame_idx": 15882, "task_index": 13}, {"db_idx": 15883, "episode_idx": 65, "frame_idx": 37, "global_frame_idx": 15883, "task_index": 13}, {"db_idx": 15884, "episode_idx": 65, "frame_idx": 38, "global_frame_idx": 15884, "task_index": 13}, {"db_idx": 15885, "episode_idx": 65, "frame_idx": 39, "global_frame_idx": 15885, "task_index": 13}, {"db_idx": 15886, "episode_idx": 65, "frame_idx": 40, "global_frame_idx": 15886, "task_index": 13}, {"db_idx": 15887, "episode_idx": 65, "frame_idx": 41, "global_frame_idx": 15887, "task_index": 13}, {"db_idx": 15888, "episode_idx": 65, "frame_idx": 42, "global_frame_idx": 15888, "task_index": 13}, {"db_idx": 15889, "episode_idx": 65, "frame_idx": 43, "global_frame_idx": 15889, "task_index": 13}, {"db_idx": 15890, "episode_idx": 65, "frame_idx": 44, "global_frame_idx": 15890, "task_index": 13}, {"db_idx": 15891, "episode_idx": 65, "frame_idx": 45, "global_frame_idx": 15891, "task_index": 13}, {"db_idx": 15892, "episode_idx": 65, "frame_idx": 46, "global_frame_idx": 15892, "task_index": 13}, {"db_idx": 15893, "episode_idx": 65, "frame_idx": 47, "global_frame_idx": 15893, "task_index": 13}, {"db_idx": 15894, "episode_idx": 65, "frame_idx": 48, "global_frame_idx": 15894, "task_index": 13}, {"db_idx": 15895, "episode_idx": 65, "frame_idx": 49, "global_frame_idx": 15895, "task_index": 13}, {"db_idx": 15896, "episode_idx": 65, "frame_idx": 50, "global_frame_idx": 15896, "task_index": 13}, {"db_idx": 15897, "episode_idx": 65, "frame_idx": 51, "global_frame_idx": 15897, "task_index": 13}, {"db_idx": 15898, "episode_idx": 65, "frame_idx": 52, "global_frame_idx": 15898, "task_index": 13}, {"db_idx": 15899, "episode_idx": 65, "frame_idx": 53, "global_frame_idx": 15899, "task_index": 13}, {"db_idx": 15900, "episode_idx": 65, "frame_idx": 54, "global_frame_idx": 15900, "task_index": 13}, {"db_idx": 15901, "episode_idx": 65, "frame_idx": 55, "global_frame_idx": 15901, "task_index": 13}, {"db_idx": 15902, "episode_idx": 65, "frame_idx": 56, "global_frame_idx": 15902, "task_index": 13}, {"db_idx": 15903, "episode_idx": 65, "frame_idx": 57, "global_frame_idx": 15903, "task_index": 13}, {"db_idx": 15904, "episode_idx": 65, "frame_idx": 58, "global_frame_idx": 15904, "task_index": 13}, {"db_idx": 15905, "episode_idx": 65, "frame_idx": 59, "global_frame_idx": 15905, "task_index": 13}, {"db_idx": 15906, "episode_idx": 65, "frame_idx": 60, "global_frame_idx": 15906, "task_index": 13}, {"db_idx": 15907, "episode_idx": 65, "frame_idx": 61, "global_frame_idx": 15907, "task_index": 13}, {"db_idx": 15908, "episode_idx": 65, "frame_idx": 62, "global_frame_idx": 15908, "task_index": 13}, {"db_idx": 15909, "episode_idx": 65, "frame_idx": 63, "global_frame_idx": 15909, "task_index": 13}, {"db_idx": 15910, "episode_idx": 65, "frame_idx": 64, "global_frame_idx": 15910, "task_index": 13}, {"db_idx": 15911, "episode_idx": 65, "frame_idx": 65, "global_frame_idx": 15911, "task_index": 13}, {"db_idx": 15912, "episode_idx": 65, "frame_idx": 66, "global_frame_idx": 15912, "task_index": 13}, {"db_idx": 15913, "episode_idx": 65, "frame_idx": 67, "global_frame_idx": 15913, "task_index": 13}, {"db_idx": 15914, "episode_idx": 65, "frame_idx": 68, "global_frame_idx": 15914, "task_index": 13}, {"db_idx": 15915, "episode_idx": 65, "frame_idx": 69, "global_frame_idx": 15915, "task_index": 13}, {"db_idx": 15916, "episode_idx": 65, "frame_idx": 70, "global_frame_idx": 15916, "task_index": 13}, {"db_idx": 15917, "episode_idx": 65, "frame_idx": 71, "global_frame_idx": 15917, "task_index": 13}, {"db_idx": 15918, "episode_idx": 65, "frame_idx": 72, "global_frame_idx": 15918, "task_index": 13}, {"db_idx": 15919, "episode_idx": 65, "frame_idx": 73, "global_frame_idx": 15919, "task_index": 13}, {"db_idx": 15920, "episode_idx": 65, "frame_idx": 74, "global_frame_idx": 15920, "task_index": 13}, {"db_idx": 15921, "episode_idx": 65, "frame_idx": 75, "global_frame_idx": 15921, "task_index": 13}, {"db_idx": 15922, "episode_idx": 65, "frame_idx": 76, "global_frame_idx": 15922, "task_index": 13}, {"db_idx": 15923, "episode_idx": 65, "frame_idx": 77, "global_frame_idx": 15923, "task_index": 13}, {"db_idx": 15924, "episode_idx": 65, "frame_idx": 78, "global_frame_idx": 15924, "task_index": 13}, {"db_idx": 15925, "episode_idx": 65, "frame_idx": 79, "global_frame_idx": 15925, "task_index": 13}, {"db_idx": 15926, "episode_idx": 65, "frame_idx": 80, "global_frame_idx": 15926, "task_index": 13}, {"db_idx": 15927, "episode_idx": 65, "frame_idx": 81, "global_frame_idx": 15927, "task_index": 13}, {"db_idx": 15928, "episode_idx": 65, "frame_idx": 82, "global_frame_idx": 15928, "task_index": 13}, {"db_idx": 15929, "episode_idx": 65, "frame_idx": 83, "global_frame_idx": 15929, "task_index": 13}, {"db_idx": 15930, "episode_idx": 65, "frame_idx": 84, "global_frame_idx": 15930, "task_index": 13}, {"db_idx": 15931, "episode_idx": 65, "frame_idx": 85, "global_frame_idx": 15931, "task_index": 13}, {"db_idx": 15932, "episode_idx": 65, "frame_idx": 86, "global_frame_idx": 15932, "task_index": 13}, {"db_idx": 15933, "episode_idx": 65, "frame_idx": 87, "global_frame_idx": 15933, "task_index": 13}, {"db_idx": 15934, "episode_idx": 65, "frame_idx": 88, "global_frame_idx": 15934, "task_index": 13}, {"db_idx": 15935, "episode_idx": 65, "frame_idx": 89, "global_frame_idx": 15935, "task_index": 13}, {"db_idx": 15936, "episode_idx": 65, "frame_idx": 90, "global_frame_idx": 15936, "task_index": 13}, {"db_idx": 15937, "episode_idx": 65, "frame_idx": 91, "global_frame_idx": 15937, "task_index": 13}, {"db_idx": 15938, "episode_idx": 65, "frame_idx": 92, "global_frame_idx": 15938, "task_index": 13}, {"db_idx": 15939, "episode_idx": 65, "frame_idx": 93, "global_frame_idx": 15939, "task_index": 13}, {"db_idx": 15940, "episode_idx": 65, "frame_idx": 94, "global_frame_idx": 15940, "task_index": 13}, {"db_idx": 15941, "episode_idx": 65, "frame_idx": 95, "global_frame_idx": 15941, "task_index": 13}, {"db_idx": 15942, "episode_idx": 65, "frame_idx": 96, "global_frame_idx": 15942, "task_index": 13}, {"db_idx": 15943, "episode_idx": 65, "frame_idx": 97, "global_frame_idx": 15943, "task_index": 13}, {"db_idx": 15944, "episode_idx": 65, "frame_idx": 98, "global_frame_idx": 15944, "task_index": 13}, {"db_idx": 15945, "episode_idx": 65, "frame_idx": 99, "global_frame_idx": 15945, "task_index": 13}, {"db_idx": 15946, "episode_idx": 65, "frame_idx": 100, "global_frame_idx": 15946, "task_index": 13}, {"db_idx": 15947, "episode_idx": 65, "frame_idx": 101, "global_frame_idx": 15947, "task_index": 13}, {"db_idx": 15948, "episode_idx": 65, "frame_idx": 102, "global_frame_idx": 15948, "task_index": 13}, {"db_idx": 15949, "episode_idx": 65, "frame_idx": 103, "global_frame_idx": 15949, "task_index": 13}, {"db_idx": 15950, "episode_idx": 65, "frame_idx": 104, "global_frame_idx": 15950, "task_index": 13}, {"db_idx": 15951, "episode_idx": 65, "frame_idx": 105, "global_frame_idx": 15951, "task_index": 13}, {"db_idx": 15952, "episode_idx": 65, "frame_idx": 106, "global_frame_idx": 15952, "task_index": 13}, {"db_idx": 15953, "episode_idx": 65, "frame_idx": 107, "global_frame_idx": 15953, "task_index": 13}, {"db_idx": 15954, "episode_idx": 65, "frame_idx": 108, "global_frame_idx": 15954, "task_index": 13}, {"db_idx": 15955, "episode_idx": 65, "frame_idx": 109, "global_frame_idx": 15955, "task_index": 13}, {"db_idx": 15956, "episode_idx": 65, "frame_idx": 110, "global_frame_idx": 15956, "task_index": 13}, {"db_idx": 15957, "episode_idx": 65, "frame_idx": 111, "global_frame_idx": 15957, "task_index": 13}, {"db_idx": 15958, "episode_idx": 65, "frame_idx": 112, "global_frame_idx": 15958, "task_index": 13}, {"db_idx": 15959, "episode_idx": 65, "frame_idx": 113, "global_frame_idx": 15959, "task_index": 13}, {"db_idx": 15960, "episode_idx": 65, "frame_idx": 114, "global_frame_idx": 15960, "task_index": 13}, {"db_idx": 15961, "episode_idx": 65, "frame_idx": 115, "global_frame_idx": 15961, "task_index": 13}, {"db_idx": 15962, "episode_idx": 65, "frame_idx": 116, "global_frame_idx": 15962, "task_index": 13}, {"db_idx": 15963, "episode_idx": 65, "frame_idx": 117, "global_frame_idx": 15963, "task_index": 13}, {"db_idx": 15964, "episode_idx": 65, "frame_idx": 118, "global_frame_idx": 15964, "task_index": 13}, {"db_idx": 15965, "episode_idx": 65, "frame_idx": 119, "global_frame_idx": 15965, "task_index": 13}, {"db_idx": 15966, "episode_idx": 65, "frame_idx": 120, "global_frame_idx": 15966, "task_index": 13}, {"db_idx": 15967, "episode_idx": 65, "frame_idx": 121, "global_frame_idx": 15967, "task_index": 13}, {"db_idx": 15968, "episode_idx": 65, "frame_idx": 122, "global_frame_idx": 15968, "task_index": 13}, {"db_idx": 15969, "episode_idx": 65, "frame_idx": 123, "global_frame_idx": 15969, "task_index": 13}, {"db_idx": 15970, "episode_idx": 65, "frame_idx": 124, "global_frame_idx": 15970, "task_index": 13}, {"db_idx": 15971, "episode_idx": 65, "frame_idx": 125, "global_frame_idx": 15971, "task_index": 13}, {"db_idx": 15972, "episode_idx": 65, "frame_idx": 126, "global_frame_idx": 15972, "task_index": 13}, {"db_idx": 15973, "episode_idx": 65, "frame_idx": 127, "global_frame_idx": 15973, "task_index": 13}, {"db_idx": 15974, "episode_idx": 65, "frame_idx": 128, "global_frame_idx": 15974, "task_index": 13}, {"db_idx": 15975, "episode_idx": 65, "frame_idx": 129, "global_frame_idx": 15975, "task_index": 13}, {"db_idx": 15976, "episode_idx": 65, "frame_idx": 130, "global_frame_idx": 15976, "task_index": 13}, {"db_idx": 15977, "episode_idx": 65, "frame_idx": 131, "global_frame_idx": 15977, "task_index": 13}, {"db_idx": 15978, "episode_idx": 65, "frame_idx": 132, "global_frame_idx": 15978, "task_index": 13}, {"db_idx": 15979, "episode_idx": 65, "frame_idx": 133, "global_frame_idx": 15979, "task_index": 13}, {"db_idx": 15980, "episode_idx": 65, "frame_idx": 134, "global_frame_idx": 15980, "task_index": 13}, {"db_idx": 15981, "episode_idx": 65, "frame_idx": 135, "global_frame_idx": 15981, "task_index": 13}, {"db_idx": 15982, "episode_idx": 66, "frame_idx": 0, "global_frame_idx": 15982, "task_index": 13}, {"db_idx": 15983, "episode_idx": 66, "frame_idx": 1, "global_frame_idx": 15983, "task_index": 13}, {"db_idx": 15984, "episode_idx": 66, "frame_idx": 2, "global_frame_idx": 15984, "task_index": 13}, {"db_idx": 15985, "episode_idx": 66, "frame_idx": 3, "global_frame_idx": 15985, "task_index": 13}, {"db_idx": 15986, "episode_idx": 66, "frame_idx": 4, "global_frame_idx": 15986, "task_index": 13}, {"db_idx": 15987, "episode_idx": 66, "frame_idx": 5, "global_frame_idx": 15987, "task_index": 13}, {"db_idx": 15988, "episode_idx": 66, "frame_idx": 6, "global_frame_idx": 15988, "task_index": 13}, {"db_idx": 15989, "episode_idx": 66, "frame_idx": 7, "global_frame_idx": 15989, "task_index": 13}, {"db_idx": 15990, "episode_idx": 66, "frame_idx": 8, "global_frame_idx": 15990, "task_index": 13}, {"db_idx": 15991, "episode_idx": 66, "frame_idx": 9, "global_frame_idx": 15991, "task_index": 13}, {"db_idx": 15992, "episode_idx": 66, "frame_idx": 10, "global_frame_idx": 15992, "task_index": 13}, {"db_idx": 15993, "episode_idx": 66, "frame_idx": 11, "global_frame_idx": 15993, "task_index": 13}, {"db_idx": 15994, "episode_idx": 66, "frame_idx": 12, "global_frame_idx": 15994, "task_index": 13}, {"db_idx": 15995, "episode_idx": 66, "frame_idx": 13, "global_frame_idx": 15995, "task_index": 13}, {"db_idx": 15996, "episode_idx": 66, "frame_idx": 14, "global_frame_idx": 15996, "task_index": 13}, {"db_idx": 15997, "episode_idx": 66, "frame_idx": 15, "global_frame_idx": 15997, "task_index": 13}, {"db_idx": 15998, "episode_idx": 66, "frame_idx": 16, "global_frame_idx": 15998, "task_index": 13}, {"db_idx": 15999, "episode_idx": 66, "frame_idx": 17, "global_frame_idx": 15999, "task_index": 13}, {"db_idx": 16000, "episode_idx": 66, "frame_idx": 18, "global_frame_idx": 16000, "task_index": 13}, {"db_idx": 16001, "episode_idx": 66, "frame_idx": 19, "global_frame_idx": 16001, "task_index": 13}, {"db_idx": 16002, "episode_idx": 66, "frame_idx": 20, "global_frame_idx": 16002, "task_index": 13}, {"db_idx": 16003, "episode_idx": 66, "frame_idx": 21, "global_frame_idx": 16003, "task_index": 13}, {"db_idx": 16004, "episode_idx": 66, "frame_idx": 22, "global_frame_idx": 16004, "task_index": 13}, {"db_idx": 16005, "episode_idx": 66, "frame_idx": 23, "global_frame_idx": 16005, "task_index": 13}, {"db_idx": 16006, "episode_idx": 66, "frame_idx": 24, "global_frame_idx": 16006, "task_index": 13}, {"db_idx": 16007, "episode_idx": 66, "frame_idx": 25, "global_frame_idx": 16007, "task_index": 13}, {"db_idx": 16008, "episode_idx": 66, "frame_idx": 26, "global_frame_idx": 16008, "task_index": 13}, {"db_idx": 16009, "episode_idx": 66, "frame_idx": 27, "global_frame_idx": 16009, "task_index": 13}, {"db_idx": 16010, "episode_idx": 66, "frame_idx": 28, "global_frame_idx": 16010, "task_index": 13}, {"db_idx": 16011, "episode_idx": 66, "frame_idx": 29, "global_frame_idx": 16011, "task_index": 13}, {"db_idx": 16012, "episode_idx": 66, "frame_idx": 30, "global_frame_idx": 16012, "task_index": 13}, {"db_idx": 16013, "episode_idx": 66, "frame_idx": 31, "global_frame_idx": 16013, "task_index": 13}, {"db_idx": 16014, "episode_idx": 66, "frame_idx": 32, "global_frame_idx": 16014, "task_index": 13}, {"db_idx": 16015, "episode_idx": 66, "frame_idx": 33, "global_frame_idx": 16015, "task_index": 13}, {"db_idx": 16016, "episode_idx": 66, "frame_idx": 34, "global_frame_idx": 16016, "task_index": 13}, {"db_idx": 16017, "episode_idx": 66, "frame_idx": 35, "global_frame_idx": 16017, "task_index": 13}, {"db_idx": 16018, "episode_idx": 66, "frame_idx": 36, "global_frame_idx": 16018, "task_index": 13}, {"db_idx": 16019, "episode_idx": 66, "frame_idx": 37, "global_frame_idx": 16019, "task_index": 13}, {"db_idx": 16020, "episode_idx": 66, "frame_idx": 38, "global_frame_idx": 16020, "task_index": 13}, {"db_idx": 16021, "episode_idx": 66, "frame_idx": 39, "global_frame_idx": 16021, "task_index": 13}, {"db_idx": 16022, "episode_idx": 66, "frame_idx": 40, "global_frame_idx": 16022, "task_index": 13}, {"db_idx": 16023, "episode_idx": 66, "frame_idx": 41, "global_frame_idx": 16023, "task_index": 13}, {"db_idx": 16024, "episode_idx": 66, "frame_idx": 42, "global_frame_idx": 16024, "task_index": 13}, {"db_idx": 16025, "episode_idx": 66, "frame_idx": 43, "global_frame_idx": 16025, "task_index": 13}, {"db_idx": 16026, "episode_idx": 66, "frame_idx": 44, "global_frame_idx": 16026, "task_index": 13}, {"db_idx": 16027, "episode_idx": 66, "frame_idx": 45, "global_frame_idx": 16027, "task_index": 13}, {"db_idx": 16028, "episode_idx": 66, "frame_idx": 46, "global_frame_idx": 16028, "task_index": 13}, {"db_idx": 16029, "episode_idx": 66, "frame_idx": 47, "global_frame_idx": 16029, "task_index": 13}, {"db_idx": 16030, "episode_idx": 66, "frame_idx": 48, "global_frame_idx": 16030, "task_index": 13}, {"db_idx": 16031, "episode_idx": 66, "frame_idx": 49, "global_frame_idx": 16031, "task_index": 13}, {"db_idx": 16032, "episode_idx": 66, "frame_idx": 50, "global_frame_idx": 16032, "task_index": 13}, {"db_idx": 16033, "episode_idx": 66, "frame_idx": 51, "global_frame_idx": 16033, "task_index": 13}, {"db_idx": 16034, "episode_idx": 66, "frame_idx": 52, "global_frame_idx": 16034, "task_index": 13}, {"db_idx": 16035, "episode_idx": 66, "frame_idx": 53, "global_frame_idx": 16035, "task_index": 13}, {"db_idx": 16036, "episode_idx": 66, "frame_idx": 54, "global_frame_idx": 16036, "task_index": 13}, {"db_idx": 16037, "episode_idx": 66, "frame_idx": 55, "global_frame_idx": 16037, "task_index": 13}, {"db_idx": 16038, "episode_idx": 66, "frame_idx": 56, "global_frame_idx": 16038, "task_index": 13}, {"db_idx": 16039, "episode_idx": 66, "frame_idx": 57, "global_frame_idx": 16039, "task_index": 13}, {"db_idx": 16040, "episode_idx": 66, "frame_idx": 58, "global_frame_idx": 16040, "task_index": 13}, {"db_idx": 16041, "episode_idx": 66, "frame_idx": 59, "global_frame_idx": 16041, "task_index": 13}, {"db_idx": 16042, "episode_idx": 66, "frame_idx": 60, "global_frame_idx": 16042, "task_index": 13}, {"db_idx": 16043, "episode_idx": 66, "frame_idx": 61, "global_frame_idx": 16043, "task_index": 13}, {"db_idx": 16044, "episode_idx": 66, "frame_idx": 62, "global_frame_idx": 16044, "task_index": 13}, {"db_idx": 16045, "episode_idx": 66, "frame_idx": 63, "global_frame_idx": 16045, "task_index": 13}, {"db_idx": 16046, "episode_idx": 66, "frame_idx": 64, "global_frame_idx": 16046, "task_index": 13}, {"db_idx": 16047, "episode_idx": 66, "frame_idx": 65, "global_frame_idx": 16047, "task_index": 13}, {"db_idx": 16048, "episode_idx": 66, "frame_idx": 66, "global_frame_idx": 16048, "task_index": 13}, {"db_idx": 16049, "episode_idx": 66, "frame_idx": 67, "global_frame_idx": 16049, "task_index": 13}, {"db_idx": 16050, "episode_idx": 66, "frame_idx": 68, "global_frame_idx": 16050, "task_index": 13}, {"db_idx": 16051, "episode_idx": 66, "frame_idx": 69, "global_frame_idx": 16051, "task_index": 13}, {"db_idx": 16052, "episode_idx": 66, "frame_idx": 70, "global_frame_idx": 16052, "task_index": 13}, {"db_idx": 16053, "episode_idx": 66, "frame_idx": 71, "global_frame_idx": 16053, "task_index": 13}, {"db_idx": 16054, "episode_idx": 66, "frame_idx": 72, "global_frame_idx": 16054, "task_index": 13}, {"db_idx": 16055, "episode_idx": 66, "frame_idx": 73, "global_frame_idx": 16055, "task_index": 13}, {"db_idx": 16056, "episode_idx": 66, "frame_idx": 74, "global_frame_idx": 16056, "task_index": 13}, {"db_idx": 16057, "episode_idx": 66, "frame_idx": 75, "global_frame_idx": 16057, "task_index": 13}, {"db_idx": 16058, "episode_idx": 66, "frame_idx": 76, "global_frame_idx": 16058, "task_index": 13}, {"db_idx": 16059, "episode_idx": 66, "frame_idx": 77, "global_frame_idx": 16059, "task_index": 13}, {"db_idx": 16060, "episode_idx": 66, "frame_idx": 78, "global_frame_idx": 16060, "task_index": 13}, {"db_idx": 16061, "episode_idx": 66, "frame_idx": 79, "global_frame_idx": 16061, "task_index": 13}, {"db_idx": 16062, "episode_idx": 66, "frame_idx": 80, "global_frame_idx": 16062, "task_index": 13}, {"db_idx": 16063, "episode_idx": 66, "frame_idx": 81, "global_frame_idx": 16063, "task_index": 13}, {"db_idx": 16064, "episode_idx": 66, "frame_idx": 82, "global_frame_idx": 16064, "task_index": 13}, {"db_idx": 16065, "episode_idx": 66, "frame_idx": 83, "global_frame_idx": 16065, "task_index": 13}, {"db_idx": 16066, "episode_idx": 66, "frame_idx": 84, "global_frame_idx": 16066, "task_index": 13}, {"db_idx": 16067, "episode_idx": 66, "frame_idx": 85, "global_frame_idx": 16067, "task_index": 13}, {"db_idx": 16068, "episode_idx": 66, "frame_idx": 86, "global_frame_idx": 16068, "task_index": 13}, {"db_idx": 16069, "episode_idx": 66, "frame_idx": 87, "global_frame_idx": 16069, "task_index": 13}, {"db_idx": 16070, "episode_idx": 66, "frame_idx": 88, "global_frame_idx": 16070, "task_index": 13}, {"db_idx": 16071, "episode_idx": 66, "frame_idx": 89, "global_frame_idx": 16071, "task_index": 13}, {"db_idx": 16072, "episode_idx": 66, "frame_idx": 90, "global_frame_idx": 16072, "task_index": 13}, {"db_idx": 16073, "episode_idx": 66, "frame_idx": 91, "global_frame_idx": 16073, "task_index": 13}, {"db_idx": 16074, "episode_idx": 66, "frame_idx": 92, "global_frame_idx": 16074, "task_index": 13}, {"db_idx": 16075, "episode_idx": 66, "frame_idx": 93, "global_frame_idx": 16075, "task_index": 13}, {"db_idx": 16076, "episode_idx": 66, "frame_idx": 94, "global_frame_idx": 16076, "task_index": 13}, {"db_idx": 16077, "episode_idx": 66, "frame_idx": 95, "global_frame_idx": 16077, "task_index": 13}, {"db_idx": 16078, "episode_idx": 66, "frame_idx": 96, "global_frame_idx": 16078, "task_index": 13}, {"db_idx": 16079, "episode_idx": 66, "frame_idx": 97, "global_frame_idx": 16079, "task_index": 13}, {"db_idx": 16080, "episode_idx": 66, "frame_idx": 98, "global_frame_idx": 16080, "task_index": 13}, {"db_idx": 16081, "episode_idx": 66, "frame_idx": 99, "global_frame_idx": 16081, "task_index": 13}, {"db_idx": 16082, "episode_idx": 66, "frame_idx": 100, "global_frame_idx": 16082, "task_index": 13}, {"db_idx": 16083, "episode_idx": 66, "frame_idx": 101, "global_frame_idx": 16083, "task_index": 13}, {"db_idx": 16084, "episode_idx": 66, "frame_idx": 102, "global_frame_idx": 16084, "task_index": 13}, {"db_idx": 16085, "episode_idx": 66, "frame_idx": 103, "global_frame_idx": 16085, "task_index": 13}, {"db_idx": 16086, "episode_idx": 66, "frame_idx": 104, "global_frame_idx": 16086, "task_index": 13}, {"db_idx": 16087, "episode_idx": 66, "frame_idx": 105, "global_frame_idx": 16087, "task_index": 13}, {"db_idx": 16088, "episode_idx": 66, "frame_idx": 106, "global_frame_idx": 16088, "task_index": 13}, {"db_idx": 16089, "episode_idx": 66, "frame_idx": 107, "global_frame_idx": 16089, "task_index": 13}, {"db_idx": 16090, "episode_idx": 66, "frame_idx": 108, "global_frame_idx": 16090, "task_index": 13}, {"db_idx": 16091, "episode_idx": 66, "frame_idx": 109, "global_frame_idx": 16091, "task_index": 13}, {"db_idx": 16092, "episode_idx": 66, "frame_idx": 110, "global_frame_idx": 16092, "task_index": 13}, {"db_idx": 16093, "episode_idx": 66, "frame_idx": 111, "global_frame_idx": 16093, "task_index": 13}, {"db_idx": 16094, "episode_idx": 66, "frame_idx": 112, "global_frame_idx": 16094, "task_index": 13}, {"db_idx": 16095, "episode_idx": 66, "frame_idx": 113, "global_frame_idx": 16095, "task_index": 13}, {"db_idx": 16096, "episode_idx": 66, "frame_idx": 114, "global_frame_idx": 16096, "task_index": 13}, {"db_idx": 16097, "episode_idx": 66, "frame_idx": 115, "global_frame_idx": 16097, "task_index": 13}, {"db_idx": 16098, "episode_idx": 66, "frame_idx": 116, "global_frame_idx": 16098, "task_index": 13}, {"db_idx": 16099, "episode_idx": 66, "frame_idx": 117, "global_frame_idx": 16099, "task_index": 13}, {"db_idx": 16100, "episode_idx": 66, "frame_idx": 118, "global_frame_idx": 16100, "task_index": 13}, {"db_idx": 16101, "episode_idx": 66, "frame_idx": 119, "global_frame_idx": 16101, "task_index": 13}, {"db_idx": 16102, "episode_idx": 66, "frame_idx": 120, "global_frame_idx": 16102, "task_index": 13}, {"db_idx": 16103, "episode_idx": 66, "frame_idx": 121, "global_frame_idx": 16103, "task_index": 13}, {"db_idx": 16104, "episode_idx": 66, "frame_idx": 122, "global_frame_idx": 16104, "task_index": 13}, {"db_idx": 16105, "episode_idx": 66, "frame_idx": 123, "global_frame_idx": 16105, "task_index": 13}, {"db_idx": 16106, "episode_idx": 66, "frame_idx": 124, "global_frame_idx": 16106, "task_index": 13}, {"db_idx": 16107, "episode_idx": 66, "frame_idx": 125, "global_frame_idx": 16107, "task_index": 13}, {"db_idx": 16108, "episode_idx": 66, "frame_idx": 126, "global_frame_idx": 16108, "task_index": 13}, {"db_idx": 16109, "episode_idx": 66, "frame_idx": 127, "global_frame_idx": 16109, "task_index": 13}, {"db_idx": 16110, "episode_idx": 66, "frame_idx": 128, "global_frame_idx": 16110, "task_index": 13}, {"db_idx": 16111, "episode_idx": 66, "frame_idx": 129, "global_frame_idx": 16111, "task_index": 13}, {"db_idx": 16112, "episode_idx": 66, "frame_idx": 130, "global_frame_idx": 16112, "task_index": 13}, {"db_idx": 16113, "episode_idx": 67, "frame_idx": 0, "global_frame_idx": 16113, "task_index": 13}, {"db_idx": 16114, "episode_idx": 67, "frame_idx": 1, "global_frame_idx": 16114, "task_index": 13}, {"db_idx": 16115, "episode_idx": 67, "frame_idx": 2, "global_frame_idx": 16115, "task_index": 13}, {"db_idx": 16116, "episode_idx": 67, "frame_idx": 3, "global_frame_idx": 16116, "task_index": 13}, {"db_idx": 16117, "episode_idx": 67, "frame_idx": 4, "global_frame_idx": 16117, "task_index": 13}, {"db_idx": 16118, "episode_idx": 67, "frame_idx": 5, "global_frame_idx": 16118, "task_index": 13}, {"db_idx": 16119, "episode_idx": 67, "frame_idx": 6, "global_frame_idx": 16119, "task_index": 13}, {"db_idx": 16120, "episode_idx": 67, "frame_idx": 7, "global_frame_idx": 16120, "task_index": 13}, {"db_idx": 16121, "episode_idx": 67, "frame_idx": 8, "global_frame_idx": 16121, "task_index": 13}, {"db_idx": 16122, "episode_idx": 67, "frame_idx": 9, "global_frame_idx": 16122, "task_index": 13}, {"db_idx": 16123, "episode_idx": 67, "frame_idx": 10, "global_frame_idx": 16123, "task_index": 13}, {"db_idx": 16124, "episode_idx": 67, "frame_idx": 11, "global_frame_idx": 16124, "task_index": 13}, {"db_idx": 16125, "episode_idx": 67, "frame_idx": 12, "global_frame_idx": 16125, "task_index": 13}, {"db_idx": 16126, "episode_idx": 67, "frame_idx": 13, "global_frame_idx": 16126, "task_index": 13}, {"db_idx": 16127, "episode_idx": 67, "frame_idx": 14, "global_frame_idx": 16127, "task_index": 13}, {"db_idx": 16128, "episode_idx": 67, "frame_idx": 15, "global_frame_idx": 16128, "task_index": 13}, {"db_idx": 16129, "episode_idx": 67, "frame_idx": 16, "global_frame_idx": 16129, "task_index": 13}, {"db_idx": 16130, "episode_idx": 67, "frame_idx": 17, "global_frame_idx": 16130, "task_index": 13}, {"db_idx": 16131, "episode_idx": 67, "frame_idx": 18, "global_frame_idx": 16131, "task_index": 13}, {"db_idx": 16132, "episode_idx": 67, "frame_idx": 19, "global_frame_idx": 16132, "task_index": 13}, {"db_idx": 16133, "episode_idx": 67, "frame_idx": 20, "global_frame_idx": 16133, "task_index": 13}, {"db_idx": 16134, "episode_idx": 67, "frame_idx": 21, "global_frame_idx": 16134, "task_index": 13}, {"db_idx": 16135, "episode_idx": 67, "frame_idx": 22, "global_frame_idx": 16135, "task_index": 13}, {"db_idx": 16136, "episode_idx": 67, "frame_idx": 23, "global_frame_idx": 16136, "task_index": 13}, {"db_idx": 16137, "episode_idx": 67, "frame_idx": 24, "global_frame_idx": 16137, "task_index": 13}, {"db_idx": 16138, "episode_idx": 67, "frame_idx": 25, "global_frame_idx": 16138, "task_index": 13}, {"db_idx": 16139, "episode_idx": 67, "frame_idx": 26, "global_frame_idx": 16139, "task_index": 13}, {"db_idx": 16140, "episode_idx": 67, "frame_idx": 27, "global_frame_idx": 16140, "task_index": 13}, {"db_idx": 16141, "episode_idx": 67, "frame_idx": 28, "global_frame_idx": 16141, "task_index": 13}, {"db_idx": 16142, "episode_idx": 67, "frame_idx": 29, "global_frame_idx": 16142, "task_index": 13}, {"db_idx": 16143, "episode_idx": 67, "frame_idx": 30, "global_frame_idx": 16143, "task_index": 13}, {"db_idx": 16144, "episode_idx": 67, "frame_idx": 31, "global_frame_idx": 16144, "task_index": 13}, {"db_idx": 16145, "episode_idx": 67, "frame_idx": 32, "global_frame_idx": 16145, "task_index": 13}, {"db_idx": 16146, "episode_idx": 67, "frame_idx": 33, "global_frame_idx": 16146, "task_index": 13}, {"db_idx": 16147, "episode_idx": 67, "frame_idx": 34, "global_frame_idx": 16147, "task_index": 13}, {"db_idx": 16148, "episode_idx": 67, "frame_idx": 35, "global_frame_idx": 16148, "task_index": 13}, {"db_idx": 16149, "episode_idx": 67, "frame_idx": 36, "global_frame_idx": 16149, "task_index": 13}, {"db_idx": 16150, "episode_idx": 67, "frame_idx": 37, "global_frame_idx": 16150, "task_index": 13}, {"db_idx": 16151, "episode_idx": 67, "frame_idx": 38, "global_frame_idx": 16151, "task_index": 13}, {"db_idx": 16152, "episode_idx": 67, "frame_idx": 39, "global_frame_idx": 16152, "task_index": 13}, {"db_idx": 16153, "episode_idx": 67, "frame_idx": 40, "global_frame_idx": 16153, "task_index": 13}, {"db_idx": 16154, "episode_idx": 67, "frame_idx": 41, "global_frame_idx": 16154, "task_index": 13}, {"db_idx": 16155, "episode_idx": 67, "frame_idx": 42, "global_frame_idx": 16155, "task_index": 13}, {"db_idx": 16156, "episode_idx": 67, "frame_idx": 43, "global_frame_idx": 16156, "task_index": 13}, {"db_idx": 16157, "episode_idx": 67, "frame_idx": 44, "global_frame_idx": 16157, "task_index": 13}, {"db_idx": 16158, "episode_idx": 67, "frame_idx": 45, "global_frame_idx": 16158, "task_index": 13}, {"db_idx": 16159, "episode_idx": 67, "frame_idx": 46, "global_frame_idx": 16159, "task_index": 13}, {"db_idx": 16160, "episode_idx": 67, "frame_idx": 47, "global_frame_idx": 16160, "task_index": 13}, {"db_idx": 16161, "episode_idx": 67, "frame_idx": 48, "global_frame_idx": 16161, "task_index": 13}, {"db_idx": 16162, "episode_idx": 67, "frame_idx": 49, "global_frame_idx": 16162, "task_index": 13}, {"db_idx": 16163, "episode_idx": 67, "frame_idx": 50, "global_frame_idx": 16163, "task_index": 13}, {"db_idx": 16164, "episode_idx": 67, "frame_idx": 51, "global_frame_idx": 16164, "task_index": 13}, {"db_idx": 16165, "episode_idx": 67, "frame_idx": 52, "global_frame_idx": 16165, "task_index": 13}, {"db_idx": 16166, "episode_idx": 67, "frame_idx": 53, "global_frame_idx": 16166, "task_index": 13}, {"db_idx": 16167, "episode_idx": 67, "frame_idx": 54, "global_frame_idx": 16167, "task_index": 13}, {"db_idx": 16168, "episode_idx": 67, "frame_idx": 55, "global_frame_idx": 16168, "task_index": 13}, {"db_idx": 16169, "episode_idx": 67, "frame_idx": 56, "global_frame_idx": 16169, "task_index": 13}, {"db_idx": 16170, "episode_idx": 67, "frame_idx": 57, "global_frame_idx": 16170, "task_index": 13}, {"db_idx": 16171, "episode_idx": 67, "frame_idx": 58, "global_frame_idx": 16171, "task_index": 13}, {"db_idx": 16172, "episode_idx": 67, "frame_idx": 59, "global_frame_idx": 16172, "task_index": 13}, {"db_idx": 16173, "episode_idx": 67, "frame_idx": 60, "global_frame_idx": 16173, "task_index": 13}, {"db_idx": 16174, "episode_idx": 67, "frame_idx": 61, "global_frame_idx": 16174, "task_index": 13}, {"db_idx": 16175, "episode_idx": 67, "frame_idx": 62, "global_frame_idx": 16175, "task_index": 13}, {"db_idx": 16176, "episode_idx": 67, "frame_idx": 63, "global_frame_idx": 16176, "task_index": 13}, {"db_idx": 16177, "episode_idx": 67, "frame_idx": 64, "global_frame_idx": 16177, "task_index": 13}, {"db_idx": 16178, "episode_idx": 67, "frame_idx": 65, "global_frame_idx": 16178, "task_index": 13}, {"db_idx": 16179, "episode_idx": 67, "frame_idx": 66, "global_frame_idx": 16179, "task_index": 13}, {"db_idx": 16180, "episode_idx": 67, "frame_idx": 67, "global_frame_idx": 16180, "task_index": 13}, {"db_idx": 16181, "episode_idx": 67, "frame_idx": 68, "global_frame_idx": 16181, "task_index": 13}, {"db_idx": 16182, "episode_idx": 67, "frame_idx": 69, "global_frame_idx": 16182, "task_index": 13}, {"db_idx": 16183, "episode_idx": 67, "frame_idx": 70, "global_frame_idx": 16183, "task_index": 13}, {"db_idx": 16184, "episode_idx": 67, "frame_idx": 71, "global_frame_idx": 16184, "task_index": 13}, {"db_idx": 16185, "episode_idx": 67, "frame_idx": 72, "global_frame_idx": 16185, "task_index": 13}, {"db_idx": 16186, "episode_idx": 67, "frame_idx": 73, "global_frame_idx": 16186, "task_index": 13}, {"db_idx": 16187, "episode_idx": 67, "frame_idx": 74, "global_frame_idx": 16187, "task_index": 13}, {"db_idx": 16188, "episode_idx": 67, "frame_idx": 75, "global_frame_idx": 16188, "task_index": 13}, {"db_idx": 16189, "episode_idx": 67, "frame_idx": 76, "global_frame_idx": 16189, "task_index": 13}, {"db_idx": 16190, "episode_idx": 67, "frame_idx": 77, "global_frame_idx": 16190, "task_index": 13}, {"db_idx": 16191, "episode_idx": 67, "frame_idx": 78, "global_frame_idx": 16191, "task_index": 13}, {"db_idx": 16192, "episode_idx": 67, "frame_idx": 79, "global_frame_idx": 16192, "task_index": 13}, {"db_idx": 16193, "episode_idx": 67, "frame_idx": 80, "global_frame_idx": 16193, "task_index": 13}, {"db_idx": 16194, "episode_idx": 67, "frame_idx": 81, "global_frame_idx": 16194, "task_index": 13}, {"db_idx": 16195, "episode_idx": 67, "frame_idx": 82, "global_frame_idx": 16195, "task_index": 13}, {"db_idx": 16196, "episode_idx": 67, "frame_idx": 83, "global_frame_idx": 16196, "task_index": 13}, {"db_idx": 16197, "episode_idx": 67, "frame_idx": 84, "global_frame_idx": 16197, "task_index": 13}, {"db_idx": 16198, "episode_idx": 67, "frame_idx": 85, "global_frame_idx": 16198, "task_index": 13}, {"db_idx": 16199, "episode_idx": 67, "frame_idx": 86, "global_frame_idx": 16199, "task_index": 13}, {"db_idx": 16200, "episode_idx": 67, "frame_idx": 87, "global_frame_idx": 16200, "task_index": 13}, {"db_idx": 16201, "episode_idx": 67, "frame_idx": 88, "global_frame_idx": 16201, "task_index": 13}, {"db_idx": 16202, "episode_idx": 67, "frame_idx": 89, "global_frame_idx": 16202, "task_index": 13}, {"db_idx": 16203, "episode_idx": 67, "frame_idx": 90, "global_frame_idx": 16203, "task_index": 13}, {"db_idx": 16204, "episode_idx": 67, "frame_idx": 91, "global_frame_idx": 16204, "task_index": 13}, {"db_idx": 16205, "episode_idx": 67, "frame_idx": 92, "global_frame_idx": 16205, "task_index": 13}, {"db_idx": 16206, "episode_idx": 67, "frame_idx": 93, "global_frame_idx": 16206, "task_index": 13}, {"db_idx": 16207, "episode_idx": 67, "frame_idx": 94, "global_frame_idx": 16207, "task_index": 13}, {"db_idx": 16208, "episode_idx": 67, "frame_idx": 95, "global_frame_idx": 16208, "task_index": 13}, {"db_idx": 16209, "episode_idx": 67, "frame_idx": 96, "global_frame_idx": 16209, "task_index": 13}, {"db_idx": 16210, "episode_idx": 67, "frame_idx": 97, "global_frame_idx": 16210, "task_index": 13}, {"db_idx": 16211, "episode_idx": 67, "frame_idx": 98, "global_frame_idx": 16211, "task_index": 13}, {"db_idx": 16212, "episode_idx": 67, "frame_idx": 99, "global_frame_idx": 16212, "task_index": 13}, {"db_idx": 16213, "episode_idx": 67, "frame_idx": 100, "global_frame_idx": 16213, "task_index": 13}, {"db_idx": 16214, "episode_idx": 67, "frame_idx": 101, "global_frame_idx": 16214, "task_index": 13}, {"db_idx": 16215, "episode_idx": 67, "frame_idx": 102, "global_frame_idx": 16215, "task_index": 13}, {"db_idx": 16216, "episode_idx": 67, "frame_idx": 103, "global_frame_idx": 16216, "task_index": 13}, {"db_idx": 16217, "episode_idx": 67, "frame_idx": 104, "global_frame_idx": 16217, "task_index": 13}, {"db_idx": 16218, "episode_idx": 67, "frame_idx": 105, "global_frame_idx": 16218, "task_index": 13}, {"db_idx": 16219, "episode_idx": 67, "frame_idx": 106, "global_frame_idx": 16219, "task_index": 13}, {"db_idx": 16220, "episode_idx": 67, "frame_idx": 107, "global_frame_idx": 16220, "task_index": 13}, {"db_idx": 16221, "episode_idx": 67, "frame_idx": 108, "global_frame_idx": 16221, "task_index": 13}, {"db_idx": 16222, "episode_idx": 67, "frame_idx": 109, "global_frame_idx": 16222, "task_index": 13}, {"db_idx": 16223, "episode_idx": 67, "frame_idx": 110, "global_frame_idx": 16223, "task_index": 13}, {"db_idx": 16224, "episode_idx": 67, "frame_idx": 111, "global_frame_idx": 16224, "task_index": 13}, {"db_idx": 16225, "episode_idx": 67, "frame_idx": 112, "global_frame_idx": 16225, "task_index": 13}, {"db_idx": 16226, "episode_idx": 67, "frame_idx": 113, "global_frame_idx": 16226, "task_index": 13}, {"db_idx": 16227, "episode_idx": 67, "frame_idx": 114, "global_frame_idx": 16227, "task_index": 13}, {"db_idx": 16228, "episode_idx": 67, "frame_idx": 115, "global_frame_idx": 16228, "task_index": 13}, {"db_idx": 16229, "episode_idx": 67, "frame_idx": 116, "global_frame_idx": 16229, "task_index": 13}, {"db_idx": 16230, "episode_idx": 67, "frame_idx": 117, "global_frame_idx": 16230, "task_index": 13}, {"db_idx": 16231, "episode_idx": 67, "frame_idx": 118, "global_frame_idx": 16231, "task_index": 13}, {"db_idx": 16232, "episode_idx": 67, "frame_idx": 119, "global_frame_idx": 16232, "task_index": 13}, {"db_idx": 16233, "episode_idx": 67, "frame_idx": 120, "global_frame_idx": 16233, "task_index": 13}, {"db_idx": 16234, "episode_idx": 67, "frame_idx": 121, "global_frame_idx": 16234, "task_index": 13}, {"db_idx": 16235, "episode_idx": 67, "frame_idx": 122, "global_frame_idx": 16235, "task_index": 13}, {"db_idx": 16236, "episode_idx": 67, "frame_idx": 123, "global_frame_idx": 16236, "task_index": 13}, {"db_idx": 16237, "episode_idx": 67, "frame_idx": 124, "global_frame_idx": 16237, "task_index": 13}, {"db_idx": 16238, "episode_idx": 67, "frame_idx": 125, "global_frame_idx": 16238, "task_index": 13}, {"db_idx": 16239, "episode_idx": 67, "frame_idx": 126, "global_frame_idx": 16239, "task_index": 13}, {"db_idx": 16240, "episode_idx": 67, "frame_idx": 127, "global_frame_idx": 16240, "task_index": 13}, {"db_idx": 16241, "episode_idx": 67, "frame_idx": 128, "global_frame_idx": 16241, "task_index": 13}, {"db_idx": 16242, "episode_idx": 67, "frame_idx": 129, "global_frame_idx": 16242, "task_index": 13}, {"db_idx": 16243, "episode_idx": 67, "frame_idx": 130, "global_frame_idx": 16243, "task_index": 13}, {"db_idx": 16244, "episode_idx": 67, "frame_idx": 131, "global_frame_idx": 16244, "task_index": 13}, {"db_idx": 16245, "episode_idx": 67, "frame_idx": 132, "global_frame_idx": 16245, "task_index": 13}, {"db_idx": 16246, "episode_idx": 67, "frame_idx": 133, "global_frame_idx": 16246, "task_index": 13}, {"db_idx": 16247, "episode_idx": 67, "frame_idx": 134, "global_frame_idx": 16247, "task_index": 13}, {"db_idx": 16248, "episode_idx": 67, "frame_idx": 135, "global_frame_idx": 16248, "task_index": 13}, {"db_idx": 16249, "episode_idx": 67, "frame_idx": 136, "global_frame_idx": 16249, "task_index": 13}, {"db_idx": 16250, "episode_idx": 67, "frame_idx": 137, "global_frame_idx": 16250, "task_index": 13}, {"db_idx": 16251, "episode_idx": 67, "frame_idx": 138, "global_frame_idx": 16251, "task_index": 13}, {"db_idx": 16252, "episode_idx": 67, "frame_idx": 139, "global_frame_idx": 16252, "task_index": 13}, {"db_idx": 16253, "episode_idx": 67, "frame_idx": 140, "global_frame_idx": 16253, "task_index": 13}, {"db_idx": 16254, "episode_idx": 67, "frame_idx": 141, "global_frame_idx": 16254, "task_index": 13}, {"db_idx": 16255, "episode_idx": 67, "frame_idx": 142, "global_frame_idx": 16255, "task_index": 13}, {"db_idx": 16256, "episode_idx": 67, "frame_idx": 143, "global_frame_idx": 16256, "task_index": 13}, {"db_idx": 16257, "episode_idx": 67, "frame_idx": 144, "global_frame_idx": 16257, "task_index": 13}, {"db_idx": 16258, "episode_idx": 67, "frame_idx": 145, "global_frame_idx": 16258, "task_index": 13}, {"db_idx": 16259, "episode_idx": 67, "frame_idx": 146, "global_frame_idx": 16259, "task_index": 13}, {"db_idx": 16260, "episode_idx": 67, "frame_idx": 147, "global_frame_idx": 16260, "task_index": 13}, {"db_idx": 16261, "episode_idx": 67, "frame_idx": 148, "global_frame_idx": 16261, "task_index": 13}, {"db_idx": 16262, "episode_idx": 67, "frame_idx": 149, "global_frame_idx": 16262, "task_index": 13}, {"db_idx": 16263, "episode_idx": 67, "frame_idx": 150, "global_frame_idx": 16263, "task_index": 13}, {"db_idx": 16264, "episode_idx": 67, "frame_idx": 151, "global_frame_idx": 16264, "task_index": 13}, {"db_idx": 16265, "episode_idx": 67, "frame_idx": 152, "global_frame_idx": 16265, "task_index": 13}, {"db_idx": 16266, "episode_idx": 67, "frame_idx": 153, "global_frame_idx": 16266, "task_index": 13}, {"db_idx": 16267, "episode_idx": 67, "frame_idx": 154, "global_frame_idx": 16267, "task_index": 13}, {"db_idx": 16268, "episode_idx": 67, "frame_idx": 155, "global_frame_idx": 16268, "task_index": 13}, {"db_idx": 16269, "episode_idx": 67, "frame_idx": 156, "global_frame_idx": 16269, "task_index": 13}, {"db_idx": 16270, "episode_idx": 67, "frame_idx": 157, "global_frame_idx": 16270, "task_index": 13}, {"db_idx": 16271, "episode_idx": 67, "frame_idx": 158, "global_frame_idx": 16271, "task_index": 13}, {"db_idx": 16272, "episode_idx": 67, "frame_idx": 159, "global_frame_idx": 16272, "task_index": 13}, {"db_idx": 16273, "episode_idx": 67, "frame_idx": 160, "global_frame_idx": 16273, "task_index": 13}, {"db_idx": 16274, "episode_idx": 67, "frame_idx": 161, "global_frame_idx": 16274, "task_index": 13}, {"db_idx": 16275, "episode_idx": 67, "frame_idx": 162, "global_frame_idx": 16275, "task_index": 13}, {"db_idx": 16276, "episode_idx": 67, "frame_idx": 163, "global_frame_idx": 16276, "task_index": 13}, {"db_idx": 16277, "episode_idx": 67, "frame_idx": 164, "global_frame_idx": 16277, "task_index": 13}, {"db_idx": 16278, "episode_idx": 67, "frame_idx": 165, "global_frame_idx": 16278, "task_index": 13}, {"db_idx": 16279, "episode_idx": 67, "frame_idx": 166, "global_frame_idx": 16279, "task_index": 13}, {"db_idx": 16280, "episode_idx": 67, "frame_idx": 167, "global_frame_idx": 16280, "task_index": 13}, {"db_idx": 16281, "episode_idx": 67, "frame_idx": 168, "global_frame_idx": 16281, "task_index": 13}, {"db_idx": 16282, "episode_idx": 67, "frame_idx": 169, "global_frame_idx": 16282, "task_index": 13}, {"db_idx": 16283, "episode_idx": 67, "frame_idx": 170, "global_frame_idx": 16283, "task_index": 13}, {"db_idx": 16284, "episode_idx": 67, "frame_idx": 171, "global_frame_idx": 16284, "task_index": 13}, {"db_idx": 16285, "episode_idx": 67, "frame_idx": 172, "global_frame_idx": 16285, "task_index": 13}, {"db_idx": 16286, "episode_idx": 67, "frame_idx": 173, "global_frame_idx": 16286, "task_index": 13}, {"db_idx": 16287, "episode_idx": 67, "frame_idx": 174, "global_frame_idx": 16287, "task_index": 13}, {"db_idx": 16288, "episode_idx": 67, "frame_idx": 175, "global_frame_idx": 16288, "task_index": 13}, {"db_idx": 16289, "episode_idx": 67, "frame_idx": 176, "global_frame_idx": 16289, "task_index": 13}, {"db_idx": 16290, "episode_idx": 67, "frame_idx": 177, "global_frame_idx": 16290, "task_index": 13}, {"db_idx": 16291, "episode_idx": 67, "frame_idx": 178, "global_frame_idx": 16291, "task_index": 13}, {"db_idx": 16292, "episode_idx": 67, "frame_idx": 179, "global_frame_idx": 16292, "task_index": 13}, {"db_idx": 16293, "episode_idx": 68, "frame_idx": 0, "global_frame_idx": 16293, "task_index": 13}, {"db_idx": 16294, "episode_idx": 68, "frame_idx": 1, "global_frame_idx": 16294, "task_index": 13}, {"db_idx": 16295, "episode_idx": 68, "frame_idx": 2, "global_frame_idx": 16295, "task_index": 13}, {"db_idx": 16296, "episode_idx": 68, "frame_idx": 3, "global_frame_idx": 16296, "task_index": 13}, {"db_idx": 16297, "episode_idx": 68, "frame_idx": 4, "global_frame_idx": 16297, "task_index": 13}, {"db_idx": 16298, "episode_idx": 68, "frame_idx": 5, "global_frame_idx": 16298, "task_index": 13}, {"db_idx": 16299, "episode_idx": 68, "frame_idx": 6, "global_frame_idx": 16299, "task_index": 13}, {"db_idx": 16300, "episode_idx": 68, "frame_idx": 7, "global_frame_idx": 16300, "task_index": 13}, {"db_idx": 16301, "episode_idx": 68, "frame_idx": 8, "global_frame_idx": 16301, "task_index": 13}, {"db_idx": 16302, "episode_idx": 68, "frame_idx": 9, "global_frame_idx": 16302, "task_index": 13}, {"db_idx": 16303, "episode_idx": 68, "frame_idx": 10, "global_frame_idx": 16303, "task_index": 13}, {"db_idx": 16304, "episode_idx": 68, "frame_idx": 11, "global_frame_idx": 16304, "task_index": 13}, {"db_idx": 16305, "episode_idx": 68, "frame_idx": 12, "global_frame_idx": 16305, "task_index": 13}, {"db_idx": 16306, "episode_idx": 68, "frame_idx": 13, "global_frame_idx": 16306, "task_index": 13}, {"db_idx": 16307, "episode_idx": 68, "frame_idx": 14, "global_frame_idx": 16307, "task_index": 13}, {"db_idx": 16308, "episode_idx": 68, "frame_idx": 15, "global_frame_idx": 16308, "task_index": 13}, {"db_idx": 16309, "episode_idx": 68, "frame_idx": 16, "global_frame_idx": 16309, "task_index": 13}, {"db_idx": 16310, "episode_idx": 68, "frame_idx": 17, "global_frame_idx": 16310, "task_index": 13}, {"db_idx": 16311, "episode_idx": 68, "frame_idx": 18, "global_frame_idx": 16311, "task_index": 13}, {"db_idx": 16312, "episode_idx": 68, "frame_idx": 19, "global_frame_idx": 16312, "task_index": 13}, {"db_idx": 16313, "episode_idx": 68, "frame_idx": 20, "global_frame_idx": 16313, "task_index": 13}, {"db_idx": 16314, "episode_idx": 68, "frame_idx": 21, "global_frame_idx": 16314, "task_index": 13}, {"db_idx": 16315, "episode_idx": 68, "frame_idx": 22, "global_frame_idx": 16315, "task_index": 13}, {"db_idx": 16316, "episode_idx": 68, "frame_idx": 23, "global_frame_idx": 16316, "task_index": 13}, {"db_idx": 16317, "episode_idx": 68, "frame_idx": 24, "global_frame_idx": 16317, "task_index": 13}, {"db_idx": 16318, "episode_idx": 68, "frame_idx": 25, "global_frame_idx": 16318, "task_index": 13}, {"db_idx": 16319, "episode_idx": 68, "frame_idx": 26, "global_frame_idx": 16319, "task_index": 13}, {"db_idx": 16320, "episode_idx": 68, "frame_idx": 27, "global_frame_idx": 16320, "task_index": 13}, {"db_idx": 16321, "episode_idx": 68, "frame_idx": 28, "global_frame_idx": 16321, "task_index": 13}, {"db_idx": 16322, "episode_idx": 68, "frame_idx": 29, "global_frame_idx": 16322, "task_index": 13}, {"db_idx": 16323, "episode_idx": 68, "frame_idx": 30, "global_frame_idx": 16323, "task_index": 13}, {"db_idx": 16324, "episode_idx": 68, "frame_idx": 31, "global_frame_idx": 16324, "task_index": 13}, {"db_idx": 16325, "episode_idx": 68, "frame_idx": 32, "global_frame_idx": 16325, "task_index": 13}, {"db_idx": 16326, "episode_idx": 68, "frame_idx": 33, "global_frame_idx": 16326, "task_index": 13}, {"db_idx": 16327, "episode_idx": 68, "frame_idx": 34, "global_frame_idx": 16327, "task_index": 13}, {"db_idx": 16328, "episode_idx": 68, "frame_idx": 35, "global_frame_idx": 16328, "task_index": 13}, {"db_idx": 16329, "episode_idx": 68, "frame_idx": 36, "global_frame_idx": 16329, "task_index": 13}, {"db_idx": 16330, "episode_idx": 68, "frame_idx": 37, "global_frame_idx": 16330, "task_index": 13}, {"db_idx": 16331, "episode_idx": 68, "frame_idx": 38, "global_frame_idx": 16331, "task_index": 13}, {"db_idx": 16332, "episode_idx": 68, "frame_idx": 39, "global_frame_idx": 16332, "task_index": 13}, {"db_idx": 16333, "episode_idx": 68, "frame_idx": 40, "global_frame_idx": 16333, "task_index": 13}, {"db_idx": 16334, "episode_idx": 68, "frame_idx": 41, "global_frame_idx": 16334, "task_index": 13}, {"db_idx": 16335, "episode_idx": 68, "frame_idx": 42, "global_frame_idx": 16335, "task_index": 13}, {"db_idx": 16336, "episode_idx": 68, "frame_idx": 43, "global_frame_idx": 16336, "task_index": 13}, {"db_idx": 16337, "episode_idx": 68, "frame_idx": 44, "global_frame_idx": 16337, "task_index": 13}, {"db_idx": 16338, "episode_idx": 68, "frame_idx": 45, "global_frame_idx": 16338, "task_index": 13}, {"db_idx": 16339, "episode_idx": 68, "frame_idx": 46, "global_frame_idx": 16339, "task_index": 13}, {"db_idx": 16340, "episode_idx": 68, "frame_idx": 47, "global_frame_idx": 16340, "task_index": 13}, {"db_idx": 16341, "episode_idx": 68, "frame_idx": 48, "global_frame_idx": 16341, "task_index": 13}, {"db_idx": 16342, "episode_idx": 68, "frame_idx": 49, "global_frame_idx": 16342, "task_index": 13}, {"db_idx": 16343, "episode_idx": 68, "frame_idx": 50, "global_frame_idx": 16343, "task_index": 13}, {"db_idx": 16344, "episode_idx": 68, "frame_idx": 51, "global_frame_idx": 16344, "task_index": 13}, {"db_idx": 16345, "episode_idx": 68, "frame_idx": 52, "global_frame_idx": 16345, "task_index": 13}, {"db_idx": 16346, "episode_idx": 68, "frame_idx": 53, "global_frame_idx": 16346, "task_index": 13}, {"db_idx": 16347, "episode_idx": 68, "frame_idx": 54, "global_frame_idx": 16347, "task_index": 13}, {"db_idx": 16348, "episode_idx": 68, "frame_idx": 55, "global_frame_idx": 16348, "task_index": 13}, {"db_idx": 16349, "episode_idx": 68, "frame_idx": 56, "global_frame_idx": 16349, "task_index": 13}, {"db_idx": 16350, "episode_idx": 68, "frame_idx": 57, "global_frame_idx": 16350, "task_index": 13}, {"db_idx": 16351, "episode_idx": 68, "frame_idx": 58, "global_frame_idx": 16351, "task_index": 13}, {"db_idx": 16352, "episode_idx": 68, "frame_idx": 59, "global_frame_idx": 16352, "task_index": 13}, {"db_idx": 16353, "episode_idx": 68, "frame_idx": 60, "global_frame_idx": 16353, "task_index": 13}, {"db_idx": 16354, "episode_idx": 68, "frame_idx": 61, "global_frame_idx": 16354, "task_index": 13}, {"db_idx": 16355, "episode_idx": 68, "frame_idx": 62, "global_frame_idx": 16355, "task_index": 13}, {"db_idx": 16356, "episode_idx": 68, "frame_idx": 63, "global_frame_idx": 16356, "task_index": 13}, {"db_idx": 16357, "episode_idx": 68, "frame_idx": 64, "global_frame_idx": 16357, "task_index": 13}, {"db_idx": 16358, "episode_idx": 68, "frame_idx": 65, "global_frame_idx": 16358, "task_index": 13}, {"db_idx": 16359, "episode_idx": 68, "frame_idx": 66, "global_frame_idx": 16359, "task_index": 13}, {"db_idx": 16360, "episode_idx": 68, "frame_idx": 67, "global_frame_idx": 16360, "task_index": 13}, {"db_idx": 16361, "episode_idx": 68, "frame_idx": 68, "global_frame_idx": 16361, "task_index": 13}, {"db_idx": 16362, "episode_idx": 68, "frame_idx": 69, "global_frame_idx": 16362, "task_index": 13}, {"db_idx": 16363, "episode_idx": 68, "frame_idx": 70, "global_frame_idx": 16363, "task_index": 13}, {"db_idx": 16364, "episode_idx": 68, "frame_idx": 71, "global_frame_idx": 16364, "task_index": 13}, {"db_idx": 16365, "episode_idx": 68, "frame_idx": 72, "global_frame_idx": 16365, "task_index": 13}, {"db_idx": 16366, "episode_idx": 68, "frame_idx": 73, "global_frame_idx": 16366, "task_index": 13}, {"db_idx": 16367, "episode_idx": 68, "frame_idx": 74, "global_frame_idx": 16367, "task_index": 13}, {"db_idx": 16368, "episode_idx": 68, "frame_idx": 75, "global_frame_idx": 16368, "task_index": 13}, {"db_idx": 16369, "episode_idx": 68, "frame_idx": 76, "global_frame_idx": 16369, "task_index": 13}, {"db_idx": 16370, "episode_idx": 68, "frame_idx": 77, "global_frame_idx": 16370, "task_index": 13}, {"db_idx": 16371, "episode_idx": 68, "frame_idx": 78, "global_frame_idx": 16371, "task_index": 13}, {"db_idx": 16372, "episode_idx": 68, "frame_idx": 79, "global_frame_idx": 16372, "task_index": 13}, {"db_idx": 16373, "episode_idx": 68, "frame_idx": 80, "global_frame_idx": 16373, "task_index": 13}, {"db_idx": 16374, "episode_idx": 68, "frame_idx": 81, "global_frame_idx": 16374, "task_index": 13}, {"db_idx": 16375, "episode_idx": 68, "frame_idx": 82, "global_frame_idx": 16375, "task_index": 13}, {"db_idx": 16376, "episode_idx": 68, "frame_idx": 83, "global_frame_idx": 16376, "task_index": 13}, {"db_idx": 16377, "episode_idx": 68, "frame_idx": 84, "global_frame_idx": 16377, "task_index": 13}, {"db_idx": 16378, "episode_idx": 68, "frame_idx": 85, "global_frame_idx": 16378, "task_index": 13}, {"db_idx": 16379, "episode_idx": 68, "frame_idx": 86, "global_frame_idx": 16379, "task_index": 13}, {"db_idx": 16380, "episode_idx": 68, "frame_idx": 87, "global_frame_idx": 16380, "task_index": 13}, {"db_idx": 16381, "episode_idx": 68, "frame_idx": 88, "global_frame_idx": 16381, "task_index": 13}, {"db_idx": 16382, "episode_idx": 68, "frame_idx": 89, "global_frame_idx": 16382, "task_index": 13}, {"db_idx": 16383, "episode_idx": 68, "frame_idx": 90, "global_frame_idx": 16383, "task_index": 13}, {"db_idx": 16384, "episode_idx": 68, "frame_idx": 91, "global_frame_idx": 16384, "task_index": 13}, {"db_idx": 16385, "episode_idx": 68, "frame_idx": 92, "global_frame_idx": 16385, "task_index": 13}, {"db_idx": 16386, "episode_idx": 68, "frame_idx": 93, "global_frame_idx": 16386, "task_index": 13}, {"db_idx": 16387, "episode_idx": 68, "frame_idx": 94, "global_frame_idx": 16387, "task_index": 13}, {"db_idx": 16388, "episode_idx": 68, "frame_idx": 95, "global_frame_idx": 16388, "task_index": 13}, {"db_idx": 16389, "episode_idx": 68, "frame_idx": 96, "global_frame_idx": 16389, "task_index": 13}, {"db_idx": 16390, "episode_idx": 68, "frame_idx": 97, "global_frame_idx": 16390, "task_index": 13}, {"db_idx": 16391, "episode_idx": 68, "frame_idx": 98, "global_frame_idx": 16391, "task_index": 13}, {"db_idx": 16392, "episode_idx": 68, "frame_idx": 99, "global_frame_idx": 16392, "task_index": 13}, {"db_idx": 16393, "episode_idx": 68, "frame_idx": 100, "global_frame_idx": 16393, "task_index": 13}, {"db_idx": 16394, "episode_idx": 68, "frame_idx": 101, "global_frame_idx": 16394, "task_index": 13}, {"db_idx": 16395, "episode_idx": 68, "frame_idx": 102, "global_frame_idx": 16395, "task_index": 13}, {"db_idx": 16396, "episode_idx": 68, "frame_idx": 103, "global_frame_idx": 16396, "task_index": 13}, {"db_idx": 16397, "episode_idx": 68, "frame_idx": 104, "global_frame_idx": 16397, "task_index": 13}, {"db_idx": 16398, "episode_idx": 68, "frame_idx": 105, "global_frame_idx": 16398, "task_index": 13}, {"db_idx": 16399, "episode_idx": 68, "frame_idx": 106, "global_frame_idx": 16399, "task_index": 13}, {"db_idx": 16400, "episode_idx": 68, "frame_idx": 107, "global_frame_idx": 16400, "task_index": 13}, {"db_idx": 16401, "episode_idx": 68, "frame_idx": 108, "global_frame_idx": 16401, "task_index": 13}, {"db_idx": 16402, "episode_idx": 68, "frame_idx": 109, "global_frame_idx": 16402, "task_index": 13}, {"db_idx": 16403, "episode_idx": 68, "frame_idx": 110, "global_frame_idx": 16403, "task_index": 13}, {"db_idx": 16404, "episode_idx": 68, "frame_idx": 111, "global_frame_idx": 16404, "task_index": 13}, {"db_idx": 16405, "episode_idx": 68, "frame_idx": 112, "global_frame_idx": 16405, "task_index": 13}, {"db_idx": 16406, "episode_idx": 68, "frame_idx": 113, "global_frame_idx": 16406, "task_index": 13}, {"db_idx": 16407, "episode_idx": 68, "frame_idx": 114, "global_frame_idx": 16407, "task_index": 13}, {"db_idx": 16408, "episode_idx": 68, "frame_idx": 115, "global_frame_idx": 16408, "task_index": 13}, {"db_idx": 16409, "episode_idx": 68, "frame_idx": 116, "global_frame_idx": 16409, "task_index": 13}, {"db_idx": 16410, "episode_idx": 68, "frame_idx": 117, "global_frame_idx": 16410, "task_index": 13}, {"db_idx": 16411, "episode_idx": 68, "frame_idx": 118, "global_frame_idx": 16411, "task_index": 13}, {"db_idx": 16412, "episode_idx": 68, "frame_idx": 119, "global_frame_idx": 16412, "task_index": 13}, {"db_idx": 16413, "episode_idx": 68, "frame_idx": 120, "global_frame_idx": 16413, "task_index": 13}, {"db_idx": 16414, "episode_idx": 68, "frame_idx": 121, "global_frame_idx": 16414, "task_index": 13}, {"db_idx": 16415, "episode_idx": 68, "frame_idx": 122, "global_frame_idx": 16415, "task_index": 13}, {"db_idx": 16416, "episode_idx": 68, "frame_idx": 123, "global_frame_idx": 16416, "task_index": 13}, {"db_idx": 16417, "episode_idx": 68, "frame_idx": 124, "global_frame_idx": 16417, "task_index": 13}, {"db_idx": 16418, "episode_idx": 68, "frame_idx": 125, "global_frame_idx": 16418, "task_index": 13}, {"db_idx": 16419, "episode_idx": 68, "frame_idx": 126, "global_frame_idx": 16419, "task_index": 13}, {"db_idx": 16420, "episode_idx": 68, "frame_idx": 127, "global_frame_idx": 16420, "task_index": 13}, {"db_idx": 16421, "episode_idx": 68, "frame_idx": 128, "global_frame_idx": 16421, "task_index": 13}, {"db_idx": 16422, "episode_idx": 68, "frame_idx": 129, "global_frame_idx": 16422, "task_index": 13}, {"db_idx": 16423, "episode_idx": 68, "frame_idx": 130, "global_frame_idx": 16423, "task_index": 13}, {"db_idx": 16424, "episode_idx": 68, "frame_idx": 131, "global_frame_idx": 16424, "task_index": 13}, {"db_idx": 16425, "episode_idx": 69, "frame_idx": 0, "global_frame_idx": 16425, "task_index": 13}, {"db_idx": 16426, "episode_idx": 69, "frame_idx": 1, "global_frame_idx": 16426, "task_index": 13}, {"db_idx": 16427, "episode_idx": 69, "frame_idx": 2, "global_frame_idx": 16427, "task_index": 13}, {"db_idx": 16428, "episode_idx": 69, "frame_idx": 3, "global_frame_idx": 16428, "task_index": 13}, {"db_idx": 16429, "episode_idx": 69, "frame_idx": 4, "global_frame_idx": 16429, "task_index": 13}, {"db_idx": 16430, "episode_idx": 69, "frame_idx": 5, "global_frame_idx": 16430, "task_index": 13}, {"db_idx": 16431, "episode_idx": 69, "frame_idx": 6, "global_frame_idx": 16431, "task_index": 13}, {"db_idx": 16432, "episode_idx": 69, "frame_idx": 7, "global_frame_idx": 16432, "task_index": 13}, {"db_idx": 16433, "episode_idx": 69, "frame_idx": 8, "global_frame_idx": 16433, "task_index": 13}, {"db_idx": 16434, "episode_idx": 69, "frame_idx": 9, "global_frame_idx": 16434, "task_index": 13}, {"db_idx": 16435, "episode_idx": 69, "frame_idx": 10, "global_frame_idx": 16435, "task_index": 13}, {"db_idx": 16436, "episode_idx": 69, "frame_idx": 11, "global_frame_idx": 16436, "task_index": 13}, {"db_idx": 16437, "episode_idx": 69, "frame_idx": 12, "global_frame_idx": 16437, "task_index": 13}, {"db_idx": 16438, "episode_idx": 69, "frame_idx": 13, "global_frame_idx": 16438, "task_index": 13}, {"db_idx": 16439, "episode_idx": 69, "frame_idx": 14, "global_frame_idx": 16439, "task_index": 13}, {"db_idx": 16440, "episode_idx": 69, "frame_idx": 15, "global_frame_idx": 16440, "task_index": 13}, {"db_idx": 16441, "episode_idx": 69, "frame_idx": 16, "global_frame_idx": 16441, "task_index": 13}, {"db_idx": 16442, "episode_idx": 69, "frame_idx": 17, "global_frame_idx": 16442, "task_index": 13}, {"db_idx": 16443, "episode_idx": 69, "frame_idx": 18, "global_frame_idx": 16443, "task_index": 13}, {"db_idx": 16444, "episode_idx": 69, "frame_idx": 19, "global_frame_idx": 16444, "task_index": 13}, {"db_idx": 16445, "episode_idx": 69, "frame_idx": 20, "global_frame_idx": 16445, "task_index": 13}, {"db_idx": 16446, "episode_idx": 69, "frame_idx": 21, "global_frame_idx": 16446, "task_index": 13}, {"db_idx": 16447, "episode_idx": 69, "frame_idx": 22, "global_frame_idx": 16447, "task_index": 13}, {"db_idx": 16448, "episode_idx": 69, "frame_idx": 23, "global_frame_idx": 16448, "task_index": 13}, {"db_idx": 16449, "episode_idx": 69, "frame_idx": 24, "global_frame_idx": 16449, "task_index": 13}, {"db_idx": 16450, "episode_idx": 69, "frame_idx": 25, "global_frame_idx": 16450, "task_index": 13}, {"db_idx": 16451, "episode_idx": 69, "frame_idx": 26, "global_frame_idx": 16451, "task_index": 13}, {"db_idx": 16452, "episode_idx": 69, "frame_idx": 27, "global_frame_idx": 16452, "task_index": 13}, {"db_idx": 16453, "episode_idx": 69, "frame_idx": 28, "global_frame_idx": 16453, "task_index": 13}, {"db_idx": 16454, "episode_idx": 69, "frame_idx": 29, "global_frame_idx": 16454, "task_index": 13}, {"db_idx": 16455, "episode_idx": 69, "frame_idx": 30, "global_frame_idx": 16455, "task_index": 13}, {"db_idx": 16456, "episode_idx": 69, "frame_idx": 31, "global_frame_idx": 16456, "task_index": 13}, {"db_idx": 16457, "episode_idx": 69, "frame_idx": 32, "global_frame_idx": 16457, "task_index": 13}, {"db_idx": 16458, "episode_idx": 69, "frame_idx": 33, "global_frame_idx": 16458, "task_index": 13}, {"db_idx": 16459, "episode_idx": 69, "frame_idx": 34, "global_frame_idx": 16459, "task_index": 13}, {"db_idx": 16460, "episode_idx": 69, "frame_idx": 35, "global_frame_idx": 16460, "task_index": 13}, {"db_idx": 16461, "episode_idx": 69, "frame_idx": 36, "global_frame_idx": 16461, "task_index": 13}, {"db_idx": 16462, "episode_idx": 69, "frame_idx": 37, "global_frame_idx": 16462, "task_index": 13}, {"db_idx": 16463, "episode_idx": 69, "frame_idx": 38, "global_frame_idx": 16463, "task_index": 13}, {"db_idx": 16464, "episode_idx": 69, "frame_idx": 39, "global_frame_idx": 16464, "task_index": 13}, {"db_idx": 16465, "episode_idx": 69, "frame_idx": 40, "global_frame_idx": 16465, "task_index": 13}, {"db_idx": 16466, "episode_idx": 69, "frame_idx": 41, "global_frame_idx": 16466, "task_index": 13}, {"db_idx": 16467, "episode_idx": 69, "frame_idx": 42, "global_frame_idx": 16467, "task_index": 13}, {"db_idx": 16468, "episode_idx": 69, "frame_idx": 43, "global_frame_idx": 16468, "task_index": 13}, {"db_idx": 16469, "episode_idx": 69, "frame_idx": 44, "global_frame_idx": 16469, "task_index": 13}, {"db_idx": 16470, "episode_idx": 69, "frame_idx": 45, "global_frame_idx": 16470, "task_index": 13}, {"db_idx": 16471, "episode_idx": 69, "frame_idx": 46, "global_frame_idx": 16471, "task_index": 13}, {"db_idx": 16472, "episode_idx": 69, "frame_idx": 47, "global_frame_idx": 16472, "task_index": 13}, {"db_idx": 16473, "episode_idx": 69, "frame_idx": 48, "global_frame_idx": 16473, "task_index": 13}, {"db_idx": 16474, "episode_idx": 69, "frame_idx": 49, "global_frame_idx": 16474, "task_index": 13}, {"db_idx": 16475, "episode_idx": 69, "frame_idx": 50, "global_frame_idx": 16475, "task_index": 13}, {"db_idx": 16476, "episode_idx": 69, "frame_idx": 51, "global_frame_idx": 16476, "task_index": 13}, {"db_idx": 16477, "episode_idx": 69, "frame_idx": 52, "global_frame_idx": 16477, "task_index": 13}, {"db_idx": 16478, "episode_idx": 69, "frame_idx": 53, "global_frame_idx": 16478, "task_index": 13}, {"db_idx": 16479, "episode_idx": 69, "frame_idx": 54, "global_frame_idx": 16479, "task_index": 13}, {"db_idx": 16480, "episode_idx": 69, "frame_idx": 55, "global_frame_idx": 16480, "task_index": 13}, {"db_idx": 16481, "episode_idx": 69, "frame_idx": 56, "global_frame_idx": 16481, "task_index": 13}, {"db_idx": 16482, "episode_idx": 69, "frame_idx": 57, "global_frame_idx": 16482, "task_index": 13}, {"db_idx": 16483, "episode_idx": 69, "frame_idx": 58, "global_frame_idx": 16483, "task_index": 13}, {"db_idx": 16484, "episode_idx": 69, "frame_idx": 59, "global_frame_idx": 16484, "task_index": 13}, {"db_idx": 16485, "episode_idx": 69, "frame_idx": 60, "global_frame_idx": 16485, "task_index": 13}, {"db_idx": 16486, "episode_idx": 69, "frame_idx": 61, "global_frame_idx": 16486, "task_index": 13}, {"db_idx": 16487, "episode_idx": 69, "frame_idx": 62, "global_frame_idx": 16487, "task_index": 13}, {"db_idx": 16488, "episode_idx": 69, "frame_idx": 63, "global_frame_idx": 16488, "task_index": 13}, {"db_idx": 16489, "episode_idx": 69, "frame_idx": 64, "global_frame_idx": 16489, "task_index": 13}, {"db_idx": 16490, "episode_idx": 69, "frame_idx": 65, "global_frame_idx": 16490, "task_index": 13}, {"db_idx": 16491, "episode_idx": 69, "frame_idx": 66, "global_frame_idx": 16491, "task_index": 13}, {"db_idx": 16492, "episode_idx": 69, "frame_idx": 67, "global_frame_idx": 16492, "task_index": 13}, {"db_idx": 16493, "episode_idx": 69, "frame_idx": 68, "global_frame_idx": 16493, "task_index": 13}, {"db_idx": 16494, "episode_idx": 69, "frame_idx": 69, "global_frame_idx": 16494, "task_index": 13}, {"db_idx": 16495, "episode_idx": 69, "frame_idx": 70, "global_frame_idx": 16495, "task_index": 13}, {"db_idx": 16496, "episode_idx": 69, "frame_idx": 71, "global_frame_idx": 16496, "task_index": 13}, {"db_idx": 16497, "episode_idx": 69, "frame_idx": 72, "global_frame_idx": 16497, "task_index": 13}, {"db_idx": 16498, "episode_idx": 69, "frame_idx": 73, "global_frame_idx": 16498, "task_index": 13}, {"db_idx": 16499, "episode_idx": 69, "frame_idx": 74, "global_frame_idx": 16499, "task_index": 13}, {"db_idx": 16500, "episode_idx": 69, "frame_idx": 75, "global_frame_idx": 16500, "task_index": 13}, {"db_idx": 16501, "episode_idx": 69, "frame_idx": 76, "global_frame_idx": 16501, "task_index": 13}, {"db_idx": 16502, "episode_idx": 69, "frame_idx": 77, "global_frame_idx": 16502, "task_index": 13}, {"db_idx": 16503, "episode_idx": 69, "frame_idx": 78, "global_frame_idx": 16503, "task_index": 13}, {"db_idx": 16504, "episode_idx": 69, "frame_idx": 79, "global_frame_idx": 16504, "task_index": 13}, {"db_idx": 16505, "episode_idx": 69, "frame_idx": 80, "global_frame_idx": 16505, "task_index": 13}, {"db_idx": 16506, "episode_idx": 69, "frame_idx": 81, "global_frame_idx": 16506, "task_index": 13}, {"db_idx": 16507, "episode_idx": 69, "frame_idx": 82, "global_frame_idx": 16507, "task_index": 13}, {"db_idx": 16508, "episode_idx": 69, "frame_idx": 83, "global_frame_idx": 16508, "task_index": 13}, {"db_idx": 16509, "episode_idx": 69, "frame_idx": 84, "global_frame_idx": 16509, "task_index": 13}, {"db_idx": 16510, "episode_idx": 69, "frame_idx": 85, "global_frame_idx": 16510, "task_index": 13}, {"db_idx": 16511, "episode_idx": 69, "frame_idx": 86, "global_frame_idx": 16511, "task_index": 13}, {"db_idx": 16512, "episode_idx": 69, "frame_idx": 87, "global_frame_idx": 16512, "task_index": 13}, {"db_idx": 16513, "episode_idx": 69, "frame_idx": 88, "global_frame_idx": 16513, "task_index": 13}, {"db_idx": 16514, "episode_idx": 69, "frame_idx": 89, "global_frame_idx": 16514, "task_index": 13}, {"db_idx": 16515, "episode_idx": 69, "frame_idx": 90, "global_frame_idx": 16515, "task_index": 13}, {"db_idx": 16516, "episode_idx": 69, "frame_idx": 91, "global_frame_idx": 16516, "task_index": 13}, {"db_idx": 16517, "episode_idx": 69, "frame_idx": 92, "global_frame_idx": 16517, "task_index": 13}, {"db_idx": 16518, "episode_idx": 69, "frame_idx": 93, "global_frame_idx": 16518, "task_index": 13}, {"db_idx": 16519, "episode_idx": 69, "frame_idx": 94, "global_frame_idx": 16519, "task_index": 13}, {"db_idx": 16520, "episode_idx": 69, "frame_idx": 95, "global_frame_idx": 16520, "task_index": 13}, {"db_idx": 16521, "episode_idx": 69, "frame_idx": 96, "global_frame_idx": 16521, "task_index": 13}, {"db_idx": 16522, "episode_idx": 69, "frame_idx": 97, "global_frame_idx": 16522, "task_index": 13}, {"db_idx": 16523, "episode_idx": 69, "frame_idx": 98, "global_frame_idx": 16523, "task_index": 13}, {"db_idx": 16524, "episode_idx": 69, "frame_idx": 99, "global_frame_idx": 16524, "task_index": 13}, {"db_idx": 16525, "episode_idx": 69, "frame_idx": 100, "global_frame_idx": 16525, "task_index": 13}, {"db_idx": 16526, "episode_idx": 69, "frame_idx": 101, "global_frame_idx": 16526, "task_index": 13}, {"db_idx": 16527, "episode_idx": 69, "frame_idx": 102, "global_frame_idx": 16527, "task_index": 13}, {"db_idx": 16528, "episode_idx": 69, "frame_idx": 103, "global_frame_idx": 16528, "task_index": 13}, {"db_idx": 16529, "episode_idx": 69, "frame_idx": 104, "global_frame_idx": 16529, "task_index": 13}, {"db_idx": 16530, "episode_idx": 69, "frame_idx": 105, "global_frame_idx": 16530, "task_index": 13}, {"db_idx": 16531, "episode_idx": 69, "frame_idx": 106, "global_frame_idx": 16531, "task_index": 13}, {"db_idx": 16532, "episode_idx": 69, "frame_idx": 107, "global_frame_idx": 16532, "task_index": 13}, {"db_idx": 16533, "episode_idx": 69, "frame_idx": 108, "global_frame_idx": 16533, "task_index": 13}, {"db_idx": 16534, "episode_idx": 69, "frame_idx": 109, "global_frame_idx": 16534, "task_index": 13}, {"db_idx": 16535, "episode_idx": 69, "frame_idx": 110, "global_frame_idx": 16535, "task_index": 13}, {"db_idx": 16536, "episode_idx": 69, "frame_idx": 111, "global_frame_idx": 16536, "task_index": 13}, {"db_idx": 16537, "episode_idx": 69, "frame_idx": 112, "global_frame_idx": 16537, "task_index": 13}, {"db_idx": 16538, "episode_idx": 69, "frame_idx": 113, "global_frame_idx": 16538, "task_index": 13}, {"db_idx": 16539, "episode_idx": 69, "frame_idx": 114, "global_frame_idx": 16539, "task_index": 13}, {"db_idx": 16540, "episode_idx": 69, "frame_idx": 115, "global_frame_idx": 16540, "task_index": 13}, {"db_idx": 16541, "episode_idx": 69, "frame_idx": 116, "global_frame_idx": 16541, "task_index": 13}, {"db_idx": 16542, "episode_idx": 69, "frame_idx": 117, "global_frame_idx": 16542, "task_index": 13}, {"db_idx": 16543, "episode_idx": 69, "frame_idx": 118, "global_frame_idx": 16543, "task_index": 13}, {"db_idx": 16544, "episode_idx": 69, "frame_idx": 119, "global_frame_idx": 16544, "task_index": 13}, {"db_idx": 16545, "episode_idx": 69, "frame_idx": 120, "global_frame_idx": 16545, "task_index": 13}, {"db_idx": 16546, "episode_idx": 69, "frame_idx": 121, "global_frame_idx": 16546, "task_index": 13}, {"db_idx": 16547, "episode_idx": 69, "frame_idx": 122, "global_frame_idx": 16547, "task_index": 13}, {"db_idx": 16548, "episode_idx": 69, "frame_idx": 123, "global_frame_idx": 16548, "task_index": 13}, {"db_idx": 16549, "episode_idx": 69, "frame_idx": 124, "global_frame_idx": 16549, "task_index": 13}, {"db_idx": 16550, "episode_idx": 69, "frame_idx": 125, "global_frame_idx": 16550, "task_index": 13}, {"db_idx": 16551, "episode_idx": 69, "frame_idx": 126, "global_frame_idx": 16551, "task_index": 13}, {"db_idx": 16552, "episode_idx": 69, "frame_idx": 127, "global_frame_idx": 16552, "task_index": 13}, {"db_idx": 16553, "episode_idx": 69, "frame_idx": 128, "global_frame_idx": 16553, "task_index": 13}, {"db_idx": 16554, "episode_idx": 69, "frame_idx": 129, "global_frame_idx": 16554, "task_index": 13}, {"db_idx": 16555, "episode_idx": 69, "frame_idx": 130, "global_frame_idx": 16555, "task_index": 13}, {"db_idx": 16556, "episode_idx": 69, "frame_idx": 131, "global_frame_idx": 16556, "task_index": 13}, {"db_idx": 16557, "episode_idx": 69, "frame_idx": 132, "global_frame_idx": 16557, "task_index": 13}, {"db_idx": 16558, "episode_idx": 69, "frame_idx": 133, "global_frame_idx": 16558, "task_index": 13}, {"db_idx": 16559, "episode_idx": 69, "frame_idx": 134, "global_frame_idx": 16559, "task_index": 13}, {"db_idx": 16560, "episode_idx": 69, "frame_idx": 135, "global_frame_idx": 16560, "task_index": 13}, {"db_idx": 16561, "episode_idx": 69, "frame_idx": 136, "global_frame_idx": 16561, "task_index": 13}, {"db_idx": 16562, "episode_idx": 69, "frame_idx": 137, "global_frame_idx": 16562, "task_index": 13}, {"db_idx": 16563, "episode_idx": 69, "frame_idx": 138, "global_frame_idx": 16563, "task_index": 13}, {"db_idx": 16564, "episode_idx": 69, "frame_idx": 139, "global_frame_idx": 16564, "task_index": 13}, {"db_idx": 16565, "episode_idx": 69, "frame_idx": 140, "global_frame_idx": 16565, "task_index": 13}, {"db_idx": 16566, "episode_idx": 69, "frame_idx": 141, "global_frame_idx": 16566, "task_index": 13}, {"db_idx": 16567, "episode_idx": 69, "frame_idx": 142, "global_frame_idx": 16567, "task_index": 13}, {"db_idx": 16568, "episode_idx": 69, "frame_idx": 143, "global_frame_idx": 16568, "task_index": 13}, {"db_idx": 16569, "episode_idx": 69, "frame_idx": 144, "global_frame_idx": 16569, "task_index": 13}, {"db_idx": 16570, "episode_idx": 69, "frame_idx": 145, "global_frame_idx": 16570, "task_index": 13}, {"db_idx": 16571, "episode_idx": 69, "frame_idx": 146, "global_frame_idx": 16571, "task_index": 13}, {"db_idx": 16572, "episode_idx": 69, "frame_idx": 147, "global_frame_idx": 16572, "task_index": 13}, {"db_idx": 16573, "episode_idx": 69, "frame_idx": 148, "global_frame_idx": 16573, "task_index": 13}, {"db_idx": 16574, "episode_idx": 69, "frame_idx": 149, "global_frame_idx": 16574, "task_index": 13}, {"db_idx": 16575, "episode_idx": 69, "frame_idx": 150, "global_frame_idx": 16575, "task_index": 13}, {"db_idx": 16576, "episode_idx": 69, "frame_idx": 151, "global_frame_idx": 16576, "task_index": 13}, {"db_idx": 16577, "episode_idx": 69, "frame_idx": 152, "global_frame_idx": 16577, "task_index": 13}, {"db_idx": 16578, "episode_idx": 69, "frame_idx": 153, "global_frame_idx": 16578, "task_index": 13}, {"db_idx": 16579, "episode_idx": 69, "frame_idx": 154, "global_frame_idx": 16579, "task_index": 13}, {"db_idx": 16580, "episode_idx": 69, "frame_idx": 155, "global_frame_idx": 16580, "task_index": 13}, {"db_idx": 16581, "episode_idx": 69, "frame_idx": 156, "global_frame_idx": 16581, "task_index": 13}, {"db_idx": 16582, "episode_idx": 69, "frame_idx": 157, "global_frame_idx": 16582, "task_index": 13}, {"db_idx": 16583, "episode_idx": 69, "frame_idx": 158, "global_frame_idx": 16583, "task_index": 13}, {"db_idx": 16584, "episode_idx": 69, "frame_idx": 159, "global_frame_idx": 16584, "task_index": 13}, {"db_idx": 16585, "episode_idx": 69, "frame_idx": 160, "global_frame_idx": 16585, "task_index": 13}, {"db_idx": 16586, "episode_idx": 69, "frame_idx": 161, "global_frame_idx": 16586, "task_index": 13}, {"db_idx": 16587, "episode_idx": 69, "frame_idx": 162, "global_frame_idx": 16587, "task_index": 13}, {"db_idx": 16588, "episode_idx": 69, "frame_idx": 163, "global_frame_idx": 16588, "task_index": 13}, {"db_idx": 16589, "episode_idx": 69, "frame_idx": 164, "global_frame_idx": 16589, "task_index": 13}, {"db_idx": 16590, "episode_idx": 69, "frame_idx": 165, "global_frame_idx": 16590, "task_index": 13}, {"db_idx": 16591, "episode_idx": 69, "frame_idx": 166, "global_frame_idx": 16591, "task_index": 13}, {"db_idx": 16592, "episode_idx": 69, "frame_idx": 167, "global_frame_idx": 16592, "task_index": 13}, {"db_idx": 16593, "episode_idx": 69, "frame_idx": 168, "global_frame_idx": 16593, "task_index": 13}, {"db_idx": 16594, "episode_idx": 69, "frame_idx": 169, "global_frame_idx": 16594, "task_index": 13}, {"db_idx": 16595, "episode_idx": 69, "frame_idx": 170, "global_frame_idx": 16595, "task_index": 13}, {"db_idx": 16596, "episode_idx": 69, "frame_idx": 171, "global_frame_idx": 16596, "task_index": 13}, {"db_idx": 16597, "episode_idx": 69, "frame_idx": 172, "global_frame_idx": 16597, "task_index": 13}, {"db_idx": 16598, "episode_idx": 69, "frame_idx": 173, "global_frame_idx": 16598, "task_index": 13}, {"db_idx": 16599, "episode_idx": 69, "frame_idx": 174, "global_frame_idx": 16599, "task_index": 13}, {"db_idx": 16600, "episode_idx": 69, "frame_idx": 175, "global_frame_idx": 16600, "task_index": 13}, {"db_idx": 16601, "episode_idx": 69, "frame_idx": 176, "global_frame_idx": 16601, "task_index": 13}, {"db_idx": 16602, "episode_idx": 69, "frame_idx": 177, "global_frame_idx": 16602, "task_index": 13}, {"db_idx": 16603, "episode_idx": 69, "frame_idx": 178, "global_frame_idx": 16603, "task_index": 13}, {"db_idx": 16604, "episode_idx": 69, "frame_idx": 179, "global_frame_idx": 16604, "task_index": 13}, {"db_idx": 16605, "episode_idx": 69, "frame_idx": 180, "global_frame_idx": 16605, "task_index": 13}, {"db_idx": 16606, "episode_idx": 69, "frame_idx": 181, "global_frame_idx": 16606, "task_index": 13}, {"db_idx": 16607, "episode_idx": 69, "frame_idx": 182, "global_frame_idx": 16607, "task_index": 13}, {"db_idx": 16608, "episode_idx": 70, "frame_idx": 0, "global_frame_idx": 16608, "task_index": 14}, {"db_idx": 16609, "episode_idx": 70, "frame_idx": 1, "global_frame_idx": 16609, "task_index": 14}, {"db_idx": 16610, "episode_idx": 70, "frame_idx": 2, "global_frame_idx": 16610, "task_index": 14}, {"db_idx": 16611, "episode_idx": 70, "frame_idx": 3, "global_frame_idx": 16611, "task_index": 14}, {"db_idx": 16612, "episode_idx": 70, "frame_idx": 4, "global_frame_idx": 16612, "task_index": 14}, {"db_idx": 16613, "episode_idx": 70, "frame_idx": 5, "global_frame_idx": 16613, "task_index": 14}, {"db_idx": 16614, "episode_idx": 70, "frame_idx": 6, "global_frame_idx": 16614, "task_index": 14}, {"db_idx": 16615, "episode_idx": 70, "frame_idx": 7, "global_frame_idx": 16615, "task_index": 14}, {"db_idx": 16616, "episode_idx": 70, "frame_idx": 8, "global_frame_idx": 16616, "task_index": 14}, {"db_idx": 16617, "episode_idx": 70, "frame_idx": 9, "global_frame_idx": 16617, "task_index": 14}, {"db_idx": 16618, "episode_idx": 70, "frame_idx": 10, "global_frame_idx": 16618, "task_index": 14}, {"db_idx": 16619, "episode_idx": 70, "frame_idx": 11, "global_frame_idx": 16619, "task_index": 14}, {"db_idx": 16620, "episode_idx": 70, "frame_idx": 12, "global_frame_idx": 16620, "task_index": 14}, {"db_idx": 16621, "episode_idx": 70, "frame_idx": 13, "global_frame_idx": 16621, "task_index": 14}, {"db_idx": 16622, "episode_idx": 70, "frame_idx": 14, "global_frame_idx": 16622, "task_index": 14}, {"db_idx": 16623, "episode_idx": 70, "frame_idx": 15, "global_frame_idx": 16623, "task_index": 14}, {"db_idx": 16624, "episode_idx": 70, "frame_idx": 16, "global_frame_idx": 16624, "task_index": 14}, {"db_idx": 16625, "episode_idx": 70, "frame_idx": 17, "global_frame_idx": 16625, "task_index": 14}, {"db_idx": 16626, "episode_idx": 70, "frame_idx": 18, "global_frame_idx": 16626, "task_index": 14}, {"db_idx": 16627, "episode_idx": 70, "frame_idx": 19, "global_frame_idx": 16627, "task_index": 14}, {"db_idx": 16628, "episode_idx": 70, "frame_idx": 20, "global_frame_idx": 16628, "task_index": 14}, {"db_idx": 16629, "episode_idx": 70, "frame_idx": 21, "global_frame_idx": 16629, "task_index": 14}, {"db_idx": 16630, "episode_idx": 70, "frame_idx": 22, "global_frame_idx": 16630, "task_index": 14}, {"db_idx": 16631, "episode_idx": 70, "frame_idx": 23, "global_frame_idx": 16631, "task_index": 14}, {"db_idx": 16632, "episode_idx": 70, "frame_idx": 24, "global_frame_idx": 16632, "task_index": 14}, {"db_idx": 16633, "episode_idx": 70, "frame_idx": 25, "global_frame_idx": 16633, "task_index": 14}, {"db_idx": 16634, "episode_idx": 70, "frame_idx": 26, "global_frame_idx": 16634, "task_index": 14}, {"db_idx": 16635, "episode_idx": 70, "frame_idx": 27, "global_frame_idx": 16635, "task_index": 14}, {"db_idx": 16636, "episode_idx": 70, "frame_idx": 28, "global_frame_idx": 16636, "task_index": 14}, {"db_idx": 16637, "episode_idx": 70, "frame_idx": 29, "global_frame_idx": 16637, "task_index": 14}, {"db_idx": 16638, "episode_idx": 70, "frame_idx": 30, "global_frame_idx": 16638, "task_index": 14}, {"db_idx": 16639, "episode_idx": 70, "frame_idx": 31, "global_frame_idx": 16639, "task_index": 14}, {"db_idx": 16640, "episode_idx": 70, "frame_idx": 32, "global_frame_idx": 16640, "task_index": 14}, {"db_idx": 16641, "episode_idx": 70, "frame_idx": 33, "global_frame_idx": 16641, "task_index": 14}, {"db_idx": 16642, "episode_idx": 70, "frame_idx": 34, "global_frame_idx": 16642, "task_index": 14}, {"db_idx": 16643, "episode_idx": 70, "frame_idx": 35, "global_frame_idx": 16643, "task_index": 14}, {"db_idx": 16644, "episode_idx": 70, "frame_idx": 36, "global_frame_idx": 16644, "task_index": 14}, {"db_idx": 16645, "episode_idx": 70, "frame_idx": 37, "global_frame_idx": 16645, "task_index": 14}, {"db_idx": 16646, "episode_idx": 70, "frame_idx": 38, "global_frame_idx": 16646, "task_index": 14}, {"db_idx": 16647, "episode_idx": 70, "frame_idx": 39, "global_frame_idx": 16647, "task_index": 14}, {"db_idx": 16648, "episode_idx": 70, "frame_idx": 40, "global_frame_idx": 16648, "task_index": 14}, {"db_idx": 16649, "episode_idx": 70, "frame_idx": 41, "global_frame_idx": 16649, "task_index": 14}, {"db_idx": 16650, "episode_idx": 70, "frame_idx": 42, "global_frame_idx": 16650, "task_index": 14}, {"db_idx": 16651, "episode_idx": 70, "frame_idx": 43, "global_frame_idx": 16651, "task_index": 14}, {"db_idx": 16652, "episode_idx": 70, "frame_idx": 44, "global_frame_idx": 16652, "task_index": 14}, {"db_idx": 16653, "episode_idx": 70, "frame_idx": 45, "global_frame_idx": 16653, "task_index": 14}, {"db_idx": 16654, "episode_idx": 70, "frame_idx": 46, "global_frame_idx": 16654, "task_index": 14}, {"db_idx": 16655, "episode_idx": 70, "frame_idx": 47, "global_frame_idx": 16655, "task_index": 14}, {"db_idx": 16656, "episode_idx": 70, "frame_idx": 48, "global_frame_idx": 16656, "task_index": 14}, {"db_idx": 16657, "episode_idx": 70, "frame_idx": 49, "global_frame_idx": 16657, "task_index": 14}, {"db_idx": 16658, "episode_idx": 70, "frame_idx": 50, "global_frame_idx": 16658, "task_index": 14}, {"db_idx": 16659, "episode_idx": 70, "frame_idx": 51, "global_frame_idx": 16659, "task_index": 14}, {"db_idx": 16660, "episode_idx": 70, "frame_idx": 52, "global_frame_idx": 16660, "task_index": 14}, {"db_idx": 16661, "episode_idx": 70, "frame_idx": 53, "global_frame_idx": 16661, "task_index": 14}, {"db_idx": 16662, "episode_idx": 70, "frame_idx": 54, "global_frame_idx": 16662, "task_index": 14}, {"db_idx": 16663, "episode_idx": 70, "frame_idx": 55, "global_frame_idx": 16663, "task_index": 14}, {"db_idx": 16664, "episode_idx": 70, "frame_idx": 56, "global_frame_idx": 16664, "task_index": 14}, {"db_idx": 16665, "episode_idx": 70, "frame_idx": 57, "global_frame_idx": 16665, "task_index": 14}, {"db_idx": 16666, "episode_idx": 70, "frame_idx": 58, "global_frame_idx": 16666, "task_index": 14}, {"db_idx": 16667, "episode_idx": 70, "frame_idx": 59, "global_frame_idx": 16667, "task_index": 14}, {"db_idx": 16668, "episode_idx": 70, "frame_idx": 60, "global_frame_idx": 16668, "task_index": 14}, {"db_idx": 16669, "episode_idx": 70, "frame_idx": 61, "global_frame_idx": 16669, "task_index": 14}, {"db_idx": 16670, "episode_idx": 70, "frame_idx": 62, "global_frame_idx": 16670, "task_index": 14}, {"db_idx": 16671, "episode_idx": 70, "frame_idx": 63, "global_frame_idx": 16671, "task_index": 14}, {"db_idx": 16672, "episode_idx": 70, "frame_idx": 64, "global_frame_idx": 16672, "task_index": 14}, {"db_idx": 16673, "episode_idx": 70, "frame_idx": 65, "global_frame_idx": 16673, "task_index": 14}, {"db_idx": 16674, "episode_idx": 70, "frame_idx": 66, "global_frame_idx": 16674, "task_index": 14}, {"db_idx": 16675, "episode_idx": 70, "frame_idx": 67, "global_frame_idx": 16675, "task_index": 14}, {"db_idx": 16676, "episode_idx": 70, "frame_idx": 68, "global_frame_idx": 16676, "task_index": 14}, {"db_idx": 16677, "episode_idx": 70, "frame_idx": 69, "global_frame_idx": 16677, "task_index": 14}, {"db_idx": 16678, "episode_idx": 70, "frame_idx": 70, "global_frame_idx": 16678, "task_index": 14}, {"db_idx": 16679, "episode_idx": 70, "frame_idx": 71, "global_frame_idx": 16679, "task_index": 14}, {"db_idx": 16680, "episode_idx": 70, "frame_idx": 72, "global_frame_idx": 16680, "task_index": 14}, {"db_idx": 16681, "episode_idx": 70, "frame_idx": 73, "global_frame_idx": 16681, "task_index": 14}, {"db_idx": 16682, "episode_idx": 70, "frame_idx": 74, "global_frame_idx": 16682, "task_index": 14}, {"db_idx": 16683, "episode_idx": 70, "frame_idx": 75, "global_frame_idx": 16683, "task_index": 14}, {"db_idx": 16684, "episode_idx": 70, "frame_idx": 76, "global_frame_idx": 16684, "task_index": 14}, {"db_idx": 16685, "episode_idx": 70, "frame_idx": 77, "global_frame_idx": 16685, "task_index": 14}, {"db_idx": 16686, "episode_idx": 70, "frame_idx": 78, "global_frame_idx": 16686, "task_index": 14}, {"db_idx": 16687, "episode_idx": 70, "frame_idx": 79, "global_frame_idx": 16687, "task_index": 14}, {"db_idx": 16688, "episode_idx": 70, "frame_idx": 80, "global_frame_idx": 16688, "task_index": 14}, {"db_idx": 16689, "episode_idx": 70, "frame_idx": 81, "global_frame_idx": 16689, "task_index": 14}, {"db_idx": 16690, "episode_idx": 70, "frame_idx": 82, "global_frame_idx": 16690, "task_index": 14}, {"db_idx": 16691, "episode_idx": 70, "frame_idx": 83, "global_frame_idx": 16691, "task_index": 14}, {"db_idx": 16692, "episode_idx": 70, "frame_idx": 84, "global_frame_idx": 16692, "task_index": 14}, {"db_idx": 16693, "episode_idx": 70, "frame_idx": 85, "global_frame_idx": 16693, "task_index": 14}, {"db_idx": 16694, "episode_idx": 70, "frame_idx": 86, "global_frame_idx": 16694, "task_index": 14}, {"db_idx": 16695, "episode_idx": 70, "frame_idx": 87, "global_frame_idx": 16695, "task_index": 14}, {"db_idx": 16696, "episode_idx": 70, "frame_idx": 88, "global_frame_idx": 16696, "task_index": 14}, {"db_idx": 16697, "episode_idx": 70, "frame_idx": 89, "global_frame_idx": 16697, "task_index": 14}, {"db_idx": 16698, "episode_idx": 70, "frame_idx": 90, "global_frame_idx": 16698, "task_index": 14}, {"db_idx": 16699, "episode_idx": 70, "frame_idx": 91, "global_frame_idx": 16699, "task_index": 14}, {"db_idx": 16700, "episode_idx": 70, "frame_idx": 92, "global_frame_idx": 16700, "task_index": 14}, {"db_idx": 16701, "episode_idx": 70, "frame_idx": 93, "global_frame_idx": 16701, "task_index": 14}, {"db_idx": 16702, "episode_idx": 70, "frame_idx": 94, "global_frame_idx": 16702, "task_index": 14}, {"db_idx": 16703, "episode_idx": 70, "frame_idx": 95, "global_frame_idx": 16703, "task_index": 14}, {"db_idx": 16704, "episode_idx": 70, "frame_idx": 96, "global_frame_idx": 16704, "task_index": 14}, {"db_idx": 16705, "episode_idx": 70, "frame_idx": 97, "global_frame_idx": 16705, "task_index": 14}, {"db_idx": 16706, "episode_idx": 70, "frame_idx": 98, "global_frame_idx": 16706, "task_index": 14}, {"db_idx": 16707, "episode_idx": 70, "frame_idx": 99, "global_frame_idx": 16707, "task_index": 14}, {"db_idx": 16708, "episode_idx": 70, "frame_idx": 100, "global_frame_idx": 16708, "task_index": 14}, {"db_idx": 16709, "episode_idx": 70, "frame_idx": 101, "global_frame_idx": 16709, "task_index": 14}, {"db_idx": 16710, "episode_idx": 70, "frame_idx": 102, "global_frame_idx": 16710, "task_index": 14}, {"db_idx": 16711, "episode_idx": 70, "frame_idx": 103, "global_frame_idx": 16711, "task_index": 14}, {"db_idx": 16712, "episode_idx": 70, "frame_idx": 104, "global_frame_idx": 16712, "task_index": 14}, {"db_idx": 16713, "episode_idx": 70, "frame_idx": 105, "global_frame_idx": 16713, "task_index": 14}, {"db_idx": 16714, "episode_idx": 70, "frame_idx": 106, "global_frame_idx": 16714, "task_index": 14}, {"db_idx": 16715, "episode_idx": 70, "frame_idx": 107, "global_frame_idx": 16715, "task_index": 14}, {"db_idx": 16716, "episode_idx": 70, "frame_idx": 108, "global_frame_idx": 16716, "task_index": 14}, {"db_idx": 16717, "episode_idx": 70, "frame_idx": 109, "global_frame_idx": 16717, "task_index": 14}, {"db_idx": 16718, "episode_idx": 70, "frame_idx": 110, "global_frame_idx": 16718, "task_index": 14}, {"db_idx": 16719, "episode_idx": 70, "frame_idx": 111, "global_frame_idx": 16719, "task_index": 14}, {"db_idx": 16720, "episode_idx": 70, "frame_idx": 112, "global_frame_idx": 16720, "task_index": 14}, {"db_idx": 16721, "episode_idx": 70, "frame_idx": 113, "global_frame_idx": 16721, "task_index": 14}, {"db_idx": 16722, "episode_idx": 70, "frame_idx": 114, "global_frame_idx": 16722, "task_index": 14}, {"db_idx": 16723, "episode_idx": 70, "frame_idx": 115, "global_frame_idx": 16723, "task_index": 14}, {"db_idx": 16724, "episode_idx": 70, "frame_idx": 116, "global_frame_idx": 16724, "task_index": 14}, {"db_idx": 16725, "episode_idx": 70, "frame_idx": 117, "global_frame_idx": 16725, "task_index": 14}, {"db_idx": 16726, "episode_idx": 70, "frame_idx": 118, "global_frame_idx": 16726, "task_index": 14}, {"db_idx": 16727, "episode_idx": 70, "frame_idx": 119, "global_frame_idx": 16727, "task_index": 14}, {"db_idx": 16728, "episode_idx": 70, "frame_idx": 120, "global_frame_idx": 16728, "task_index": 14}, {"db_idx": 16729, "episode_idx": 70, "frame_idx": 121, "global_frame_idx": 16729, "task_index": 14}, {"db_idx": 16730, "episode_idx": 70, "frame_idx": 122, "global_frame_idx": 16730, "task_index": 14}, {"db_idx": 16731, "episode_idx": 70, "frame_idx": 123, "global_frame_idx": 16731, "task_index": 14}, {"db_idx": 16732, "episode_idx": 70, "frame_idx": 124, "global_frame_idx": 16732, "task_index": 14}, {"db_idx": 16733, "episode_idx": 70, "frame_idx": 125, "global_frame_idx": 16733, "task_index": 14}, {"db_idx": 16734, "episode_idx": 70, "frame_idx": 126, "global_frame_idx": 16734, "task_index": 14}, {"db_idx": 16735, "episode_idx": 70, "frame_idx": 127, "global_frame_idx": 16735, "task_index": 14}, {"db_idx": 16736, "episode_idx": 70, "frame_idx": 128, "global_frame_idx": 16736, "task_index": 14}, {"db_idx": 16737, "episode_idx": 70, "frame_idx": 129, "global_frame_idx": 16737, "task_index": 14}, {"db_idx": 16738, "episode_idx": 70, "frame_idx": 130, "global_frame_idx": 16738, "task_index": 14}, {"db_idx": 16739, "episode_idx": 70, "frame_idx": 131, "global_frame_idx": 16739, "task_index": 14}, {"db_idx": 16740, "episode_idx": 70, "frame_idx": 132, "global_frame_idx": 16740, "task_index": 14}, {"db_idx": 16741, "episode_idx": 70, "frame_idx": 133, "global_frame_idx": 16741, "task_index": 14}, {"db_idx": 16742, "episode_idx": 70, "frame_idx": 134, "global_frame_idx": 16742, "task_index": 14}, {"db_idx": 16743, "episode_idx": 70, "frame_idx": 135, "global_frame_idx": 16743, "task_index": 14}, {"db_idx": 16744, "episode_idx": 70, "frame_idx": 136, "global_frame_idx": 16744, "task_index": 14}, {"db_idx": 16745, "episode_idx": 70, "frame_idx": 137, "global_frame_idx": 16745, "task_index": 14}, {"db_idx": 16746, "episode_idx": 70, "frame_idx": 138, "global_frame_idx": 16746, "task_index": 14}, {"db_idx": 16747, "episode_idx": 70, "frame_idx": 139, "global_frame_idx": 16747, "task_index": 14}, {"db_idx": 16748, "episode_idx": 70, "frame_idx": 140, "global_frame_idx": 16748, "task_index": 14}, {"db_idx": 16749, "episode_idx": 70, "frame_idx": 141, "global_frame_idx": 16749, "task_index": 14}, {"db_idx": 16750, "episode_idx": 70, "frame_idx": 142, "global_frame_idx": 16750, "task_index": 14}, {"db_idx": 16751, "episode_idx": 70, "frame_idx": 143, "global_frame_idx": 16751, "task_index": 14}, {"db_idx": 16752, "episode_idx": 70, "frame_idx": 144, "global_frame_idx": 16752, "task_index": 14}, {"db_idx": 16753, "episode_idx": 70, "frame_idx": 145, "global_frame_idx": 16753, "task_index": 14}, {"db_idx": 16754, "episode_idx": 70, "frame_idx": 146, "global_frame_idx": 16754, "task_index": 14}, {"db_idx": 16755, "episode_idx": 70, "frame_idx": 147, "global_frame_idx": 16755, "task_index": 14}, {"db_idx": 16756, "episode_idx": 70, "frame_idx": 148, "global_frame_idx": 16756, "task_index": 14}, {"db_idx": 16757, "episode_idx": 70, "frame_idx": 149, "global_frame_idx": 16757, "task_index": 14}, {"db_idx": 16758, "episode_idx": 70, "frame_idx": 150, "global_frame_idx": 16758, "task_index": 14}, {"db_idx": 16759, "episode_idx": 70, "frame_idx": 151, "global_frame_idx": 16759, "task_index": 14}, {"db_idx": 16760, "episode_idx": 70, "frame_idx": 152, "global_frame_idx": 16760, "task_index": 14}, {"db_idx": 16761, "episode_idx": 71, "frame_idx": 0, "global_frame_idx": 16761, "task_index": 14}, {"db_idx": 16762, "episode_idx": 71, "frame_idx": 1, "global_frame_idx": 16762, "task_index": 14}, {"db_idx": 16763, "episode_idx": 71, "frame_idx": 2, "global_frame_idx": 16763, "task_index": 14}, {"db_idx": 16764, "episode_idx": 71, "frame_idx": 3, "global_frame_idx": 16764, "task_index": 14}, {"db_idx": 16765, "episode_idx": 71, "frame_idx": 4, "global_frame_idx": 16765, "task_index": 14}, {"db_idx": 16766, "episode_idx": 71, "frame_idx": 5, "global_frame_idx": 16766, "task_index": 14}, {"db_idx": 16767, "episode_idx": 71, "frame_idx": 6, "global_frame_idx": 16767, "task_index": 14}, {"db_idx": 16768, "episode_idx": 71, "frame_idx": 7, "global_frame_idx": 16768, "task_index": 14}, {"db_idx": 16769, "episode_idx": 71, "frame_idx": 8, "global_frame_idx": 16769, "task_index": 14}, {"db_idx": 16770, "episode_idx": 71, "frame_idx": 9, "global_frame_idx": 16770, "task_index": 14}, {"db_idx": 16771, "episode_idx": 71, "frame_idx": 10, "global_frame_idx": 16771, "task_index": 14}, {"db_idx": 16772, "episode_idx": 71, "frame_idx": 11, "global_frame_idx": 16772, "task_index": 14}, {"db_idx": 16773, "episode_idx": 71, "frame_idx": 12, "global_frame_idx": 16773, "task_index": 14}, {"db_idx": 16774, "episode_idx": 71, "frame_idx": 13, "global_frame_idx": 16774, "task_index": 14}, {"db_idx": 16775, "episode_idx": 71, "frame_idx": 14, "global_frame_idx": 16775, "task_index": 14}, {"db_idx": 16776, "episode_idx": 71, "frame_idx": 15, "global_frame_idx": 16776, "task_index": 14}, {"db_idx": 16777, "episode_idx": 71, "frame_idx": 16, "global_frame_idx": 16777, "task_index": 14}, {"db_idx": 16778, "episode_idx": 71, "frame_idx": 17, "global_frame_idx": 16778, "task_index": 14}, {"db_idx": 16779, "episode_idx": 71, "frame_idx": 18, "global_frame_idx": 16779, "task_index": 14}, {"db_idx": 16780, "episode_idx": 71, "frame_idx": 19, "global_frame_idx": 16780, "task_index": 14}, {"db_idx": 16781, "episode_idx": 71, "frame_idx": 20, "global_frame_idx": 16781, "task_index": 14}, {"db_idx": 16782, "episode_idx": 71, "frame_idx": 21, "global_frame_idx": 16782, "task_index": 14}, {"db_idx": 16783, "episode_idx": 71, "frame_idx": 22, "global_frame_idx": 16783, "task_index": 14}, {"db_idx": 16784, "episode_idx": 71, "frame_idx": 23, "global_frame_idx": 16784, "task_index": 14}, {"db_idx": 16785, "episode_idx": 71, "frame_idx": 24, "global_frame_idx": 16785, "task_index": 14}, {"db_idx": 16786, "episode_idx": 71, "frame_idx": 25, "global_frame_idx": 16786, "task_index": 14}, {"db_idx": 16787, "episode_idx": 71, "frame_idx": 26, "global_frame_idx": 16787, "task_index": 14}, {"db_idx": 16788, "episode_idx": 71, "frame_idx": 27, "global_frame_idx": 16788, "task_index": 14}, {"db_idx": 16789, "episode_idx": 71, "frame_idx": 28, "global_frame_idx": 16789, "task_index": 14}, {"db_idx": 16790, "episode_idx": 71, "frame_idx": 29, "global_frame_idx": 16790, "task_index": 14}, {"db_idx": 16791, "episode_idx": 71, "frame_idx": 30, "global_frame_idx": 16791, "task_index": 14}, {"db_idx": 16792, "episode_idx": 71, "frame_idx": 31, "global_frame_idx": 16792, "task_index": 14}, {"db_idx": 16793, "episode_idx": 71, "frame_idx": 32, "global_frame_idx": 16793, "task_index": 14}, {"db_idx": 16794, "episode_idx": 71, "frame_idx": 33, "global_frame_idx": 16794, "task_index": 14}, {"db_idx": 16795, "episode_idx": 71, "frame_idx": 34, "global_frame_idx": 16795, "task_index": 14}, {"db_idx": 16796, "episode_idx": 71, "frame_idx": 35, "global_frame_idx": 16796, "task_index": 14}, {"db_idx": 16797, "episode_idx": 71, "frame_idx": 36, "global_frame_idx": 16797, "task_index": 14}, {"db_idx": 16798, "episode_idx": 71, "frame_idx": 37, "global_frame_idx": 16798, "task_index": 14}, {"db_idx": 16799, "episode_idx": 71, "frame_idx": 38, "global_frame_idx": 16799, "task_index": 14}, {"db_idx": 16800, "episode_idx": 71, "frame_idx": 39, "global_frame_idx": 16800, "task_index": 14}, {"db_idx": 16801, "episode_idx": 71, "frame_idx": 40, "global_frame_idx": 16801, "task_index": 14}, {"db_idx": 16802, "episode_idx": 71, "frame_idx": 41, "global_frame_idx": 16802, "task_index": 14}, {"db_idx": 16803, "episode_idx": 71, "frame_idx": 42, "global_frame_idx": 16803, "task_index": 14}, {"db_idx": 16804, "episode_idx": 71, "frame_idx": 43, "global_frame_idx": 16804, "task_index": 14}, {"db_idx": 16805, "episode_idx": 71, "frame_idx": 44, "global_frame_idx": 16805, "task_index": 14}, {"db_idx": 16806, "episode_idx": 71, "frame_idx": 45, "global_frame_idx": 16806, "task_index": 14}, {"db_idx": 16807, "episode_idx": 71, "frame_idx": 46, "global_frame_idx": 16807, "task_index": 14}, {"db_idx": 16808, "episode_idx": 71, "frame_idx": 47, "global_frame_idx": 16808, "task_index": 14}, {"db_idx": 16809, "episode_idx": 71, "frame_idx": 48, "global_frame_idx": 16809, "task_index": 14}, {"db_idx": 16810, "episode_idx": 71, "frame_idx": 49, "global_frame_idx": 16810, "task_index": 14}, {"db_idx": 16811, "episode_idx": 71, "frame_idx": 50, "global_frame_idx": 16811, "task_index": 14}, {"db_idx": 16812, "episode_idx": 71, "frame_idx": 51, "global_frame_idx": 16812, "task_index": 14}, {"db_idx": 16813, "episode_idx": 71, "frame_idx": 52, "global_frame_idx": 16813, "task_index": 14}, {"db_idx": 16814, "episode_idx": 71, "frame_idx": 53, "global_frame_idx": 16814, "task_index": 14}, {"db_idx": 16815, "episode_idx": 71, "frame_idx": 54, "global_frame_idx": 16815, "task_index": 14}, {"db_idx": 16816, "episode_idx": 71, "frame_idx": 55, "global_frame_idx": 16816, "task_index": 14}, {"db_idx": 16817, "episode_idx": 71, "frame_idx": 56, "global_frame_idx": 16817, "task_index": 14}, {"db_idx": 16818, "episode_idx": 71, "frame_idx": 57, "global_frame_idx": 16818, "task_index": 14}, {"db_idx": 16819, "episode_idx": 71, "frame_idx": 58, "global_frame_idx": 16819, "task_index": 14}, {"db_idx": 16820, "episode_idx": 71, "frame_idx": 59, "global_frame_idx": 16820, "task_index": 14}, {"db_idx": 16821, "episode_idx": 71, "frame_idx": 60, "global_frame_idx": 16821, "task_index": 14}, {"db_idx": 16822, "episode_idx": 71, "frame_idx": 61, "global_frame_idx": 16822, "task_index": 14}, {"db_idx": 16823, "episode_idx": 71, "frame_idx": 62, "global_frame_idx": 16823, "task_index": 14}, {"db_idx": 16824, "episode_idx": 71, "frame_idx": 63, "global_frame_idx": 16824, "task_index": 14}, {"db_idx": 16825, "episode_idx": 71, "frame_idx": 64, "global_frame_idx": 16825, "task_index": 14}, {"db_idx": 16826, "episode_idx": 71, "frame_idx": 65, "global_frame_idx": 16826, "task_index": 14}, {"db_idx": 16827, "episode_idx": 71, "frame_idx": 66, "global_frame_idx": 16827, "task_index": 14}, {"db_idx": 16828, "episode_idx": 71, "frame_idx": 67, "global_frame_idx": 16828, "task_index": 14}, {"db_idx": 16829, "episode_idx": 71, "frame_idx": 68, "global_frame_idx": 16829, "task_index": 14}, {"db_idx": 16830, "episode_idx": 71, "frame_idx": 69, "global_frame_idx": 16830, "task_index": 14}, {"db_idx": 16831, "episode_idx": 71, "frame_idx": 70, "global_frame_idx": 16831, "task_index": 14}, {"db_idx": 16832, "episode_idx": 71, "frame_idx": 71, "global_frame_idx": 16832, "task_index": 14}, {"db_idx": 16833, "episode_idx": 71, "frame_idx": 72, "global_frame_idx": 16833, "task_index": 14}, {"db_idx": 16834, "episode_idx": 71, "frame_idx": 73, "global_frame_idx": 16834, "task_index": 14}, {"db_idx": 16835, "episode_idx": 71, "frame_idx": 74, "global_frame_idx": 16835, "task_index": 14}, {"db_idx": 16836, "episode_idx": 71, "frame_idx": 75, "global_frame_idx": 16836, "task_index": 14}, {"db_idx": 16837, "episode_idx": 71, "frame_idx": 76, "global_frame_idx": 16837, "task_index": 14}, {"db_idx": 16838, "episode_idx": 71, "frame_idx": 77, "global_frame_idx": 16838, "task_index": 14}, {"db_idx": 16839, "episode_idx": 71, "frame_idx": 78, "global_frame_idx": 16839, "task_index": 14}, {"db_idx": 16840, "episode_idx": 71, "frame_idx": 79, "global_frame_idx": 16840, "task_index": 14}, {"db_idx": 16841, "episode_idx": 71, "frame_idx": 80, "global_frame_idx": 16841, "task_index": 14}, {"db_idx": 16842, "episode_idx": 71, "frame_idx": 81, "global_frame_idx": 16842, "task_index": 14}, {"db_idx": 16843, "episode_idx": 71, "frame_idx": 82, "global_frame_idx": 16843, "task_index": 14}, {"db_idx": 16844, "episode_idx": 71, "frame_idx": 83, "global_frame_idx": 16844, "task_index": 14}, {"db_idx": 16845, "episode_idx": 71, "frame_idx": 84, "global_frame_idx": 16845, "task_index": 14}, {"db_idx": 16846, "episode_idx": 71, "frame_idx": 85, "global_frame_idx": 16846, "task_index": 14}, {"db_idx": 16847, "episode_idx": 71, "frame_idx": 86, "global_frame_idx": 16847, "task_index": 14}, {"db_idx": 16848, "episode_idx": 71, "frame_idx": 87, "global_frame_idx": 16848, "task_index": 14}, {"db_idx": 16849, "episode_idx": 71, "frame_idx": 88, "global_frame_idx": 16849, "task_index": 14}, {"db_idx": 16850, "episode_idx": 71, "frame_idx": 89, "global_frame_idx": 16850, "task_index": 14}, {"db_idx": 16851, "episode_idx": 71, "frame_idx": 90, "global_frame_idx": 16851, "task_index": 14}, {"db_idx": 16852, "episode_idx": 71, "frame_idx": 91, "global_frame_idx": 16852, "task_index": 14}, {"db_idx": 16853, "episode_idx": 71, "frame_idx": 92, "global_frame_idx": 16853, "task_index": 14}, {"db_idx": 16854, "episode_idx": 71, "frame_idx": 93, "global_frame_idx": 16854, "task_index": 14}, {"db_idx": 16855, "episode_idx": 71, "frame_idx": 94, "global_frame_idx": 16855, "task_index": 14}, {"db_idx": 16856, "episode_idx": 71, "frame_idx": 95, "global_frame_idx": 16856, "task_index": 14}, {"db_idx": 16857, "episode_idx": 71, "frame_idx": 96, "global_frame_idx": 16857, "task_index": 14}, {"db_idx": 16858, "episode_idx": 71, "frame_idx": 97, "global_frame_idx": 16858, "task_index": 14}, {"db_idx": 16859, "episode_idx": 71, "frame_idx": 98, "global_frame_idx": 16859, "task_index": 14}, {"db_idx": 16860, "episode_idx": 71, "frame_idx": 99, "global_frame_idx": 16860, "task_index": 14}, {"db_idx": 16861, "episode_idx": 71, "frame_idx": 100, "global_frame_idx": 16861, "task_index": 14}, {"db_idx": 16862, "episode_idx": 71, "frame_idx": 101, "global_frame_idx": 16862, "task_index": 14}, {"db_idx": 16863, "episode_idx": 71, "frame_idx": 102, "global_frame_idx": 16863, "task_index": 14}, {"db_idx": 16864, "episode_idx": 71, "frame_idx": 103, "global_frame_idx": 16864, "task_index": 14}, {"db_idx": 16865, "episode_idx": 71, "frame_idx": 104, "global_frame_idx": 16865, "task_index": 14}, {"db_idx": 16866, "episode_idx": 71, "frame_idx": 105, "global_frame_idx": 16866, "task_index": 14}, {"db_idx": 16867, "episode_idx": 71, "frame_idx": 106, "global_frame_idx": 16867, "task_index": 14}, {"db_idx": 16868, "episode_idx": 71, "frame_idx": 107, "global_frame_idx": 16868, "task_index": 14}, {"db_idx": 16869, "episode_idx": 71, "frame_idx": 108, "global_frame_idx": 16869, "task_index": 14}, {"db_idx": 16870, "episode_idx": 71, "frame_idx": 109, "global_frame_idx": 16870, "task_index": 14}, {"db_idx": 16871, "episode_idx": 71, "frame_idx": 110, "global_frame_idx": 16871, "task_index": 14}, {"db_idx": 16872, "episode_idx": 71, "frame_idx": 111, "global_frame_idx": 16872, "task_index": 14}, {"db_idx": 16873, "episode_idx": 71, "frame_idx": 112, "global_frame_idx": 16873, "task_index": 14}, {"db_idx": 16874, "episode_idx": 71, "frame_idx": 113, "global_frame_idx": 16874, "task_index": 14}, {"db_idx": 16875, "episode_idx": 71, "frame_idx": 114, "global_frame_idx": 16875, "task_index": 14}, {"db_idx": 16876, "episode_idx": 71, "frame_idx": 115, "global_frame_idx": 16876, "task_index": 14}, {"db_idx": 16877, "episode_idx": 71, "frame_idx": 116, "global_frame_idx": 16877, "task_index": 14}, {"db_idx": 16878, "episode_idx": 71, "frame_idx": 117, "global_frame_idx": 16878, "task_index": 14}, {"db_idx": 16879, "episode_idx": 71, "frame_idx": 118, "global_frame_idx": 16879, "task_index": 14}, {"db_idx": 16880, "episode_idx": 71, "frame_idx": 119, "global_frame_idx": 16880, "task_index": 14}, {"db_idx": 16881, "episode_idx": 71, "frame_idx": 120, "global_frame_idx": 16881, "task_index": 14}, {"db_idx": 16882, "episode_idx": 71, "frame_idx": 121, "global_frame_idx": 16882, "task_index": 14}, {"db_idx": 16883, "episode_idx": 71, "frame_idx": 122, "global_frame_idx": 16883, "task_index": 14}, {"db_idx": 16884, "episode_idx": 71, "frame_idx": 123, "global_frame_idx": 16884, "task_index": 14}, {"db_idx": 16885, "episode_idx": 71, "frame_idx": 124, "global_frame_idx": 16885, "task_index": 14}, {"db_idx": 16886, "episode_idx": 71, "frame_idx": 125, "global_frame_idx": 16886, "task_index": 14}, {"db_idx": 16887, "episode_idx": 71, "frame_idx": 126, "global_frame_idx": 16887, "task_index": 14}, {"db_idx": 16888, "episode_idx": 71, "frame_idx": 127, "global_frame_idx": 16888, "task_index": 14}, {"db_idx": 16889, "episode_idx": 71, "frame_idx": 128, "global_frame_idx": 16889, "task_index": 14}, {"db_idx": 16890, "episode_idx": 71, "frame_idx": 129, "global_frame_idx": 16890, "task_index": 14}, {"db_idx": 16891, "episode_idx": 71, "frame_idx": 130, "global_frame_idx": 16891, "task_index": 14}, {"db_idx": 16892, "episode_idx": 71, "frame_idx": 131, "global_frame_idx": 16892, "task_index": 14}, {"db_idx": 16893, "episode_idx": 71, "frame_idx": 132, "global_frame_idx": 16893, "task_index": 14}, {"db_idx": 16894, "episode_idx": 71, "frame_idx": 133, "global_frame_idx": 16894, "task_index": 14}, {"db_idx": 16895, "episode_idx": 71, "frame_idx": 134, "global_frame_idx": 16895, "task_index": 14}, {"db_idx": 16896, "episode_idx": 71, "frame_idx": 135, "global_frame_idx": 16896, "task_index": 14}, {"db_idx": 16897, "episode_idx": 71, "frame_idx": 136, "global_frame_idx": 16897, "task_index": 14}, {"db_idx": 16898, "episode_idx": 71, "frame_idx": 137, "global_frame_idx": 16898, "task_index": 14}, {"db_idx": 16899, "episode_idx": 71, "frame_idx": 138, "global_frame_idx": 16899, "task_index": 14}, {"db_idx": 16900, "episode_idx": 71, "frame_idx": 139, "global_frame_idx": 16900, "task_index": 14}, {"db_idx": 16901, "episode_idx": 71, "frame_idx": 140, "global_frame_idx": 16901, "task_index": 14}, {"db_idx": 16902, "episode_idx": 71, "frame_idx": 141, "global_frame_idx": 16902, "task_index": 14}, {"db_idx": 16903, "episode_idx": 71, "frame_idx": 142, "global_frame_idx": 16903, "task_index": 14}, {"db_idx": 16904, "episode_idx": 71, "frame_idx": 143, "global_frame_idx": 16904, "task_index": 14}, {"db_idx": 16905, "episode_idx": 71, "frame_idx": 144, "global_frame_idx": 16905, "task_index": 14}, {"db_idx": 16906, "episode_idx": 71, "frame_idx": 145, "global_frame_idx": 16906, "task_index": 14}, {"db_idx": 16907, "episode_idx": 71, "frame_idx": 146, "global_frame_idx": 16907, "task_index": 14}, {"db_idx": 16908, "episode_idx": 71, "frame_idx": 147, "global_frame_idx": 16908, "task_index": 14}, {"db_idx": 16909, "episode_idx": 71, "frame_idx": 148, "global_frame_idx": 16909, "task_index": 14}, {"db_idx": 16910, "episode_idx": 71, "frame_idx": 149, "global_frame_idx": 16910, "task_index": 14}, {"db_idx": 16911, "episode_idx": 71, "frame_idx": 150, "global_frame_idx": 16911, "task_index": 14}, {"db_idx": 16912, "episode_idx": 72, "frame_idx": 0, "global_frame_idx": 16912, "task_index": 14}, {"db_idx": 16913, "episode_idx": 72, "frame_idx": 1, "global_frame_idx": 16913, "task_index": 14}, {"db_idx": 16914, "episode_idx": 72, "frame_idx": 2, "global_frame_idx": 16914, "task_index": 14}, {"db_idx": 16915, "episode_idx": 72, "frame_idx": 3, "global_frame_idx": 16915, "task_index": 14}, {"db_idx": 16916, "episode_idx": 72, "frame_idx": 4, "global_frame_idx": 16916, "task_index": 14}, {"db_idx": 16917, "episode_idx": 72, "frame_idx": 5, "global_frame_idx": 16917, "task_index": 14}, {"db_idx": 16918, "episode_idx": 72, "frame_idx": 6, "global_frame_idx": 16918, "task_index": 14}, {"db_idx": 16919, "episode_idx": 72, "frame_idx": 7, "global_frame_idx": 16919, "task_index": 14}, {"db_idx": 16920, "episode_idx": 72, "frame_idx": 8, "global_frame_idx": 16920, "task_index": 14}, {"db_idx": 16921, "episode_idx": 72, "frame_idx": 9, "global_frame_idx": 16921, "task_index": 14}, {"db_idx": 16922, "episode_idx": 72, "frame_idx": 10, "global_frame_idx": 16922, "task_index": 14}, {"db_idx": 16923, "episode_idx": 72, "frame_idx": 11, "global_frame_idx": 16923, "task_index": 14}, {"db_idx": 16924, "episode_idx": 72, "frame_idx": 12, "global_frame_idx": 16924, "task_index": 14}, {"db_idx": 16925, "episode_idx": 72, "frame_idx": 13, "global_frame_idx": 16925, "task_index": 14}, {"db_idx": 16926, "episode_idx": 72, "frame_idx": 14, "global_frame_idx": 16926, "task_index": 14}, {"db_idx": 16927, "episode_idx": 72, "frame_idx": 15, "global_frame_idx": 16927, "task_index": 14}, {"db_idx": 16928, "episode_idx": 72, "frame_idx": 16, "global_frame_idx": 16928, "task_index": 14}, {"db_idx": 16929, "episode_idx": 72, "frame_idx": 17, "global_frame_idx": 16929, "task_index": 14}, {"db_idx": 16930, "episode_idx": 72, "frame_idx": 18, "global_frame_idx": 16930, "task_index": 14}, {"db_idx": 16931, "episode_idx": 72, "frame_idx": 19, "global_frame_idx": 16931, "task_index": 14}, {"db_idx": 16932, "episode_idx": 72, "frame_idx": 20, "global_frame_idx": 16932, "task_index": 14}, {"db_idx": 16933, "episode_idx": 72, "frame_idx": 21, "global_frame_idx": 16933, "task_index": 14}, {"db_idx": 16934, "episode_idx": 72, "frame_idx": 22, "global_frame_idx": 16934, "task_index": 14}, {"db_idx": 16935, "episode_idx": 72, "frame_idx": 23, "global_frame_idx": 16935, "task_index": 14}, {"db_idx": 16936, "episode_idx": 72, "frame_idx": 24, "global_frame_idx": 16936, "task_index": 14}, {"db_idx": 16937, "episode_idx": 72, "frame_idx": 25, "global_frame_idx": 16937, "task_index": 14}, {"db_idx": 16938, "episode_idx": 72, "frame_idx": 26, "global_frame_idx": 16938, "task_index": 14}, {"db_idx": 16939, "episode_idx": 72, "frame_idx": 27, "global_frame_idx": 16939, "task_index": 14}, {"db_idx": 16940, "episode_idx": 72, "frame_idx": 28, "global_frame_idx": 16940, "task_index": 14}, {"db_idx": 16941, "episode_idx": 72, "frame_idx": 29, "global_frame_idx": 16941, "task_index": 14}, {"db_idx": 16942, "episode_idx": 72, "frame_idx": 30, "global_frame_idx": 16942, "task_index": 14}, {"db_idx": 16943, "episode_idx": 72, "frame_idx": 31, "global_frame_idx": 16943, "task_index": 14}, {"db_idx": 16944, "episode_idx": 72, "frame_idx": 32, "global_frame_idx": 16944, "task_index": 14}, {"db_idx": 16945, "episode_idx": 72, "frame_idx": 33, "global_frame_idx": 16945, "task_index": 14}, {"db_idx": 16946, "episode_idx": 72, "frame_idx": 34, "global_frame_idx": 16946, "task_index": 14}, {"db_idx": 16947, "episode_idx": 72, "frame_idx": 35, "global_frame_idx": 16947, "task_index": 14}, {"db_idx": 16948, "episode_idx": 72, "frame_idx": 36, "global_frame_idx": 16948, "task_index": 14}, {"db_idx": 16949, "episode_idx": 72, "frame_idx": 37, "global_frame_idx": 16949, "task_index": 14}, {"db_idx": 16950, "episode_idx": 72, "frame_idx": 38, "global_frame_idx": 16950, "task_index": 14}, {"db_idx": 16951, "episode_idx": 72, "frame_idx": 39, "global_frame_idx": 16951, "task_index": 14}, {"db_idx": 16952, "episode_idx": 72, "frame_idx": 40, "global_frame_idx": 16952, "task_index": 14}, {"db_idx": 16953, "episode_idx": 72, "frame_idx": 41, "global_frame_idx": 16953, "task_index": 14}, {"db_idx": 16954, "episode_idx": 72, "frame_idx": 42, "global_frame_idx": 16954, "task_index": 14}, {"db_idx": 16955, "episode_idx": 72, "frame_idx": 43, "global_frame_idx": 16955, "task_index": 14}, {"db_idx": 16956, "episode_idx": 72, "frame_idx": 44, "global_frame_idx": 16956, "task_index": 14}, {"db_idx": 16957, "episode_idx": 72, "frame_idx": 45, "global_frame_idx": 16957, "task_index": 14}, {"db_idx": 16958, "episode_idx": 72, "frame_idx": 46, "global_frame_idx": 16958, "task_index": 14}, {"db_idx": 16959, "episode_idx": 72, "frame_idx": 47, "global_frame_idx": 16959, "task_index": 14}, {"db_idx": 16960, "episode_idx": 72, "frame_idx": 48, "global_frame_idx": 16960, "task_index": 14}, {"db_idx": 16961, "episode_idx": 72, "frame_idx": 49, "global_frame_idx": 16961, "task_index": 14}, {"db_idx": 16962, "episode_idx": 72, "frame_idx": 50, "global_frame_idx": 16962, "task_index": 14}, {"db_idx": 16963, "episode_idx": 72, "frame_idx": 51, "global_frame_idx": 16963, "task_index": 14}, {"db_idx": 16964, "episode_idx": 72, "frame_idx": 52, "global_frame_idx": 16964, "task_index": 14}, {"db_idx": 16965, "episode_idx": 72, "frame_idx": 53, "global_frame_idx": 16965, "task_index": 14}, {"db_idx": 16966, "episode_idx": 72, "frame_idx": 54, "global_frame_idx": 16966, "task_index": 14}, {"db_idx": 16967, "episode_idx": 72, "frame_idx": 55, "global_frame_idx": 16967, "task_index": 14}, {"db_idx": 16968, "episode_idx": 72, "frame_idx": 56, "global_frame_idx": 16968, "task_index": 14}, {"db_idx": 16969, "episode_idx": 72, "frame_idx": 57, "global_frame_idx": 16969, "task_index": 14}, {"db_idx": 16970, "episode_idx": 72, "frame_idx": 58, "global_frame_idx": 16970, "task_index": 14}, {"db_idx": 16971, "episode_idx": 72, "frame_idx": 59, "global_frame_idx": 16971, "task_index": 14}, {"db_idx": 16972, "episode_idx": 72, "frame_idx": 60, "global_frame_idx": 16972, "task_index": 14}, {"db_idx": 16973, "episode_idx": 72, "frame_idx": 61, "global_frame_idx": 16973, "task_index": 14}, {"db_idx": 16974, "episode_idx": 72, "frame_idx": 62, "global_frame_idx": 16974, "task_index": 14}, {"db_idx": 16975, "episode_idx": 72, "frame_idx": 63, "global_frame_idx": 16975, "task_index": 14}, {"db_idx": 16976, "episode_idx": 72, "frame_idx": 64, "global_frame_idx": 16976, "task_index": 14}, {"db_idx": 16977, "episode_idx": 72, "frame_idx": 65, "global_frame_idx": 16977, "task_index": 14}, {"db_idx": 16978, "episode_idx": 72, "frame_idx": 66, "global_frame_idx": 16978, "task_index": 14}, {"db_idx": 16979, "episode_idx": 72, "frame_idx": 67, "global_frame_idx": 16979, "task_index": 14}, {"db_idx": 16980, "episode_idx": 72, "frame_idx": 68, "global_frame_idx": 16980, "task_index": 14}, {"db_idx": 16981, "episode_idx": 72, "frame_idx": 69, "global_frame_idx": 16981, "task_index": 14}, {"db_idx": 16982, "episode_idx": 72, "frame_idx": 70, "global_frame_idx": 16982, "task_index": 14}, {"db_idx": 16983, "episode_idx": 72, "frame_idx": 71, "global_frame_idx": 16983, "task_index": 14}, {"db_idx": 16984, "episode_idx": 72, "frame_idx": 72, "global_frame_idx": 16984, "task_index": 14}, {"db_idx": 16985, "episode_idx": 72, "frame_idx": 73, "global_frame_idx": 16985, "task_index": 14}, {"db_idx": 16986, "episode_idx": 72, "frame_idx": 74, "global_frame_idx": 16986, "task_index": 14}, {"db_idx": 16987, "episode_idx": 72, "frame_idx": 75, "global_frame_idx": 16987, "task_index": 14}, {"db_idx": 16988, "episode_idx": 72, "frame_idx": 76, "global_frame_idx": 16988, "task_index": 14}, {"db_idx": 16989, "episode_idx": 72, "frame_idx": 77, "global_frame_idx": 16989, "task_index": 14}, {"db_idx": 16990, "episode_idx": 72, "frame_idx": 78, "global_frame_idx": 16990, "task_index": 14}, {"db_idx": 16991, "episode_idx": 72, "frame_idx": 79, "global_frame_idx": 16991, "task_index": 14}, {"db_idx": 16992, "episode_idx": 72, "frame_idx": 80, "global_frame_idx": 16992, "task_index": 14}, {"db_idx": 16993, "episode_idx": 72, "frame_idx": 81, "global_frame_idx": 16993, "task_index": 14}, {"db_idx": 16994, "episode_idx": 72, "frame_idx": 82, "global_frame_idx": 16994, "task_index": 14}, {"db_idx": 16995, "episode_idx": 72, "frame_idx": 83, "global_frame_idx": 16995, "task_index": 14}, {"db_idx": 16996, "episode_idx": 72, "frame_idx": 84, "global_frame_idx": 16996, "task_index": 14}, {"db_idx": 16997, "episode_idx": 72, "frame_idx": 85, "global_frame_idx": 16997, "task_index": 14}, {"db_idx": 16998, "episode_idx": 72, "frame_idx": 86, "global_frame_idx": 16998, "task_index": 14}, {"db_idx": 16999, "episode_idx": 72, "frame_idx": 87, "global_frame_idx": 16999, "task_index": 14}, {"db_idx": 17000, "episode_idx": 72, "frame_idx": 88, "global_frame_idx": 17000, "task_index": 14}, {"db_idx": 17001, "episode_idx": 72, "frame_idx": 89, "global_frame_idx": 17001, "task_index": 14}, {"db_idx": 17002, "episode_idx": 72, "frame_idx": 90, "global_frame_idx": 17002, "task_index": 14}, {"db_idx": 17003, "episode_idx": 72, "frame_idx": 91, "global_frame_idx": 17003, "task_index": 14}, {"db_idx": 17004, "episode_idx": 72, "frame_idx": 92, "global_frame_idx": 17004, "task_index": 14}, {"db_idx": 17005, "episode_idx": 72, "frame_idx": 93, "global_frame_idx": 17005, "task_index": 14}, {"db_idx": 17006, "episode_idx": 72, "frame_idx": 94, "global_frame_idx": 17006, "task_index": 14}, {"db_idx": 17007, "episode_idx": 72, "frame_idx": 95, "global_frame_idx": 17007, "task_index": 14}, {"db_idx": 17008, "episode_idx": 72, "frame_idx": 96, "global_frame_idx": 17008, "task_index": 14}, {"db_idx": 17009, "episode_idx": 72, "frame_idx": 97, "global_frame_idx": 17009, "task_index": 14}, {"db_idx": 17010, "episode_idx": 72, "frame_idx": 98, "global_frame_idx": 17010, "task_index": 14}, {"db_idx": 17011, "episode_idx": 72, "frame_idx": 99, "global_frame_idx": 17011, "task_index": 14}, {"db_idx": 17012, "episode_idx": 72, "frame_idx": 100, "global_frame_idx": 17012, "task_index": 14}, {"db_idx": 17013, "episode_idx": 72, "frame_idx": 101, "global_frame_idx": 17013, "task_index": 14}, {"db_idx": 17014, "episode_idx": 72, "frame_idx": 102, "global_frame_idx": 17014, "task_index": 14}, {"db_idx": 17015, "episode_idx": 72, "frame_idx": 103, "global_frame_idx": 17015, "task_index": 14}, {"db_idx": 17016, "episode_idx": 72, "frame_idx": 104, "global_frame_idx": 17016, "task_index": 14}, {"db_idx": 17017, "episode_idx": 72, "frame_idx": 105, "global_frame_idx": 17017, "task_index": 14}, {"db_idx": 17018, "episode_idx": 72, "frame_idx": 106, "global_frame_idx": 17018, "task_index": 14}, {"db_idx": 17019, "episode_idx": 72, "frame_idx": 107, "global_frame_idx": 17019, "task_index": 14}, {"db_idx": 17020, "episode_idx": 72, "frame_idx": 108, "global_frame_idx": 17020, "task_index": 14}, {"db_idx": 17021, "episode_idx": 72, "frame_idx": 109, "global_frame_idx": 17021, "task_index": 14}, {"db_idx": 17022, "episode_idx": 72, "frame_idx": 110, "global_frame_idx": 17022, "task_index": 14}, {"db_idx": 17023, "episode_idx": 72, "frame_idx": 111, "global_frame_idx": 17023, "task_index": 14}, {"db_idx": 17024, "episode_idx": 72, "frame_idx": 112, "global_frame_idx": 17024, "task_index": 14}, {"db_idx": 17025, "episode_idx": 72, "frame_idx": 113, "global_frame_idx": 17025, "task_index": 14}, {"db_idx": 17026, "episode_idx": 72, "frame_idx": 114, "global_frame_idx": 17026, "task_index": 14}, {"db_idx": 17027, "episode_idx": 72, "frame_idx": 115, "global_frame_idx": 17027, "task_index": 14}, {"db_idx": 17028, "episode_idx": 72, "frame_idx": 116, "global_frame_idx": 17028, "task_index": 14}, {"db_idx": 17029, "episode_idx": 72, "frame_idx": 117, "global_frame_idx": 17029, "task_index": 14}, {"db_idx": 17030, "episode_idx": 72, "frame_idx": 118, "global_frame_idx": 17030, "task_index": 14}, {"db_idx": 17031, "episode_idx": 72, "frame_idx": 119, "global_frame_idx": 17031, "task_index": 14}, {"db_idx": 17032, "episode_idx": 72, "frame_idx": 120, "global_frame_idx": 17032, "task_index": 14}, {"db_idx": 17033, "episode_idx": 72, "frame_idx": 121, "global_frame_idx": 17033, "task_index": 14}, {"db_idx": 17034, "episode_idx": 72, "frame_idx": 122, "global_frame_idx": 17034, "task_index": 14}, {"db_idx": 17035, "episode_idx": 72, "frame_idx": 123, "global_frame_idx": 17035, "task_index": 14}, {"db_idx": 17036, "episode_idx": 72, "frame_idx": 124, "global_frame_idx": 17036, "task_index": 14}, {"db_idx": 17037, "episode_idx": 72, "frame_idx": 125, "global_frame_idx": 17037, "task_index": 14}, {"db_idx": 17038, "episode_idx": 72, "frame_idx": 126, "global_frame_idx": 17038, "task_index": 14}, {"db_idx": 17039, "episode_idx": 72, "frame_idx": 127, "global_frame_idx": 17039, "task_index": 14}, {"db_idx": 17040, "episode_idx": 72, "frame_idx": 128, "global_frame_idx": 17040, "task_index": 14}, {"db_idx": 17041, "episode_idx": 72, "frame_idx": 129, "global_frame_idx": 17041, "task_index": 14}, {"db_idx": 17042, "episode_idx": 72, "frame_idx": 130, "global_frame_idx": 17042, "task_index": 14}, {"db_idx": 17043, "episode_idx": 72, "frame_idx": 131, "global_frame_idx": 17043, "task_index": 14}, {"db_idx": 17044, "episode_idx": 72, "frame_idx": 132, "global_frame_idx": 17044, "task_index": 14}, {"db_idx": 17045, "episode_idx": 72, "frame_idx": 133, "global_frame_idx": 17045, "task_index": 14}, {"db_idx": 17046, "episode_idx": 72, "frame_idx": 134, "global_frame_idx": 17046, "task_index": 14}, {"db_idx": 17047, "episode_idx": 72, "frame_idx": 135, "global_frame_idx": 17047, "task_index": 14}, {"db_idx": 17048, "episode_idx": 72, "frame_idx": 136, "global_frame_idx": 17048, "task_index": 14}, {"db_idx": 17049, "episode_idx": 72, "frame_idx": 137, "global_frame_idx": 17049, "task_index": 14}, {"db_idx": 17050, "episode_idx": 72, "frame_idx": 138, "global_frame_idx": 17050, "task_index": 14}, {"db_idx": 17051, "episode_idx": 72, "frame_idx": 139, "global_frame_idx": 17051, "task_index": 14}, {"db_idx": 17052, "episode_idx": 72, "frame_idx": 140, "global_frame_idx": 17052, "task_index": 14}, {"db_idx": 17053, "episode_idx": 72, "frame_idx": 141, "global_frame_idx": 17053, "task_index": 14}, {"db_idx": 17054, "episode_idx": 72, "frame_idx": 142, "global_frame_idx": 17054, "task_index": 14}, {"db_idx": 17055, "episode_idx": 72, "frame_idx": 143, "global_frame_idx": 17055, "task_index": 14}, {"db_idx": 17056, "episode_idx": 72, "frame_idx": 144, "global_frame_idx": 17056, "task_index": 14}, {"db_idx": 17057, "episode_idx": 72, "frame_idx": 145, "global_frame_idx": 17057, "task_index": 14}, {"db_idx": 17058, "episode_idx": 72, "frame_idx": 146, "global_frame_idx": 17058, "task_index": 14}, {"db_idx": 17059, "episode_idx": 72, "frame_idx": 147, "global_frame_idx": 17059, "task_index": 14}, {"db_idx": 17060, "episode_idx": 72, "frame_idx": 148, "global_frame_idx": 17060, "task_index": 14}, {"db_idx": 17061, "episode_idx": 72, "frame_idx": 149, "global_frame_idx": 17061, "task_index": 14}, {"db_idx": 17062, "episode_idx": 72, "frame_idx": 150, "global_frame_idx": 17062, "task_index": 14}, {"db_idx": 17063, "episode_idx": 72, "frame_idx": 151, "global_frame_idx": 17063, "task_index": 14}, {"db_idx": 17064, "episode_idx": 72, "frame_idx": 152, "global_frame_idx": 17064, "task_index": 14}, {"db_idx": 17065, "episode_idx": 72, "frame_idx": 153, "global_frame_idx": 17065, "task_index": 14}, {"db_idx": 17066, "episode_idx": 72, "frame_idx": 154, "global_frame_idx": 17066, "task_index": 14}, {"db_idx": 17067, "episode_idx": 73, "frame_idx": 0, "global_frame_idx": 17067, "task_index": 14}, {"db_idx": 17068, "episode_idx": 73, "frame_idx": 1, "global_frame_idx": 17068, "task_index": 14}, {"db_idx": 17069, "episode_idx": 73, "frame_idx": 2, "global_frame_idx": 17069, "task_index": 14}, {"db_idx": 17070, "episode_idx": 73, "frame_idx": 3, "global_frame_idx": 17070, "task_index": 14}, {"db_idx": 17071, "episode_idx": 73, "frame_idx": 4, "global_frame_idx": 17071, "task_index": 14}, {"db_idx": 17072, "episode_idx": 73, "frame_idx": 5, "global_frame_idx": 17072, "task_index": 14}, {"db_idx": 17073, "episode_idx": 73, "frame_idx": 6, "global_frame_idx": 17073, "task_index": 14}, {"db_idx": 17074, "episode_idx": 73, "frame_idx": 7, "global_frame_idx": 17074, "task_index": 14}, {"db_idx": 17075, "episode_idx": 73, "frame_idx": 8, "global_frame_idx": 17075, "task_index": 14}, {"db_idx": 17076, "episode_idx": 73, "frame_idx": 9, "global_frame_idx": 17076, "task_index": 14}, {"db_idx": 17077, "episode_idx": 73, "frame_idx": 10, "global_frame_idx": 17077, "task_index": 14}, {"db_idx": 17078, "episode_idx": 73, "frame_idx": 11, "global_frame_idx": 17078, "task_index": 14}, {"db_idx": 17079, "episode_idx": 73, "frame_idx": 12, "global_frame_idx": 17079, "task_index": 14}, {"db_idx": 17080, "episode_idx": 73, "frame_idx": 13, "global_frame_idx": 17080, "task_index": 14}, {"db_idx": 17081, "episode_idx": 73, "frame_idx": 14, "global_frame_idx": 17081, "task_index": 14}, {"db_idx": 17082, "episode_idx": 73, "frame_idx": 15, "global_frame_idx": 17082, "task_index": 14}, {"db_idx": 17083, "episode_idx": 73, "frame_idx": 16, "global_frame_idx": 17083, "task_index": 14}, {"db_idx": 17084, "episode_idx": 73, "frame_idx": 17, "global_frame_idx": 17084, "task_index": 14}, {"db_idx": 17085, "episode_idx": 73, "frame_idx": 18, "global_frame_idx": 17085, "task_index": 14}, {"db_idx": 17086, "episode_idx": 73, "frame_idx": 19, "global_frame_idx": 17086, "task_index": 14}, {"db_idx": 17087, "episode_idx": 73, "frame_idx": 20, "global_frame_idx": 17087, "task_index": 14}, {"db_idx": 17088, "episode_idx": 73, "frame_idx": 21, "global_frame_idx": 17088, "task_index": 14}, {"db_idx": 17089, "episode_idx": 73, "frame_idx": 22, "global_frame_idx": 17089, "task_index": 14}, {"db_idx": 17090, "episode_idx": 73, "frame_idx": 23, "global_frame_idx": 17090, "task_index": 14}, {"db_idx": 17091, "episode_idx": 73, "frame_idx": 24, "global_frame_idx": 17091, "task_index": 14}, {"db_idx": 17092, "episode_idx": 73, "frame_idx": 25, "global_frame_idx": 17092, "task_index": 14}, {"db_idx": 17093, "episode_idx": 73, "frame_idx": 26, "global_frame_idx": 17093, "task_index": 14}, {"db_idx": 17094, "episode_idx": 73, "frame_idx": 27, "global_frame_idx": 17094, "task_index": 14}, {"db_idx": 17095, "episode_idx": 73, "frame_idx": 28, "global_frame_idx": 17095, "task_index": 14}, {"db_idx": 17096, "episode_idx": 73, "frame_idx": 29, "global_frame_idx": 17096, "task_index": 14}, {"db_idx": 17097, "episode_idx": 73, "frame_idx": 30, "global_frame_idx": 17097, "task_index": 14}, {"db_idx": 17098, "episode_idx": 73, "frame_idx": 31, "global_frame_idx": 17098, "task_index": 14}, {"db_idx": 17099, "episode_idx": 73, "frame_idx": 32, "global_frame_idx": 17099, "task_index": 14}, {"db_idx": 17100, "episode_idx": 73, "frame_idx": 33, "global_frame_idx": 17100, "task_index": 14}, {"db_idx": 17101, "episode_idx": 73, "frame_idx": 34, "global_frame_idx": 17101, "task_index": 14}, {"db_idx": 17102, "episode_idx": 73, "frame_idx": 35, "global_frame_idx": 17102, "task_index": 14}, {"db_idx": 17103, "episode_idx": 73, "frame_idx": 36, "global_frame_idx": 17103, "task_index": 14}, {"db_idx": 17104, "episode_idx": 73, "frame_idx": 37, "global_frame_idx": 17104, "task_index": 14}, {"db_idx": 17105, "episode_idx": 73, "frame_idx": 38, "global_frame_idx": 17105, "task_index": 14}, {"db_idx": 17106, "episode_idx": 73, "frame_idx": 39, "global_frame_idx": 17106, "task_index": 14}, {"db_idx": 17107, "episode_idx": 73, "frame_idx": 40, "global_frame_idx": 17107, "task_index": 14}, {"db_idx": 17108, "episode_idx": 73, "frame_idx": 41, "global_frame_idx": 17108, "task_index": 14}, {"db_idx": 17109, "episode_idx": 73, "frame_idx": 42, "global_frame_idx": 17109, "task_index": 14}, {"db_idx": 17110, "episode_idx": 73, "frame_idx": 43, "global_frame_idx": 17110, "task_index": 14}, {"db_idx": 17111, "episode_idx": 73, "frame_idx": 44, "global_frame_idx": 17111, "task_index": 14}, {"db_idx": 17112, "episode_idx": 73, "frame_idx": 45, "global_frame_idx": 17112, "task_index": 14}, {"db_idx": 17113, "episode_idx": 73, "frame_idx": 46, "global_frame_idx": 17113, "task_index": 14}, {"db_idx": 17114, "episode_idx": 73, "frame_idx": 47, "global_frame_idx": 17114, "task_index": 14}, {"db_idx": 17115, "episode_idx": 73, "frame_idx": 48, "global_frame_idx": 17115, "task_index": 14}, {"db_idx": 17116, "episode_idx": 73, "frame_idx": 49, "global_frame_idx": 17116, "task_index": 14}, {"db_idx": 17117, "episode_idx": 73, "frame_idx": 50, "global_frame_idx": 17117, "task_index": 14}, {"db_idx": 17118, "episode_idx": 73, "frame_idx": 51, "global_frame_idx": 17118, "task_index": 14}, {"db_idx": 17119, "episode_idx": 73, "frame_idx": 52, "global_frame_idx": 17119, "task_index": 14}, {"db_idx": 17120, "episode_idx": 73, "frame_idx": 53, "global_frame_idx": 17120, "task_index": 14}, {"db_idx": 17121, "episode_idx": 73, "frame_idx": 54, "global_frame_idx": 17121, "task_index": 14}, {"db_idx": 17122, "episode_idx": 73, "frame_idx": 55, "global_frame_idx": 17122, "task_index": 14}, {"db_idx": 17123, "episode_idx": 73, "frame_idx": 56, "global_frame_idx": 17123, "task_index": 14}, {"db_idx": 17124, "episode_idx": 73, "frame_idx": 57, "global_frame_idx": 17124, "task_index": 14}, {"db_idx": 17125, "episode_idx": 73, "frame_idx": 58, "global_frame_idx": 17125, "task_index": 14}, {"db_idx": 17126, "episode_idx": 73, "frame_idx": 59, "global_frame_idx": 17126, "task_index": 14}, {"db_idx": 17127, "episode_idx": 73, "frame_idx": 60, "global_frame_idx": 17127, "task_index": 14}, {"db_idx": 17128, "episode_idx": 73, "frame_idx": 61, "global_frame_idx": 17128, "task_index": 14}, {"db_idx": 17129, "episode_idx": 73, "frame_idx": 62, "global_frame_idx": 17129, "task_index": 14}, {"db_idx": 17130, "episode_idx": 73, "frame_idx": 63, "global_frame_idx": 17130, "task_index": 14}, {"db_idx": 17131, "episode_idx": 73, "frame_idx": 64, "global_frame_idx": 17131, "task_index": 14}, {"db_idx": 17132, "episode_idx": 73, "frame_idx": 65, "global_frame_idx": 17132, "task_index": 14}, {"db_idx": 17133, "episode_idx": 73, "frame_idx": 66, "global_frame_idx": 17133, "task_index": 14}, {"db_idx": 17134, "episode_idx": 73, "frame_idx": 67, "global_frame_idx": 17134, "task_index": 14}, {"db_idx": 17135, "episode_idx": 73, "frame_idx": 68, "global_frame_idx": 17135, "task_index": 14}, {"db_idx": 17136, "episode_idx": 73, "frame_idx": 69, "global_frame_idx": 17136, "task_index": 14}, {"db_idx": 17137, "episode_idx": 73, "frame_idx": 70, "global_frame_idx": 17137, "task_index": 14}, {"db_idx": 17138, "episode_idx": 73, "frame_idx": 71, "global_frame_idx": 17138, "task_index": 14}, {"db_idx": 17139, "episode_idx": 73, "frame_idx": 72, "global_frame_idx": 17139, "task_index": 14}, {"db_idx": 17140, "episode_idx": 73, "frame_idx": 73, "global_frame_idx": 17140, "task_index": 14}, {"db_idx": 17141, "episode_idx": 73, "frame_idx": 74, "global_frame_idx": 17141, "task_index": 14}, {"db_idx": 17142, "episode_idx": 73, "frame_idx": 75, "global_frame_idx": 17142, "task_index": 14}, {"db_idx": 17143, "episode_idx": 73, "frame_idx": 76, "global_frame_idx": 17143, "task_index": 14}, {"db_idx": 17144, "episode_idx": 73, "frame_idx": 77, "global_frame_idx": 17144, "task_index": 14}, {"db_idx": 17145, "episode_idx": 73, "frame_idx": 78, "global_frame_idx": 17145, "task_index": 14}, {"db_idx": 17146, "episode_idx": 73, "frame_idx": 79, "global_frame_idx": 17146, "task_index": 14}, {"db_idx": 17147, "episode_idx": 73, "frame_idx": 80, "global_frame_idx": 17147, "task_index": 14}, {"db_idx": 17148, "episode_idx": 73, "frame_idx": 81, "global_frame_idx": 17148, "task_index": 14}, {"db_idx": 17149, "episode_idx": 73, "frame_idx": 82, "global_frame_idx": 17149, "task_index": 14}, {"db_idx": 17150, "episode_idx": 73, "frame_idx": 83, "global_frame_idx": 17150, "task_index": 14}, {"db_idx": 17151, "episode_idx": 73, "frame_idx": 84, "global_frame_idx": 17151, "task_index": 14}, {"db_idx": 17152, "episode_idx": 73, "frame_idx": 85, "global_frame_idx": 17152, "task_index": 14}, {"db_idx": 17153, "episode_idx": 73, "frame_idx": 86, "global_frame_idx": 17153, "task_index": 14}, {"db_idx": 17154, "episode_idx": 73, "frame_idx": 87, "global_frame_idx": 17154, "task_index": 14}, {"db_idx": 17155, "episode_idx": 73, "frame_idx": 88, "global_frame_idx": 17155, "task_index": 14}, {"db_idx": 17156, "episode_idx": 73, "frame_idx": 89, "global_frame_idx": 17156, "task_index": 14}, {"db_idx": 17157, "episode_idx": 73, "frame_idx": 90, "global_frame_idx": 17157, "task_index": 14}, {"db_idx": 17158, "episode_idx": 73, "frame_idx": 91, "global_frame_idx": 17158, "task_index": 14}, {"db_idx": 17159, "episode_idx": 73, "frame_idx": 92, "global_frame_idx": 17159, "task_index": 14}, {"db_idx": 17160, "episode_idx": 73, "frame_idx": 93, "global_frame_idx": 17160, "task_index": 14}, {"db_idx": 17161, "episode_idx": 73, "frame_idx": 94, "global_frame_idx": 17161, "task_index": 14}, {"db_idx": 17162, "episode_idx": 73, "frame_idx": 95, "global_frame_idx": 17162, "task_index": 14}, {"db_idx": 17163, "episode_idx": 73, "frame_idx": 96, "global_frame_idx": 17163, "task_index": 14}, {"db_idx": 17164, "episode_idx": 73, "frame_idx": 97, "global_frame_idx": 17164, "task_index": 14}, {"db_idx": 17165, "episode_idx": 73, "frame_idx": 98, "global_frame_idx": 17165, "task_index": 14}, {"db_idx": 17166, "episode_idx": 73, "frame_idx": 99, "global_frame_idx": 17166, "task_index": 14}, {"db_idx": 17167, "episode_idx": 73, "frame_idx": 100, "global_frame_idx": 17167, "task_index": 14}, {"db_idx": 17168, "episode_idx": 73, "frame_idx": 101, "global_frame_idx": 17168, "task_index": 14}, {"db_idx": 17169, "episode_idx": 73, "frame_idx": 102, "global_frame_idx": 17169, "task_index": 14}, {"db_idx": 17170, "episode_idx": 73, "frame_idx": 103, "global_frame_idx": 17170, "task_index": 14}, {"db_idx": 17171, "episode_idx": 73, "frame_idx": 104, "global_frame_idx": 17171, "task_index": 14}, {"db_idx": 17172, "episode_idx": 73, "frame_idx": 105, "global_frame_idx": 17172, "task_index": 14}, {"db_idx": 17173, "episode_idx": 73, "frame_idx": 106, "global_frame_idx": 17173, "task_index": 14}, {"db_idx": 17174, "episode_idx": 73, "frame_idx": 107, "global_frame_idx": 17174, "task_index": 14}, {"db_idx": 17175, "episode_idx": 73, "frame_idx": 108, "global_frame_idx": 17175, "task_index": 14}, {"db_idx": 17176, "episode_idx": 73, "frame_idx": 109, "global_frame_idx": 17176, "task_index": 14}, {"db_idx": 17177, "episode_idx": 73, "frame_idx": 110, "global_frame_idx": 17177, "task_index": 14}, {"db_idx": 17178, "episode_idx": 73, "frame_idx": 111, "global_frame_idx": 17178, "task_index": 14}, {"db_idx": 17179, "episode_idx": 73, "frame_idx": 112, "global_frame_idx": 17179, "task_index": 14}, {"db_idx": 17180, "episode_idx": 73, "frame_idx": 113, "global_frame_idx": 17180, "task_index": 14}, {"db_idx": 17181, "episode_idx": 73, "frame_idx": 114, "global_frame_idx": 17181, "task_index": 14}, {"db_idx": 17182, "episode_idx": 73, "frame_idx": 115, "global_frame_idx": 17182, "task_index": 14}, {"db_idx": 17183, "episode_idx": 73, "frame_idx": 116, "global_frame_idx": 17183, "task_index": 14}, {"db_idx": 17184, "episode_idx": 73, "frame_idx": 117, "global_frame_idx": 17184, "task_index": 14}, {"db_idx": 17185, "episode_idx": 73, "frame_idx": 118, "global_frame_idx": 17185, "task_index": 14}, {"db_idx": 17186, "episode_idx": 73, "frame_idx": 119, "global_frame_idx": 17186, "task_index": 14}, {"db_idx": 17187, "episode_idx": 73, "frame_idx": 120, "global_frame_idx": 17187, "task_index": 14}, {"db_idx": 17188, "episode_idx": 73, "frame_idx": 121, "global_frame_idx": 17188, "task_index": 14}, {"db_idx": 17189, "episode_idx": 73, "frame_idx": 122, "global_frame_idx": 17189, "task_index": 14}, {"db_idx": 17190, "episode_idx": 73, "frame_idx": 123, "global_frame_idx": 17190, "task_index": 14}, {"db_idx": 17191, "episode_idx": 73, "frame_idx": 124, "global_frame_idx": 17191, "task_index": 14}, {"db_idx": 17192, "episode_idx": 73, "frame_idx": 125, "global_frame_idx": 17192, "task_index": 14}, {"db_idx": 17193, "episode_idx": 73, "frame_idx": 126, "global_frame_idx": 17193, "task_index": 14}, {"db_idx": 17194, "episode_idx": 73, "frame_idx": 127, "global_frame_idx": 17194, "task_index": 14}, {"db_idx": 17195, "episode_idx": 73, "frame_idx": 128, "global_frame_idx": 17195, "task_index": 14}, {"db_idx": 17196, "episode_idx": 73, "frame_idx": 129, "global_frame_idx": 17196, "task_index": 14}, {"db_idx": 17197, "episode_idx": 73, "frame_idx": 130, "global_frame_idx": 17197, "task_index": 14}, {"db_idx": 17198, "episode_idx": 73, "frame_idx": 131, "global_frame_idx": 17198, "task_index": 14}, {"db_idx": 17199, "episode_idx": 73, "frame_idx": 132, "global_frame_idx": 17199, "task_index": 14}, {"db_idx": 17200, "episode_idx": 73, "frame_idx": 133, "global_frame_idx": 17200, "task_index": 14}, {"db_idx": 17201, "episode_idx": 73, "frame_idx": 134, "global_frame_idx": 17201, "task_index": 14}, {"db_idx": 17202, "episode_idx": 73, "frame_idx": 135, "global_frame_idx": 17202, "task_index": 14}, {"db_idx": 17203, "episode_idx": 73, "frame_idx": 136, "global_frame_idx": 17203, "task_index": 14}, {"db_idx": 17204, "episode_idx": 73, "frame_idx": 137, "global_frame_idx": 17204, "task_index": 14}, {"db_idx": 17205, "episode_idx": 73, "frame_idx": 138, "global_frame_idx": 17205, "task_index": 14}, {"db_idx": 17206, "episode_idx": 73, "frame_idx": 139, "global_frame_idx": 17206, "task_index": 14}, {"db_idx": 17207, "episode_idx": 73, "frame_idx": 140, "global_frame_idx": 17207, "task_index": 14}, {"db_idx": 17208, "episode_idx": 73, "frame_idx": 141, "global_frame_idx": 17208, "task_index": 14}, {"db_idx": 17209, "episode_idx": 73, "frame_idx": 142, "global_frame_idx": 17209, "task_index": 14}, {"db_idx": 17210, "episode_idx": 73, "frame_idx": 143, "global_frame_idx": 17210, "task_index": 14}, {"db_idx": 17211, "episode_idx": 73, "frame_idx": 144, "global_frame_idx": 17211, "task_index": 14}, {"db_idx": 17212, "episode_idx": 73, "frame_idx": 145, "global_frame_idx": 17212, "task_index": 14}, {"db_idx": 17213, "episode_idx": 73, "frame_idx": 146, "global_frame_idx": 17213, "task_index": 14}, {"db_idx": 17214, "episode_idx": 73, "frame_idx": 147, "global_frame_idx": 17214, "task_index": 14}, {"db_idx": 17215, "episode_idx": 73, "frame_idx": 148, "global_frame_idx": 17215, "task_index": 14}, {"db_idx": 17216, "episode_idx": 73, "frame_idx": 149, "global_frame_idx": 17216, "task_index": 14}, {"db_idx": 17217, "episode_idx": 73, "frame_idx": 150, "global_frame_idx": 17217, "task_index": 14}, {"db_idx": 17218, "episode_idx": 73, "frame_idx": 151, "global_frame_idx": 17218, "task_index": 14}, {"db_idx": 17219, "episode_idx": 73, "frame_idx": 152, "global_frame_idx": 17219, "task_index": 14}, {"db_idx": 17220, "episode_idx": 73, "frame_idx": 153, "global_frame_idx": 17220, "task_index": 14}, {"db_idx": 17221, "episode_idx": 74, "frame_idx": 0, "global_frame_idx": 17221, "task_index": 14}, {"db_idx": 17222, "episode_idx": 74, "frame_idx": 1, "global_frame_idx": 17222, "task_index": 14}, {"db_idx": 17223, "episode_idx": 74, "frame_idx": 2, "global_frame_idx": 17223, "task_index": 14}, {"db_idx": 17224, "episode_idx": 74, "frame_idx": 3, "global_frame_idx": 17224, "task_index": 14}, {"db_idx": 17225, "episode_idx": 74, "frame_idx": 4, "global_frame_idx": 17225, "task_index": 14}, {"db_idx": 17226, "episode_idx": 74, "frame_idx": 5, "global_frame_idx": 17226, "task_index": 14}, {"db_idx": 17227, "episode_idx": 74, "frame_idx": 6, "global_frame_idx": 17227, "task_index": 14}, {"db_idx": 17228, "episode_idx": 74, "frame_idx": 7, "global_frame_idx": 17228, "task_index": 14}, {"db_idx": 17229, "episode_idx": 74, "frame_idx": 8, "global_frame_idx": 17229, "task_index": 14}, {"db_idx": 17230, "episode_idx": 74, "frame_idx": 9, "global_frame_idx": 17230, "task_index": 14}, {"db_idx": 17231, "episode_idx": 74, "frame_idx": 10, "global_frame_idx": 17231, "task_index": 14}, {"db_idx": 17232, "episode_idx": 74, "frame_idx": 11, "global_frame_idx": 17232, "task_index": 14}, {"db_idx": 17233, "episode_idx": 74, "frame_idx": 12, "global_frame_idx": 17233, "task_index": 14}, {"db_idx": 17234, "episode_idx": 74, "frame_idx": 13, "global_frame_idx": 17234, "task_index": 14}, {"db_idx": 17235, "episode_idx": 74, "frame_idx": 14, "global_frame_idx": 17235, "task_index": 14}, {"db_idx": 17236, "episode_idx": 74, "frame_idx": 15, "global_frame_idx": 17236, "task_index": 14}, {"db_idx": 17237, "episode_idx": 74, "frame_idx": 16, "global_frame_idx": 17237, "task_index": 14}, {"db_idx": 17238, "episode_idx": 74, "frame_idx": 17, "global_frame_idx": 17238, "task_index": 14}, {"db_idx": 17239, "episode_idx": 74, "frame_idx": 18, "global_frame_idx": 17239, "task_index": 14}, {"db_idx": 17240, "episode_idx": 74, "frame_idx": 19, "global_frame_idx": 17240, "task_index": 14}, {"db_idx": 17241, "episode_idx": 74, "frame_idx": 20, "global_frame_idx": 17241, "task_index": 14}, {"db_idx": 17242, "episode_idx": 74, "frame_idx": 21, "global_frame_idx": 17242, "task_index": 14}, {"db_idx": 17243, "episode_idx": 74, "frame_idx": 22, "global_frame_idx": 17243, "task_index": 14}, {"db_idx": 17244, "episode_idx": 74, "frame_idx": 23, "global_frame_idx": 17244, "task_index": 14}, {"db_idx": 17245, "episode_idx": 74, "frame_idx": 24, "global_frame_idx": 17245, "task_index": 14}, {"db_idx": 17246, "episode_idx": 74, "frame_idx": 25, "global_frame_idx": 17246, "task_index": 14}, {"db_idx": 17247, "episode_idx": 74, "frame_idx": 26, "global_frame_idx": 17247, "task_index": 14}, {"db_idx": 17248, "episode_idx": 74, "frame_idx": 27, "global_frame_idx": 17248, "task_index": 14}, {"db_idx": 17249, "episode_idx": 74, "frame_idx": 28, "global_frame_idx": 17249, "task_index": 14}, {"db_idx": 17250, "episode_idx": 74, "frame_idx": 29, "global_frame_idx": 17250, "task_index": 14}, {"db_idx": 17251, "episode_idx": 74, "frame_idx": 30, "global_frame_idx": 17251, "task_index": 14}, {"db_idx": 17252, "episode_idx": 74, "frame_idx": 31, "global_frame_idx": 17252, "task_index": 14}, {"db_idx": 17253, "episode_idx": 74, "frame_idx": 32, "global_frame_idx": 17253, "task_index": 14}, {"db_idx": 17254, "episode_idx": 74, "frame_idx": 33, "global_frame_idx": 17254, "task_index": 14}, {"db_idx": 17255, "episode_idx": 74, "frame_idx": 34, "global_frame_idx": 17255, "task_index": 14}, {"db_idx": 17256, "episode_idx": 74, "frame_idx": 35, "global_frame_idx": 17256, "task_index": 14}, {"db_idx": 17257, "episode_idx": 74, "frame_idx": 36, "global_frame_idx": 17257, "task_index": 14}, {"db_idx": 17258, "episode_idx": 74, "frame_idx": 37, "global_frame_idx": 17258, "task_index": 14}, {"db_idx": 17259, "episode_idx": 74, "frame_idx": 38, "global_frame_idx": 17259, "task_index": 14}, {"db_idx": 17260, "episode_idx": 74, "frame_idx": 39, "global_frame_idx": 17260, "task_index": 14}, {"db_idx": 17261, "episode_idx": 74, "frame_idx": 40, "global_frame_idx": 17261, "task_index": 14}, {"db_idx": 17262, "episode_idx": 74, "frame_idx": 41, "global_frame_idx": 17262, "task_index": 14}, {"db_idx": 17263, "episode_idx": 74, "frame_idx": 42, "global_frame_idx": 17263, "task_index": 14}, {"db_idx": 17264, "episode_idx": 74, "frame_idx": 43, "global_frame_idx": 17264, "task_index": 14}, {"db_idx": 17265, "episode_idx": 74, "frame_idx": 44, "global_frame_idx": 17265, "task_index": 14}, {"db_idx": 17266, "episode_idx": 74, "frame_idx": 45, "global_frame_idx": 17266, "task_index": 14}, {"db_idx": 17267, "episode_idx": 74, "frame_idx": 46, "global_frame_idx": 17267, "task_index": 14}, {"db_idx": 17268, "episode_idx": 74, "frame_idx": 47, "global_frame_idx": 17268, "task_index": 14}, {"db_idx": 17269, "episode_idx": 74, "frame_idx": 48, "global_frame_idx": 17269, "task_index": 14}, {"db_idx": 17270, "episode_idx": 74, "frame_idx": 49, "global_frame_idx": 17270, "task_index": 14}, {"db_idx": 17271, "episode_idx": 74, "frame_idx": 50, "global_frame_idx": 17271, "task_index": 14}, {"db_idx": 17272, "episode_idx": 74, "frame_idx": 51, "global_frame_idx": 17272, "task_index": 14}, {"db_idx": 17273, "episode_idx": 74, "frame_idx": 52, "global_frame_idx": 17273, "task_index": 14}, {"db_idx": 17274, "episode_idx": 74, "frame_idx": 53, "global_frame_idx": 17274, "task_index": 14}, {"db_idx": 17275, "episode_idx": 74, "frame_idx": 54, "global_frame_idx": 17275, "task_index": 14}, {"db_idx": 17276, "episode_idx": 74, "frame_idx": 55, "global_frame_idx": 17276, "task_index": 14}, {"db_idx": 17277, "episode_idx": 74, "frame_idx": 56, "global_frame_idx": 17277, "task_index": 14}, {"db_idx": 17278, "episode_idx": 74, "frame_idx": 57, "global_frame_idx": 17278, "task_index": 14}, {"db_idx": 17279, "episode_idx": 74, "frame_idx": 58, "global_frame_idx": 17279, "task_index": 14}, {"db_idx": 17280, "episode_idx": 74, "frame_idx": 59, "global_frame_idx": 17280, "task_index": 14}, {"db_idx": 17281, "episode_idx": 74, "frame_idx": 60, "global_frame_idx": 17281, "task_index": 14}, {"db_idx": 17282, "episode_idx": 74, "frame_idx": 61, "global_frame_idx": 17282, "task_index": 14}, {"db_idx": 17283, "episode_idx": 74, "frame_idx": 62, "global_frame_idx": 17283, "task_index": 14}, {"db_idx": 17284, "episode_idx": 74, "frame_idx": 63, "global_frame_idx": 17284, "task_index": 14}, {"db_idx": 17285, "episode_idx": 74, "frame_idx": 64, "global_frame_idx": 17285, "task_index": 14}, {"db_idx": 17286, "episode_idx": 74, "frame_idx": 65, "global_frame_idx": 17286, "task_index": 14}, {"db_idx": 17287, "episode_idx": 74, "frame_idx": 66, "global_frame_idx": 17287, "task_index": 14}, {"db_idx": 17288, "episode_idx": 74, "frame_idx": 67, "global_frame_idx": 17288, "task_index": 14}, {"db_idx": 17289, "episode_idx": 74, "frame_idx": 68, "global_frame_idx": 17289, "task_index": 14}, {"db_idx": 17290, "episode_idx": 74, "frame_idx": 69, "global_frame_idx": 17290, "task_index": 14}, {"db_idx": 17291, "episode_idx": 74, "frame_idx": 70, "global_frame_idx": 17291, "task_index": 14}, {"db_idx": 17292, "episode_idx": 74, "frame_idx": 71, "global_frame_idx": 17292, "task_index": 14}, {"db_idx": 17293, "episode_idx": 74, "frame_idx": 72, "global_frame_idx": 17293, "task_index": 14}, {"db_idx": 17294, "episode_idx": 74, "frame_idx": 73, "global_frame_idx": 17294, "task_index": 14}, {"db_idx": 17295, "episode_idx": 74, "frame_idx": 74, "global_frame_idx": 17295, "task_index": 14}, {"db_idx": 17296, "episode_idx": 74, "frame_idx": 75, "global_frame_idx": 17296, "task_index": 14}, {"db_idx": 17297, "episode_idx": 74, "frame_idx": 76, "global_frame_idx": 17297, "task_index": 14}, {"db_idx": 17298, "episode_idx": 74, "frame_idx": 77, "global_frame_idx": 17298, "task_index": 14}, {"db_idx": 17299, "episode_idx": 74, "frame_idx": 78, "global_frame_idx": 17299, "task_index": 14}, {"db_idx": 17300, "episode_idx": 74, "frame_idx": 79, "global_frame_idx": 17300, "task_index": 14}, {"db_idx": 17301, "episode_idx": 74, "frame_idx": 80, "global_frame_idx": 17301, "task_index": 14}, {"db_idx": 17302, "episode_idx": 74, "frame_idx": 81, "global_frame_idx": 17302, "task_index": 14}, {"db_idx": 17303, "episode_idx": 74, "frame_idx": 82, "global_frame_idx": 17303, "task_index": 14}, {"db_idx": 17304, "episode_idx": 74, "frame_idx": 83, "global_frame_idx": 17304, "task_index": 14}, {"db_idx": 17305, "episode_idx": 74, "frame_idx": 84, "global_frame_idx": 17305, "task_index": 14}, {"db_idx": 17306, "episode_idx": 74, "frame_idx": 85, "global_frame_idx": 17306, "task_index": 14}, {"db_idx": 17307, "episode_idx": 74, "frame_idx": 86, "global_frame_idx": 17307, "task_index": 14}, {"db_idx": 17308, "episode_idx": 74, "frame_idx": 87, "global_frame_idx": 17308, "task_index": 14}, {"db_idx": 17309, "episode_idx": 74, "frame_idx": 88, "global_frame_idx": 17309, "task_index": 14}, {"db_idx": 17310, "episode_idx": 74, "frame_idx": 89, "global_frame_idx": 17310, "task_index": 14}, {"db_idx": 17311, "episode_idx": 74, "frame_idx": 90, "global_frame_idx": 17311, "task_index": 14}, {"db_idx": 17312, "episode_idx": 74, "frame_idx": 91, "global_frame_idx": 17312, "task_index": 14}, {"db_idx": 17313, "episode_idx": 74, "frame_idx": 92, "global_frame_idx": 17313, "task_index": 14}, {"db_idx": 17314, "episode_idx": 74, "frame_idx": 93, "global_frame_idx": 17314, "task_index": 14}, {"db_idx": 17315, "episode_idx": 74, "frame_idx": 94, "global_frame_idx": 17315, "task_index": 14}, {"db_idx": 17316, "episode_idx": 74, "frame_idx": 95, "global_frame_idx": 17316, "task_index": 14}, {"db_idx": 17317, "episode_idx": 74, "frame_idx": 96, "global_frame_idx": 17317, "task_index": 14}, {"db_idx": 17318, "episode_idx": 74, "frame_idx": 97, "global_frame_idx": 17318, "task_index": 14}, {"db_idx": 17319, "episode_idx": 74, "frame_idx": 98, "global_frame_idx": 17319, "task_index": 14}, {"db_idx": 17320, "episode_idx": 74, "frame_idx": 99, "global_frame_idx": 17320, "task_index": 14}, {"db_idx": 17321, "episode_idx": 74, "frame_idx": 100, "global_frame_idx": 17321, "task_index": 14}, {"db_idx": 17322, "episode_idx": 74, "frame_idx": 101, "global_frame_idx": 17322, "task_index": 14}, {"db_idx": 17323, "episode_idx": 74, "frame_idx": 102, "global_frame_idx": 17323, "task_index": 14}, {"db_idx": 17324, "episode_idx": 74, "frame_idx": 103, "global_frame_idx": 17324, "task_index": 14}, {"db_idx": 17325, "episode_idx": 74, "frame_idx": 104, "global_frame_idx": 17325, "task_index": 14}, {"db_idx": 17326, "episode_idx": 74, "frame_idx": 105, "global_frame_idx": 17326, "task_index": 14}, {"db_idx": 17327, "episode_idx": 74, "frame_idx": 106, "global_frame_idx": 17327, "task_index": 14}, {"db_idx": 17328, "episode_idx": 74, "frame_idx": 107, "global_frame_idx": 17328, "task_index": 14}, {"db_idx": 17329, "episode_idx": 74, "frame_idx": 108, "global_frame_idx": 17329, "task_index": 14}, {"db_idx": 17330, "episode_idx": 74, "frame_idx": 109, "global_frame_idx": 17330, "task_index": 14}, {"db_idx": 17331, "episode_idx": 74, "frame_idx": 110, "global_frame_idx": 17331, "task_index": 14}, {"db_idx": 17332, "episode_idx": 74, "frame_idx": 111, "global_frame_idx": 17332, "task_index": 14}, {"db_idx": 17333, "episode_idx": 74, "frame_idx": 112, "global_frame_idx": 17333, "task_index": 14}, {"db_idx": 17334, "episode_idx": 74, "frame_idx": 113, "global_frame_idx": 17334, "task_index": 14}, {"db_idx": 17335, "episode_idx": 74, "frame_idx": 114, "global_frame_idx": 17335, "task_index": 14}, {"db_idx": 17336, "episode_idx": 74, "frame_idx": 115, "global_frame_idx": 17336, "task_index": 14}, {"db_idx": 17337, "episode_idx": 74, "frame_idx": 116, "global_frame_idx": 17337, "task_index": 14}, {"db_idx": 17338, "episode_idx": 74, "frame_idx": 117, "global_frame_idx": 17338, "task_index": 14}, {"db_idx": 17339, "episode_idx": 74, "frame_idx": 118, "global_frame_idx": 17339, "task_index": 14}, {"db_idx": 17340, "episode_idx": 74, "frame_idx": 119, "global_frame_idx": 17340, "task_index": 14}, {"db_idx": 17341, "episode_idx": 74, "frame_idx": 120, "global_frame_idx": 17341, "task_index": 14}, {"db_idx": 17342, "episode_idx": 74, "frame_idx": 121, "global_frame_idx": 17342, "task_index": 14}, {"db_idx": 17343, "episode_idx": 74, "frame_idx": 122, "global_frame_idx": 17343, "task_index": 14}, {"db_idx": 17344, "episode_idx": 74, "frame_idx": 123, "global_frame_idx": 17344, "task_index": 14}, {"db_idx": 17345, "episode_idx": 74, "frame_idx": 124, "global_frame_idx": 17345, "task_index": 14}, {"db_idx": 17346, "episode_idx": 74, "frame_idx": 125, "global_frame_idx": 17346, "task_index": 14}, {"db_idx": 17347, "episode_idx": 74, "frame_idx": 126, "global_frame_idx": 17347, "task_index": 14}, {"db_idx": 17348, "episode_idx": 74, "frame_idx": 127, "global_frame_idx": 17348, "task_index": 14}, {"db_idx": 17349, "episode_idx": 74, "frame_idx": 128, "global_frame_idx": 17349, "task_index": 14}, {"db_idx": 17350, "episode_idx": 74, "frame_idx": 129, "global_frame_idx": 17350, "task_index": 14}, {"db_idx": 17351, "episode_idx": 74, "frame_idx": 130, "global_frame_idx": 17351, "task_index": 14}, {"db_idx": 17352, "episode_idx": 74, "frame_idx": 131, "global_frame_idx": 17352, "task_index": 14}, {"db_idx": 17353, "episode_idx": 74, "frame_idx": 132, "global_frame_idx": 17353, "task_index": 14}, {"db_idx": 17354, "episode_idx": 74, "frame_idx": 133, "global_frame_idx": 17354, "task_index": 14}, {"db_idx": 17355, "episode_idx": 74, "frame_idx": 134, "global_frame_idx": 17355, "task_index": 14}, {"db_idx": 17356, "episode_idx": 74, "frame_idx": 135, "global_frame_idx": 17356, "task_index": 14}, {"db_idx": 17357, "episode_idx": 74, "frame_idx": 136, "global_frame_idx": 17357, "task_index": 14}, {"db_idx": 17358, "episode_idx": 74, "frame_idx": 137, "global_frame_idx": 17358, "task_index": 14}, {"db_idx": 17359, "episode_idx": 74, "frame_idx": 138, "global_frame_idx": 17359, "task_index": 14}, {"db_idx": 17360, "episode_idx": 74, "frame_idx": 139, "global_frame_idx": 17360, "task_index": 14}, {"db_idx": 17361, "episode_idx": 74, "frame_idx": 140, "global_frame_idx": 17361, "task_index": 14}, {"db_idx": 17362, "episode_idx": 74, "frame_idx": 141, "global_frame_idx": 17362, "task_index": 14}, {"db_idx": 17363, "episode_idx": 74, "frame_idx": 142, "global_frame_idx": 17363, "task_index": 14}, {"db_idx": 17364, "episode_idx": 74, "frame_idx": 143, "global_frame_idx": 17364, "task_index": 14}, {"db_idx": 17365, "episode_idx": 74, "frame_idx": 144, "global_frame_idx": 17365, "task_index": 14}, {"db_idx": 17366, "episode_idx": 74, "frame_idx": 145, "global_frame_idx": 17366, "task_index": 14}, {"db_idx": 17367, "episode_idx": 74, "frame_idx": 146, "global_frame_idx": 17367, "task_index": 14}, {"db_idx": 17368, "episode_idx": 74, "frame_idx": 147, "global_frame_idx": 17368, "task_index": 14}, {"db_idx": 17369, "episode_idx": 74, "frame_idx": 148, "global_frame_idx": 17369, "task_index": 14}, {"db_idx": 17370, "episode_idx": 74, "frame_idx": 149, "global_frame_idx": 17370, "task_index": 14}, {"db_idx": 17371, "episode_idx": 74, "frame_idx": 150, "global_frame_idx": 17371, "task_index": 14}, {"db_idx": 17372, "episode_idx": 74, "frame_idx": 151, "global_frame_idx": 17372, "task_index": 14}, {"db_idx": 17373, "episode_idx": 74, "frame_idx": 152, "global_frame_idx": 17373, "task_index": 14}, {"db_idx": 17374, "episode_idx": 74, "frame_idx": 153, "global_frame_idx": 17374, "task_index": 14}, {"db_idx": 17375, "episode_idx": 74, "frame_idx": 154, "global_frame_idx": 17375, "task_index": 14}, {"db_idx": 17376, "episode_idx": 74, "frame_idx": 155, "global_frame_idx": 17376, "task_index": 14}, {"db_idx": 17377, "episode_idx": 74, "frame_idx": 156, "global_frame_idx": 17377, "task_index": 14}, {"db_idx": 17378, "episode_idx": 74, "frame_idx": 157, "global_frame_idx": 17378, "task_index": 14}, {"db_idx": 17379, "episode_idx": 74, "frame_idx": 158, "global_frame_idx": 17379, "task_index": 14}, {"db_idx": 17380, "episode_idx": 74, "frame_idx": 159, "global_frame_idx": 17380, "task_index": 14}, {"db_idx": 17381, "episode_idx": 75, "frame_idx": 0, "global_frame_idx": 17381, "task_index": 15}, {"db_idx": 17382, "episode_idx": 75, "frame_idx": 1, "global_frame_idx": 17382, "task_index": 15}, {"db_idx": 17383, "episode_idx": 75, "frame_idx": 2, "global_frame_idx": 17383, "task_index": 15}, {"db_idx": 17384, "episode_idx": 75, "frame_idx": 3, "global_frame_idx": 17384, "task_index": 15}, {"db_idx": 17385, "episode_idx": 75, "frame_idx": 4, "global_frame_idx": 17385, "task_index": 15}, {"db_idx": 17386, "episode_idx": 75, "frame_idx": 5, "global_frame_idx": 17386, "task_index": 15}, {"db_idx": 17387, "episode_idx": 75, "frame_idx": 6, "global_frame_idx": 17387, "task_index": 15}, {"db_idx": 17388, "episode_idx": 75, "frame_idx": 7, "global_frame_idx": 17388, "task_index": 15}, {"db_idx": 17389, "episode_idx": 75, "frame_idx": 8, "global_frame_idx": 17389, "task_index": 15}, {"db_idx": 17390, "episode_idx": 75, "frame_idx": 9, "global_frame_idx": 17390, "task_index": 15}, {"db_idx": 17391, "episode_idx": 75, "frame_idx": 10, "global_frame_idx": 17391, "task_index": 15}, {"db_idx": 17392, "episode_idx": 75, "frame_idx": 11, "global_frame_idx": 17392, "task_index": 15}, {"db_idx": 17393, "episode_idx": 75, "frame_idx": 12, "global_frame_idx": 17393, "task_index": 15}, {"db_idx": 17394, "episode_idx": 75, "frame_idx": 13, "global_frame_idx": 17394, "task_index": 15}, {"db_idx": 17395, "episode_idx": 75, "frame_idx": 14, "global_frame_idx": 17395, "task_index": 15}, {"db_idx": 17396, "episode_idx": 75, "frame_idx": 15, "global_frame_idx": 17396, "task_index": 15}, {"db_idx": 17397, "episode_idx": 75, "frame_idx": 16, "global_frame_idx": 17397, "task_index": 15}, {"db_idx": 17398, "episode_idx": 75, "frame_idx": 17, "global_frame_idx": 17398, "task_index": 15}, {"db_idx": 17399, "episode_idx": 75, "frame_idx": 18, "global_frame_idx": 17399, "task_index": 15}, {"db_idx": 17400, "episode_idx": 75, "frame_idx": 19, "global_frame_idx": 17400, "task_index": 15}, {"db_idx": 17401, "episode_idx": 75, "frame_idx": 20, "global_frame_idx": 17401, "task_index": 15}, {"db_idx": 17402, "episode_idx": 75, "frame_idx": 21, "global_frame_idx": 17402, "task_index": 15}, {"db_idx": 17403, "episode_idx": 75, "frame_idx": 22, "global_frame_idx": 17403, "task_index": 15}, {"db_idx": 17404, "episode_idx": 75, "frame_idx": 23, "global_frame_idx": 17404, "task_index": 15}, {"db_idx": 17405, "episode_idx": 75, "frame_idx": 24, "global_frame_idx": 17405, "task_index": 15}, {"db_idx": 17406, "episode_idx": 75, "frame_idx": 25, "global_frame_idx": 17406, "task_index": 15}, {"db_idx": 17407, "episode_idx": 75, "frame_idx": 26, "global_frame_idx": 17407, "task_index": 15}, {"db_idx": 17408, "episode_idx": 75, "frame_idx": 27, "global_frame_idx": 17408, "task_index": 15}, {"db_idx": 17409, "episode_idx": 75, "frame_idx": 28, "global_frame_idx": 17409, "task_index": 15}, {"db_idx": 17410, "episode_idx": 75, "frame_idx": 29, "global_frame_idx": 17410, "task_index": 15}, {"db_idx": 17411, "episode_idx": 75, "frame_idx": 30, "global_frame_idx": 17411, "task_index": 15}, {"db_idx": 17412, "episode_idx": 75, "frame_idx": 31, "global_frame_idx": 17412, "task_index": 15}, {"db_idx": 17413, "episode_idx": 75, "frame_idx": 32, "global_frame_idx": 17413, "task_index": 15}, {"db_idx": 17414, "episode_idx": 75, "frame_idx": 33, "global_frame_idx": 17414, "task_index": 15}, {"db_idx": 17415, "episode_idx": 75, "frame_idx": 34, "global_frame_idx": 17415, "task_index": 15}, {"db_idx": 17416, "episode_idx": 75, "frame_idx": 35, "global_frame_idx": 17416, "task_index": 15}, {"db_idx": 17417, "episode_idx": 75, "frame_idx": 36, "global_frame_idx": 17417, "task_index": 15}, {"db_idx": 17418, "episode_idx": 75, "frame_idx": 37, "global_frame_idx": 17418, "task_index": 15}, {"db_idx": 17419, "episode_idx": 75, "frame_idx": 38, "global_frame_idx": 17419, "task_index": 15}, {"db_idx": 17420, "episode_idx": 75, "frame_idx": 39, "global_frame_idx": 17420, "task_index": 15}, {"db_idx": 17421, "episode_idx": 75, "frame_idx": 40, "global_frame_idx": 17421, "task_index": 15}, {"db_idx": 17422, "episode_idx": 75, "frame_idx": 41, "global_frame_idx": 17422, "task_index": 15}, {"db_idx": 17423, "episode_idx": 75, "frame_idx": 42, "global_frame_idx": 17423, "task_index": 15}, {"db_idx": 17424, "episode_idx": 75, "frame_idx": 43, "global_frame_idx": 17424, "task_index": 15}, {"db_idx": 17425, "episode_idx": 75, "frame_idx": 44, "global_frame_idx": 17425, "task_index": 15}, {"db_idx": 17426, "episode_idx": 75, "frame_idx": 45, "global_frame_idx": 17426, "task_index": 15}, {"db_idx": 17427, "episode_idx": 75, "frame_idx": 46, "global_frame_idx": 17427, "task_index": 15}, {"db_idx": 17428, "episode_idx": 75, "frame_idx": 47, "global_frame_idx": 17428, "task_index": 15}, {"db_idx": 17429, "episode_idx": 75, "frame_idx": 48, "global_frame_idx": 17429, "task_index": 15}, {"db_idx": 17430, "episode_idx": 75, "frame_idx": 49, "global_frame_idx": 17430, "task_index": 15}, {"db_idx": 17431, "episode_idx": 75, "frame_idx": 50, "global_frame_idx": 17431, "task_index": 15}, {"db_idx": 17432, "episode_idx": 75, "frame_idx": 51, "global_frame_idx": 17432, "task_index": 15}, {"db_idx": 17433, "episode_idx": 75, "frame_idx": 52, "global_frame_idx": 17433, "task_index": 15}, {"db_idx": 17434, "episode_idx": 75, "frame_idx": 53, "global_frame_idx": 17434, "task_index": 15}, {"db_idx": 17435, "episode_idx": 75, "frame_idx": 54, "global_frame_idx": 17435, "task_index": 15}, {"db_idx": 17436, "episode_idx": 75, "frame_idx": 55, "global_frame_idx": 17436, "task_index": 15}, {"db_idx": 17437, "episode_idx": 75, "frame_idx": 56, "global_frame_idx": 17437, "task_index": 15}, {"db_idx": 17438, "episode_idx": 75, "frame_idx": 57, "global_frame_idx": 17438, "task_index": 15}, {"db_idx": 17439, "episode_idx": 75, "frame_idx": 58, "global_frame_idx": 17439, "task_index": 15}, {"db_idx": 17440, "episode_idx": 75, "frame_idx": 59, "global_frame_idx": 17440, "task_index": 15}, {"db_idx": 17441, "episode_idx": 75, "frame_idx": 60, "global_frame_idx": 17441, "task_index": 15}, {"db_idx": 17442, "episode_idx": 75, "frame_idx": 61, "global_frame_idx": 17442, "task_index": 15}, {"db_idx": 17443, "episode_idx": 75, "frame_idx": 62, "global_frame_idx": 17443, "task_index": 15}, {"db_idx": 17444, "episode_idx": 75, "frame_idx": 63, "global_frame_idx": 17444, "task_index": 15}, {"db_idx": 17445, "episode_idx": 75, "frame_idx": 64, "global_frame_idx": 17445, "task_index": 15}, {"db_idx": 17446, "episode_idx": 75, "frame_idx": 65, "global_frame_idx": 17446, "task_index": 15}, {"db_idx": 17447, "episode_idx": 75, "frame_idx": 66, "global_frame_idx": 17447, "task_index": 15}, {"db_idx": 17448, "episode_idx": 75, "frame_idx": 67, "global_frame_idx": 17448, "task_index": 15}, {"db_idx": 17449, "episode_idx": 75, "frame_idx": 68, "global_frame_idx": 17449, "task_index": 15}, {"db_idx": 17450, "episode_idx": 75, "frame_idx": 69, "global_frame_idx": 17450, "task_index": 15}, {"db_idx": 17451, "episode_idx": 75, "frame_idx": 70, "global_frame_idx": 17451, "task_index": 15}, {"db_idx": 17452, "episode_idx": 75, "frame_idx": 71, "global_frame_idx": 17452, "task_index": 15}, {"db_idx": 17453, "episode_idx": 75, "frame_idx": 72, "global_frame_idx": 17453, "task_index": 15}, {"db_idx": 17454, "episode_idx": 75, "frame_idx": 73, "global_frame_idx": 17454, "task_index": 15}, {"db_idx": 17455, "episode_idx": 75, "frame_idx": 74, "global_frame_idx": 17455, "task_index": 15}, {"db_idx": 17456, "episode_idx": 75, "frame_idx": 75, "global_frame_idx": 17456, "task_index": 15}, {"db_idx": 17457, "episode_idx": 75, "frame_idx": 76, "global_frame_idx": 17457, "task_index": 15}, {"db_idx": 17458, "episode_idx": 75, "frame_idx": 77, "global_frame_idx": 17458, "task_index": 15}, {"db_idx": 17459, "episode_idx": 75, "frame_idx": 78, "global_frame_idx": 17459, "task_index": 15}, {"db_idx": 17460, "episode_idx": 75, "frame_idx": 79, "global_frame_idx": 17460, "task_index": 15}, {"db_idx": 17461, "episode_idx": 75, "frame_idx": 80, "global_frame_idx": 17461, "task_index": 15}, {"db_idx": 17462, "episode_idx": 75, "frame_idx": 81, "global_frame_idx": 17462, "task_index": 15}, {"db_idx": 17463, "episode_idx": 75, "frame_idx": 82, "global_frame_idx": 17463, "task_index": 15}, {"db_idx": 17464, "episode_idx": 75, "frame_idx": 83, "global_frame_idx": 17464, "task_index": 15}, {"db_idx": 17465, "episode_idx": 75, "frame_idx": 84, "global_frame_idx": 17465, "task_index": 15}, {"db_idx": 17466, "episode_idx": 75, "frame_idx": 85, "global_frame_idx": 17466, "task_index": 15}, {"db_idx": 17467, "episode_idx": 75, "frame_idx": 86, "global_frame_idx": 17467, "task_index": 15}, {"db_idx": 17468, "episode_idx": 75, "frame_idx": 87, "global_frame_idx": 17468, "task_index": 15}, {"db_idx": 17469, "episode_idx": 75, "frame_idx": 88, "global_frame_idx": 17469, "task_index": 15}, {"db_idx": 17470, "episode_idx": 75, "frame_idx": 89, "global_frame_idx": 17470, "task_index": 15}, {"db_idx": 17471, "episode_idx": 75, "frame_idx": 90, "global_frame_idx": 17471, "task_index": 15}, {"db_idx": 17472, "episode_idx": 75, "frame_idx": 91, "global_frame_idx": 17472, "task_index": 15}, {"db_idx": 17473, "episode_idx": 75, "frame_idx": 92, "global_frame_idx": 17473, "task_index": 15}, {"db_idx": 17474, "episode_idx": 75, "frame_idx": 93, "global_frame_idx": 17474, "task_index": 15}, {"db_idx": 17475, "episode_idx": 75, "frame_idx": 94, "global_frame_idx": 17475, "task_index": 15}, {"db_idx": 17476, "episode_idx": 75, "frame_idx": 95, "global_frame_idx": 17476, "task_index": 15}, {"db_idx": 17477, "episode_idx": 75, "frame_idx": 96, "global_frame_idx": 17477, "task_index": 15}, {"db_idx": 17478, "episode_idx": 75, "frame_idx": 97, "global_frame_idx": 17478, "task_index": 15}, {"db_idx": 17479, "episode_idx": 75, "frame_idx": 98, "global_frame_idx": 17479, "task_index": 15}, {"db_idx": 17480, "episode_idx": 75, "frame_idx": 99, "global_frame_idx": 17480, "task_index": 15}, {"db_idx": 17481, "episode_idx": 75, "frame_idx": 100, "global_frame_idx": 17481, "task_index": 15}, {"db_idx": 17482, "episode_idx": 75, "frame_idx": 101, "global_frame_idx": 17482, "task_index": 15}, {"db_idx": 17483, "episode_idx": 75, "frame_idx": 102, "global_frame_idx": 17483, "task_index": 15}, {"db_idx": 17484, "episode_idx": 75, "frame_idx": 103, "global_frame_idx": 17484, "task_index": 15}, {"db_idx": 17485, "episode_idx": 75, "frame_idx": 104, "global_frame_idx": 17485, "task_index": 15}, {"db_idx": 17486, "episode_idx": 75, "frame_idx": 105, "global_frame_idx": 17486, "task_index": 15}, {"db_idx": 17487, "episode_idx": 75, "frame_idx": 106, "global_frame_idx": 17487, "task_index": 15}, {"db_idx": 17488, "episode_idx": 75, "frame_idx": 107, "global_frame_idx": 17488, "task_index": 15}, {"db_idx": 17489, "episode_idx": 75, "frame_idx": 108, "global_frame_idx": 17489, "task_index": 15}, {"db_idx": 17490, "episode_idx": 75, "frame_idx": 109, "global_frame_idx": 17490, "task_index": 15}, {"db_idx": 17491, "episode_idx": 75, "frame_idx": 110, "global_frame_idx": 17491, "task_index": 15}, {"db_idx": 17492, "episode_idx": 75, "frame_idx": 111, "global_frame_idx": 17492, "task_index": 15}, {"db_idx": 17493, "episode_idx": 75, "frame_idx": 112, "global_frame_idx": 17493, "task_index": 15}, {"db_idx": 17494, "episode_idx": 75, "frame_idx": 113, "global_frame_idx": 17494, "task_index": 15}, {"db_idx": 17495, "episode_idx": 75, "frame_idx": 114, "global_frame_idx": 17495, "task_index": 15}, {"db_idx": 17496, "episode_idx": 75, "frame_idx": 115, "global_frame_idx": 17496, "task_index": 15}, {"db_idx": 17497, "episode_idx": 75, "frame_idx": 116, "global_frame_idx": 17497, "task_index": 15}, {"db_idx": 17498, "episode_idx": 75, "frame_idx": 117, "global_frame_idx": 17498, "task_index": 15}, {"db_idx": 17499, "episode_idx": 75, "frame_idx": 118, "global_frame_idx": 17499, "task_index": 15}, {"db_idx": 17500, "episode_idx": 75, "frame_idx": 119, "global_frame_idx": 17500, "task_index": 15}, {"db_idx": 17501, "episode_idx": 75, "frame_idx": 120, "global_frame_idx": 17501, "task_index": 15}, {"db_idx": 17502, "episode_idx": 75, "frame_idx": 121, "global_frame_idx": 17502, "task_index": 15}, {"db_idx": 17503, "episode_idx": 75, "frame_idx": 122, "global_frame_idx": 17503, "task_index": 15}, {"db_idx": 17504, "episode_idx": 75, "frame_idx": 123, "global_frame_idx": 17504, "task_index": 15}, {"db_idx": 17505, "episode_idx": 75, "frame_idx": 124, "global_frame_idx": 17505, "task_index": 15}, {"db_idx": 17506, "episode_idx": 75, "frame_idx": 125, "global_frame_idx": 17506, "task_index": 15}, {"db_idx": 17507, "episode_idx": 75, "frame_idx": 126, "global_frame_idx": 17507, "task_index": 15}, {"db_idx": 17508, "episode_idx": 75, "frame_idx": 127, "global_frame_idx": 17508, "task_index": 15}, {"db_idx": 17509, "episode_idx": 75, "frame_idx": 128, "global_frame_idx": 17509, "task_index": 15}, {"db_idx": 17510, "episode_idx": 75, "frame_idx": 129, "global_frame_idx": 17510, "task_index": 15}, {"db_idx": 17511, "episode_idx": 75, "frame_idx": 130, "global_frame_idx": 17511, "task_index": 15}, {"db_idx": 17512, "episode_idx": 75, "frame_idx": 131, "global_frame_idx": 17512, "task_index": 15}, {"db_idx": 17513, "episode_idx": 75, "frame_idx": 132, "global_frame_idx": 17513, "task_index": 15}, {"db_idx": 17514, "episode_idx": 75, "frame_idx": 133, "global_frame_idx": 17514, "task_index": 15}, {"db_idx": 17515, "episode_idx": 75, "frame_idx": 134, "global_frame_idx": 17515, "task_index": 15}, {"db_idx": 17516, "episode_idx": 75, "frame_idx": 135, "global_frame_idx": 17516, "task_index": 15}, {"db_idx": 17517, "episode_idx": 75, "frame_idx": 136, "global_frame_idx": 17517, "task_index": 15}, {"db_idx": 17518, "episode_idx": 75, "frame_idx": 137, "global_frame_idx": 17518, "task_index": 15}, {"db_idx": 17519, "episode_idx": 75, "frame_idx": 138, "global_frame_idx": 17519, "task_index": 15}, {"db_idx": 17520, "episode_idx": 76, "frame_idx": 0, "global_frame_idx": 17520, "task_index": 15}, {"db_idx": 17521, "episode_idx": 76, "frame_idx": 1, "global_frame_idx": 17521, "task_index": 15}, {"db_idx": 17522, "episode_idx": 76, "frame_idx": 2, "global_frame_idx": 17522, "task_index": 15}, {"db_idx": 17523, "episode_idx": 76, "frame_idx": 3, "global_frame_idx": 17523, "task_index": 15}, {"db_idx": 17524, "episode_idx": 76, "frame_idx": 4, "global_frame_idx": 17524, "task_index": 15}, {"db_idx": 17525, "episode_idx": 76, "frame_idx": 5, "global_frame_idx": 17525, "task_index": 15}, {"db_idx": 17526, "episode_idx": 76, "frame_idx": 6, "global_frame_idx": 17526, "task_index": 15}, {"db_idx": 17527, "episode_idx": 76, "frame_idx": 7, "global_frame_idx": 17527, "task_index": 15}, {"db_idx": 17528, "episode_idx": 76, "frame_idx": 8, "global_frame_idx": 17528, "task_index": 15}, {"db_idx": 17529, "episode_idx": 76, "frame_idx": 9, "global_frame_idx": 17529, "task_index": 15}, {"db_idx": 17530, "episode_idx": 76, "frame_idx": 10, "global_frame_idx": 17530, "task_index": 15}, {"db_idx": 17531, "episode_idx": 76, "frame_idx": 11, "global_frame_idx": 17531, "task_index": 15}, {"db_idx": 17532, "episode_idx": 76, "frame_idx": 12, "global_frame_idx": 17532, "task_index": 15}, {"db_idx": 17533, "episode_idx": 76, "frame_idx": 13, "global_frame_idx": 17533, "task_index": 15}, {"db_idx": 17534, "episode_idx": 76, "frame_idx": 14, "global_frame_idx": 17534, "task_index": 15}, {"db_idx": 17535, "episode_idx": 76, "frame_idx": 15, "global_frame_idx": 17535, "task_index": 15}, {"db_idx": 17536, "episode_idx": 76, "frame_idx": 16, "global_frame_idx": 17536, "task_index": 15}, {"db_idx": 17537, "episode_idx": 76, "frame_idx": 17, "global_frame_idx": 17537, "task_index": 15}, {"db_idx": 17538, "episode_idx": 76, "frame_idx": 18, "global_frame_idx": 17538, "task_index": 15}, {"db_idx": 17539, "episode_idx": 76, "frame_idx": 19, "global_frame_idx": 17539, "task_index": 15}, {"db_idx": 17540, "episode_idx": 76, "frame_idx": 20, "global_frame_idx": 17540, "task_index": 15}, {"db_idx": 17541, "episode_idx": 76, "frame_idx": 21, "global_frame_idx": 17541, "task_index": 15}, {"db_idx": 17542, "episode_idx": 76, "frame_idx": 22, "global_frame_idx": 17542, "task_index": 15}, {"db_idx": 17543, "episode_idx": 76, "frame_idx": 23, "global_frame_idx": 17543, "task_index": 15}, {"db_idx": 17544, "episode_idx": 76, "frame_idx": 24, "global_frame_idx": 17544, "task_index": 15}, {"db_idx": 17545, "episode_idx": 76, "frame_idx": 25, "global_frame_idx": 17545, "task_index": 15}, {"db_idx": 17546, "episode_idx": 76, "frame_idx": 26, "global_frame_idx": 17546, "task_index": 15}, {"db_idx": 17547, "episode_idx": 76, "frame_idx": 27, "global_frame_idx": 17547, "task_index": 15}, {"db_idx": 17548, "episode_idx": 76, "frame_idx": 28, "global_frame_idx": 17548, "task_index": 15}, {"db_idx": 17549, "episode_idx": 76, "frame_idx": 29, "global_frame_idx": 17549, "task_index": 15}, {"db_idx": 17550, "episode_idx": 76, "frame_idx": 30, "global_frame_idx": 17550, "task_index": 15}, {"db_idx": 17551, "episode_idx": 76, "frame_idx": 31, "global_frame_idx": 17551, "task_index": 15}, {"db_idx": 17552, "episode_idx": 76, "frame_idx": 32, "global_frame_idx": 17552, "task_index": 15}, {"db_idx": 17553, "episode_idx": 76, "frame_idx": 33, "global_frame_idx": 17553, "task_index": 15}, {"db_idx": 17554, "episode_idx": 76, "frame_idx": 34, "global_frame_idx": 17554, "task_index": 15}, {"db_idx": 17555, "episode_idx": 76, "frame_idx": 35, "global_frame_idx": 17555, "task_index": 15}, {"db_idx": 17556, "episode_idx": 76, "frame_idx": 36, "global_frame_idx": 17556, "task_index": 15}, {"db_idx": 17557, "episode_idx": 76, "frame_idx": 37, "global_frame_idx": 17557, "task_index": 15}, {"db_idx": 17558, "episode_idx": 76, "frame_idx": 38, "global_frame_idx": 17558, "task_index": 15}, {"db_idx": 17559, "episode_idx": 76, "frame_idx": 39, "global_frame_idx": 17559, "task_index": 15}, {"db_idx": 17560, "episode_idx": 76, "frame_idx": 40, "global_frame_idx": 17560, "task_index": 15}, {"db_idx": 17561, "episode_idx": 76, "frame_idx": 41, "global_frame_idx": 17561, "task_index": 15}, {"db_idx": 17562, "episode_idx": 76, "frame_idx": 42, "global_frame_idx": 17562, "task_index": 15}, {"db_idx": 17563, "episode_idx": 76, "frame_idx": 43, "global_frame_idx": 17563, "task_index": 15}, {"db_idx": 17564, "episode_idx": 76, "frame_idx": 44, "global_frame_idx": 17564, "task_index": 15}, {"db_idx": 17565, "episode_idx": 76, "frame_idx": 45, "global_frame_idx": 17565, "task_index": 15}, {"db_idx": 17566, "episode_idx": 76, "frame_idx": 46, "global_frame_idx": 17566, "task_index": 15}, {"db_idx": 17567, "episode_idx": 76, "frame_idx": 47, "global_frame_idx": 17567, "task_index": 15}, {"db_idx": 17568, "episode_idx": 76, "frame_idx": 48, "global_frame_idx": 17568, "task_index": 15}, {"db_idx": 17569, "episode_idx": 76, "frame_idx": 49, "global_frame_idx": 17569, "task_index": 15}, {"db_idx": 17570, "episode_idx": 76, "frame_idx": 50, "global_frame_idx": 17570, "task_index": 15}, {"db_idx": 17571, "episode_idx": 76, "frame_idx": 51, "global_frame_idx": 17571, "task_index": 15}, {"db_idx": 17572, "episode_idx": 76, "frame_idx": 52, "global_frame_idx": 17572, "task_index": 15}, {"db_idx": 17573, "episode_idx": 76, "frame_idx": 53, "global_frame_idx": 17573, "task_index": 15}, {"db_idx": 17574, "episode_idx": 76, "frame_idx": 54, "global_frame_idx": 17574, "task_index": 15}, {"db_idx": 17575, "episode_idx": 76, "frame_idx": 55, "global_frame_idx": 17575, "task_index": 15}, {"db_idx": 17576, "episode_idx": 76, "frame_idx": 56, "global_frame_idx": 17576, "task_index": 15}, {"db_idx": 17577, "episode_idx": 76, "frame_idx": 57, "global_frame_idx": 17577, "task_index": 15}, {"db_idx": 17578, "episode_idx": 76, "frame_idx": 58, "global_frame_idx": 17578, "task_index": 15}, {"db_idx": 17579, "episode_idx": 76, "frame_idx": 59, "global_frame_idx": 17579, "task_index": 15}, {"db_idx": 17580, "episode_idx": 76, "frame_idx": 60, "global_frame_idx": 17580, "task_index": 15}, {"db_idx": 17581, "episode_idx": 76, "frame_idx": 61, "global_frame_idx": 17581, "task_index": 15}, {"db_idx": 17582, "episode_idx": 76, "frame_idx": 62, "global_frame_idx": 17582, "task_index": 15}, {"db_idx": 17583, "episode_idx": 76, "frame_idx": 63, "global_frame_idx": 17583, "task_index": 15}, {"db_idx": 17584, "episode_idx": 76, "frame_idx": 64, "global_frame_idx": 17584, "task_index": 15}, {"db_idx": 17585, "episode_idx": 76, "frame_idx": 65, "global_frame_idx": 17585, "task_index": 15}, {"db_idx": 17586, "episode_idx": 76, "frame_idx": 66, "global_frame_idx": 17586, "task_index": 15}, {"db_idx": 17587, "episode_idx": 76, "frame_idx": 67, "global_frame_idx": 17587, "task_index": 15}, {"db_idx": 17588, "episode_idx": 76, "frame_idx": 68, "global_frame_idx": 17588, "task_index": 15}, {"db_idx": 17589, "episode_idx": 76, "frame_idx": 69, "global_frame_idx": 17589, "task_index": 15}, {"db_idx": 17590, "episode_idx": 76, "frame_idx": 70, "global_frame_idx": 17590, "task_index": 15}, {"db_idx": 17591, "episode_idx": 76, "frame_idx": 71, "global_frame_idx": 17591, "task_index": 15}, {"db_idx": 17592, "episode_idx": 76, "frame_idx": 72, "global_frame_idx": 17592, "task_index": 15}, {"db_idx": 17593, "episode_idx": 76, "frame_idx": 73, "global_frame_idx": 17593, "task_index": 15}, {"db_idx": 17594, "episode_idx": 76, "frame_idx": 74, "global_frame_idx": 17594, "task_index": 15}, {"db_idx": 17595, "episode_idx": 76, "frame_idx": 75, "global_frame_idx": 17595, "task_index": 15}, {"db_idx": 17596, "episode_idx": 76, "frame_idx": 76, "global_frame_idx": 17596, "task_index": 15}, {"db_idx": 17597, "episode_idx": 76, "frame_idx": 77, "global_frame_idx": 17597, "task_index": 15}, {"db_idx": 17598, "episode_idx": 76, "frame_idx": 78, "global_frame_idx": 17598, "task_index": 15}, {"db_idx": 17599, "episode_idx": 76, "frame_idx": 79, "global_frame_idx": 17599, "task_index": 15}, {"db_idx": 17600, "episode_idx": 76, "frame_idx": 80, "global_frame_idx": 17600, "task_index": 15}, {"db_idx": 17601, "episode_idx": 76, "frame_idx": 81, "global_frame_idx": 17601, "task_index": 15}, {"db_idx": 17602, "episode_idx": 76, "frame_idx": 82, "global_frame_idx": 17602, "task_index": 15}, {"db_idx": 17603, "episode_idx": 76, "frame_idx": 83, "global_frame_idx": 17603, "task_index": 15}, {"db_idx": 17604, "episode_idx": 76, "frame_idx": 84, "global_frame_idx": 17604, "task_index": 15}, {"db_idx": 17605, "episode_idx": 76, "frame_idx": 85, "global_frame_idx": 17605, "task_index": 15}, {"db_idx": 17606, "episode_idx": 76, "frame_idx": 86, "global_frame_idx": 17606, "task_index": 15}, {"db_idx": 17607, "episode_idx": 76, "frame_idx": 87, "global_frame_idx": 17607, "task_index": 15}, {"db_idx": 17608, "episode_idx": 76, "frame_idx": 88, "global_frame_idx": 17608, "task_index": 15}, {"db_idx": 17609, "episode_idx": 76, "frame_idx": 89, "global_frame_idx": 17609, "task_index": 15}, {"db_idx": 17610, "episode_idx": 76, "frame_idx": 90, "global_frame_idx": 17610, "task_index": 15}, {"db_idx": 17611, "episode_idx": 76, "frame_idx": 91, "global_frame_idx": 17611, "task_index": 15}, {"db_idx": 17612, "episode_idx": 76, "frame_idx": 92, "global_frame_idx": 17612, "task_index": 15}, {"db_idx": 17613, "episode_idx": 76, "frame_idx": 93, "global_frame_idx": 17613, "task_index": 15}, {"db_idx": 17614, "episode_idx": 76, "frame_idx": 94, "global_frame_idx": 17614, "task_index": 15}, {"db_idx": 17615, "episode_idx": 76, "frame_idx": 95, "global_frame_idx": 17615, "task_index": 15}, {"db_idx": 17616, "episode_idx": 76, "frame_idx": 96, "global_frame_idx": 17616, "task_index": 15}, {"db_idx": 17617, "episode_idx": 76, "frame_idx": 97, "global_frame_idx": 17617, "task_index": 15}, {"db_idx": 17618, "episode_idx": 76, "frame_idx": 98, "global_frame_idx": 17618, "task_index": 15}, {"db_idx": 17619, "episode_idx": 76, "frame_idx": 99, "global_frame_idx": 17619, "task_index": 15}, {"db_idx": 17620, "episode_idx": 76, "frame_idx": 100, "global_frame_idx": 17620, "task_index": 15}, {"db_idx": 17621, "episode_idx": 76, "frame_idx": 101, "global_frame_idx": 17621, "task_index": 15}, {"db_idx": 17622, "episode_idx": 76, "frame_idx": 102, "global_frame_idx": 17622, "task_index": 15}, {"db_idx": 17623, "episode_idx": 76, "frame_idx": 103, "global_frame_idx": 17623, "task_index": 15}, {"db_idx": 17624, "episode_idx": 76, "frame_idx": 104, "global_frame_idx": 17624, "task_index": 15}, {"db_idx": 17625, "episode_idx": 76, "frame_idx": 105, "global_frame_idx": 17625, "task_index": 15}, {"db_idx": 17626, "episode_idx": 76, "frame_idx": 106, "global_frame_idx": 17626, "task_index": 15}, {"db_idx": 17627, "episode_idx": 76, "frame_idx": 107, "global_frame_idx": 17627, "task_index": 15}, {"db_idx": 17628, "episode_idx": 76, "frame_idx": 108, "global_frame_idx": 17628, "task_index": 15}, {"db_idx": 17629, "episode_idx": 76, "frame_idx": 109, "global_frame_idx": 17629, "task_index": 15}, {"db_idx": 17630, "episode_idx": 76, "frame_idx": 110, "global_frame_idx": 17630, "task_index": 15}, {"db_idx": 17631, "episode_idx": 76, "frame_idx": 111, "global_frame_idx": 17631, "task_index": 15}, {"db_idx": 17632, "episode_idx": 76, "frame_idx": 112, "global_frame_idx": 17632, "task_index": 15}, {"db_idx": 17633, "episode_idx": 76, "frame_idx": 113, "global_frame_idx": 17633, "task_index": 15}, {"db_idx": 17634, "episode_idx": 76, "frame_idx": 114, "global_frame_idx": 17634, "task_index": 15}, {"db_idx": 17635, "episode_idx": 76, "frame_idx": 115, "global_frame_idx": 17635, "task_index": 15}, {"db_idx": 17636, "episode_idx": 76, "frame_idx": 116, "global_frame_idx": 17636, "task_index": 15}, {"db_idx": 17637, "episode_idx": 76, "frame_idx": 117, "global_frame_idx": 17637, "task_index": 15}, {"db_idx": 17638, "episode_idx": 76, "frame_idx": 118, "global_frame_idx": 17638, "task_index": 15}, {"db_idx": 17639, "episode_idx": 76, "frame_idx": 119, "global_frame_idx": 17639, "task_index": 15}, {"db_idx": 17640, "episode_idx": 76, "frame_idx": 120, "global_frame_idx": 17640, "task_index": 15}, {"db_idx": 17641, "episode_idx": 76, "frame_idx": 121, "global_frame_idx": 17641, "task_index": 15}, {"db_idx": 17642, "episode_idx": 76, "frame_idx": 122, "global_frame_idx": 17642, "task_index": 15}, {"db_idx": 17643, "episode_idx": 76, "frame_idx": 123, "global_frame_idx": 17643, "task_index": 15}, {"db_idx": 17644, "episode_idx": 76, "frame_idx": 124, "global_frame_idx": 17644, "task_index": 15}, {"db_idx": 17645, "episode_idx": 76, "frame_idx": 125, "global_frame_idx": 17645, "task_index": 15}, {"db_idx": 17646, "episode_idx": 76, "frame_idx": 126, "global_frame_idx": 17646, "task_index": 15}, {"db_idx": 17647, "episode_idx": 76, "frame_idx": 127, "global_frame_idx": 17647, "task_index": 15}, {"db_idx": 17648, "episode_idx": 76, "frame_idx": 128, "global_frame_idx": 17648, "task_index": 15}, {"db_idx": 17649, "episode_idx": 76, "frame_idx": 129, "global_frame_idx": 17649, "task_index": 15}, {"db_idx": 17650, "episode_idx": 76, "frame_idx": 130, "global_frame_idx": 17650, "task_index": 15}, {"db_idx": 17651, "episode_idx": 76, "frame_idx": 131, "global_frame_idx": 17651, "task_index": 15}, {"db_idx": 17652, "episode_idx": 76, "frame_idx": 132, "global_frame_idx": 17652, "task_index": 15}, {"db_idx": 17653, "episode_idx": 76, "frame_idx": 133, "global_frame_idx": 17653, "task_index": 15}, {"db_idx": 17654, "episode_idx": 76, "frame_idx": 134, "global_frame_idx": 17654, "task_index": 15}, {"db_idx": 17655, "episode_idx": 76, "frame_idx": 135, "global_frame_idx": 17655, "task_index": 15}, {"db_idx": 17656, "episode_idx": 76, "frame_idx": 136, "global_frame_idx": 17656, "task_index": 15}, {"db_idx": 17657, "episode_idx": 76, "frame_idx": 137, "global_frame_idx": 17657, "task_index": 15}, {"db_idx": 17658, "episode_idx": 76, "frame_idx": 138, "global_frame_idx": 17658, "task_index": 15}, {"db_idx": 17659, "episode_idx": 76, "frame_idx": 139, "global_frame_idx": 17659, "task_index": 15}, {"db_idx": 17660, "episode_idx": 76, "frame_idx": 140, "global_frame_idx": 17660, "task_index": 15}, {"db_idx": 17661, "episode_idx": 76, "frame_idx": 141, "global_frame_idx": 17661, "task_index": 15}, {"db_idx": 17662, "episode_idx": 76, "frame_idx": 142, "global_frame_idx": 17662, "task_index": 15}, {"db_idx": 17663, "episode_idx": 76, "frame_idx": 143, "global_frame_idx": 17663, "task_index": 15}, {"db_idx": 17664, "episode_idx": 76, "frame_idx": 144, "global_frame_idx": 17664, "task_index": 15}, {"db_idx": 17665, "episode_idx": 76, "frame_idx": 145, "global_frame_idx": 17665, "task_index": 15}, {"db_idx": 17666, "episode_idx": 76, "frame_idx": 146, "global_frame_idx": 17666, "task_index": 15}, {"db_idx": 17667, "episode_idx": 76, "frame_idx": 147, "global_frame_idx": 17667, "task_index": 15}, {"db_idx": 17668, "episode_idx": 76, "frame_idx": 148, "global_frame_idx": 17668, "task_index": 15}, {"db_idx": 17669, "episode_idx": 76, "frame_idx": 149, "global_frame_idx": 17669, "task_index": 15}, {"db_idx": 17670, "episode_idx": 76, "frame_idx": 150, "global_frame_idx": 17670, "task_index": 15}, {"db_idx": 17671, "episode_idx": 76, "frame_idx": 151, "global_frame_idx": 17671, "task_index": 15}, {"db_idx": 17672, "episode_idx": 76, "frame_idx": 152, "global_frame_idx": 17672, "task_index": 15}, {"db_idx": 17673, "episode_idx": 76, "frame_idx": 153, "global_frame_idx": 17673, "task_index": 15}, {"db_idx": 17674, "episode_idx": 76, "frame_idx": 154, "global_frame_idx": 17674, "task_index": 15}, {"db_idx": 17675, "episode_idx": 76, "frame_idx": 155, "global_frame_idx": 17675, "task_index": 15}, {"db_idx": 17676, "episode_idx": 76, "frame_idx": 156, "global_frame_idx": 17676, "task_index": 15}, {"db_idx": 17677, "episode_idx": 76, "frame_idx": 157, "global_frame_idx": 17677, "task_index": 15}, {"db_idx": 17678, "episode_idx": 76, "frame_idx": 158, "global_frame_idx": 17678, "task_index": 15}, {"db_idx": 17679, "episode_idx": 76, "frame_idx": 159, "global_frame_idx": 17679, "task_index": 15}, {"db_idx": 17680, "episode_idx": 76, "frame_idx": 160, "global_frame_idx": 17680, "task_index": 15}, {"db_idx": 17681, "episode_idx": 76, "frame_idx": 161, "global_frame_idx": 17681, "task_index": 15}, {"db_idx": 17682, "episode_idx": 76, "frame_idx": 162, "global_frame_idx": 17682, "task_index": 15}, {"db_idx": 17683, "episode_idx": 76, "frame_idx": 163, "global_frame_idx": 17683, "task_index": 15}, {"db_idx": 17684, "episode_idx": 77, "frame_idx": 0, "global_frame_idx": 17684, "task_index": 15}, {"db_idx": 17685, "episode_idx": 77, "frame_idx": 1, "global_frame_idx": 17685, "task_index": 15}, {"db_idx": 17686, "episode_idx": 77, "frame_idx": 2, "global_frame_idx": 17686, "task_index": 15}, {"db_idx": 17687, "episode_idx": 77, "frame_idx": 3, "global_frame_idx": 17687, "task_index": 15}, {"db_idx": 17688, "episode_idx": 77, "frame_idx": 4, "global_frame_idx": 17688, "task_index": 15}, {"db_idx": 17689, "episode_idx": 77, "frame_idx": 5, "global_frame_idx": 17689, "task_index": 15}, {"db_idx": 17690, "episode_idx": 77, "frame_idx": 6, "global_frame_idx": 17690, "task_index": 15}, {"db_idx": 17691, "episode_idx": 77, "frame_idx": 7, "global_frame_idx": 17691, "task_index": 15}, {"db_idx": 17692, "episode_idx": 77, "frame_idx": 8, "global_frame_idx": 17692, "task_index": 15}, {"db_idx": 17693, "episode_idx": 77, "frame_idx": 9, "global_frame_idx": 17693, "task_index": 15}, {"db_idx": 17694, "episode_idx": 77, "frame_idx": 10, "global_frame_idx": 17694, "task_index": 15}, {"db_idx": 17695, "episode_idx": 77, "frame_idx": 11, "global_frame_idx": 17695, "task_index": 15}, {"db_idx": 17696, "episode_idx": 77, "frame_idx": 12, "global_frame_idx": 17696, "task_index": 15}, {"db_idx": 17697, "episode_idx": 77, "frame_idx": 13, "global_frame_idx": 17697, "task_index": 15}, {"db_idx": 17698, "episode_idx": 77, "frame_idx": 14, "global_frame_idx": 17698, "task_index": 15}, {"db_idx": 17699, "episode_idx": 77, "frame_idx": 15, "global_frame_idx": 17699, "task_index": 15}, {"db_idx": 17700, "episode_idx": 77, "frame_idx": 16, "global_frame_idx": 17700, "task_index": 15}, {"db_idx": 17701, "episode_idx": 77, "frame_idx": 17, "global_frame_idx": 17701, "task_index": 15}, {"db_idx": 17702, "episode_idx": 77, "frame_idx": 18, "global_frame_idx": 17702, "task_index": 15}, {"db_idx": 17703, "episode_idx": 77, "frame_idx": 19, "global_frame_idx": 17703, "task_index": 15}, {"db_idx": 17704, "episode_idx": 77, "frame_idx": 20, "global_frame_idx": 17704, "task_index": 15}, {"db_idx": 17705, "episode_idx": 77, "frame_idx": 21, "global_frame_idx": 17705, "task_index": 15}, {"db_idx": 17706, "episode_idx": 77, "frame_idx": 22, "global_frame_idx": 17706, "task_index": 15}, {"db_idx": 17707, "episode_idx": 77, "frame_idx": 23, "global_frame_idx": 17707, "task_index": 15}, {"db_idx": 17708, "episode_idx": 77, "frame_idx": 24, "global_frame_idx": 17708, "task_index": 15}, {"db_idx": 17709, "episode_idx": 77, "frame_idx": 25, "global_frame_idx": 17709, "task_index": 15}, {"db_idx": 17710, "episode_idx": 77, "frame_idx": 26, "global_frame_idx": 17710, "task_index": 15}, {"db_idx": 17711, "episode_idx": 77, "frame_idx": 27, "global_frame_idx": 17711, "task_index": 15}, {"db_idx": 17712, "episode_idx": 77, "frame_idx": 28, "global_frame_idx": 17712, "task_index": 15}, {"db_idx": 17713, "episode_idx": 77, "frame_idx": 29, "global_frame_idx": 17713, "task_index": 15}, {"db_idx": 17714, "episode_idx": 77, "frame_idx": 30, "global_frame_idx": 17714, "task_index": 15}, {"db_idx": 17715, "episode_idx": 77, "frame_idx": 31, "global_frame_idx": 17715, "task_index": 15}, {"db_idx": 17716, "episode_idx": 77, "frame_idx": 32, "global_frame_idx": 17716, "task_index": 15}, {"db_idx": 17717, "episode_idx": 77, "frame_idx": 33, "global_frame_idx": 17717, "task_index": 15}, {"db_idx": 17718, "episode_idx": 77, "frame_idx": 34, "global_frame_idx": 17718, "task_index": 15}, {"db_idx": 17719, "episode_idx": 77, "frame_idx": 35, "global_frame_idx": 17719, "task_index": 15}, {"db_idx": 17720, "episode_idx": 77, "frame_idx": 36, "global_frame_idx": 17720, "task_index": 15}, {"db_idx": 17721, "episode_idx": 77, "frame_idx": 37, "global_frame_idx": 17721, "task_index": 15}, {"db_idx": 17722, "episode_idx": 77, "frame_idx": 38, "global_frame_idx": 17722, "task_index": 15}, {"db_idx": 17723, "episode_idx": 77, "frame_idx": 39, "global_frame_idx": 17723, "task_index": 15}, {"db_idx": 17724, "episode_idx": 77, "frame_idx": 40, "global_frame_idx": 17724, "task_index": 15}, {"db_idx": 17725, "episode_idx": 77, "frame_idx": 41, "global_frame_idx": 17725, "task_index": 15}, {"db_idx": 17726, "episode_idx": 77, "frame_idx": 42, "global_frame_idx": 17726, "task_index": 15}, {"db_idx": 17727, "episode_idx": 77, "frame_idx": 43, "global_frame_idx": 17727, "task_index": 15}, {"db_idx": 17728, "episode_idx": 77, "frame_idx": 44, "global_frame_idx": 17728, "task_index": 15}, {"db_idx": 17729, "episode_idx": 77, "frame_idx": 45, "global_frame_idx": 17729, "task_index": 15}, {"db_idx": 17730, "episode_idx": 77, "frame_idx": 46, "global_frame_idx": 17730, "task_index": 15}, {"db_idx": 17731, "episode_idx": 77, "frame_idx": 47, "global_frame_idx": 17731, "task_index": 15}, {"db_idx": 17732, "episode_idx": 77, "frame_idx": 48, "global_frame_idx": 17732, "task_index": 15}, {"db_idx": 17733, "episode_idx": 77, "frame_idx": 49, "global_frame_idx": 17733, "task_index": 15}, {"db_idx": 17734, "episode_idx": 77, "frame_idx": 50, "global_frame_idx": 17734, "task_index": 15}, {"db_idx": 17735, "episode_idx": 77, "frame_idx": 51, "global_frame_idx": 17735, "task_index": 15}, {"db_idx": 17736, "episode_idx": 77, "frame_idx": 52, "global_frame_idx": 17736, "task_index": 15}, {"db_idx": 17737, "episode_idx": 77, "frame_idx": 53, "global_frame_idx": 17737, "task_index": 15}, {"db_idx": 17738, "episode_idx": 77, "frame_idx": 54, "global_frame_idx": 17738, "task_index": 15}, {"db_idx": 17739, "episode_idx": 77, "frame_idx": 55, "global_frame_idx": 17739, "task_index": 15}, {"db_idx": 17740, "episode_idx": 77, "frame_idx": 56, "global_frame_idx": 17740, "task_index": 15}, {"db_idx": 17741, "episode_idx": 77, "frame_idx": 57, "global_frame_idx": 17741, "task_index": 15}, {"db_idx": 17742, "episode_idx": 77, "frame_idx": 58, "global_frame_idx": 17742, "task_index": 15}, {"db_idx": 17743, "episode_idx": 77, "frame_idx": 59, "global_frame_idx": 17743, "task_index": 15}, {"db_idx": 17744, "episode_idx": 77, "frame_idx": 60, "global_frame_idx": 17744, "task_index": 15}, {"db_idx": 17745, "episode_idx": 77, "frame_idx": 61, "global_frame_idx": 17745, "task_index": 15}, {"db_idx": 17746, "episode_idx": 77, "frame_idx": 62, "global_frame_idx": 17746, "task_index": 15}, {"db_idx": 17747, "episode_idx": 77, "frame_idx": 63, "global_frame_idx": 17747, "task_index": 15}, {"db_idx": 17748, "episode_idx": 77, "frame_idx": 64, "global_frame_idx": 17748, "task_index": 15}, {"db_idx": 17749, "episode_idx": 77, "frame_idx": 65, "global_frame_idx": 17749, "task_index": 15}, {"db_idx": 17750, "episode_idx": 77, "frame_idx": 66, "global_frame_idx": 17750, "task_index": 15}, {"db_idx": 17751, "episode_idx": 77, "frame_idx": 67, "global_frame_idx": 17751, "task_index": 15}, {"db_idx": 17752, "episode_idx": 77, "frame_idx": 68, "global_frame_idx": 17752, "task_index": 15}, {"db_idx": 17753, "episode_idx": 77, "frame_idx": 69, "global_frame_idx": 17753, "task_index": 15}, {"db_idx": 17754, "episode_idx": 77, "frame_idx": 70, "global_frame_idx": 17754, "task_index": 15}, {"db_idx": 17755, "episode_idx": 77, "frame_idx": 71, "global_frame_idx": 17755, "task_index": 15}, {"db_idx": 17756, "episode_idx": 77, "frame_idx": 72, "global_frame_idx": 17756, "task_index": 15}, {"db_idx": 17757, "episode_idx": 77, "frame_idx": 73, "global_frame_idx": 17757, "task_index": 15}, {"db_idx": 17758, "episode_idx": 77, "frame_idx": 74, "global_frame_idx": 17758, "task_index": 15}, {"db_idx": 17759, "episode_idx": 77, "frame_idx": 75, "global_frame_idx": 17759, "task_index": 15}, {"db_idx": 17760, "episode_idx": 77, "frame_idx": 76, "global_frame_idx": 17760, "task_index": 15}, {"db_idx": 17761, "episode_idx": 77, "frame_idx": 77, "global_frame_idx": 17761, "task_index": 15}, {"db_idx": 17762, "episode_idx": 77, "frame_idx": 78, "global_frame_idx": 17762, "task_index": 15}, {"db_idx": 17763, "episode_idx": 77, "frame_idx": 79, "global_frame_idx": 17763, "task_index": 15}, {"db_idx": 17764, "episode_idx": 77, "frame_idx": 80, "global_frame_idx": 17764, "task_index": 15}, {"db_idx": 17765, "episode_idx": 77, "frame_idx": 81, "global_frame_idx": 17765, "task_index": 15}, {"db_idx": 17766, "episode_idx": 77, "frame_idx": 82, "global_frame_idx": 17766, "task_index": 15}, {"db_idx": 17767, "episode_idx": 77, "frame_idx": 83, "global_frame_idx": 17767, "task_index": 15}, {"db_idx": 17768, "episode_idx": 77, "frame_idx": 84, "global_frame_idx": 17768, "task_index": 15}, {"db_idx": 17769, "episode_idx": 77, "frame_idx": 85, "global_frame_idx": 17769, "task_index": 15}, {"db_idx": 17770, "episode_idx": 77, "frame_idx": 86, "global_frame_idx": 17770, "task_index": 15}, {"db_idx": 17771, "episode_idx": 77, "frame_idx": 87, "global_frame_idx": 17771, "task_index": 15}, {"db_idx": 17772, "episode_idx": 77, "frame_idx": 88, "global_frame_idx": 17772, "task_index": 15}, {"db_idx": 17773, "episode_idx": 77, "frame_idx": 89, "global_frame_idx": 17773, "task_index": 15}, {"db_idx": 17774, "episode_idx": 77, "frame_idx": 90, "global_frame_idx": 17774, "task_index": 15}, {"db_idx": 17775, "episode_idx": 77, "frame_idx": 91, "global_frame_idx": 17775, "task_index": 15}, {"db_idx": 17776, "episode_idx": 77, "frame_idx": 92, "global_frame_idx": 17776, "task_index": 15}, {"db_idx": 17777, "episode_idx": 77, "frame_idx": 93, "global_frame_idx": 17777, "task_index": 15}, {"db_idx": 17778, "episode_idx": 77, "frame_idx": 94, "global_frame_idx": 17778, "task_index": 15}, {"db_idx": 17779, "episode_idx": 77, "frame_idx": 95, "global_frame_idx": 17779, "task_index": 15}, {"db_idx": 17780, "episode_idx": 77, "frame_idx": 96, "global_frame_idx": 17780, "task_index": 15}, {"db_idx": 17781, "episode_idx": 77, "frame_idx": 97, "global_frame_idx": 17781, "task_index": 15}, {"db_idx": 17782, "episode_idx": 77, "frame_idx": 98, "global_frame_idx": 17782, "task_index": 15}, {"db_idx": 17783, "episode_idx": 77, "frame_idx": 99, "global_frame_idx": 17783, "task_index": 15}, {"db_idx": 17784, "episode_idx": 77, "frame_idx": 100, "global_frame_idx": 17784, "task_index": 15}, {"db_idx": 17785, "episode_idx": 77, "frame_idx": 101, "global_frame_idx": 17785, "task_index": 15}, {"db_idx": 17786, "episode_idx": 77, "frame_idx": 102, "global_frame_idx": 17786, "task_index": 15}, {"db_idx": 17787, "episode_idx": 77, "frame_idx": 103, "global_frame_idx": 17787, "task_index": 15}, {"db_idx": 17788, "episode_idx": 77, "frame_idx": 104, "global_frame_idx": 17788, "task_index": 15}, {"db_idx": 17789, "episode_idx": 77, "frame_idx": 105, "global_frame_idx": 17789, "task_index": 15}, {"db_idx": 17790, "episode_idx": 77, "frame_idx": 106, "global_frame_idx": 17790, "task_index": 15}, {"db_idx": 17791, "episode_idx": 77, "frame_idx": 107, "global_frame_idx": 17791, "task_index": 15}, {"db_idx": 17792, "episode_idx": 77, "frame_idx": 108, "global_frame_idx": 17792, "task_index": 15}, {"db_idx": 17793, "episode_idx": 77, "frame_idx": 109, "global_frame_idx": 17793, "task_index": 15}, {"db_idx": 17794, "episode_idx": 77, "frame_idx": 110, "global_frame_idx": 17794, "task_index": 15}, {"db_idx": 17795, "episode_idx": 77, "frame_idx": 111, "global_frame_idx": 17795, "task_index": 15}, {"db_idx": 17796, "episode_idx": 77, "frame_idx": 112, "global_frame_idx": 17796, "task_index": 15}, {"db_idx": 17797, "episode_idx": 77, "frame_idx": 113, "global_frame_idx": 17797, "task_index": 15}, {"db_idx": 17798, "episode_idx": 77, "frame_idx": 114, "global_frame_idx": 17798, "task_index": 15}, {"db_idx": 17799, "episode_idx": 77, "frame_idx": 115, "global_frame_idx": 17799, "task_index": 15}, {"db_idx": 17800, "episode_idx": 77, "frame_idx": 116, "global_frame_idx": 17800, "task_index": 15}, {"db_idx": 17801, "episode_idx": 77, "frame_idx": 117, "global_frame_idx": 17801, "task_index": 15}, {"db_idx": 17802, "episode_idx": 77, "frame_idx": 118, "global_frame_idx": 17802, "task_index": 15}, {"db_idx": 17803, "episode_idx": 77, "frame_idx": 119, "global_frame_idx": 17803, "task_index": 15}, {"db_idx": 17804, "episode_idx": 77, "frame_idx": 120, "global_frame_idx": 17804, "task_index": 15}, {"db_idx": 17805, "episode_idx": 77, "frame_idx": 121, "global_frame_idx": 17805, "task_index": 15}, {"db_idx": 17806, "episode_idx": 77, "frame_idx": 122, "global_frame_idx": 17806, "task_index": 15}, {"db_idx": 17807, "episode_idx": 77, "frame_idx": 123, "global_frame_idx": 17807, "task_index": 15}, {"db_idx": 17808, "episode_idx": 77, "frame_idx": 124, "global_frame_idx": 17808, "task_index": 15}, {"db_idx": 17809, "episode_idx": 77, "frame_idx": 125, "global_frame_idx": 17809, "task_index": 15}, {"db_idx": 17810, "episode_idx": 77, "frame_idx": 126, "global_frame_idx": 17810, "task_index": 15}, {"db_idx": 17811, "episode_idx": 77, "frame_idx": 127, "global_frame_idx": 17811, "task_index": 15}, {"db_idx": 17812, "episode_idx": 77, "frame_idx": 128, "global_frame_idx": 17812, "task_index": 15}, {"db_idx": 17813, "episode_idx": 77, "frame_idx": 129, "global_frame_idx": 17813, "task_index": 15}, {"db_idx": 17814, "episode_idx": 77, "frame_idx": 130, "global_frame_idx": 17814, "task_index": 15}, {"db_idx": 17815, "episode_idx": 77, "frame_idx": 131, "global_frame_idx": 17815, "task_index": 15}, {"db_idx": 17816, "episode_idx": 77, "frame_idx": 132, "global_frame_idx": 17816, "task_index": 15}, {"db_idx": 17817, "episode_idx": 78, "frame_idx": 0, "global_frame_idx": 17817, "task_index": 15}, {"db_idx": 17818, "episode_idx": 78, "frame_idx": 1, "global_frame_idx": 17818, "task_index": 15}, {"db_idx": 17819, "episode_idx": 78, "frame_idx": 2, "global_frame_idx": 17819, "task_index": 15}, {"db_idx": 17820, "episode_idx": 78, "frame_idx": 3, "global_frame_idx": 17820, "task_index": 15}, {"db_idx": 17821, "episode_idx": 78, "frame_idx": 4, "global_frame_idx": 17821, "task_index": 15}, {"db_idx": 17822, "episode_idx": 78, "frame_idx": 5, "global_frame_idx": 17822, "task_index": 15}, {"db_idx": 17823, "episode_idx": 78, "frame_idx": 6, "global_frame_idx": 17823, "task_index": 15}, {"db_idx": 17824, "episode_idx": 78, "frame_idx": 7, "global_frame_idx": 17824, "task_index": 15}, {"db_idx": 17825, "episode_idx": 78, "frame_idx": 8, "global_frame_idx": 17825, "task_index": 15}, {"db_idx": 17826, "episode_idx": 78, "frame_idx": 9, "global_frame_idx": 17826, "task_index": 15}, {"db_idx": 17827, "episode_idx": 78, "frame_idx": 10, "global_frame_idx": 17827, "task_index": 15}, {"db_idx": 17828, "episode_idx": 78, "frame_idx": 11, "global_frame_idx": 17828, "task_index": 15}, {"db_idx": 17829, "episode_idx": 78, "frame_idx": 12, "global_frame_idx": 17829, "task_index": 15}, {"db_idx": 17830, "episode_idx": 78, "frame_idx": 13, "global_frame_idx": 17830, "task_index": 15}, {"db_idx": 17831, "episode_idx": 78, "frame_idx": 14, "global_frame_idx": 17831, "task_index": 15}, {"db_idx": 17832, "episode_idx": 78, "frame_idx": 15, "global_frame_idx": 17832, "task_index": 15}, {"db_idx": 17833, "episode_idx": 78, "frame_idx": 16, "global_frame_idx": 17833, "task_index": 15}, {"db_idx": 17834, "episode_idx": 78, "frame_idx": 17, "global_frame_idx": 17834, "task_index": 15}, {"db_idx": 17835, "episode_idx": 78, "frame_idx": 18, "global_frame_idx": 17835, "task_index": 15}, {"db_idx": 17836, "episode_idx": 78, "frame_idx": 19, "global_frame_idx": 17836, "task_index": 15}, {"db_idx": 17837, "episode_idx": 78, "frame_idx": 20, "global_frame_idx": 17837, "task_index": 15}, {"db_idx": 17838, "episode_idx": 78, "frame_idx": 21, "global_frame_idx": 17838, "task_index": 15}, {"db_idx": 17839, "episode_idx": 78, "frame_idx": 22, "global_frame_idx": 17839, "task_index": 15}, {"db_idx": 17840, "episode_idx": 78, "frame_idx": 23, "global_frame_idx": 17840, "task_index": 15}, {"db_idx": 17841, "episode_idx": 78, "frame_idx": 24, "global_frame_idx": 17841, "task_index": 15}, {"db_idx": 17842, "episode_idx": 78, "frame_idx": 25, "global_frame_idx": 17842, "task_index": 15}, {"db_idx": 17843, "episode_idx": 78, "frame_idx": 26, "global_frame_idx": 17843, "task_index": 15}, {"db_idx": 17844, "episode_idx": 78, "frame_idx": 27, "global_frame_idx": 17844, "task_index": 15}, {"db_idx": 17845, "episode_idx": 78, "frame_idx": 28, "global_frame_idx": 17845, "task_index": 15}, {"db_idx": 17846, "episode_idx": 78, "frame_idx": 29, "global_frame_idx": 17846, "task_index": 15}, {"db_idx": 17847, "episode_idx": 78, "frame_idx": 30, "global_frame_idx": 17847, "task_index": 15}, {"db_idx": 17848, "episode_idx": 78, "frame_idx": 31, "global_frame_idx": 17848, "task_index": 15}, {"db_idx": 17849, "episode_idx": 78, "frame_idx": 32, "global_frame_idx": 17849, "task_index": 15}, {"db_idx": 17850, "episode_idx": 78, "frame_idx": 33, "global_frame_idx": 17850, "task_index": 15}, {"db_idx": 17851, "episode_idx": 78, "frame_idx": 34, "global_frame_idx": 17851, "task_index": 15}, {"db_idx": 17852, "episode_idx": 78, "frame_idx": 35, "global_frame_idx": 17852, "task_index": 15}, {"db_idx": 17853, "episode_idx": 78, "frame_idx": 36, "global_frame_idx": 17853, "task_index": 15}, {"db_idx": 17854, "episode_idx": 78, "frame_idx": 37, "global_frame_idx": 17854, "task_index": 15}, {"db_idx": 17855, "episode_idx": 78, "frame_idx": 38, "global_frame_idx": 17855, "task_index": 15}, {"db_idx": 17856, "episode_idx": 78, "frame_idx": 39, "global_frame_idx": 17856, "task_index": 15}, {"db_idx": 17857, "episode_idx": 78, "frame_idx": 40, "global_frame_idx": 17857, "task_index": 15}, {"db_idx": 17858, "episode_idx": 78, "frame_idx": 41, "global_frame_idx": 17858, "task_index": 15}, {"db_idx": 17859, "episode_idx": 78, "frame_idx": 42, "global_frame_idx": 17859, "task_index": 15}, {"db_idx": 17860, "episode_idx": 78, "frame_idx": 43, "global_frame_idx": 17860, "task_index": 15}, {"db_idx": 17861, "episode_idx": 78, "frame_idx": 44, "global_frame_idx": 17861, "task_index": 15}, {"db_idx": 17862, "episode_idx": 78, "frame_idx": 45, "global_frame_idx": 17862, "task_index": 15}, {"db_idx": 17863, "episode_idx": 78, "frame_idx": 46, "global_frame_idx": 17863, "task_index": 15}, {"db_idx": 17864, "episode_idx": 78, "frame_idx": 47, "global_frame_idx": 17864, "task_index": 15}, {"db_idx": 17865, "episode_idx": 78, "frame_idx": 48, "global_frame_idx": 17865, "task_index": 15}, {"db_idx": 17866, "episode_idx": 78, "frame_idx": 49, "global_frame_idx": 17866, "task_index": 15}, {"db_idx": 17867, "episode_idx": 78, "frame_idx": 50, "global_frame_idx": 17867, "task_index": 15}, {"db_idx": 17868, "episode_idx": 78, "frame_idx": 51, "global_frame_idx": 17868, "task_index": 15}, {"db_idx": 17869, "episode_idx": 78, "frame_idx": 52, "global_frame_idx": 17869, "task_index": 15}, {"db_idx": 17870, "episode_idx": 78, "frame_idx": 53, "global_frame_idx": 17870, "task_index": 15}, {"db_idx": 17871, "episode_idx": 78, "frame_idx": 54, "global_frame_idx": 17871, "task_index": 15}, {"db_idx": 17872, "episode_idx": 78, "frame_idx": 55, "global_frame_idx": 17872, "task_index": 15}, {"db_idx": 17873, "episode_idx": 78, "frame_idx": 56, "global_frame_idx": 17873, "task_index": 15}, {"db_idx": 17874, "episode_idx": 78, "frame_idx": 57, "global_frame_idx": 17874, "task_index": 15}, {"db_idx": 17875, "episode_idx": 78, "frame_idx": 58, "global_frame_idx": 17875, "task_index": 15}, {"db_idx": 17876, "episode_idx": 78, "frame_idx": 59, "global_frame_idx": 17876, "task_index": 15}, {"db_idx": 17877, "episode_idx": 78, "frame_idx": 60, "global_frame_idx": 17877, "task_index": 15}, {"db_idx": 17878, "episode_idx": 78, "frame_idx": 61, "global_frame_idx": 17878, "task_index": 15}, {"db_idx": 17879, "episode_idx": 78, "frame_idx": 62, "global_frame_idx": 17879, "task_index": 15}, {"db_idx": 17880, "episode_idx": 78, "frame_idx": 63, "global_frame_idx": 17880, "task_index": 15}, {"db_idx": 17881, "episode_idx": 78, "frame_idx": 64, "global_frame_idx": 17881, "task_index": 15}, {"db_idx": 17882, "episode_idx": 78, "frame_idx": 65, "global_frame_idx": 17882, "task_index": 15}, {"db_idx": 17883, "episode_idx": 78, "frame_idx": 66, "global_frame_idx": 17883, "task_index": 15}, {"db_idx": 17884, "episode_idx": 78, "frame_idx": 67, "global_frame_idx": 17884, "task_index": 15}, {"db_idx": 17885, "episode_idx": 78, "frame_idx": 68, "global_frame_idx": 17885, "task_index": 15}, {"db_idx": 17886, "episode_idx": 78, "frame_idx": 69, "global_frame_idx": 17886, "task_index": 15}, {"db_idx": 17887, "episode_idx": 78, "frame_idx": 70, "global_frame_idx": 17887, "task_index": 15}, {"db_idx": 17888, "episode_idx": 78, "frame_idx": 71, "global_frame_idx": 17888, "task_index": 15}, {"db_idx": 17889, "episode_idx": 78, "frame_idx": 72, "global_frame_idx": 17889, "task_index": 15}, {"db_idx": 17890, "episode_idx": 78, "frame_idx": 73, "global_frame_idx": 17890, "task_index": 15}, {"db_idx": 17891, "episode_idx": 78, "frame_idx": 74, "global_frame_idx": 17891, "task_index": 15}, {"db_idx": 17892, "episode_idx": 78, "frame_idx": 75, "global_frame_idx": 17892, "task_index": 15}, {"db_idx": 17893, "episode_idx": 78, "frame_idx": 76, "global_frame_idx": 17893, "task_index": 15}, {"db_idx": 17894, "episode_idx": 78, "frame_idx": 77, "global_frame_idx": 17894, "task_index": 15}, {"db_idx": 17895, "episode_idx": 78, "frame_idx": 78, "global_frame_idx": 17895, "task_index": 15}, {"db_idx": 17896, "episode_idx": 78, "frame_idx": 79, "global_frame_idx": 17896, "task_index": 15}, {"db_idx": 17897, "episode_idx": 78, "frame_idx": 80, "global_frame_idx": 17897, "task_index": 15}, {"db_idx": 17898, "episode_idx": 78, "frame_idx": 81, "global_frame_idx": 17898, "task_index": 15}, {"db_idx": 17899, "episode_idx": 78, "frame_idx": 82, "global_frame_idx": 17899, "task_index": 15}, {"db_idx": 17900, "episode_idx": 78, "frame_idx": 83, "global_frame_idx": 17900, "task_index": 15}, {"db_idx": 17901, "episode_idx": 78, "frame_idx": 84, "global_frame_idx": 17901, "task_index": 15}, {"db_idx": 17902, "episode_idx": 78, "frame_idx": 85, "global_frame_idx": 17902, "task_index": 15}, {"db_idx": 17903, "episode_idx": 78, "frame_idx": 86, "global_frame_idx": 17903, "task_index": 15}, {"db_idx": 17904, "episode_idx": 78, "frame_idx": 87, "global_frame_idx": 17904, "task_index": 15}, {"db_idx": 17905, "episode_idx": 78, "frame_idx": 88, "global_frame_idx": 17905, "task_index": 15}, {"db_idx": 17906, "episode_idx": 78, "frame_idx": 89, "global_frame_idx": 17906, "task_index": 15}, {"db_idx": 17907, "episode_idx": 78, "frame_idx": 90, "global_frame_idx": 17907, "task_index": 15}, {"db_idx": 17908, "episode_idx": 78, "frame_idx": 91, "global_frame_idx": 17908, "task_index": 15}, {"db_idx": 17909, "episode_idx": 78, "frame_idx": 92, "global_frame_idx": 17909, "task_index": 15}, {"db_idx": 17910, "episode_idx": 78, "frame_idx": 93, "global_frame_idx": 17910, "task_index": 15}, {"db_idx": 17911, "episode_idx": 78, "frame_idx": 94, "global_frame_idx": 17911, "task_index": 15}, {"db_idx": 17912, "episode_idx": 78, "frame_idx": 95, "global_frame_idx": 17912, "task_index": 15}, {"db_idx": 17913, "episode_idx": 78, "frame_idx": 96, "global_frame_idx": 17913, "task_index": 15}, {"db_idx": 17914, "episode_idx": 78, "frame_idx": 97, "global_frame_idx": 17914, "task_index": 15}, {"db_idx": 17915, "episode_idx": 78, "frame_idx": 98, "global_frame_idx": 17915, "task_index": 15}, {"db_idx": 17916, "episode_idx": 78, "frame_idx": 99, "global_frame_idx": 17916, "task_index": 15}, {"db_idx": 17917, "episode_idx": 78, "frame_idx": 100, "global_frame_idx": 17917, "task_index": 15}, {"db_idx": 17918, "episode_idx": 78, "frame_idx": 101, "global_frame_idx": 17918, "task_index": 15}, {"db_idx": 17919, "episode_idx": 78, "frame_idx": 102, "global_frame_idx": 17919, "task_index": 15}, {"db_idx": 17920, "episode_idx": 78, "frame_idx": 103, "global_frame_idx": 17920, "task_index": 15}, {"db_idx": 17921, "episode_idx": 78, "frame_idx": 104, "global_frame_idx": 17921, "task_index": 15}, {"db_idx": 17922, "episode_idx": 78, "frame_idx": 105, "global_frame_idx": 17922, "task_index": 15}, {"db_idx": 17923, "episode_idx": 78, "frame_idx": 106, "global_frame_idx": 17923, "task_index": 15}, {"db_idx": 17924, "episode_idx": 78, "frame_idx": 107, "global_frame_idx": 17924, "task_index": 15}, {"db_idx": 17925, "episode_idx": 78, "frame_idx": 108, "global_frame_idx": 17925, "task_index": 15}, {"db_idx": 17926, "episode_idx": 78, "frame_idx": 109, "global_frame_idx": 17926, "task_index": 15}, {"db_idx": 17927, "episode_idx": 78, "frame_idx": 110, "global_frame_idx": 17927, "task_index": 15}, {"db_idx": 17928, "episode_idx": 78, "frame_idx": 111, "global_frame_idx": 17928, "task_index": 15}, {"db_idx": 17929, "episode_idx": 78, "frame_idx": 112, "global_frame_idx": 17929, "task_index": 15}, {"db_idx": 17930, "episode_idx": 78, "frame_idx": 113, "global_frame_idx": 17930, "task_index": 15}, {"db_idx": 17931, "episode_idx": 78, "frame_idx": 114, "global_frame_idx": 17931, "task_index": 15}, {"db_idx": 17932, "episode_idx": 78, "frame_idx": 115, "global_frame_idx": 17932, "task_index": 15}, {"db_idx": 17933, "episode_idx": 79, "frame_idx": 0, "global_frame_idx": 17933, "task_index": 15}, {"db_idx": 17934, "episode_idx": 79, "frame_idx": 1, "global_frame_idx": 17934, "task_index": 15}, {"db_idx": 17935, "episode_idx": 79, "frame_idx": 2, "global_frame_idx": 17935, "task_index": 15}, {"db_idx": 17936, "episode_idx": 79, "frame_idx": 3, "global_frame_idx": 17936, "task_index": 15}, {"db_idx": 17937, "episode_idx": 79, "frame_idx": 4, "global_frame_idx": 17937, "task_index": 15}, {"db_idx": 17938, "episode_idx": 79, "frame_idx": 5, "global_frame_idx": 17938, "task_index": 15}, {"db_idx": 17939, "episode_idx": 79, "frame_idx": 6, "global_frame_idx": 17939, "task_index": 15}, {"db_idx": 17940, "episode_idx": 79, "frame_idx": 7, "global_frame_idx": 17940, "task_index": 15}, {"db_idx": 17941, "episode_idx": 79, "frame_idx": 8, "global_frame_idx": 17941, "task_index": 15}, {"db_idx": 17942, "episode_idx": 79, "frame_idx": 9, "global_frame_idx": 17942, "task_index": 15}, {"db_idx": 17943, "episode_idx": 79, "frame_idx": 10, "global_frame_idx": 17943, "task_index": 15}, {"db_idx": 17944, "episode_idx": 79, "frame_idx": 11, "global_frame_idx": 17944, "task_index": 15}, {"db_idx": 17945, "episode_idx": 79, "frame_idx": 12, "global_frame_idx": 17945, "task_index": 15}, {"db_idx": 17946, "episode_idx": 79, "frame_idx": 13, "global_frame_idx": 17946, "task_index": 15}, {"db_idx": 17947, "episode_idx": 79, "frame_idx": 14, "global_frame_idx": 17947, "task_index": 15}, {"db_idx": 17948, "episode_idx": 79, "frame_idx": 15, "global_frame_idx": 17948, "task_index": 15}, {"db_idx": 17949, "episode_idx": 79, "frame_idx": 16, "global_frame_idx": 17949, "task_index": 15}, {"db_idx": 17950, "episode_idx": 79, "frame_idx": 17, "global_frame_idx": 17950, "task_index": 15}, {"db_idx": 17951, "episode_idx": 79, "frame_idx": 18, "global_frame_idx": 17951, "task_index": 15}, {"db_idx": 17952, "episode_idx": 79, "frame_idx": 19, "global_frame_idx": 17952, "task_index": 15}, {"db_idx": 17953, "episode_idx": 79, "frame_idx": 20, "global_frame_idx": 17953, "task_index": 15}, {"db_idx": 17954, "episode_idx": 79, "frame_idx": 21, "global_frame_idx": 17954, "task_index": 15}, {"db_idx": 17955, "episode_idx": 79, "frame_idx": 22, "global_frame_idx": 17955, "task_index": 15}, {"db_idx": 17956, "episode_idx": 79, "frame_idx": 23, "global_frame_idx": 17956, "task_index": 15}, {"db_idx": 17957, "episode_idx": 79, "frame_idx": 24, "global_frame_idx": 17957, "task_index": 15}, {"db_idx": 17958, "episode_idx": 79, "frame_idx": 25, "global_frame_idx": 17958, "task_index": 15}, {"db_idx": 17959, "episode_idx": 79, "frame_idx": 26, "global_frame_idx": 17959, "task_index": 15}, {"db_idx": 17960, "episode_idx": 79, "frame_idx": 27, "global_frame_idx": 17960, "task_index": 15}, {"db_idx": 17961, "episode_idx": 79, "frame_idx": 28, "global_frame_idx": 17961, "task_index": 15}, {"db_idx": 17962, "episode_idx": 79, "frame_idx": 29, "global_frame_idx": 17962, "task_index": 15}, {"db_idx": 17963, "episode_idx": 79, "frame_idx": 30, "global_frame_idx": 17963, "task_index": 15}, {"db_idx": 17964, "episode_idx": 79, "frame_idx": 31, "global_frame_idx": 17964, "task_index": 15}, {"db_idx": 17965, "episode_idx": 79, "frame_idx": 32, "global_frame_idx": 17965, "task_index": 15}, {"db_idx": 17966, "episode_idx": 79, "frame_idx": 33, "global_frame_idx": 17966, "task_index": 15}, {"db_idx": 17967, "episode_idx": 79, "frame_idx": 34, "global_frame_idx": 17967, "task_index": 15}, {"db_idx": 17968, "episode_idx": 79, "frame_idx": 35, "global_frame_idx": 17968, "task_index": 15}, {"db_idx": 17969, "episode_idx": 79, "frame_idx": 36, "global_frame_idx": 17969, "task_index": 15}, {"db_idx": 17970, "episode_idx": 79, "frame_idx": 37, "global_frame_idx": 17970, "task_index": 15}, {"db_idx": 17971, "episode_idx": 79, "frame_idx": 38, "global_frame_idx": 17971, "task_index": 15}, {"db_idx": 17972, "episode_idx": 79, "frame_idx": 39, "global_frame_idx": 17972, "task_index": 15}, {"db_idx": 17973, "episode_idx": 79, "frame_idx": 40, "global_frame_idx": 17973, "task_index": 15}, {"db_idx": 17974, "episode_idx": 79, "frame_idx": 41, "global_frame_idx": 17974, "task_index": 15}, {"db_idx": 17975, "episode_idx": 79, "frame_idx": 42, "global_frame_idx": 17975, "task_index": 15}, {"db_idx": 17976, "episode_idx": 79, "frame_idx": 43, "global_frame_idx": 17976, "task_index": 15}, {"db_idx": 17977, "episode_idx": 79, "frame_idx": 44, "global_frame_idx": 17977, "task_index": 15}, {"db_idx": 17978, "episode_idx": 79, "frame_idx": 45, "global_frame_idx": 17978, "task_index": 15}, {"db_idx": 17979, "episode_idx": 79, "frame_idx": 46, "global_frame_idx": 17979, "task_index": 15}, {"db_idx": 17980, "episode_idx": 79, "frame_idx": 47, "global_frame_idx": 17980, "task_index": 15}, {"db_idx": 17981, "episode_idx": 79, "frame_idx": 48, "global_frame_idx": 17981, "task_index": 15}, {"db_idx": 17982, "episode_idx": 79, "frame_idx": 49, "global_frame_idx": 17982, "task_index": 15}, {"db_idx": 17983, "episode_idx": 79, "frame_idx": 50, "global_frame_idx": 17983, "task_index": 15}, {"db_idx": 17984, "episode_idx": 79, "frame_idx": 51, "global_frame_idx": 17984, "task_index": 15}, {"db_idx": 17985, "episode_idx": 79, "frame_idx": 52, "global_frame_idx": 17985, "task_index": 15}, {"db_idx": 17986, "episode_idx": 79, "frame_idx": 53, "global_frame_idx": 17986, "task_index": 15}, {"db_idx": 17987, "episode_idx": 79, "frame_idx": 54, "global_frame_idx": 17987, "task_index": 15}, {"db_idx": 17988, "episode_idx": 79, "frame_idx": 55, "global_frame_idx": 17988, "task_index": 15}, {"db_idx": 17989, "episode_idx": 79, "frame_idx": 56, "global_frame_idx": 17989, "task_index": 15}, {"db_idx": 17990, "episode_idx": 79, "frame_idx": 57, "global_frame_idx": 17990, "task_index": 15}, {"db_idx": 17991, "episode_idx": 79, "frame_idx": 58, "global_frame_idx": 17991, "task_index": 15}, {"db_idx": 17992, "episode_idx": 79, "frame_idx": 59, "global_frame_idx": 17992, "task_index": 15}, {"db_idx": 17993, "episode_idx": 79, "frame_idx": 60, "global_frame_idx": 17993, "task_index": 15}, {"db_idx": 17994, "episode_idx": 79, "frame_idx": 61, "global_frame_idx": 17994, "task_index": 15}, {"db_idx": 17995, "episode_idx": 79, "frame_idx": 62, "global_frame_idx": 17995, "task_index": 15}, {"db_idx": 17996, "episode_idx": 79, "frame_idx": 63, "global_frame_idx": 17996, "task_index": 15}, {"db_idx": 17997, "episode_idx": 79, "frame_idx": 64, "global_frame_idx": 17997, "task_index": 15}, {"db_idx": 17998, "episode_idx": 79, "frame_idx": 65, "global_frame_idx": 17998, "task_index": 15}, {"db_idx": 17999, "episode_idx": 79, "frame_idx": 66, "global_frame_idx": 17999, "task_index": 15}, {"db_idx": 18000, "episode_idx": 79, "frame_idx": 67, "global_frame_idx": 18000, "task_index": 15}, {"db_idx": 18001, "episode_idx": 79, "frame_idx": 68, "global_frame_idx": 18001, "task_index": 15}, {"db_idx": 18002, "episode_idx": 79, "frame_idx": 69, "global_frame_idx": 18002, "task_index": 15}, {"db_idx": 18003, "episode_idx": 79, "frame_idx": 70, "global_frame_idx": 18003, "task_index": 15}, {"db_idx": 18004, "episode_idx": 79, "frame_idx": 71, "global_frame_idx": 18004, "task_index": 15}, {"db_idx": 18005, "episode_idx": 79, "frame_idx": 72, "global_frame_idx": 18005, "task_index": 15}, {"db_idx": 18006, "episode_idx": 79, "frame_idx": 73, "global_frame_idx": 18006, "task_index": 15}, {"db_idx": 18007, "episode_idx": 79, "frame_idx": 74, "global_frame_idx": 18007, "task_index": 15}, {"db_idx": 18008, "episode_idx": 79, "frame_idx": 75, "global_frame_idx": 18008, "task_index": 15}, {"db_idx": 18009, "episode_idx": 79, "frame_idx": 76, "global_frame_idx": 18009, "task_index": 15}, {"db_idx": 18010, "episode_idx": 79, "frame_idx": 77, "global_frame_idx": 18010, "task_index": 15}, {"db_idx": 18011, "episode_idx": 79, "frame_idx": 78, "global_frame_idx": 18011, "task_index": 15}, {"db_idx": 18012, "episode_idx": 79, "frame_idx": 79, "global_frame_idx": 18012, "task_index": 15}, {"db_idx": 18013, "episode_idx": 79, "frame_idx": 80, "global_frame_idx": 18013, "task_index": 15}, {"db_idx": 18014, "episode_idx": 79, "frame_idx": 81, "global_frame_idx": 18014, "task_index": 15}, {"db_idx": 18015, "episode_idx": 79, "frame_idx": 82, "global_frame_idx": 18015, "task_index": 15}, {"db_idx": 18016, "episode_idx": 79, "frame_idx": 83, "global_frame_idx": 18016, "task_index": 15}, {"db_idx": 18017, "episode_idx": 79, "frame_idx": 84, "global_frame_idx": 18017, "task_index": 15}, {"db_idx": 18018, "episode_idx": 79, "frame_idx": 85, "global_frame_idx": 18018, "task_index": 15}, {"db_idx": 18019, "episode_idx": 79, "frame_idx": 86, "global_frame_idx": 18019, "task_index": 15}, {"db_idx": 18020, "episode_idx": 79, "frame_idx": 87, "global_frame_idx": 18020, "task_index": 15}, {"db_idx": 18021, "episode_idx": 79, "frame_idx": 88, "global_frame_idx": 18021, "task_index": 15}, {"db_idx": 18022, "episode_idx": 79, "frame_idx": 89, "global_frame_idx": 18022, "task_index": 15}, {"db_idx": 18023, "episode_idx": 79, "frame_idx": 90, "global_frame_idx": 18023, "task_index": 15}, {"db_idx": 18024, "episode_idx": 79, "frame_idx": 91, "global_frame_idx": 18024, "task_index": 15}, {"db_idx": 18025, "episode_idx": 79, "frame_idx": 92, "global_frame_idx": 18025, "task_index": 15}, {"db_idx": 18026, "episode_idx": 79, "frame_idx": 93, "global_frame_idx": 18026, "task_index": 15}, {"db_idx": 18027, "episode_idx": 79, "frame_idx": 94, "global_frame_idx": 18027, "task_index": 15}, {"db_idx": 18028, "episode_idx": 79, "frame_idx": 95, "global_frame_idx": 18028, "task_index": 15}, {"db_idx": 18029, "episode_idx": 79, "frame_idx": 96, "global_frame_idx": 18029, "task_index": 15}, {"db_idx": 18030, "episode_idx": 79, "frame_idx": 97, "global_frame_idx": 18030, "task_index": 15}, {"db_idx": 18031, "episode_idx": 79, "frame_idx": 98, "global_frame_idx": 18031, "task_index": 15}, {"db_idx": 18032, "episode_idx": 79, "frame_idx": 99, "global_frame_idx": 18032, "task_index": 15}, {"db_idx": 18033, "episode_idx": 79, "frame_idx": 100, "global_frame_idx": 18033, "task_index": 15}, {"db_idx": 18034, "episode_idx": 79, "frame_idx": 101, "global_frame_idx": 18034, "task_index": 15}, {"db_idx": 18035, "episode_idx": 79, "frame_idx": 102, "global_frame_idx": 18035, "task_index": 15}, {"db_idx": 18036, "episode_idx": 79, "frame_idx": 103, "global_frame_idx": 18036, "task_index": 15}, {"db_idx": 18037, "episode_idx": 79, "frame_idx": 104, "global_frame_idx": 18037, "task_index": 15}, {"db_idx": 18038, "episode_idx": 79, "frame_idx": 105, "global_frame_idx": 18038, "task_index": 15}, {"db_idx": 18039, "episode_idx": 79, "frame_idx": 106, "global_frame_idx": 18039, "task_index": 15}, {"db_idx": 18040, "episode_idx": 79, "frame_idx": 107, "global_frame_idx": 18040, "task_index": 15}, {"db_idx": 18041, "episode_idx": 79, "frame_idx": 108, "global_frame_idx": 18041, "task_index": 15}, {"db_idx": 18042, "episode_idx": 79, "frame_idx": 109, "global_frame_idx": 18042, "task_index": 15}, {"db_idx": 18043, "episode_idx": 79, "frame_idx": 110, "global_frame_idx": 18043, "task_index": 15}, {"db_idx": 18044, "episode_idx": 79, "frame_idx": 111, "global_frame_idx": 18044, "task_index": 15}, {"db_idx": 18045, "episode_idx": 79, "frame_idx": 112, "global_frame_idx": 18045, "task_index": 15}, {"db_idx": 18046, "episode_idx": 79, "frame_idx": 113, "global_frame_idx": 18046, "task_index": 15}, {"db_idx": 18047, "episode_idx": 79, "frame_idx": 114, "global_frame_idx": 18047, "task_index": 15}, {"db_idx": 18048, "episode_idx": 79, "frame_idx": 115, "global_frame_idx": 18048, "task_index": 15}, {"db_idx": 18049, "episode_idx": 79, "frame_idx": 116, "global_frame_idx": 18049, "task_index": 15}, {"db_idx": 18050, "episode_idx": 79, "frame_idx": 117, "global_frame_idx": 18050, "task_index": 15}, {"db_idx": 18051, "episode_idx": 79, "frame_idx": 118, "global_frame_idx": 18051, "task_index": 15}, {"db_idx": 18052, "episode_idx": 79, "frame_idx": 119, "global_frame_idx": 18052, "task_index": 15}, {"db_idx": 18053, "episode_idx": 79, "frame_idx": 120, "global_frame_idx": 18053, "task_index": 15}, {"db_idx": 18054, "episode_idx": 79, "frame_idx": 121, "global_frame_idx": 18054, "task_index": 15}, {"db_idx": 18055, "episode_idx": 79, "frame_idx": 122, "global_frame_idx": 18055, "task_index": 15}, {"db_idx": 18056, "episode_idx": 79, "frame_idx": 123, "global_frame_idx": 18056, "task_index": 15}, {"db_idx": 18057, "episode_idx": 79, "frame_idx": 124, "global_frame_idx": 18057, "task_index": 15}, {"db_idx": 18058, "episode_idx": 79, "frame_idx": 125, "global_frame_idx": 18058, "task_index": 15}, {"db_idx": 18059, "episode_idx": 79, "frame_idx": 126, "global_frame_idx": 18059, "task_index": 15}, {"db_idx": 18060, "episode_idx": 79, "frame_idx": 127, "global_frame_idx": 18060, "task_index": 15}, {"db_idx": 18061, "episode_idx": 79, "frame_idx": 128, "global_frame_idx": 18061, "task_index": 15}, {"db_idx": 18062, "episode_idx": 79, "frame_idx": 129, "global_frame_idx": 18062, "task_index": 15}, {"db_idx": 18063, "episode_idx": 79, "frame_idx": 130, "global_frame_idx": 18063, "task_index": 15}, {"db_idx": 18064, "episode_idx": 79, "frame_idx": 131, "global_frame_idx": 18064, "task_index": 15}, {"db_idx": 18065, "episode_idx": 79, "frame_idx": 132, "global_frame_idx": 18065, "task_index": 15}, {"db_idx": 18066, "episode_idx": 79, "frame_idx": 133, "global_frame_idx": 18066, "task_index": 15}, {"db_idx": 18067, "episode_idx": 79, "frame_idx": 134, "global_frame_idx": 18067, "task_index": 15}, {"db_idx": 18068, "episode_idx": 79, "frame_idx": 135, "global_frame_idx": 18068, "task_index": 15}, {"db_idx": 18069, "episode_idx": 79, "frame_idx": 136, "global_frame_idx": 18069, "task_index": 15}, {"db_idx": 18070, "episode_idx": 79, "frame_idx": 137, "global_frame_idx": 18070, "task_index": 15}, {"db_idx": 18071, "episode_idx": 79, "frame_idx": 138, "global_frame_idx": 18071, "task_index": 15}, {"db_idx": 18072, "episode_idx": 79, "frame_idx": 139, "global_frame_idx": 18072, "task_index": 15}, {"db_idx": 18073, "episode_idx": 79, "frame_idx": 140, "global_frame_idx": 18073, "task_index": 15}, {"db_idx": 18074, "episode_idx": 79, "frame_idx": 141, "global_frame_idx": 18074, "task_index": 15}, {"db_idx": 18075, "episode_idx": 79, "frame_idx": 142, "global_frame_idx": 18075, "task_index": 15}, {"db_idx": 18076, "episode_idx": 79, "frame_idx": 143, "global_frame_idx": 18076, "task_index": 15}, {"db_idx": 18077, "episode_idx": 79, "frame_idx": 144, "global_frame_idx": 18077, "task_index": 15}, {"db_idx": 18078, "episode_idx": 79, "frame_idx": 145, "global_frame_idx": 18078, "task_index": 15}, {"db_idx": 18079, "episode_idx": 79, "frame_idx": 146, "global_frame_idx": 18079, "task_index": 15}, {"db_idx": 18080, "episode_idx": 79, "frame_idx": 147, "global_frame_idx": 18080, "task_index": 15}, {"db_idx": 18081, "episode_idx": 79, "frame_idx": 148, "global_frame_idx": 18081, "task_index": 15}, {"db_idx": 18082, "episode_idx": 79, "frame_idx": 149, "global_frame_idx": 18082, "task_index": 15}, {"db_idx": 18083, "episode_idx": 79, "frame_idx": 150, "global_frame_idx": 18083, "task_index": 15}, {"db_idx": 18084, "episode_idx": 79, "frame_idx": 151, "global_frame_idx": 18084, "task_index": 15}, {"db_idx": 18085, "episode_idx": 79, "frame_idx": 152, "global_frame_idx": 18085, "task_index": 15}, {"db_idx": 18086, "episode_idx": 80, "frame_idx": 0, "global_frame_idx": 18086, "task_index": 16}, {"db_idx": 18087, "episode_idx": 80, "frame_idx": 1, "global_frame_idx": 18087, "task_index": 16}, {"db_idx": 18088, "episode_idx": 80, "frame_idx": 2, "global_frame_idx": 18088, "task_index": 16}, {"db_idx": 18089, "episode_idx": 80, "frame_idx": 3, "global_frame_idx": 18089, "task_index": 16}, {"db_idx": 18090, "episode_idx": 80, "frame_idx": 4, "global_frame_idx": 18090, "task_index": 16}, {"db_idx": 18091, "episode_idx": 80, "frame_idx": 5, "global_frame_idx": 18091, "task_index": 16}, {"db_idx": 18092, "episode_idx": 80, "frame_idx": 6, "global_frame_idx": 18092, "task_index": 16}, {"db_idx": 18093, "episode_idx": 80, "frame_idx": 7, "global_frame_idx": 18093, "task_index": 16}, {"db_idx": 18094, "episode_idx": 80, "frame_idx": 8, "global_frame_idx": 18094, "task_index": 16}, {"db_idx": 18095, "episode_idx": 80, "frame_idx": 9, "global_frame_idx": 18095, "task_index": 16}, {"db_idx": 18096, "episode_idx": 80, "frame_idx": 10, "global_frame_idx": 18096, "task_index": 16}, {"db_idx": 18097, "episode_idx": 80, "frame_idx": 11, "global_frame_idx": 18097, "task_index": 16}, {"db_idx": 18098, "episode_idx": 80, "frame_idx": 12, "global_frame_idx": 18098, "task_index": 16}, {"db_idx": 18099, "episode_idx": 80, "frame_idx": 13, "global_frame_idx": 18099, "task_index": 16}, {"db_idx": 18100, "episode_idx": 80, "frame_idx": 14, "global_frame_idx": 18100, "task_index": 16}, {"db_idx": 18101, "episode_idx": 80, "frame_idx": 15, "global_frame_idx": 18101, "task_index": 16}, {"db_idx": 18102, "episode_idx": 80, "frame_idx": 16, "global_frame_idx": 18102, "task_index": 16}, {"db_idx": 18103, "episode_idx": 80, "frame_idx": 17, "global_frame_idx": 18103, "task_index": 16}, {"db_idx": 18104, "episode_idx": 80, "frame_idx": 18, "global_frame_idx": 18104, "task_index": 16}, {"db_idx": 18105, "episode_idx": 80, "frame_idx": 19, "global_frame_idx": 18105, "task_index": 16}, {"db_idx": 18106, "episode_idx": 80, "frame_idx": 20, "global_frame_idx": 18106, "task_index": 16}, {"db_idx": 18107, "episode_idx": 80, "frame_idx": 21, "global_frame_idx": 18107, "task_index": 16}, {"db_idx": 18108, "episode_idx": 80, "frame_idx": 22, "global_frame_idx": 18108, "task_index": 16}, {"db_idx": 18109, "episode_idx": 80, "frame_idx": 23, "global_frame_idx": 18109, "task_index": 16}, {"db_idx": 18110, "episode_idx": 80, "frame_idx": 24, "global_frame_idx": 18110, "task_index": 16}, {"db_idx": 18111, "episode_idx": 80, "frame_idx": 25, "global_frame_idx": 18111, "task_index": 16}, {"db_idx": 18112, "episode_idx": 80, "frame_idx": 26, "global_frame_idx": 18112, "task_index": 16}, {"db_idx": 18113, "episode_idx": 80, "frame_idx": 27, "global_frame_idx": 18113, "task_index": 16}, {"db_idx": 18114, "episode_idx": 80, "frame_idx": 28, "global_frame_idx": 18114, "task_index": 16}, {"db_idx": 18115, "episode_idx": 80, "frame_idx": 29, "global_frame_idx": 18115, "task_index": 16}, {"db_idx": 18116, "episode_idx": 80, "frame_idx": 30, "global_frame_idx": 18116, "task_index": 16}, {"db_idx": 18117, "episode_idx": 80, "frame_idx": 31, "global_frame_idx": 18117, "task_index": 16}, {"db_idx": 18118, "episode_idx": 80, "frame_idx": 32, "global_frame_idx": 18118, "task_index": 16}, {"db_idx": 18119, "episode_idx": 80, "frame_idx": 33, "global_frame_idx": 18119, "task_index": 16}, {"db_idx": 18120, "episode_idx": 80, "frame_idx": 34, "global_frame_idx": 18120, "task_index": 16}, {"db_idx": 18121, "episode_idx": 80, "frame_idx": 35, "global_frame_idx": 18121, "task_index": 16}, {"db_idx": 18122, "episode_idx": 80, "frame_idx": 36, "global_frame_idx": 18122, "task_index": 16}, {"db_idx": 18123, "episode_idx": 80, "frame_idx": 37, "global_frame_idx": 18123, "task_index": 16}, {"db_idx": 18124, "episode_idx": 80, "frame_idx": 38, "global_frame_idx": 18124, "task_index": 16}, {"db_idx": 18125, "episode_idx": 80, "frame_idx": 39, "global_frame_idx": 18125, "task_index": 16}, {"db_idx": 18126, "episode_idx": 80, "frame_idx": 40, "global_frame_idx": 18126, "task_index": 16}, {"db_idx": 18127, "episode_idx": 80, "frame_idx": 41, "global_frame_idx": 18127, "task_index": 16}, {"db_idx": 18128, "episode_idx": 80, "frame_idx": 42, "global_frame_idx": 18128, "task_index": 16}, {"db_idx": 18129, "episode_idx": 80, "frame_idx": 43, "global_frame_idx": 18129, "task_index": 16}, {"db_idx": 18130, "episode_idx": 80, "frame_idx": 44, "global_frame_idx": 18130, "task_index": 16}, {"db_idx": 18131, "episode_idx": 80, "frame_idx": 45, "global_frame_idx": 18131, "task_index": 16}, {"db_idx": 18132, "episode_idx": 80, "frame_idx": 46, "global_frame_idx": 18132, "task_index": 16}, {"db_idx": 18133, "episode_idx": 80, "frame_idx": 47, "global_frame_idx": 18133, "task_index": 16}, {"db_idx": 18134, "episode_idx": 80, "frame_idx": 48, "global_frame_idx": 18134, "task_index": 16}, {"db_idx": 18135, "episode_idx": 80, "frame_idx": 49, "global_frame_idx": 18135, "task_index": 16}, {"db_idx": 18136, "episode_idx": 80, "frame_idx": 50, "global_frame_idx": 18136, "task_index": 16}, {"db_idx": 18137, "episode_idx": 80, "frame_idx": 51, "global_frame_idx": 18137, "task_index": 16}, {"db_idx": 18138, "episode_idx": 80, "frame_idx": 52, "global_frame_idx": 18138, "task_index": 16}, {"db_idx": 18139, "episode_idx": 80, "frame_idx": 53, "global_frame_idx": 18139, "task_index": 16}, {"db_idx": 18140, "episode_idx": 80, "frame_idx": 54, "global_frame_idx": 18140, "task_index": 16}, {"db_idx": 18141, "episode_idx": 80, "frame_idx": 55, "global_frame_idx": 18141, "task_index": 16}, {"db_idx": 18142, "episode_idx": 80, "frame_idx": 56, "global_frame_idx": 18142, "task_index": 16}, {"db_idx": 18143, "episode_idx": 80, "frame_idx": 57, "global_frame_idx": 18143, "task_index": 16}, {"db_idx": 18144, "episode_idx": 80, "frame_idx": 58, "global_frame_idx": 18144, "task_index": 16}, {"db_idx": 18145, "episode_idx": 80, "frame_idx": 59, "global_frame_idx": 18145, "task_index": 16}, {"db_idx": 18146, "episode_idx": 80, "frame_idx": 60, "global_frame_idx": 18146, "task_index": 16}, {"db_idx": 18147, "episode_idx": 80, "frame_idx": 61, "global_frame_idx": 18147, "task_index": 16}, {"db_idx": 18148, "episode_idx": 80, "frame_idx": 62, "global_frame_idx": 18148, "task_index": 16}, {"db_idx": 18149, "episode_idx": 80, "frame_idx": 63, "global_frame_idx": 18149, "task_index": 16}, {"db_idx": 18150, "episode_idx": 80, "frame_idx": 64, "global_frame_idx": 18150, "task_index": 16}, {"db_idx": 18151, "episode_idx": 80, "frame_idx": 65, "global_frame_idx": 18151, "task_index": 16}, {"db_idx": 18152, "episode_idx": 80, "frame_idx": 66, "global_frame_idx": 18152, "task_index": 16}, {"db_idx": 18153, "episode_idx": 80, "frame_idx": 67, "global_frame_idx": 18153, "task_index": 16}, {"db_idx": 18154, "episode_idx": 80, "frame_idx": 68, "global_frame_idx": 18154, "task_index": 16}, {"db_idx": 18155, "episode_idx": 80, "frame_idx": 69, "global_frame_idx": 18155, "task_index": 16}, {"db_idx": 18156, "episode_idx": 80, "frame_idx": 70, "global_frame_idx": 18156, "task_index": 16}, {"db_idx": 18157, "episode_idx": 80, "frame_idx": 71, "global_frame_idx": 18157, "task_index": 16}, {"db_idx": 18158, "episode_idx": 80, "frame_idx": 72, "global_frame_idx": 18158, "task_index": 16}, {"db_idx": 18159, "episode_idx": 80, "frame_idx": 73, "global_frame_idx": 18159, "task_index": 16}, {"db_idx": 18160, "episode_idx": 80, "frame_idx": 74, "global_frame_idx": 18160, "task_index": 16}, {"db_idx": 18161, "episode_idx": 80, "frame_idx": 75, "global_frame_idx": 18161, "task_index": 16}, {"db_idx": 18162, "episode_idx": 80, "frame_idx": 76, "global_frame_idx": 18162, "task_index": 16}, {"db_idx": 18163, "episode_idx": 80, "frame_idx": 77, "global_frame_idx": 18163, "task_index": 16}, {"db_idx": 18164, "episode_idx": 80, "frame_idx": 78, "global_frame_idx": 18164, "task_index": 16}, {"db_idx": 18165, "episode_idx": 80, "frame_idx": 79, "global_frame_idx": 18165, "task_index": 16}, {"db_idx": 18166, "episode_idx": 80, "frame_idx": 80, "global_frame_idx": 18166, "task_index": 16}, {"db_idx": 18167, "episode_idx": 80, "frame_idx": 81, "global_frame_idx": 18167, "task_index": 16}, {"db_idx": 18168, "episode_idx": 80, "frame_idx": 82, "global_frame_idx": 18168, "task_index": 16}, {"db_idx": 18169, "episode_idx": 80, "frame_idx": 83, "global_frame_idx": 18169, "task_index": 16}, {"db_idx": 18170, "episode_idx": 80, "frame_idx": 84, "global_frame_idx": 18170, "task_index": 16}, {"db_idx": 18171, "episode_idx": 80, "frame_idx": 85, "global_frame_idx": 18171, "task_index": 16}, {"db_idx": 18172, "episode_idx": 80, "frame_idx": 86, "global_frame_idx": 18172, "task_index": 16}, {"db_idx": 18173, "episode_idx": 80, "frame_idx": 87, "global_frame_idx": 18173, "task_index": 16}, {"db_idx": 18174, "episode_idx": 80, "frame_idx": 88, "global_frame_idx": 18174, "task_index": 16}, {"db_idx": 18175, "episode_idx": 80, "frame_idx": 89, "global_frame_idx": 18175, "task_index": 16}, {"db_idx": 18176, "episode_idx": 80, "frame_idx": 90, "global_frame_idx": 18176, "task_index": 16}, {"db_idx": 18177, "episode_idx": 80, "frame_idx": 91, "global_frame_idx": 18177, "task_index": 16}, {"db_idx": 18178, "episode_idx": 80, "frame_idx": 92, "global_frame_idx": 18178, "task_index": 16}, {"db_idx": 18179, "episode_idx": 80, "frame_idx": 93, "global_frame_idx": 18179, "task_index": 16}, {"db_idx": 18180, "episode_idx": 80, "frame_idx": 94, "global_frame_idx": 18180, "task_index": 16}, {"db_idx": 18181, "episode_idx": 80, "frame_idx": 95, "global_frame_idx": 18181, "task_index": 16}, {"db_idx": 18182, "episode_idx": 80, "frame_idx": 96, "global_frame_idx": 18182, "task_index": 16}, {"db_idx": 18183, "episode_idx": 80, "frame_idx": 97, "global_frame_idx": 18183, "task_index": 16}, {"db_idx": 18184, "episode_idx": 80, "frame_idx": 98, "global_frame_idx": 18184, "task_index": 16}, {"db_idx": 18185, "episode_idx": 80, "frame_idx": 99, "global_frame_idx": 18185, "task_index": 16}, {"db_idx": 18186, "episode_idx": 80, "frame_idx": 100, "global_frame_idx": 18186, "task_index": 16}, {"db_idx": 18187, "episode_idx": 80, "frame_idx": 101, "global_frame_idx": 18187, "task_index": 16}, {"db_idx": 18188, "episode_idx": 80, "frame_idx": 102, "global_frame_idx": 18188, "task_index": 16}, {"db_idx": 18189, "episode_idx": 80, "frame_idx": 103, "global_frame_idx": 18189, "task_index": 16}, {"db_idx": 18190, "episode_idx": 80, "frame_idx": 104, "global_frame_idx": 18190, "task_index": 16}, {"db_idx": 18191, "episode_idx": 80, "frame_idx": 105, "global_frame_idx": 18191, "task_index": 16}, {"db_idx": 18192, "episode_idx": 80, "frame_idx": 106, "global_frame_idx": 18192, "task_index": 16}, {"db_idx": 18193, "episode_idx": 80, "frame_idx": 107, "global_frame_idx": 18193, "task_index": 16}, {"db_idx": 18194, "episode_idx": 80, "frame_idx": 108, "global_frame_idx": 18194, "task_index": 16}, {"db_idx": 18195, "episode_idx": 80, "frame_idx": 109, "global_frame_idx": 18195, "task_index": 16}, {"db_idx": 18196, "episode_idx": 80, "frame_idx": 110, "global_frame_idx": 18196, "task_index": 16}, {"db_idx": 18197, "episode_idx": 80, "frame_idx": 111, "global_frame_idx": 18197, "task_index": 16}, {"db_idx": 18198, "episode_idx": 80, "frame_idx": 112, "global_frame_idx": 18198, "task_index": 16}, {"db_idx": 18199, "episode_idx": 80, "frame_idx": 113, "global_frame_idx": 18199, "task_index": 16}, {"db_idx": 18200, "episode_idx": 80, "frame_idx": 114, "global_frame_idx": 18200, "task_index": 16}, {"db_idx": 18201, "episode_idx": 80, "frame_idx": 115, "global_frame_idx": 18201, "task_index": 16}, {"db_idx": 18202, "episode_idx": 80, "frame_idx": 116, "global_frame_idx": 18202, "task_index": 16}, {"db_idx": 18203, "episode_idx": 80, "frame_idx": 117, "global_frame_idx": 18203, "task_index": 16}, {"db_idx": 18204, "episode_idx": 80, "frame_idx": 118, "global_frame_idx": 18204, "task_index": 16}, {"db_idx": 18205, "episode_idx": 80, "frame_idx": 119, "global_frame_idx": 18205, "task_index": 16}, {"db_idx": 18206, "episode_idx": 80, "frame_idx": 120, "global_frame_idx": 18206, "task_index": 16}, {"db_idx": 18207, "episode_idx": 80, "frame_idx": 121, "global_frame_idx": 18207, "task_index": 16}, {"db_idx": 18208, "episode_idx": 80, "frame_idx": 122, "global_frame_idx": 18208, "task_index": 16}, {"db_idx": 18209, "episode_idx": 80, "frame_idx": 123, "global_frame_idx": 18209, "task_index": 16}, {"db_idx": 18210, "episode_idx": 80, "frame_idx": 124, "global_frame_idx": 18210, "task_index": 16}, {"db_idx": 18211, "episode_idx": 80, "frame_idx": 125, "global_frame_idx": 18211, "task_index": 16}, {"db_idx": 18212, "episode_idx": 80, "frame_idx": 126, "global_frame_idx": 18212, "task_index": 16}, {"db_idx": 18213, "episode_idx": 80, "frame_idx": 127, "global_frame_idx": 18213, "task_index": 16}, {"db_idx": 18214, "episode_idx": 80, "frame_idx": 128, "global_frame_idx": 18214, "task_index": 16}, {"db_idx": 18215, "episode_idx": 80, "frame_idx": 129, "global_frame_idx": 18215, "task_index": 16}, {"db_idx": 18216, "episode_idx": 80, "frame_idx": 130, "global_frame_idx": 18216, "task_index": 16}, {"db_idx": 18217, "episode_idx": 80, "frame_idx": 131, "global_frame_idx": 18217, "task_index": 16}, {"db_idx": 18218, "episode_idx": 80, "frame_idx": 132, "global_frame_idx": 18218, "task_index": 16}, {"db_idx": 18219, "episode_idx": 80, "frame_idx": 133, "global_frame_idx": 18219, "task_index": 16}, {"db_idx": 18220, "episode_idx": 80, "frame_idx": 134, "global_frame_idx": 18220, "task_index": 16}, {"db_idx": 18221, "episode_idx": 80, "frame_idx": 135, "global_frame_idx": 18221, "task_index": 16}, {"db_idx": 18222, "episode_idx": 80, "frame_idx": 136, "global_frame_idx": 18222, "task_index": 16}, {"db_idx": 18223, "episode_idx": 80, "frame_idx": 137, "global_frame_idx": 18223, "task_index": 16}, {"db_idx": 18224, "episode_idx": 80, "frame_idx": 138, "global_frame_idx": 18224, "task_index": 16}, {"db_idx": 18225, "episode_idx": 80, "frame_idx": 139, "global_frame_idx": 18225, "task_index": 16}, {"db_idx": 18226, "episode_idx": 80, "frame_idx": 140, "global_frame_idx": 18226, "task_index": 16}, {"db_idx": 18227, "episode_idx": 80, "frame_idx": 141, "global_frame_idx": 18227, "task_index": 16}, {"db_idx": 18228, "episode_idx": 80, "frame_idx": 142, "global_frame_idx": 18228, "task_index": 16}, {"db_idx": 18229, "episode_idx": 80, "frame_idx": 143, "global_frame_idx": 18229, "task_index": 16}, {"db_idx": 18230, "episode_idx": 80, "frame_idx": 144, "global_frame_idx": 18230, "task_index": 16}, {"db_idx": 18231, "episode_idx": 80, "frame_idx": 145, "global_frame_idx": 18231, "task_index": 16}, {"db_idx": 18232, "episode_idx": 80, "frame_idx": 146, "global_frame_idx": 18232, "task_index": 16}, {"db_idx": 18233, "episode_idx": 80, "frame_idx": 147, "global_frame_idx": 18233, "task_index": 16}, {"db_idx": 18234, "episode_idx": 80, "frame_idx": 148, "global_frame_idx": 18234, "task_index": 16}, {"db_idx": 18235, "episode_idx": 80, "frame_idx": 149, "global_frame_idx": 18235, "task_index": 16}, {"db_idx": 18236, "episode_idx": 80, "frame_idx": 150, "global_frame_idx": 18236, "task_index": 16}, {"db_idx": 18237, "episode_idx": 80, "frame_idx": 151, "global_frame_idx": 18237, "task_index": 16}, {"db_idx": 18238, "episode_idx": 80, "frame_idx": 152, "global_frame_idx": 18238, "task_index": 16}, {"db_idx": 18239, "episode_idx": 80, "frame_idx": 153, "global_frame_idx": 18239, "task_index": 16}, {"db_idx": 18240, "episode_idx": 80, "frame_idx": 154, "global_frame_idx": 18240, "task_index": 16}, {"db_idx": 18241, "episode_idx": 80, "frame_idx": 155, "global_frame_idx": 18241, "task_index": 16}, {"db_idx": 18242, "episode_idx": 80, "frame_idx": 156, "global_frame_idx": 18242, "task_index": 16}, {"db_idx": 18243, "episode_idx": 80, "frame_idx": 157, "global_frame_idx": 18243, "task_index": 16}, {"db_idx": 18244, "episode_idx": 81, "frame_idx": 0, "global_frame_idx": 18244, "task_index": 16}, {"db_idx": 18245, "episode_idx": 81, "frame_idx": 1, "global_frame_idx": 18245, "task_index": 16}, {"db_idx": 18246, "episode_idx": 81, "frame_idx": 2, "global_frame_idx": 18246, "task_index": 16}, {"db_idx": 18247, "episode_idx": 81, "frame_idx": 3, "global_frame_idx": 18247, "task_index": 16}, {"db_idx": 18248, "episode_idx": 81, "frame_idx": 4, "global_frame_idx": 18248, "task_index": 16}, {"db_idx": 18249, "episode_idx": 81, "frame_idx": 5, "global_frame_idx": 18249, "task_index": 16}, {"db_idx": 18250, "episode_idx": 81, "frame_idx": 6, "global_frame_idx": 18250, "task_index": 16}, {"db_idx": 18251, "episode_idx": 81, "frame_idx": 7, "global_frame_idx": 18251, "task_index": 16}, {"db_idx": 18252, "episode_idx": 81, "frame_idx": 8, "global_frame_idx": 18252, "task_index": 16}, {"db_idx": 18253, "episode_idx": 81, "frame_idx": 9, "global_frame_idx": 18253, "task_index": 16}, {"db_idx": 18254, "episode_idx": 81, "frame_idx": 10, "global_frame_idx": 18254, "task_index": 16}, {"db_idx": 18255, "episode_idx": 81, "frame_idx": 11, "global_frame_idx": 18255, "task_index": 16}, {"db_idx": 18256, "episode_idx": 81, "frame_idx": 12, "global_frame_idx": 18256, "task_index": 16}, {"db_idx": 18257, "episode_idx": 81, "frame_idx": 13, "global_frame_idx": 18257, "task_index": 16}, {"db_idx": 18258, "episode_idx": 81, "frame_idx": 14, "global_frame_idx": 18258, "task_index": 16}, {"db_idx": 18259, "episode_idx": 81, "frame_idx": 15, "global_frame_idx": 18259, "task_index": 16}, {"db_idx": 18260, "episode_idx": 81, "frame_idx": 16, "global_frame_idx": 18260, "task_index": 16}, {"db_idx": 18261, "episode_idx": 81, "frame_idx": 17, "global_frame_idx": 18261, "task_index": 16}, {"db_idx": 18262, "episode_idx": 81, "frame_idx": 18, "global_frame_idx": 18262, "task_index": 16}, {"db_idx": 18263, "episode_idx": 81, "frame_idx": 19, "global_frame_idx": 18263, "task_index": 16}, {"db_idx": 18264, "episode_idx": 81, "frame_idx": 20, "global_frame_idx": 18264, "task_index": 16}, {"db_idx": 18265, "episode_idx": 81, "frame_idx": 21, "global_frame_idx": 18265, "task_index": 16}, {"db_idx": 18266, "episode_idx": 81, "frame_idx": 22, "global_frame_idx": 18266, "task_index": 16}, {"db_idx": 18267, "episode_idx": 81, "frame_idx": 23, "global_frame_idx": 18267, "task_index": 16}, {"db_idx": 18268, "episode_idx": 81, "frame_idx": 24, "global_frame_idx": 18268, "task_index": 16}, {"db_idx": 18269, "episode_idx": 81, "frame_idx": 25, "global_frame_idx": 18269, "task_index": 16}, {"db_idx": 18270, "episode_idx": 81, "frame_idx": 26, "global_frame_idx": 18270, "task_index": 16}, {"db_idx": 18271, "episode_idx": 81, "frame_idx": 27, "global_frame_idx": 18271, "task_index": 16}, {"db_idx": 18272, "episode_idx": 81, "frame_idx": 28, "global_frame_idx": 18272, "task_index": 16}, {"db_idx": 18273, "episode_idx": 81, "frame_idx": 29, "global_frame_idx": 18273, "task_index": 16}, {"db_idx": 18274, "episode_idx": 81, "frame_idx": 30, "global_frame_idx": 18274, "task_index": 16}, {"db_idx": 18275, "episode_idx": 81, "frame_idx": 31, "global_frame_idx": 18275, "task_index": 16}, {"db_idx": 18276, "episode_idx": 81, "frame_idx": 32, "global_frame_idx": 18276, "task_index": 16}, {"db_idx": 18277, "episode_idx": 81, "frame_idx": 33, "global_frame_idx": 18277, "task_index": 16}, {"db_idx": 18278, "episode_idx": 81, "frame_idx": 34, "global_frame_idx": 18278, "task_index": 16}, {"db_idx": 18279, "episode_idx": 81, "frame_idx": 35, "global_frame_idx": 18279, "task_index": 16}, {"db_idx": 18280, "episode_idx": 81, "frame_idx": 36, "global_frame_idx": 18280, "task_index": 16}, {"db_idx": 18281, "episode_idx": 81, "frame_idx": 37, "global_frame_idx": 18281, "task_index": 16}, {"db_idx": 18282, "episode_idx": 81, "frame_idx": 38, "global_frame_idx": 18282, "task_index": 16}, {"db_idx": 18283, "episode_idx": 81, "frame_idx": 39, "global_frame_idx": 18283, "task_index": 16}, {"db_idx": 18284, "episode_idx": 81, "frame_idx": 40, "global_frame_idx": 18284, "task_index": 16}, {"db_idx": 18285, "episode_idx": 81, "frame_idx": 41, "global_frame_idx": 18285, "task_index": 16}, {"db_idx": 18286, "episode_idx": 81, "frame_idx": 42, "global_frame_idx": 18286, "task_index": 16}, {"db_idx": 18287, "episode_idx": 81, "frame_idx": 43, "global_frame_idx": 18287, "task_index": 16}, {"db_idx": 18288, "episode_idx": 81, "frame_idx": 44, "global_frame_idx": 18288, "task_index": 16}, {"db_idx": 18289, "episode_idx": 81, "frame_idx": 45, "global_frame_idx": 18289, "task_index": 16}, {"db_idx": 18290, "episode_idx": 81, "frame_idx": 46, "global_frame_idx": 18290, "task_index": 16}, {"db_idx": 18291, "episode_idx": 81, "frame_idx": 47, "global_frame_idx": 18291, "task_index": 16}, {"db_idx": 18292, "episode_idx": 81, "frame_idx": 48, "global_frame_idx": 18292, "task_index": 16}, {"db_idx": 18293, "episode_idx": 81, "frame_idx": 49, "global_frame_idx": 18293, "task_index": 16}, {"db_idx": 18294, "episode_idx": 81, "frame_idx": 50, "global_frame_idx": 18294, "task_index": 16}, {"db_idx": 18295, "episode_idx": 81, "frame_idx": 51, "global_frame_idx": 18295, "task_index": 16}, {"db_idx": 18296, "episode_idx": 81, "frame_idx": 52, "global_frame_idx": 18296, "task_index": 16}, {"db_idx": 18297, "episode_idx": 81, "frame_idx": 53, "global_frame_idx": 18297, "task_index": 16}, {"db_idx": 18298, "episode_idx": 81, "frame_idx": 54, "global_frame_idx": 18298, "task_index": 16}, {"db_idx": 18299, "episode_idx": 81, "frame_idx": 55, "global_frame_idx": 18299, "task_index": 16}, {"db_idx": 18300, "episode_idx": 81, "frame_idx": 56, "global_frame_idx": 18300, "task_index": 16}, {"db_idx": 18301, "episode_idx": 81, "frame_idx": 57, "global_frame_idx": 18301, "task_index": 16}, {"db_idx": 18302, "episode_idx": 81, "frame_idx": 58, "global_frame_idx": 18302, "task_index": 16}, {"db_idx": 18303, "episode_idx": 81, "frame_idx": 59, "global_frame_idx": 18303, "task_index": 16}, {"db_idx": 18304, "episode_idx": 81, "frame_idx": 60, "global_frame_idx": 18304, "task_index": 16}, {"db_idx": 18305, "episode_idx": 81, "frame_idx": 61, "global_frame_idx": 18305, "task_index": 16}, {"db_idx": 18306, "episode_idx": 81, "frame_idx": 62, "global_frame_idx": 18306, "task_index": 16}, {"db_idx": 18307, "episode_idx": 81, "frame_idx": 63, "global_frame_idx": 18307, "task_index": 16}, {"db_idx": 18308, "episode_idx": 81, "frame_idx": 64, "global_frame_idx": 18308, "task_index": 16}, {"db_idx": 18309, "episode_idx": 81, "frame_idx": 65, "global_frame_idx": 18309, "task_index": 16}, {"db_idx": 18310, "episode_idx": 81, "frame_idx": 66, "global_frame_idx": 18310, "task_index": 16}, {"db_idx": 18311, "episode_idx": 81, "frame_idx": 67, "global_frame_idx": 18311, "task_index": 16}, {"db_idx": 18312, "episode_idx": 81, "frame_idx": 68, "global_frame_idx": 18312, "task_index": 16}, {"db_idx": 18313, "episode_idx": 81, "frame_idx": 69, "global_frame_idx": 18313, "task_index": 16}, {"db_idx": 18314, "episode_idx": 81, "frame_idx": 70, "global_frame_idx": 18314, "task_index": 16}, {"db_idx": 18315, "episode_idx": 81, "frame_idx": 71, "global_frame_idx": 18315, "task_index": 16}, {"db_idx": 18316, "episode_idx": 81, "frame_idx": 72, "global_frame_idx": 18316, "task_index": 16}, {"db_idx": 18317, "episode_idx": 81, "frame_idx": 73, "global_frame_idx": 18317, "task_index": 16}, {"db_idx": 18318, "episode_idx": 81, "frame_idx": 74, "global_frame_idx": 18318, "task_index": 16}, {"db_idx": 18319, "episode_idx": 81, "frame_idx": 75, "global_frame_idx": 18319, "task_index": 16}, {"db_idx": 18320, "episode_idx": 81, "frame_idx": 76, "global_frame_idx": 18320, "task_index": 16}, {"db_idx": 18321, "episode_idx": 81, "frame_idx": 77, "global_frame_idx": 18321, "task_index": 16}, {"db_idx": 18322, "episode_idx": 81, "frame_idx": 78, "global_frame_idx": 18322, "task_index": 16}, {"db_idx": 18323, "episode_idx": 81, "frame_idx": 79, "global_frame_idx": 18323, "task_index": 16}, {"db_idx": 18324, "episode_idx": 81, "frame_idx": 80, "global_frame_idx": 18324, "task_index": 16}, {"db_idx": 18325, "episode_idx": 81, "frame_idx": 81, "global_frame_idx": 18325, "task_index": 16}, {"db_idx": 18326, "episode_idx": 81, "frame_idx": 82, "global_frame_idx": 18326, "task_index": 16}, {"db_idx": 18327, "episode_idx": 81, "frame_idx": 83, "global_frame_idx": 18327, "task_index": 16}, {"db_idx": 18328, "episode_idx": 81, "frame_idx": 84, "global_frame_idx": 18328, "task_index": 16}, {"db_idx": 18329, "episode_idx": 81, "frame_idx": 85, "global_frame_idx": 18329, "task_index": 16}, {"db_idx": 18330, "episode_idx": 81, "frame_idx": 86, "global_frame_idx": 18330, "task_index": 16}, {"db_idx": 18331, "episode_idx": 81, "frame_idx": 87, "global_frame_idx": 18331, "task_index": 16}, {"db_idx": 18332, "episode_idx": 81, "frame_idx": 88, "global_frame_idx": 18332, "task_index": 16}, {"db_idx": 18333, "episode_idx": 81, "frame_idx": 89, "global_frame_idx": 18333, "task_index": 16}, {"db_idx": 18334, "episode_idx": 81, "frame_idx": 90, "global_frame_idx": 18334, "task_index": 16}, {"db_idx": 18335, "episode_idx": 81, "frame_idx": 91, "global_frame_idx": 18335, "task_index": 16}, {"db_idx": 18336, "episode_idx": 81, "frame_idx": 92, "global_frame_idx": 18336, "task_index": 16}, {"db_idx": 18337, "episode_idx": 81, "frame_idx": 93, "global_frame_idx": 18337, "task_index": 16}, {"db_idx": 18338, "episode_idx": 81, "frame_idx": 94, "global_frame_idx": 18338, "task_index": 16}, {"db_idx": 18339, "episode_idx": 81, "frame_idx": 95, "global_frame_idx": 18339, "task_index": 16}, {"db_idx": 18340, "episode_idx": 81, "frame_idx": 96, "global_frame_idx": 18340, "task_index": 16}, {"db_idx": 18341, "episode_idx": 81, "frame_idx": 97, "global_frame_idx": 18341, "task_index": 16}, {"db_idx": 18342, "episode_idx": 81, "frame_idx": 98, "global_frame_idx": 18342, "task_index": 16}, {"db_idx": 18343, "episode_idx": 81, "frame_idx": 99, "global_frame_idx": 18343, "task_index": 16}, {"db_idx": 18344, "episode_idx": 81, "frame_idx": 100, "global_frame_idx": 18344, "task_index": 16}, {"db_idx": 18345, "episode_idx": 81, "frame_idx": 101, "global_frame_idx": 18345, "task_index": 16}, {"db_idx": 18346, "episode_idx": 81, "frame_idx": 102, "global_frame_idx": 18346, "task_index": 16}, {"db_idx": 18347, "episode_idx": 81, "frame_idx": 103, "global_frame_idx": 18347, "task_index": 16}, {"db_idx": 18348, "episode_idx": 81, "frame_idx": 104, "global_frame_idx": 18348, "task_index": 16}, {"db_idx": 18349, "episode_idx": 81, "frame_idx": 105, "global_frame_idx": 18349, "task_index": 16}, {"db_idx": 18350, "episode_idx": 81, "frame_idx": 106, "global_frame_idx": 18350, "task_index": 16}, {"db_idx": 18351, "episode_idx": 81, "frame_idx": 107, "global_frame_idx": 18351, "task_index": 16}, {"db_idx": 18352, "episode_idx": 81, "frame_idx": 108, "global_frame_idx": 18352, "task_index": 16}, {"db_idx": 18353, "episode_idx": 81, "frame_idx": 109, "global_frame_idx": 18353, "task_index": 16}, {"db_idx": 18354, "episode_idx": 81, "frame_idx": 110, "global_frame_idx": 18354, "task_index": 16}, {"db_idx": 18355, "episode_idx": 81, "frame_idx": 111, "global_frame_idx": 18355, "task_index": 16}, {"db_idx": 18356, "episode_idx": 81, "frame_idx": 112, "global_frame_idx": 18356, "task_index": 16}, {"db_idx": 18357, "episode_idx": 81, "frame_idx": 113, "global_frame_idx": 18357, "task_index": 16}, {"db_idx": 18358, "episode_idx": 81, "frame_idx": 114, "global_frame_idx": 18358, "task_index": 16}, {"db_idx": 18359, "episode_idx": 81, "frame_idx": 115, "global_frame_idx": 18359, "task_index": 16}, {"db_idx": 18360, "episode_idx": 81, "frame_idx": 116, "global_frame_idx": 18360, "task_index": 16}, {"db_idx": 18361, "episode_idx": 81, "frame_idx": 117, "global_frame_idx": 18361, "task_index": 16}, {"db_idx": 18362, "episode_idx": 81, "frame_idx": 118, "global_frame_idx": 18362, "task_index": 16}, {"db_idx": 18363, "episode_idx": 81, "frame_idx": 119, "global_frame_idx": 18363, "task_index": 16}, {"db_idx": 18364, "episode_idx": 81, "frame_idx": 120, "global_frame_idx": 18364, "task_index": 16}, {"db_idx": 18365, "episode_idx": 81, "frame_idx": 121, "global_frame_idx": 18365, "task_index": 16}, {"db_idx": 18366, "episode_idx": 81, "frame_idx": 122, "global_frame_idx": 18366, "task_index": 16}, {"db_idx": 18367, "episode_idx": 81, "frame_idx": 123, "global_frame_idx": 18367, "task_index": 16}, {"db_idx": 18368, "episode_idx": 81, "frame_idx": 124, "global_frame_idx": 18368, "task_index": 16}, {"db_idx": 18369, "episode_idx": 81, "frame_idx": 125, "global_frame_idx": 18369, "task_index": 16}, {"db_idx": 18370, "episode_idx": 81, "frame_idx": 126, "global_frame_idx": 18370, "task_index": 16}, {"db_idx": 18371, "episode_idx": 81, "frame_idx": 127, "global_frame_idx": 18371, "task_index": 16}, {"db_idx": 18372, "episode_idx": 81, "frame_idx": 128, "global_frame_idx": 18372, "task_index": 16}, {"db_idx": 18373, "episode_idx": 81, "frame_idx": 129, "global_frame_idx": 18373, "task_index": 16}, {"db_idx": 18374, "episode_idx": 81, "frame_idx": 130, "global_frame_idx": 18374, "task_index": 16}, {"db_idx": 18375, "episode_idx": 81, "frame_idx": 131, "global_frame_idx": 18375, "task_index": 16}, {"db_idx": 18376, "episode_idx": 81, "frame_idx": 132, "global_frame_idx": 18376, "task_index": 16}, {"db_idx": 18377, "episode_idx": 81, "frame_idx": 133, "global_frame_idx": 18377, "task_index": 16}, {"db_idx": 18378, "episode_idx": 81, "frame_idx": 134, "global_frame_idx": 18378, "task_index": 16}, {"db_idx": 18379, "episode_idx": 81, "frame_idx": 135, "global_frame_idx": 18379, "task_index": 16}, {"db_idx": 18380, "episode_idx": 81, "frame_idx": 136, "global_frame_idx": 18380, "task_index": 16}, {"db_idx": 18381, "episode_idx": 81, "frame_idx": 137, "global_frame_idx": 18381, "task_index": 16}, {"db_idx": 18382, "episode_idx": 81, "frame_idx": 138, "global_frame_idx": 18382, "task_index": 16}, {"db_idx": 18383, "episode_idx": 81, "frame_idx": 139, "global_frame_idx": 18383, "task_index": 16}, {"db_idx": 18384, "episode_idx": 81, "frame_idx": 140, "global_frame_idx": 18384, "task_index": 16}, {"db_idx": 18385, "episode_idx": 81, "frame_idx": 141, "global_frame_idx": 18385, "task_index": 16}, {"db_idx": 18386, "episode_idx": 81, "frame_idx": 142, "global_frame_idx": 18386, "task_index": 16}, {"db_idx": 18387, "episode_idx": 81, "frame_idx": 143, "global_frame_idx": 18387, "task_index": 16}, {"db_idx": 18388, "episode_idx": 81, "frame_idx": 144, "global_frame_idx": 18388, "task_index": 16}, {"db_idx": 18389, "episode_idx": 81, "frame_idx": 145, "global_frame_idx": 18389, "task_index": 16}, {"db_idx": 18390, "episode_idx": 81, "frame_idx": 146, "global_frame_idx": 18390, "task_index": 16}, {"db_idx": 18391, "episode_idx": 81, "frame_idx": 147, "global_frame_idx": 18391, "task_index": 16}, {"db_idx": 18392, "episode_idx": 81, "frame_idx": 148, "global_frame_idx": 18392, "task_index": 16}, {"db_idx": 18393, "episode_idx": 81, "frame_idx": 149, "global_frame_idx": 18393, "task_index": 16}, {"db_idx": 18394, "episode_idx": 81, "frame_idx": 150, "global_frame_idx": 18394, "task_index": 16}, {"db_idx": 18395, "episode_idx": 81, "frame_idx": 151, "global_frame_idx": 18395, "task_index": 16}, {"db_idx": 18396, "episode_idx": 81, "frame_idx": 152, "global_frame_idx": 18396, "task_index": 16}, {"db_idx": 18397, "episode_idx": 81, "frame_idx": 153, "global_frame_idx": 18397, "task_index": 16}, {"db_idx": 18398, "episode_idx": 81, "frame_idx": 154, "global_frame_idx": 18398, "task_index": 16}, {"db_idx": 18399, "episode_idx": 81, "frame_idx": 155, "global_frame_idx": 18399, "task_index": 16}, {"db_idx": 18400, "episode_idx": 81, "frame_idx": 156, "global_frame_idx": 18400, "task_index": 16}, {"db_idx": 18401, "episode_idx": 81, "frame_idx": 157, "global_frame_idx": 18401, "task_index": 16}, {"db_idx": 18402, "episode_idx": 81, "frame_idx": 158, "global_frame_idx": 18402, "task_index": 16}, {"db_idx": 18403, "episode_idx": 81, "frame_idx": 159, "global_frame_idx": 18403, "task_index": 16}, {"db_idx": 18404, "episode_idx": 81, "frame_idx": 160, "global_frame_idx": 18404, "task_index": 16}, {"db_idx": 18405, "episode_idx": 81, "frame_idx": 161, "global_frame_idx": 18405, "task_index": 16}, {"db_idx": 18406, "episode_idx": 82, "frame_idx": 0, "global_frame_idx": 18406, "task_index": 16}, {"db_idx": 18407, "episode_idx": 82, "frame_idx": 1, "global_frame_idx": 18407, "task_index": 16}, {"db_idx": 18408, "episode_idx": 82, "frame_idx": 2, "global_frame_idx": 18408, "task_index": 16}, {"db_idx": 18409, "episode_idx": 82, "frame_idx": 3, "global_frame_idx": 18409, "task_index": 16}, {"db_idx": 18410, "episode_idx": 82, "frame_idx": 4, "global_frame_idx": 18410, "task_index": 16}, {"db_idx": 18411, "episode_idx": 82, "frame_idx": 5, "global_frame_idx": 18411, "task_index": 16}, {"db_idx": 18412, "episode_idx": 82, "frame_idx": 6, "global_frame_idx": 18412, "task_index": 16}, {"db_idx": 18413, "episode_idx": 82, "frame_idx": 7, "global_frame_idx": 18413, "task_index": 16}, {"db_idx": 18414, "episode_idx": 82, "frame_idx": 8, "global_frame_idx": 18414, "task_index": 16}, {"db_idx": 18415, "episode_idx": 82, "frame_idx": 9, "global_frame_idx": 18415, "task_index": 16}, {"db_idx": 18416, "episode_idx": 82, "frame_idx": 10, "global_frame_idx": 18416, "task_index": 16}, {"db_idx": 18417, "episode_idx": 82, "frame_idx": 11, "global_frame_idx": 18417, "task_index": 16}, {"db_idx": 18418, "episode_idx": 82, "frame_idx": 12, "global_frame_idx": 18418, "task_index": 16}, {"db_idx": 18419, "episode_idx": 82, "frame_idx": 13, "global_frame_idx": 18419, "task_index": 16}, {"db_idx": 18420, "episode_idx": 82, "frame_idx": 14, "global_frame_idx": 18420, "task_index": 16}, {"db_idx": 18421, "episode_idx": 82, "frame_idx": 15, "global_frame_idx": 18421, "task_index": 16}, {"db_idx": 18422, "episode_idx": 82, "frame_idx": 16, "global_frame_idx": 18422, "task_index": 16}, {"db_idx": 18423, "episode_idx": 82, "frame_idx": 17, "global_frame_idx": 18423, "task_index": 16}, {"db_idx": 18424, "episode_idx": 82, "frame_idx": 18, "global_frame_idx": 18424, "task_index": 16}, {"db_idx": 18425, "episode_idx": 82, "frame_idx": 19, "global_frame_idx": 18425, "task_index": 16}, {"db_idx": 18426, "episode_idx": 82, "frame_idx": 20, "global_frame_idx": 18426, "task_index": 16}, {"db_idx": 18427, "episode_idx": 82, "frame_idx": 21, "global_frame_idx": 18427, "task_index": 16}, {"db_idx": 18428, "episode_idx": 82, "frame_idx": 22, "global_frame_idx": 18428, "task_index": 16}, {"db_idx": 18429, "episode_idx": 82, "frame_idx": 23, "global_frame_idx": 18429, "task_index": 16}, {"db_idx": 18430, "episode_idx": 82, "frame_idx": 24, "global_frame_idx": 18430, "task_index": 16}, {"db_idx": 18431, "episode_idx": 82, "frame_idx": 25, "global_frame_idx": 18431, "task_index": 16}, {"db_idx": 18432, "episode_idx": 82, "frame_idx": 26, "global_frame_idx": 18432, "task_index": 16}, {"db_idx": 18433, "episode_idx": 82, "frame_idx": 27, "global_frame_idx": 18433, "task_index": 16}, {"db_idx": 18434, "episode_idx": 82, "frame_idx": 28, "global_frame_idx": 18434, "task_index": 16}, {"db_idx": 18435, "episode_idx": 82, "frame_idx": 29, "global_frame_idx": 18435, "task_index": 16}, {"db_idx": 18436, "episode_idx": 82, "frame_idx": 30, "global_frame_idx": 18436, "task_index": 16}, {"db_idx": 18437, "episode_idx": 82, "frame_idx": 31, "global_frame_idx": 18437, "task_index": 16}, {"db_idx": 18438, "episode_idx": 82, "frame_idx": 32, "global_frame_idx": 18438, "task_index": 16}, {"db_idx": 18439, "episode_idx": 82, "frame_idx": 33, "global_frame_idx": 18439, "task_index": 16}, {"db_idx": 18440, "episode_idx": 82, "frame_idx": 34, "global_frame_idx": 18440, "task_index": 16}, {"db_idx": 18441, "episode_idx": 82, "frame_idx": 35, "global_frame_idx": 18441, "task_index": 16}, {"db_idx": 18442, "episode_idx": 82, "frame_idx": 36, "global_frame_idx": 18442, "task_index": 16}, {"db_idx": 18443, "episode_idx": 82, "frame_idx": 37, "global_frame_idx": 18443, "task_index": 16}, {"db_idx": 18444, "episode_idx": 82, "frame_idx": 38, "global_frame_idx": 18444, "task_index": 16}, {"db_idx": 18445, "episode_idx": 82, "frame_idx": 39, "global_frame_idx": 18445, "task_index": 16}, {"db_idx": 18446, "episode_idx": 82, "frame_idx": 40, "global_frame_idx": 18446, "task_index": 16}, {"db_idx": 18447, "episode_idx": 82, "frame_idx": 41, "global_frame_idx": 18447, "task_index": 16}, {"db_idx": 18448, "episode_idx": 82, "frame_idx": 42, "global_frame_idx": 18448, "task_index": 16}, {"db_idx": 18449, "episode_idx": 82, "frame_idx": 43, "global_frame_idx": 18449, "task_index": 16}, {"db_idx": 18450, "episode_idx": 82, "frame_idx": 44, "global_frame_idx": 18450, "task_index": 16}, {"db_idx": 18451, "episode_idx": 82, "frame_idx": 45, "global_frame_idx": 18451, "task_index": 16}, {"db_idx": 18452, "episode_idx": 82, "frame_idx": 46, "global_frame_idx": 18452, "task_index": 16}, {"db_idx": 18453, "episode_idx": 82, "frame_idx": 47, "global_frame_idx": 18453, "task_index": 16}, {"db_idx": 18454, "episode_idx": 82, "frame_idx": 48, "global_frame_idx": 18454, "task_index": 16}, {"db_idx": 18455, "episode_idx": 82, "frame_idx": 49, "global_frame_idx": 18455, "task_index": 16}, {"db_idx": 18456, "episode_idx": 82, "frame_idx": 50, "global_frame_idx": 18456, "task_index": 16}, {"db_idx": 18457, "episode_idx": 82, "frame_idx": 51, "global_frame_idx": 18457, "task_index": 16}, {"db_idx": 18458, "episode_idx": 82, "frame_idx": 52, "global_frame_idx": 18458, "task_index": 16}, {"db_idx": 18459, "episode_idx": 82, "frame_idx": 53, "global_frame_idx": 18459, "task_index": 16}, {"db_idx": 18460, "episode_idx": 82, "frame_idx": 54, "global_frame_idx": 18460, "task_index": 16}, {"db_idx": 18461, "episode_idx": 82, "frame_idx": 55, "global_frame_idx": 18461, "task_index": 16}, {"db_idx": 18462, "episode_idx": 82, "frame_idx": 56, "global_frame_idx": 18462, "task_index": 16}, {"db_idx": 18463, "episode_idx": 82, "frame_idx": 57, "global_frame_idx": 18463, "task_index": 16}, {"db_idx": 18464, "episode_idx": 82, "frame_idx": 58, "global_frame_idx": 18464, "task_index": 16}, {"db_idx": 18465, "episode_idx": 82, "frame_idx": 59, "global_frame_idx": 18465, "task_index": 16}, {"db_idx": 18466, "episode_idx": 82, "frame_idx": 60, "global_frame_idx": 18466, "task_index": 16}, {"db_idx": 18467, "episode_idx": 82, "frame_idx": 61, "global_frame_idx": 18467, "task_index": 16}, {"db_idx": 18468, "episode_idx": 82, "frame_idx": 62, "global_frame_idx": 18468, "task_index": 16}, {"db_idx": 18469, "episode_idx": 82, "frame_idx": 63, "global_frame_idx": 18469, "task_index": 16}, {"db_idx": 18470, "episode_idx": 82, "frame_idx": 64, "global_frame_idx": 18470, "task_index": 16}, {"db_idx": 18471, "episode_idx": 82, "frame_idx": 65, "global_frame_idx": 18471, "task_index": 16}, {"db_idx": 18472, "episode_idx": 82, "frame_idx": 66, "global_frame_idx": 18472, "task_index": 16}, {"db_idx": 18473, "episode_idx": 82, "frame_idx": 67, "global_frame_idx": 18473, "task_index": 16}, {"db_idx": 18474, "episode_idx": 82, "frame_idx": 68, "global_frame_idx": 18474, "task_index": 16}, {"db_idx": 18475, "episode_idx": 82, "frame_idx": 69, "global_frame_idx": 18475, "task_index": 16}, {"db_idx": 18476, "episode_idx": 82, "frame_idx": 70, "global_frame_idx": 18476, "task_index": 16}, {"db_idx": 18477, "episode_idx": 82, "frame_idx": 71, "global_frame_idx": 18477, "task_index": 16}, {"db_idx": 18478, "episode_idx": 82, "frame_idx": 72, "global_frame_idx": 18478, "task_index": 16}, {"db_idx": 18479, "episode_idx": 82, "frame_idx": 73, "global_frame_idx": 18479, "task_index": 16}, {"db_idx": 18480, "episode_idx": 82, "frame_idx": 74, "global_frame_idx": 18480, "task_index": 16}, {"db_idx": 18481, "episode_idx": 82, "frame_idx": 75, "global_frame_idx": 18481, "task_index": 16}, {"db_idx": 18482, "episode_idx": 82, "frame_idx": 76, "global_frame_idx": 18482, "task_index": 16}, {"db_idx": 18483, "episode_idx": 82, "frame_idx": 77, "global_frame_idx": 18483, "task_index": 16}, {"db_idx": 18484, "episode_idx": 82, "frame_idx": 78, "global_frame_idx": 18484, "task_index": 16}, {"db_idx": 18485, "episode_idx": 82, "frame_idx": 79, "global_frame_idx": 18485, "task_index": 16}, {"db_idx": 18486, "episode_idx": 82, "frame_idx": 80, "global_frame_idx": 18486, "task_index": 16}, {"db_idx": 18487, "episode_idx": 82, "frame_idx": 81, "global_frame_idx": 18487, "task_index": 16}, {"db_idx": 18488, "episode_idx": 82, "frame_idx": 82, "global_frame_idx": 18488, "task_index": 16}, {"db_idx": 18489, "episode_idx": 82, "frame_idx": 83, "global_frame_idx": 18489, "task_index": 16}, {"db_idx": 18490, "episode_idx": 82, "frame_idx": 84, "global_frame_idx": 18490, "task_index": 16}, {"db_idx": 18491, "episode_idx": 82, "frame_idx": 85, "global_frame_idx": 18491, "task_index": 16}, {"db_idx": 18492, "episode_idx": 82, "frame_idx": 86, "global_frame_idx": 18492, "task_index": 16}, {"db_idx": 18493, "episode_idx": 82, "frame_idx": 87, "global_frame_idx": 18493, "task_index": 16}, {"db_idx": 18494, "episode_idx": 82, "frame_idx": 88, "global_frame_idx": 18494, "task_index": 16}, {"db_idx": 18495, "episode_idx": 82, "frame_idx": 89, "global_frame_idx": 18495, "task_index": 16}, {"db_idx": 18496, "episode_idx": 82, "frame_idx": 90, "global_frame_idx": 18496, "task_index": 16}, {"db_idx": 18497, "episode_idx": 82, "frame_idx": 91, "global_frame_idx": 18497, "task_index": 16}, {"db_idx": 18498, "episode_idx": 82, "frame_idx": 92, "global_frame_idx": 18498, "task_index": 16}, {"db_idx": 18499, "episode_idx": 82, "frame_idx": 93, "global_frame_idx": 18499, "task_index": 16}, {"db_idx": 18500, "episode_idx": 82, "frame_idx": 94, "global_frame_idx": 18500, "task_index": 16}, {"db_idx": 18501, "episode_idx": 82, "frame_idx": 95, "global_frame_idx": 18501, "task_index": 16}, {"db_idx": 18502, "episode_idx": 82, "frame_idx": 96, "global_frame_idx": 18502, "task_index": 16}, {"db_idx": 18503, "episode_idx": 82, "frame_idx": 97, "global_frame_idx": 18503, "task_index": 16}, {"db_idx": 18504, "episode_idx": 82, "frame_idx": 98, "global_frame_idx": 18504, "task_index": 16}, {"db_idx": 18505, "episode_idx": 82, "frame_idx": 99, "global_frame_idx": 18505, "task_index": 16}, {"db_idx": 18506, "episode_idx": 82, "frame_idx": 100, "global_frame_idx": 18506, "task_index": 16}, {"db_idx": 18507, "episode_idx": 82, "frame_idx": 101, "global_frame_idx": 18507, "task_index": 16}, {"db_idx": 18508, "episode_idx": 82, "frame_idx": 102, "global_frame_idx": 18508, "task_index": 16}, {"db_idx": 18509, "episode_idx": 82, "frame_idx": 103, "global_frame_idx": 18509, "task_index": 16}, {"db_idx": 18510, "episode_idx": 82, "frame_idx": 104, "global_frame_idx": 18510, "task_index": 16}, {"db_idx": 18511, "episode_idx": 82, "frame_idx": 105, "global_frame_idx": 18511, "task_index": 16}, {"db_idx": 18512, "episode_idx": 82, "frame_idx": 106, "global_frame_idx": 18512, "task_index": 16}, {"db_idx": 18513, "episode_idx": 82, "frame_idx": 107, "global_frame_idx": 18513, "task_index": 16}, {"db_idx": 18514, "episode_idx": 82, "frame_idx": 108, "global_frame_idx": 18514, "task_index": 16}, {"db_idx": 18515, "episode_idx": 82, "frame_idx": 109, "global_frame_idx": 18515, "task_index": 16}, {"db_idx": 18516, "episode_idx": 82, "frame_idx": 110, "global_frame_idx": 18516, "task_index": 16}, {"db_idx": 18517, "episode_idx": 82, "frame_idx": 111, "global_frame_idx": 18517, "task_index": 16}, {"db_idx": 18518, "episode_idx": 82, "frame_idx": 112, "global_frame_idx": 18518, "task_index": 16}, {"db_idx": 18519, "episode_idx": 82, "frame_idx": 113, "global_frame_idx": 18519, "task_index": 16}, {"db_idx": 18520, "episode_idx": 82, "frame_idx": 114, "global_frame_idx": 18520, "task_index": 16}, {"db_idx": 18521, "episode_idx": 82, "frame_idx": 115, "global_frame_idx": 18521, "task_index": 16}, {"db_idx": 18522, "episode_idx": 82, "frame_idx": 116, "global_frame_idx": 18522, "task_index": 16}, {"db_idx": 18523, "episode_idx": 82, "frame_idx": 117, "global_frame_idx": 18523, "task_index": 16}, {"db_idx": 18524, "episode_idx": 82, "frame_idx": 118, "global_frame_idx": 18524, "task_index": 16}, {"db_idx": 18525, "episode_idx": 82, "frame_idx": 119, "global_frame_idx": 18525, "task_index": 16}, {"db_idx": 18526, "episode_idx": 82, "frame_idx": 120, "global_frame_idx": 18526, "task_index": 16}, {"db_idx": 18527, "episode_idx": 82, "frame_idx": 121, "global_frame_idx": 18527, "task_index": 16}, {"db_idx": 18528, "episode_idx": 82, "frame_idx": 122, "global_frame_idx": 18528, "task_index": 16}, {"db_idx": 18529, "episode_idx": 82, "frame_idx": 123, "global_frame_idx": 18529, "task_index": 16}, {"db_idx": 18530, "episode_idx": 82, "frame_idx": 124, "global_frame_idx": 18530, "task_index": 16}, {"db_idx": 18531, "episode_idx": 82, "frame_idx": 125, "global_frame_idx": 18531, "task_index": 16}, {"db_idx": 18532, "episode_idx": 82, "frame_idx": 126, "global_frame_idx": 18532, "task_index": 16}, {"db_idx": 18533, "episode_idx": 82, "frame_idx": 127, "global_frame_idx": 18533, "task_index": 16}, {"db_idx": 18534, "episode_idx": 82, "frame_idx": 128, "global_frame_idx": 18534, "task_index": 16}, {"db_idx": 18535, "episode_idx": 82, "frame_idx": 129, "global_frame_idx": 18535, "task_index": 16}, {"db_idx": 18536, "episode_idx": 82, "frame_idx": 130, "global_frame_idx": 18536, "task_index": 16}, {"db_idx": 18537, "episode_idx": 82, "frame_idx": 131, "global_frame_idx": 18537, "task_index": 16}, {"db_idx": 18538, "episode_idx": 82, "frame_idx": 132, "global_frame_idx": 18538, "task_index": 16}, {"db_idx": 18539, "episode_idx": 82, "frame_idx": 133, "global_frame_idx": 18539, "task_index": 16}, {"db_idx": 18540, "episode_idx": 82, "frame_idx": 134, "global_frame_idx": 18540, "task_index": 16}, {"db_idx": 18541, "episode_idx": 82, "frame_idx": 135, "global_frame_idx": 18541, "task_index": 16}, {"db_idx": 18542, "episode_idx": 82, "frame_idx": 136, "global_frame_idx": 18542, "task_index": 16}, {"db_idx": 18543, "episode_idx": 82, "frame_idx": 137, "global_frame_idx": 18543, "task_index": 16}, {"db_idx": 18544, "episode_idx": 82, "frame_idx": 138, "global_frame_idx": 18544, "task_index": 16}, {"db_idx": 18545, "episode_idx": 82, "frame_idx": 139, "global_frame_idx": 18545, "task_index": 16}, {"db_idx": 18546, "episode_idx": 82, "frame_idx": 140, "global_frame_idx": 18546, "task_index": 16}, {"db_idx": 18547, "episode_idx": 82, "frame_idx": 141, "global_frame_idx": 18547, "task_index": 16}, {"db_idx": 18548, "episode_idx": 82, "frame_idx": 142, "global_frame_idx": 18548, "task_index": 16}, {"db_idx": 18549, "episode_idx": 82, "frame_idx": 143, "global_frame_idx": 18549, "task_index": 16}, {"db_idx": 18550, "episode_idx": 82, "frame_idx": 144, "global_frame_idx": 18550, "task_index": 16}, {"db_idx": 18551, "episode_idx": 82, "frame_idx": 145, "global_frame_idx": 18551, "task_index": 16}, {"db_idx": 18552, "episode_idx": 82, "frame_idx": 146, "global_frame_idx": 18552, "task_index": 16}, {"db_idx": 18553, "episode_idx": 82, "frame_idx": 147, "global_frame_idx": 18553, "task_index": 16}, {"db_idx": 18554, "episode_idx": 82, "frame_idx": 148, "global_frame_idx": 18554, "task_index": 16}, {"db_idx": 18555, "episode_idx": 82, "frame_idx": 149, "global_frame_idx": 18555, "task_index": 16}, {"db_idx": 18556, "episode_idx": 82, "frame_idx": 150, "global_frame_idx": 18556, "task_index": 16}, {"db_idx": 18557, "episode_idx": 82, "frame_idx": 151, "global_frame_idx": 18557, "task_index": 16}, {"db_idx": 18558, "episode_idx": 82, "frame_idx": 152, "global_frame_idx": 18558, "task_index": 16}, {"db_idx": 18559, "episode_idx": 82, "frame_idx": 153, "global_frame_idx": 18559, "task_index": 16}, {"db_idx": 18560, "episode_idx": 82, "frame_idx": 154, "global_frame_idx": 18560, "task_index": 16}, {"db_idx": 18561, "episode_idx": 82, "frame_idx": 155, "global_frame_idx": 18561, "task_index": 16}, {"db_idx": 18562, "episode_idx": 82, "frame_idx": 156, "global_frame_idx": 18562, "task_index": 16}, {"db_idx": 18563, "episode_idx": 82, "frame_idx": 157, "global_frame_idx": 18563, "task_index": 16}, {"db_idx": 18564, "episode_idx": 82, "frame_idx": 158, "global_frame_idx": 18564, "task_index": 16}, {"db_idx": 18565, "episode_idx": 82, "frame_idx": 159, "global_frame_idx": 18565, "task_index": 16}, {"db_idx": 18566, "episode_idx": 82, "frame_idx": 160, "global_frame_idx": 18566, "task_index": 16}, {"db_idx": 18567, "episode_idx": 82, "frame_idx": 161, "global_frame_idx": 18567, "task_index": 16}, {"db_idx": 18568, "episode_idx": 82, "frame_idx": 162, "global_frame_idx": 18568, "task_index": 16}, {"db_idx": 18569, "episode_idx": 82, "frame_idx": 163, "global_frame_idx": 18569, "task_index": 16}, {"db_idx": 18570, "episode_idx": 83, "frame_idx": 0, "global_frame_idx": 18570, "task_index": 16}, {"db_idx": 18571, "episode_idx": 83, "frame_idx": 1, "global_frame_idx": 18571, "task_index": 16}, {"db_idx": 18572, "episode_idx": 83, "frame_idx": 2, "global_frame_idx": 18572, "task_index": 16}, {"db_idx": 18573, "episode_idx": 83, "frame_idx": 3, "global_frame_idx": 18573, "task_index": 16}, {"db_idx": 18574, "episode_idx": 83, "frame_idx": 4, "global_frame_idx": 18574, "task_index": 16}, {"db_idx": 18575, "episode_idx": 83, "frame_idx": 5, "global_frame_idx": 18575, "task_index": 16}, {"db_idx": 18576, "episode_idx": 83, "frame_idx": 6, "global_frame_idx": 18576, "task_index": 16}, {"db_idx": 18577, "episode_idx": 83, "frame_idx": 7, "global_frame_idx": 18577, "task_index": 16}, {"db_idx": 18578, "episode_idx": 83, "frame_idx": 8, "global_frame_idx": 18578, "task_index": 16}, {"db_idx": 18579, "episode_idx": 83, "frame_idx": 9, "global_frame_idx": 18579, "task_index": 16}, {"db_idx": 18580, "episode_idx": 83, "frame_idx": 10, "global_frame_idx": 18580, "task_index": 16}, {"db_idx": 18581, "episode_idx": 83, "frame_idx": 11, "global_frame_idx": 18581, "task_index": 16}, {"db_idx": 18582, "episode_idx": 83, "frame_idx": 12, "global_frame_idx": 18582, "task_index": 16}, {"db_idx": 18583, "episode_idx": 83, "frame_idx": 13, "global_frame_idx": 18583, "task_index": 16}, {"db_idx": 18584, "episode_idx": 83, "frame_idx": 14, "global_frame_idx": 18584, "task_index": 16}, {"db_idx": 18585, "episode_idx": 83, "frame_idx": 15, "global_frame_idx": 18585, "task_index": 16}, {"db_idx": 18586, "episode_idx": 83, "frame_idx": 16, "global_frame_idx": 18586, "task_index": 16}, {"db_idx": 18587, "episode_idx": 83, "frame_idx": 17, "global_frame_idx": 18587, "task_index": 16}, {"db_idx": 18588, "episode_idx": 83, "frame_idx": 18, "global_frame_idx": 18588, "task_index": 16}, {"db_idx": 18589, "episode_idx": 83, "frame_idx": 19, "global_frame_idx": 18589, "task_index": 16}, {"db_idx": 18590, "episode_idx": 83, "frame_idx": 20, "global_frame_idx": 18590, "task_index": 16}, {"db_idx": 18591, "episode_idx": 83, "frame_idx": 21, "global_frame_idx": 18591, "task_index": 16}, {"db_idx": 18592, "episode_idx": 83, "frame_idx": 22, "global_frame_idx": 18592, "task_index": 16}, {"db_idx": 18593, "episode_idx": 83, "frame_idx": 23, "global_frame_idx": 18593, "task_index": 16}, {"db_idx": 18594, "episode_idx": 83, "frame_idx": 24, "global_frame_idx": 18594, "task_index": 16}, {"db_idx": 18595, "episode_idx": 83, "frame_idx": 25, "global_frame_idx": 18595, "task_index": 16}, {"db_idx": 18596, "episode_idx": 83, "frame_idx": 26, "global_frame_idx": 18596, "task_index": 16}, {"db_idx": 18597, "episode_idx": 83, "frame_idx": 27, "global_frame_idx": 18597, "task_index": 16}, {"db_idx": 18598, "episode_idx": 83, "frame_idx": 28, "global_frame_idx": 18598, "task_index": 16}, {"db_idx": 18599, "episode_idx": 83, "frame_idx": 29, "global_frame_idx": 18599, "task_index": 16}, {"db_idx": 18600, "episode_idx": 83, "frame_idx": 30, "global_frame_idx": 18600, "task_index": 16}, {"db_idx": 18601, "episode_idx": 83, "frame_idx": 31, "global_frame_idx": 18601, "task_index": 16}, {"db_idx": 18602, "episode_idx": 83, "frame_idx": 32, "global_frame_idx": 18602, "task_index": 16}, {"db_idx": 18603, "episode_idx": 83, "frame_idx": 33, "global_frame_idx": 18603, "task_index": 16}, {"db_idx": 18604, "episode_idx": 83, "frame_idx": 34, "global_frame_idx": 18604, "task_index": 16}, {"db_idx": 18605, "episode_idx": 83, "frame_idx": 35, "global_frame_idx": 18605, "task_index": 16}, {"db_idx": 18606, "episode_idx": 83, "frame_idx": 36, "global_frame_idx": 18606, "task_index": 16}, {"db_idx": 18607, "episode_idx": 83, "frame_idx": 37, "global_frame_idx": 18607, "task_index": 16}, {"db_idx": 18608, "episode_idx": 83, "frame_idx": 38, "global_frame_idx": 18608, "task_index": 16}, {"db_idx": 18609, "episode_idx": 83, "frame_idx": 39, "global_frame_idx": 18609, "task_index": 16}, {"db_idx": 18610, "episode_idx": 83, "frame_idx": 40, "global_frame_idx": 18610, "task_index": 16}, {"db_idx": 18611, "episode_idx": 83, "frame_idx": 41, "global_frame_idx": 18611, "task_index": 16}, {"db_idx": 18612, "episode_idx": 83, "frame_idx": 42, "global_frame_idx": 18612, "task_index": 16}, {"db_idx": 18613, "episode_idx": 83, "frame_idx": 43, "global_frame_idx": 18613, "task_index": 16}, {"db_idx": 18614, "episode_idx": 83, "frame_idx": 44, "global_frame_idx": 18614, "task_index": 16}, {"db_idx": 18615, "episode_idx": 83, "frame_idx": 45, "global_frame_idx": 18615, "task_index": 16}, {"db_idx": 18616, "episode_idx": 83, "frame_idx": 46, "global_frame_idx": 18616, "task_index": 16}, {"db_idx": 18617, "episode_idx": 83, "frame_idx": 47, "global_frame_idx": 18617, "task_index": 16}, {"db_idx": 18618, "episode_idx": 83, "frame_idx": 48, "global_frame_idx": 18618, "task_index": 16}, {"db_idx": 18619, "episode_idx": 83, "frame_idx": 49, "global_frame_idx": 18619, "task_index": 16}, {"db_idx": 18620, "episode_idx": 83, "frame_idx": 50, "global_frame_idx": 18620, "task_index": 16}, {"db_idx": 18621, "episode_idx": 83, "frame_idx": 51, "global_frame_idx": 18621, "task_index": 16}, {"db_idx": 18622, "episode_idx": 83, "frame_idx": 52, "global_frame_idx": 18622, "task_index": 16}, {"db_idx": 18623, "episode_idx": 83, "frame_idx": 53, "global_frame_idx": 18623, "task_index": 16}, {"db_idx": 18624, "episode_idx": 83, "frame_idx": 54, "global_frame_idx": 18624, "task_index": 16}, {"db_idx": 18625, "episode_idx": 83, "frame_idx": 55, "global_frame_idx": 18625, "task_index": 16}, {"db_idx": 18626, "episode_idx": 83, "frame_idx": 56, "global_frame_idx": 18626, "task_index": 16}, {"db_idx": 18627, "episode_idx": 83, "frame_idx": 57, "global_frame_idx": 18627, "task_index": 16}, {"db_idx": 18628, "episode_idx": 83, "frame_idx": 58, "global_frame_idx": 18628, "task_index": 16}, {"db_idx": 18629, "episode_idx": 83, "frame_idx": 59, "global_frame_idx": 18629, "task_index": 16}, {"db_idx": 18630, "episode_idx": 83, "frame_idx": 60, "global_frame_idx": 18630, "task_index": 16}, {"db_idx": 18631, "episode_idx": 83, "frame_idx": 61, "global_frame_idx": 18631, "task_index": 16}, {"db_idx": 18632, "episode_idx": 83, "frame_idx": 62, "global_frame_idx": 18632, "task_index": 16}, {"db_idx": 18633, "episode_idx": 83, "frame_idx": 63, "global_frame_idx": 18633, "task_index": 16}, {"db_idx": 18634, "episode_idx": 83, "frame_idx": 64, "global_frame_idx": 18634, "task_index": 16}, {"db_idx": 18635, "episode_idx": 83, "frame_idx": 65, "global_frame_idx": 18635, "task_index": 16}, {"db_idx": 18636, "episode_idx": 83, "frame_idx": 66, "global_frame_idx": 18636, "task_index": 16}, {"db_idx": 18637, "episode_idx": 83, "frame_idx": 67, "global_frame_idx": 18637, "task_index": 16}, {"db_idx": 18638, "episode_idx": 83, "frame_idx": 68, "global_frame_idx": 18638, "task_index": 16}, {"db_idx": 18639, "episode_idx": 83, "frame_idx": 69, "global_frame_idx": 18639, "task_index": 16}, {"db_idx": 18640, "episode_idx": 83, "frame_idx": 70, "global_frame_idx": 18640, "task_index": 16}, {"db_idx": 18641, "episode_idx": 83, "frame_idx": 71, "global_frame_idx": 18641, "task_index": 16}, {"db_idx": 18642, "episode_idx": 83, "frame_idx": 72, "global_frame_idx": 18642, "task_index": 16}, {"db_idx": 18643, "episode_idx": 83, "frame_idx": 73, "global_frame_idx": 18643, "task_index": 16}, {"db_idx": 18644, "episode_idx": 83, "frame_idx": 74, "global_frame_idx": 18644, "task_index": 16}, {"db_idx": 18645, "episode_idx": 83, "frame_idx": 75, "global_frame_idx": 18645, "task_index": 16}, {"db_idx": 18646, "episode_idx": 83, "frame_idx": 76, "global_frame_idx": 18646, "task_index": 16}, {"db_idx": 18647, "episode_idx": 83, "frame_idx": 77, "global_frame_idx": 18647, "task_index": 16}, {"db_idx": 18648, "episode_idx": 83, "frame_idx": 78, "global_frame_idx": 18648, "task_index": 16}, {"db_idx": 18649, "episode_idx": 83, "frame_idx": 79, "global_frame_idx": 18649, "task_index": 16}, {"db_idx": 18650, "episode_idx": 83, "frame_idx": 80, "global_frame_idx": 18650, "task_index": 16}, {"db_idx": 18651, "episode_idx": 83, "frame_idx": 81, "global_frame_idx": 18651, "task_index": 16}, {"db_idx": 18652, "episode_idx": 83, "frame_idx": 82, "global_frame_idx": 18652, "task_index": 16}, {"db_idx": 18653, "episode_idx": 83, "frame_idx": 83, "global_frame_idx": 18653, "task_index": 16}, {"db_idx": 18654, "episode_idx": 83, "frame_idx": 84, "global_frame_idx": 18654, "task_index": 16}, {"db_idx": 18655, "episode_idx": 83, "frame_idx": 85, "global_frame_idx": 18655, "task_index": 16}, {"db_idx": 18656, "episode_idx": 83, "frame_idx": 86, "global_frame_idx": 18656, "task_index": 16}, {"db_idx": 18657, "episode_idx": 83, "frame_idx": 87, "global_frame_idx": 18657, "task_index": 16}, {"db_idx": 18658, "episode_idx": 83, "frame_idx": 88, "global_frame_idx": 18658, "task_index": 16}, {"db_idx": 18659, "episode_idx": 83, "frame_idx": 89, "global_frame_idx": 18659, "task_index": 16}, {"db_idx": 18660, "episode_idx": 83, "frame_idx": 90, "global_frame_idx": 18660, "task_index": 16}, {"db_idx": 18661, "episode_idx": 83, "frame_idx": 91, "global_frame_idx": 18661, "task_index": 16}, {"db_idx": 18662, "episode_idx": 83, "frame_idx": 92, "global_frame_idx": 18662, "task_index": 16}, {"db_idx": 18663, "episode_idx": 83, "frame_idx": 93, "global_frame_idx": 18663, "task_index": 16}, {"db_idx": 18664, "episode_idx": 83, "frame_idx": 94, "global_frame_idx": 18664, "task_index": 16}, {"db_idx": 18665, "episode_idx": 83, "frame_idx": 95, "global_frame_idx": 18665, "task_index": 16}, {"db_idx": 18666, "episode_idx": 83, "frame_idx": 96, "global_frame_idx": 18666, "task_index": 16}, {"db_idx": 18667, "episode_idx": 83, "frame_idx": 97, "global_frame_idx": 18667, "task_index": 16}, {"db_idx": 18668, "episode_idx": 83, "frame_idx": 98, "global_frame_idx": 18668, "task_index": 16}, {"db_idx": 18669, "episode_idx": 83, "frame_idx": 99, "global_frame_idx": 18669, "task_index": 16}, {"db_idx": 18670, "episode_idx": 83, "frame_idx": 100, "global_frame_idx": 18670, "task_index": 16}, {"db_idx": 18671, "episode_idx": 83, "frame_idx": 101, "global_frame_idx": 18671, "task_index": 16}, {"db_idx": 18672, "episode_idx": 83, "frame_idx": 102, "global_frame_idx": 18672, "task_index": 16}, {"db_idx": 18673, "episode_idx": 83, "frame_idx": 103, "global_frame_idx": 18673, "task_index": 16}, {"db_idx": 18674, "episode_idx": 83, "frame_idx": 104, "global_frame_idx": 18674, "task_index": 16}, {"db_idx": 18675, "episode_idx": 83, "frame_idx": 105, "global_frame_idx": 18675, "task_index": 16}, {"db_idx": 18676, "episode_idx": 83, "frame_idx": 106, "global_frame_idx": 18676, "task_index": 16}, {"db_idx": 18677, "episode_idx": 83, "frame_idx": 107, "global_frame_idx": 18677, "task_index": 16}, {"db_idx": 18678, "episode_idx": 83, "frame_idx": 108, "global_frame_idx": 18678, "task_index": 16}, {"db_idx": 18679, "episode_idx": 83, "frame_idx": 109, "global_frame_idx": 18679, "task_index": 16}, {"db_idx": 18680, "episode_idx": 83, "frame_idx": 110, "global_frame_idx": 18680, "task_index": 16}, {"db_idx": 18681, "episode_idx": 83, "frame_idx": 111, "global_frame_idx": 18681, "task_index": 16}, {"db_idx": 18682, "episode_idx": 83, "frame_idx": 112, "global_frame_idx": 18682, "task_index": 16}, {"db_idx": 18683, "episode_idx": 83, "frame_idx": 113, "global_frame_idx": 18683, "task_index": 16}, {"db_idx": 18684, "episode_idx": 83, "frame_idx": 114, "global_frame_idx": 18684, "task_index": 16}, {"db_idx": 18685, "episode_idx": 83, "frame_idx": 115, "global_frame_idx": 18685, "task_index": 16}, {"db_idx": 18686, "episode_idx": 83, "frame_idx": 116, "global_frame_idx": 18686, "task_index": 16}, {"db_idx": 18687, "episode_idx": 83, "frame_idx": 117, "global_frame_idx": 18687, "task_index": 16}, {"db_idx": 18688, "episode_idx": 83, "frame_idx": 118, "global_frame_idx": 18688, "task_index": 16}, {"db_idx": 18689, "episode_idx": 83, "frame_idx": 119, "global_frame_idx": 18689, "task_index": 16}, {"db_idx": 18690, "episode_idx": 83, "frame_idx": 120, "global_frame_idx": 18690, "task_index": 16}, {"db_idx": 18691, "episode_idx": 83, "frame_idx": 121, "global_frame_idx": 18691, "task_index": 16}, {"db_idx": 18692, "episode_idx": 83, "frame_idx": 122, "global_frame_idx": 18692, "task_index": 16}, {"db_idx": 18693, "episode_idx": 83, "frame_idx": 123, "global_frame_idx": 18693, "task_index": 16}, {"db_idx": 18694, "episode_idx": 83, "frame_idx": 124, "global_frame_idx": 18694, "task_index": 16}, {"db_idx": 18695, "episode_idx": 83, "frame_idx": 125, "global_frame_idx": 18695, "task_index": 16}, {"db_idx": 18696, "episode_idx": 83, "frame_idx": 126, "global_frame_idx": 18696, "task_index": 16}, {"db_idx": 18697, "episode_idx": 83, "frame_idx": 127, "global_frame_idx": 18697, "task_index": 16}, {"db_idx": 18698, "episode_idx": 83, "frame_idx": 128, "global_frame_idx": 18698, "task_index": 16}, {"db_idx": 18699, "episode_idx": 83, "frame_idx": 129, "global_frame_idx": 18699, "task_index": 16}, {"db_idx": 18700, "episode_idx": 83, "frame_idx": 130, "global_frame_idx": 18700, "task_index": 16}, {"db_idx": 18701, "episode_idx": 83, "frame_idx": 131, "global_frame_idx": 18701, "task_index": 16}, {"db_idx": 18702, "episode_idx": 83, "frame_idx": 132, "global_frame_idx": 18702, "task_index": 16}, {"db_idx": 18703, "episode_idx": 83, "frame_idx": 133, "global_frame_idx": 18703, "task_index": 16}, {"db_idx": 18704, "episode_idx": 83, "frame_idx": 134, "global_frame_idx": 18704, "task_index": 16}, {"db_idx": 18705, "episode_idx": 83, "frame_idx": 135, "global_frame_idx": 18705, "task_index": 16}, {"db_idx": 18706, "episode_idx": 83, "frame_idx": 136, "global_frame_idx": 18706, "task_index": 16}, {"db_idx": 18707, "episode_idx": 83, "frame_idx": 137, "global_frame_idx": 18707, "task_index": 16}, {"db_idx": 18708, "episode_idx": 83, "frame_idx": 138, "global_frame_idx": 18708, "task_index": 16}, {"db_idx": 18709, "episode_idx": 83, "frame_idx": 139, "global_frame_idx": 18709, "task_index": 16}, {"db_idx": 18710, "episode_idx": 83, "frame_idx": 140, "global_frame_idx": 18710, "task_index": 16}, {"db_idx": 18711, "episode_idx": 83, "frame_idx": 141, "global_frame_idx": 18711, "task_index": 16}, {"db_idx": 18712, "episode_idx": 83, "frame_idx": 142, "global_frame_idx": 18712, "task_index": 16}, {"db_idx": 18713, "episode_idx": 83, "frame_idx": 143, "global_frame_idx": 18713, "task_index": 16}, {"db_idx": 18714, "episode_idx": 83, "frame_idx": 144, "global_frame_idx": 18714, "task_index": 16}, {"db_idx": 18715, "episode_idx": 83, "frame_idx": 145, "global_frame_idx": 18715, "task_index": 16}, {"db_idx": 18716, "episode_idx": 83, "frame_idx": 146, "global_frame_idx": 18716, "task_index": 16}, {"db_idx": 18717, "episode_idx": 83, "frame_idx": 147, "global_frame_idx": 18717, "task_index": 16}, {"db_idx": 18718, "episode_idx": 83, "frame_idx": 148, "global_frame_idx": 18718, "task_index": 16}, {"db_idx": 18719, "episode_idx": 83, "frame_idx": 149, "global_frame_idx": 18719, "task_index": 16}, {"db_idx": 18720, "episode_idx": 83, "frame_idx": 150, "global_frame_idx": 18720, "task_index": 16}, {"db_idx": 18721, "episode_idx": 83, "frame_idx": 151, "global_frame_idx": 18721, "task_index": 16}, {"db_idx": 18722, "episode_idx": 83, "frame_idx": 152, "global_frame_idx": 18722, "task_index": 16}, {"db_idx": 18723, "episode_idx": 83, "frame_idx": 153, "global_frame_idx": 18723, "task_index": 16}, {"db_idx": 18724, "episode_idx": 84, "frame_idx": 0, "global_frame_idx": 18724, "task_index": 16}, {"db_idx": 18725, "episode_idx": 84, "frame_idx": 1, "global_frame_idx": 18725, "task_index": 16}, {"db_idx": 18726, "episode_idx": 84, "frame_idx": 2, "global_frame_idx": 18726, "task_index": 16}, {"db_idx": 18727, "episode_idx": 84, "frame_idx": 3, "global_frame_idx": 18727, "task_index": 16}, {"db_idx": 18728, "episode_idx": 84, "frame_idx": 4, "global_frame_idx": 18728, "task_index": 16}, {"db_idx": 18729, "episode_idx": 84, "frame_idx": 5, "global_frame_idx": 18729, "task_index": 16}, {"db_idx": 18730, "episode_idx": 84, "frame_idx": 6, "global_frame_idx": 18730, "task_index": 16}, {"db_idx": 18731, "episode_idx": 84, "frame_idx": 7, "global_frame_idx": 18731, "task_index": 16}, {"db_idx": 18732, "episode_idx": 84, "frame_idx": 8, "global_frame_idx": 18732, "task_index": 16}, {"db_idx": 18733, "episode_idx": 84, "frame_idx": 9, "global_frame_idx": 18733, "task_index": 16}, {"db_idx": 18734, "episode_idx": 84, "frame_idx": 10, "global_frame_idx": 18734, "task_index": 16}, {"db_idx": 18735, "episode_idx": 84, "frame_idx": 11, "global_frame_idx": 18735, "task_index": 16}, {"db_idx": 18736, "episode_idx": 84, "frame_idx": 12, "global_frame_idx": 18736, "task_index": 16}, {"db_idx": 18737, "episode_idx": 84, "frame_idx": 13, "global_frame_idx": 18737, "task_index": 16}, {"db_idx": 18738, "episode_idx": 84, "frame_idx": 14, "global_frame_idx": 18738, "task_index": 16}, {"db_idx": 18739, "episode_idx": 84, "frame_idx": 15, "global_frame_idx": 18739, "task_index": 16}, {"db_idx": 18740, "episode_idx": 84, "frame_idx": 16, "global_frame_idx": 18740, "task_index": 16}, {"db_idx": 18741, "episode_idx": 84, "frame_idx": 17, "global_frame_idx": 18741, "task_index": 16}, {"db_idx": 18742, "episode_idx": 84, "frame_idx": 18, "global_frame_idx": 18742, "task_index": 16}, {"db_idx": 18743, "episode_idx": 84, "frame_idx": 19, "global_frame_idx": 18743, "task_index": 16}, {"db_idx": 18744, "episode_idx": 84, "frame_idx": 20, "global_frame_idx": 18744, "task_index": 16}, {"db_idx": 18745, "episode_idx": 84, "frame_idx": 21, "global_frame_idx": 18745, "task_index": 16}, {"db_idx": 18746, "episode_idx": 84, "frame_idx": 22, "global_frame_idx": 18746, "task_index": 16}, {"db_idx": 18747, "episode_idx": 84, "frame_idx": 23, "global_frame_idx": 18747, "task_index": 16}, {"db_idx": 18748, "episode_idx": 84, "frame_idx": 24, "global_frame_idx": 18748, "task_index": 16}, {"db_idx": 18749, "episode_idx": 84, "frame_idx": 25, "global_frame_idx": 18749, "task_index": 16}, {"db_idx": 18750, "episode_idx": 84, "frame_idx": 26, "global_frame_idx": 18750, "task_index": 16}, {"db_idx": 18751, "episode_idx": 84, "frame_idx": 27, "global_frame_idx": 18751, "task_index": 16}, {"db_idx": 18752, "episode_idx": 84, "frame_idx": 28, "global_frame_idx": 18752, "task_index": 16}, {"db_idx": 18753, "episode_idx": 84, "frame_idx": 29, "global_frame_idx": 18753, "task_index": 16}, {"db_idx": 18754, "episode_idx": 84, "frame_idx": 30, "global_frame_idx": 18754, "task_index": 16}, {"db_idx": 18755, "episode_idx": 84, "frame_idx": 31, "global_frame_idx": 18755, "task_index": 16}, {"db_idx": 18756, "episode_idx": 84, "frame_idx": 32, "global_frame_idx": 18756, "task_index": 16}, {"db_idx": 18757, "episode_idx": 84, "frame_idx": 33, "global_frame_idx": 18757, "task_index": 16}, {"db_idx": 18758, "episode_idx": 84, "frame_idx": 34, "global_frame_idx": 18758, "task_index": 16}, {"db_idx": 18759, "episode_idx": 84, "frame_idx": 35, "global_frame_idx": 18759, "task_index": 16}, {"db_idx": 18760, "episode_idx": 84, "frame_idx": 36, "global_frame_idx": 18760, "task_index": 16}, {"db_idx": 18761, "episode_idx": 84, "frame_idx": 37, "global_frame_idx": 18761, "task_index": 16}, {"db_idx": 18762, "episode_idx": 84, "frame_idx": 38, "global_frame_idx": 18762, "task_index": 16}, {"db_idx": 18763, "episode_idx": 84, "frame_idx": 39, "global_frame_idx": 18763, "task_index": 16}, {"db_idx": 18764, "episode_idx": 84, "frame_idx": 40, "global_frame_idx": 18764, "task_index": 16}, {"db_idx": 18765, "episode_idx": 84, "frame_idx": 41, "global_frame_idx": 18765, "task_index": 16}, {"db_idx": 18766, "episode_idx": 84, "frame_idx": 42, "global_frame_idx": 18766, "task_index": 16}, {"db_idx": 18767, "episode_idx": 84, "frame_idx": 43, "global_frame_idx": 18767, "task_index": 16}, {"db_idx": 18768, "episode_idx": 84, "frame_idx": 44, "global_frame_idx": 18768, "task_index": 16}, {"db_idx": 18769, "episode_idx": 84, "frame_idx": 45, "global_frame_idx": 18769, "task_index": 16}, {"db_idx": 18770, "episode_idx": 84, "frame_idx": 46, "global_frame_idx": 18770, "task_index": 16}, {"db_idx": 18771, "episode_idx": 84, "frame_idx": 47, "global_frame_idx": 18771, "task_index": 16}, {"db_idx": 18772, "episode_idx": 84, "frame_idx": 48, "global_frame_idx": 18772, "task_index": 16}, {"db_idx": 18773, "episode_idx": 84, "frame_idx": 49, "global_frame_idx": 18773, "task_index": 16}, {"db_idx": 18774, "episode_idx": 84, "frame_idx": 50, "global_frame_idx": 18774, "task_index": 16}, {"db_idx": 18775, "episode_idx": 84, "frame_idx": 51, "global_frame_idx": 18775, "task_index": 16}, {"db_idx": 18776, "episode_idx": 84, "frame_idx": 52, "global_frame_idx": 18776, "task_index": 16}, {"db_idx": 18777, "episode_idx": 84, "frame_idx": 53, "global_frame_idx": 18777, "task_index": 16}, {"db_idx": 18778, "episode_idx": 84, "frame_idx": 54, "global_frame_idx": 18778, "task_index": 16}, {"db_idx": 18779, "episode_idx": 84, "frame_idx": 55, "global_frame_idx": 18779, "task_index": 16}, {"db_idx": 18780, "episode_idx": 84, "frame_idx": 56, "global_frame_idx": 18780, "task_index": 16}, {"db_idx": 18781, "episode_idx": 84, "frame_idx": 57, "global_frame_idx": 18781, "task_index": 16}, {"db_idx": 18782, "episode_idx": 84, "frame_idx": 58, "global_frame_idx": 18782, "task_index": 16}, {"db_idx": 18783, "episode_idx": 84, "frame_idx": 59, "global_frame_idx": 18783, "task_index": 16}, {"db_idx": 18784, "episode_idx": 84, "frame_idx": 60, "global_frame_idx": 18784, "task_index": 16}, {"db_idx": 18785, "episode_idx": 84, "frame_idx": 61, "global_frame_idx": 18785, "task_index": 16}, {"db_idx": 18786, "episode_idx": 84, "frame_idx": 62, "global_frame_idx": 18786, "task_index": 16}, {"db_idx": 18787, "episode_idx": 84, "frame_idx": 63, "global_frame_idx": 18787, "task_index": 16}, {"db_idx": 18788, "episode_idx": 84, "frame_idx": 64, "global_frame_idx": 18788, "task_index": 16}, {"db_idx": 18789, "episode_idx": 84, "frame_idx": 65, "global_frame_idx": 18789, "task_index": 16}, {"db_idx": 18790, "episode_idx": 84, "frame_idx": 66, "global_frame_idx": 18790, "task_index": 16}, {"db_idx": 18791, "episode_idx": 84, "frame_idx": 67, "global_frame_idx": 18791, "task_index": 16}, {"db_idx": 18792, "episode_idx": 84, "frame_idx": 68, "global_frame_idx": 18792, "task_index": 16}, {"db_idx": 18793, "episode_idx": 84, "frame_idx": 69, "global_frame_idx": 18793, "task_index": 16}, {"db_idx": 18794, "episode_idx": 84, "frame_idx": 70, "global_frame_idx": 18794, "task_index": 16}, {"db_idx": 18795, "episode_idx": 84, "frame_idx": 71, "global_frame_idx": 18795, "task_index": 16}, {"db_idx": 18796, "episode_idx": 84, "frame_idx": 72, "global_frame_idx": 18796, "task_index": 16}, {"db_idx": 18797, "episode_idx": 84, "frame_idx": 73, "global_frame_idx": 18797, "task_index": 16}, {"db_idx": 18798, "episode_idx": 84, "frame_idx": 74, "global_frame_idx": 18798, "task_index": 16}, {"db_idx": 18799, "episode_idx": 84, "frame_idx": 75, "global_frame_idx": 18799, "task_index": 16}, {"db_idx": 18800, "episode_idx": 84, "frame_idx": 76, "global_frame_idx": 18800, "task_index": 16}, {"db_idx": 18801, "episode_idx": 84, "frame_idx": 77, "global_frame_idx": 18801, "task_index": 16}, {"db_idx": 18802, "episode_idx": 84, "frame_idx": 78, "global_frame_idx": 18802, "task_index": 16}, {"db_idx": 18803, "episode_idx": 84, "frame_idx": 79, "global_frame_idx": 18803, "task_index": 16}, {"db_idx": 18804, "episode_idx": 84, "frame_idx": 80, "global_frame_idx": 18804, "task_index": 16}, {"db_idx": 18805, "episode_idx": 84, "frame_idx": 81, "global_frame_idx": 18805, "task_index": 16}, {"db_idx": 18806, "episode_idx": 84, "frame_idx": 82, "global_frame_idx": 18806, "task_index": 16}, {"db_idx": 18807, "episode_idx": 84, "frame_idx": 83, "global_frame_idx": 18807, "task_index": 16}, {"db_idx": 18808, "episode_idx": 84, "frame_idx": 84, "global_frame_idx": 18808, "task_index": 16}, {"db_idx": 18809, "episode_idx": 84, "frame_idx": 85, "global_frame_idx": 18809, "task_index": 16}, {"db_idx": 18810, "episode_idx": 84, "frame_idx": 86, "global_frame_idx": 18810, "task_index": 16}, {"db_idx": 18811, "episode_idx": 84, "frame_idx": 87, "global_frame_idx": 18811, "task_index": 16}, {"db_idx": 18812, "episode_idx": 84, "frame_idx": 88, "global_frame_idx": 18812, "task_index": 16}, {"db_idx": 18813, "episode_idx": 84, "frame_idx": 89, "global_frame_idx": 18813, "task_index": 16}, {"db_idx": 18814, "episode_idx": 84, "frame_idx": 90, "global_frame_idx": 18814, "task_index": 16}, {"db_idx": 18815, "episode_idx": 84, "frame_idx": 91, "global_frame_idx": 18815, "task_index": 16}, {"db_idx": 18816, "episode_idx": 84, "frame_idx": 92, "global_frame_idx": 18816, "task_index": 16}, {"db_idx": 18817, "episode_idx": 84, "frame_idx": 93, "global_frame_idx": 18817, "task_index": 16}, {"db_idx": 18818, "episode_idx": 84, "frame_idx": 94, "global_frame_idx": 18818, "task_index": 16}, {"db_idx": 18819, "episode_idx": 84, "frame_idx": 95, "global_frame_idx": 18819, "task_index": 16}, {"db_idx": 18820, "episode_idx": 84, "frame_idx": 96, "global_frame_idx": 18820, "task_index": 16}, {"db_idx": 18821, "episode_idx": 84, "frame_idx": 97, "global_frame_idx": 18821, "task_index": 16}, {"db_idx": 18822, "episode_idx": 84, "frame_idx": 98, "global_frame_idx": 18822, "task_index": 16}, {"db_idx": 18823, "episode_idx": 84, "frame_idx": 99, "global_frame_idx": 18823, "task_index": 16}, {"db_idx": 18824, "episode_idx": 84, "frame_idx": 100, "global_frame_idx": 18824, "task_index": 16}, {"db_idx": 18825, "episode_idx": 84, "frame_idx": 101, "global_frame_idx": 18825, "task_index": 16}, {"db_idx": 18826, "episode_idx": 84, "frame_idx": 102, "global_frame_idx": 18826, "task_index": 16}, {"db_idx": 18827, "episode_idx": 84, "frame_idx": 103, "global_frame_idx": 18827, "task_index": 16}, {"db_idx": 18828, "episode_idx": 84, "frame_idx": 104, "global_frame_idx": 18828, "task_index": 16}, {"db_idx": 18829, "episode_idx": 84, "frame_idx": 105, "global_frame_idx": 18829, "task_index": 16}, {"db_idx": 18830, "episode_idx": 84, "frame_idx": 106, "global_frame_idx": 18830, "task_index": 16}, {"db_idx": 18831, "episode_idx": 84, "frame_idx": 107, "global_frame_idx": 18831, "task_index": 16}, {"db_idx": 18832, "episode_idx": 84, "frame_idx": 108, "global_frame_idx": 18832, "task_index": 16}, {"db_idx": 18833, "episode_idx": 84, "frame_idx": 109, "global_frame_idx": 18833, "task_index": 16}, {"db_idx": 18834, "episode_idx": 84, "frame_idx": 110, "global_frame_idx": 18834, "task_index": 16}, {"db_idx": 18835, "episode_idx": 84, "frame_idx": 111, "global_frame_idx": 18835, "task_index": 16}, {"db_idx": 18836, "episode_idx": 84, "frame_idx": 112, "global_frame_idx": 18836, "task_index": 16}, {"db_idx": 18837, "episode_idx": 84, "frame_idx": 113, "global_frame_idx": 18837, "task_index": 16}, {"db_idx": 18838, "episode_idx": 84, "frame_idx": 114, "global_frame_idx": 18838, "task_index": 16}, {"db_idx": 18839, "episode_idx": 84, "frame_idx": 115, "global_frame_idx": 18839, "task_index": 16}, {"db_idx": 18840, "episode_idx": 84, "frame_idx": 116, "global_frame_idx": 18840, "task_index": 16}, {"db_idx": 18841, "episode_idx": 84, "frame_idx": 117, "global_frame_idx": 18841, "task_index": 16}, {"db_idx": 18842, "episode_idx": 84, "frame_idx": 118, "global_frame_idx": 18842, "task_index": 16}, {"db_idx": 18843, "episode_idx": 84, "frame_idx": 119, "global_frame_idx": 18843, "task_index": 16}, {"db_idx": 18844, "episode_idx": 84, "frame_idx": 120, "global_frame_idx": 18844, "task_index": 16}, {"db_idx": 18845, "episode_idx": 84, "frame_idx": 121, "global_frame_idx": 18845, "task_index": 16}, {"db_idx": 18846, "episode_idx": 84, "frame_idx": 122, "global_frame_idx": 18846, "task_index": 16}, {"db_idx": 18847, "episode_idx": 84, "frame_idx": 123, "global_frame_idx": 18847, "task_index": 16}, {"db_idx": 18848, "episode_idx": 84, "frame_idx": 124, "global_frame_idx": 18848, "task_index": 16}, {"db_idx": 18849, "episode_idx": 84, "frame_idx": 125, "global_frame_idx": 18849, "task_index": 16}, {"db_idx": 18850, "episode_idx": 84, "frame_idx": 126, "global_frame_idx": 18850, "task_index": 16}, {"db_idx": 18851, "episode_idx": 84, "frame_idx": 127, "global_frame_idx": 18851, "task_index": 16}, {"db_idx": 18852, "episode_idx": 84, "frame_idx": 128, "global_frame_idx": 18852, "task_index": 16}, {"db_idx": 18853, "episode_idx": 84, "frame_idx": 129, "global_frame_idx": 18853, "task_index": 16}, {"db_idx": 18854, "episode_idx": 84, "frame_idx": 130, "global_frame_idx": 18854, "task_index": 16}, {"db_idx": 18855, "episode_idx": 84, "frame_idx": 131, "global_frame_idx": 18855, "task_index": 16}, {"db_idx": 18856, "episode_idx": 84, "frame_idx": 132, "global_frame_idx": 18856, "task_index": 16}, {"db_idx": 18857, "episode_idx": 84, "frame_idx": 133, "global_frame_idx": 18857, "task_index": 16}, {"db_idx": 18858, "episode_idx": 84, "frame_idx": 134, "global_frame_idx": 18858, "task_index": 16}, {"db_idx": 18859, "episode_idx": 84, "frame_idx": 135, "global_frame_idx": 18859, "task_index": 16}, {"db_idx": 18860, "episode_idx": 84, "frame_idx": 136, "global_frame_idx": 18860, "task_index": 16}, {"db_idx": 18861, "episode_idx": 84, "frame_idx": 137, "global_frame_idx": 18861, "task_index": 16}, {"db_idx": 18862, "episode_idx": 84, "frame_idx": 138, "global_frame_idx": 18862, "task_index": 16}, {"db_idx": 18863, "episode_idx": 84, "frame_idx": 139, "global_frame_idx": 18863, "task_index": 16}, {"db_idx": 18864, "episode_idx": 84, "frame_idx": 140, "global_frame_idx": 18864, "task_index": 16}, {"db_idx": 18865, "episode_idx": 84, "frame_idx": 141, "global_frame_idx": 18865, "task_index": 16}, {"db_idx": 18866, "episode_idx": 84, "frame_idx": 142, "global_frame_idx": 18866, "task_index": 16}, {"db_idx": 18867, "episode_idx": 84, "frame_idx": 143, "global_frame_idx": 18867, "task_index": 16}, {"db_idx": 18868, "episode_idx": 84, "frame_idx": 144, "global_frame_idx": 18868, "task_index": 16}, {"db_idx": 18869, "episode_idx": 84, "frame_idx": 145, "global_frame_idx": 18869, "task_index": 16}, {"db_idx": 18870, "episode_idx": 84, "frame_idx": 146, "global_frame_idx": 18870, "task_index": 16}, {"db_idx": 18871, "episode_idx": 84, "frame_idx": 147, "global_frame_idx": 18871, "task_index": 16}, {"db_idx": 18872, "episode_idx": 84, "frame_idx": 148, "global_frame_idx": 18872, "task_index": 16}, {"db_idx": 18873, "episode_idx": 84, "frame_idx": 149, "global_frame_idx": 18873, "task_index": 16}, {"db_idx": 18874, "episode_idx": 84, "frame_idx": 150, "global_frame_idx": 18874, "task_index": 16}, {"db_idx": 18875, "episode_idx": 84, "frame_idx": 151, "global_frame_idx": 18875, "task_index": 16}, {"db_idx": 18876, "episode_idx": 84, "frame_idx": 152, "global_frame_idx": 18876, "task_index": 16}, {"db_idx": 18877, "episode_idx": 84, "frame_idx": 153, "global_frame_idx": 18877, "task_index": 16}, {"db_idx": 18878, "episode_idx": 84, "frame_idx": 154, "global_frame_idx": 18878, "task_index": 16}, {"db_idx": 18879, "episode_idx": 84, "frame_idx": 155, "global_frame_idx": 18879, "task_index": 16}, {"db_idx": 18880, "episode_idx": 84, "frame_idx": 156, "global_frame_idx": 18880, "task_index": 16}, {"db_idx": 18881, "episode_idx": 84, "frame_idx": 157, "global_frame_idx": 18881, "task_index": 16}, {"db_idx": 18882, "episode_idx": 84, "frame_idx": 158, "global_frame_idx": 18882, "task_index": 16}, {"db_idx": 18883, "episode_idx": 84, "frame_idx": 159, "global_frame_idx": 18883, "task_index": 16}, {"db_idx": 18884, "episode_idx": 84, "frame_idx": 160, "global_frame_idx": 18884, "task_index": 16}, {"db_idx": 18885, "episode_idx": 84, "frame_idx": 161, "global_frame_idx": 18885, "task_index": 16}, {"db_idx": 18886, "episode_idx": 84, "frame_idx": 162, "global_frame_idx": 18886, "task_index": 16}, {"db_idx": 18887, "episode_idx": 84, "frame_idx": 163, "global_frame_idx": 18887, "task_index": 16}, {"db_idx": 18888, "episode_idx": 84, "frame_idx": 164, "global_frame_idx": 18888, "task_index": 16}, {"db_idx": 18889, "episode_idx": 84, "frame_idx": 165, "global_frame_idx": 18889, "task_index": 16}, {"db_idx": 18890, "episode_idx": 84, "frame_idx": 166, "global_frame_idx": 18890, "task_index": 16}, {"db_idx": 18891, "episode_idx": 84, "frame_idx": 167, "global_frame_idx": 18891, "task_index": 16}, {"db_idx": 18892, "episode_idx": 84, "frame_idx": 168, "global_frame_idx": 18892, "task_index": 16}, {"db_idx": 18893, "episode_idx": 84, "frame_idx": 169, "global_frame_idx": 18893, "task_index": 16}, {"db_idx": 18894, "episode_idx": 84, "frame_idx": 170, "global_frame_idx": 18894, "task_index": 16}, {"db_idx": 18895, "episode_idx": 84, "frame_idx": 171, "global_frame_idx": 18895, "task_index": 16}, {"db_idx": 18896, "episode_idx": 84, "frame_idx": 172, "global_frame_idx": 18896, "task_index": 16}, {"db_idx": 18897, "episode_idx": 84, "frame_idx": 173, "global_frame_idx": 18897, "task_index": 16}, {"db_idx": 18898, "episode_idx": 84, "frame_idx": 174, "global_frame_idx": 18898, "task_index": 16}, {"db_idx": 18899, "episode_idx": 84, "frame_idx": 175, "global_frame_idx": 18899, "task_index": 16}, {"db_idx": 18900, "episode_idx": 84, "frame_idx": 176, "global_frame_idx": 18900, "task_index": 16}, {"db_idx": 18901, "episode_idx": 84, "frame_idx": 177, "global_frame_idx": 18901, "task_index": 16}, {"db_idx": 18902, "episode_idx": 84, "frame_idx": 178, "global_frame_idx": 18902, "task_index": 16}, {"db_idx": 18903, "episode_idx": 84, "frame_idx": 179, "global_frame_idx": 18903, "task_index": 16}, {"db_idx": 18904, "episode_idx": 84, "frame_idx": 180, "global_frame_idx": 18904, "task_index": 16}, {"db_idx": 18905, "episode_idx": 84, "frame_idx": 181, "global_frame_idx": 18905, "task_index": 16}, {"db_idx": 18906, "episode_idx": 84, "frame_idx": 182, "global_frame_idx": 18906, "task_index": 16}, {"db_idx": 18907, "episode_idx": 84, "frame_idx": 183, "global_frame_idx": 18907, "task_index": 16}, {"db_idx": 18908, "episode_idx": 84, "frame_idx": 184, "global_frame_idx": 18908, "task_index": 16}, {"db_idx": 18909, "episode_idx": 84, "frame_idx": 185, "global_frame_idx": 18909, "task_index": 16}, {"db_idx": 18910, "episode_idx": 84, "frame_idx": 186, "global_frame_idx": 18910, "task_index": 16}, {"db_idx": 18911, "episode_idx": 84, "frame_idx": 187, "global_frame_idx": 18911, "task_index": 16}, {"db_idx": 18912, "episode_idx": 84, "frame_idx": 188, "global_frame_idx": 18912, "task_index": 16}, {"db_idx": 18913, "episode_idx": 84, "frame_idx": 189, "global_frame_idx": 18913, "task_index": 16}, {"db_idx": 18914, "episode_idx": 84, "frame_idx": 190, "global_frame_idx": 18914, "task_index": 16}, {"db_idx": 18915, "episode_idx": 84, "frame_idx": 191, "global_frame_idx": 18915, "task_index": 16}, {"db_idx": 18916, "episode_idx": 84, "frame_idx": 192, "global_frame_idx": 18916, "task_index": 16}, {"db_idx": 18917, "episode_idx": 84, "frame_idx": 193, "global_frame_idx": 18917, "task_index": 16}, {"db_idx": 18918, "episode_idx": 84, "frame_idx": 194, "global_frame_idx": 18918, "task_index": 16}, {"db_idx": 18919, "episode_idx": 84, "frame_idx": 195, "global_frame_idx": 18919, "task_index": 16}, {"db_idx": 18920, "episode_idx": 84, "frame_idx": 196, "global_frame_idx": 18920, "task_index": 16}, {"db_idx": 18921, "episode_idx": 84, "frame_idx": 197, "global_frame_idx": 18921, "task_index": 16}, {"db_idx": 18922, "episode_idx": 84, "frame_idx": 198, "global_frame_idx": 18922, "task_index": 16}, {"db_idx": 18923, "episode_idx": 84, "frame_idx": 199, "global_frame_idx": 18923, "task_index": 16}, {"db_idx": 18924, "episode_idx": 84, "frame_idx": 200, "global_frame_idx": 18924, "task_index": 16}, {"db_idx": 18925, "episode_idx": 84, "frame_idx": 201, "global_frame_idx": 18925, "task_index": 16}, {"db_idx": 18926, "episode_idx": 84, "frame_idx": 202, "global_frame_idx": 18926, "task_index": 16}, {"db_idx": 18927, "episode_idx": 84, "frame_idx": 203, "global_frame_idx": 18927, "task_index": 16}, {"db_idx": 18928, "episode_idx": 84, "frame_idx": 204, "global_frame_idx": 18928, "task_index": 16}, {"db_idx": 18929, "episode_idx": 84, "frame_idx": 205, "global_frame_idx": 18929, "task_index": 16}, {"db_idx": 18930, "episode_idx": 84, "frame_idx": 206, "global_frame_idx": 18930, "task_index": 16}, {"db_idx": 18931, "episode_idx": 84, "frame_idx": 207, "global_frame_idx": 18931, "task_index": 16}, {"db_idx": 18932, "episode_idx": 84, "frame_idx": 208, "global_frame_idx": 18932, "task_index": 16}, {"db_idx": 18933, "episode_idx": 84, "frame_idx": 209, "global_frame_idx": 18933, "task_index": 16}, {"db_idx": 18934, "episode_idx": 84, "frame_idx": 210, "global_frame_idx": 18934, "task_index": 16}, {"db_idx": 18935, "episode_idx": 84, "frame_idx": 211, "global_frame_idx": 18935, "task_index": 16}, {"db_idx": 18936, "episode_idx": 84, "frame_idx": 212, "global_frame_idx": 18936, "task_index": 16}, {"db_idx": 18937, "episode_idx": 84, "frame_idx": 213, "global_frame_idx": 18937, "task_index": 16}, {"db_idx": 18938, "episode_idx": 84, "frame_idx": 214, "global_frame_idx": 18938, "task_index": 16}, {"db_idx": 18939, "episode_idx": 84, "frame_idx": 215, "global_frame_idx": 18939, "task_index": 16}, {"db_idx": 18940, "episode_idx": 84, "frame_idx": 216, "global_frame_idx": 18940, "task_index": 16}, {"db_idx": 18941, "episode_idx": 84, "frame_idx": 217, "global_frame_idx": 18941, "task_index": 16}, {"db_idx": 18942, "episode_idx": 84, "frame_idx": 218, "global_frame_idx": 18942, "task_index": 16}, {"db_idx": 18943, "episode_idx": 84, "frame_idx": 219, "global_frame_idx": 18943, "task_index": 16}, {"db_idx": 18944, "episode_idx": 84, "frame_idx": 220, "global_frame_idx": 18944, "task_index": 16}, {"db_idx": 18945, "episode_idx": 84, "frame_idx": 221, "global_frame_idx": 18945, "task_index": 16}, {"db_idx": 18946, "episode_idx": 84, "frame_idx": 222, "global_frame_idx": 18946, "task_index": 16}, {"db_idx": 18947, "episode_idx": 84, "frame_idx": 223, "global_frame_idx": 18947, "task_index": 16}, {"db_idx": 18948, "episode_idx": 85, "frame_idx": 0, "global_frame_idx": 18948, "task_index": 17}, {"db_idx": 18949, "episode_idx": 85, "frame_idx": 1, "global_frame_idx": 18949, "task_index": 17}, {"db_idx": 18950, "episode_idx": 85, "frame_idx": 2, "global_frame_idx": 18950, "task_index": 17}, {"db_idx": 18951, "episode_idx": 85, "frame_idx": 3, "global_frame_idx": 18951, "task_index": 17}, {"db_idx": 18952, "episode_idx": 85, "frame_idx": 4, "global_frame_idx": 18952, "task_index": 17}, {"db_idx": 18953, "episode_idx": 85, "frame_idx": 5, "global_frame_idx": 18953, "task_index": 17}, {"db_idx": 18954, "episode_idx": 85, "frame_idx": 6, "global_frame_idx": 18954, "task_index": 17}, {"db_idx": 18955, "episode_idx": 85, "frame_idx": 7, "global_frame_idx": 18955, "task_index": 17}, {"db_idx": 18956, "episode_idx": 85, "frame_idx": 8, "global_frame_idx": 18956, "task_index": 17}, {"db_idx": 18957, "episode_idx": 85, "frame_idx": 9, "global_frame_idx": 18957, "task_index": 17}, {"db_idx": 18958, "episode_idx": 85, "frame_idx": 10, "global_frame_idx": 18958, "task_index": 17}, {"db_idx": 18959, "episode_idx": 85, "frame_idx": 11, "global_frame_idx": 18959, "task_index": 17}, {"db_idx": 18960, "episode_idx": 85, "frame_idx": 12, "global_frame_idx": 18960, "task_index": 17}, {"db_idx": 18961, "episode_idx": 85, "frame_idx": 13, "global_frame_idx": 18961, "task_index": 17}, {"db_idx": 18962, "episode_idx": 85, "frame_idx": 14, "global_frame_idx": 18962, "task_index": 17}, {"db_idx": 18963, "episode_idx": 85, "frame_idx": 15, "global_frame_idx": 18963, "task_index": 17}, {"db_idx": 18964, "episode_idx": 85, "frame_idx": 16, "global_frame_idx": 18964, "task_index": 17}, {"db_idx": 18965, "episode_idx": 85, "frame_idx": 17, "global_frame_idx": 18965, "task_index": 17}, {"db_idx": 18966, "episode_idx": 85, "frame_idx": 18, "global_frame_idx": 18966, "task_index": 17}, {"db_idx": 18967, "episode_idx": 85, "frame_idx": 19, "global_frame_idx": 18967, "task_index": 17}, {"db_idx": 18968, "episode_idx": 85, "frame_idx": 20, "global_frame_idx": 18968, "task_index": 17}, {"db_idx": 18969, "episode_idx": 85, "frame_idx": 21, "global_frame_idx": 18969, "task_index": 17}, {"db_idx": 18970, "episode_idx": 85, "frame_idx": 22, "global_frame_idx": 18970, "task_index": 17}, {"db_idx": 18971, "episode_idx": 85, "frame_idx": 23, "global_frame_idx": 18971, "task_index": 17}, {"db_idx": 18972, "episode_idx": 85, "frame_idx": 24, "global_frame_idx": 18972, "task_index": 17}, {"db_idx": 18973, "episode_idx": 85, "frame_idx": 25, "global_frame_idx": 18973, "task_index": 17}, {"db_idx": 18974, "episode_idx": 85, "frame_idx": 26, "global_frame_idx": 18974, "task_index": 17}, {"db_idx": 18975, "episode_idx": 85, "frame_idx": 27, "global_frame_idx": 18975, "task_index": 17}, {"db_idx": 18976, "episode_idx": 85, "frame_idx": 28, "global_frame_idx": 18976, "task_index": 17}, {"db_idx": 18977, "episode_idx": 85, "frame_idx": 29, "global_frame_idx": 18977, "task_index": 17}, {"db_idx": 18978, "episode_idx": 85, "frame_idx": 30, "global_frame_idx": 18978, "task_index": 17}, {"db_idx": 18979, "episode_idx": 85, "frame_idx": 31, "global_frame_idx": 18979, "task_index": 17}, {"db_idx": 18980, "episode_idx": 85, "frame_idx": 32, "global_frame_idx": 18980, "task_index": 17}, {"db_idx": 18981, "episode_idx": 85, "frame_idx": 33, "global_frame_idx": 18981, "task_index": 17}, {"db_idx": 18982, "episode_idx": 85, "frame_idx": 34, "global_frame_idx": 18982, "task_index": 17}, {"db_idx": 18983, "episode_idx": 85, "frame_idx": 35, "global_frame_idx": 18983, "task_index": 17}, {"db_idx": 18984, "episode_idx": 85, "frame_idx": 36, "global_frame_idx": 18984, "task_index": 17}, {"db_idx": 18985, "episode_idx": 85, "frame_idx": 37, "global_frame_idx": 18985, "task_index": 17}, {"db_idx": 18986, "episode_idx": 85, "frame_idx": 38, "global_frame_idx": 18986, "task_index": 17}, {"db_idx": 18987, "episode_idx": 85, "frame_idx": 39, "global_frame_idx": 18987, "task_index": 17}, {"db_idx": 18988, "episode_idx": 85, "frame_idx": 40, "global_frame_idx": 18988, "task_index": 17}, {"db_idx": 18989, "episode_idx": 85, "frame_idx": 41, "global_frame_idx": 18989, "task_index": 17}, {"db_idx": 18990, "episode_idx": 85, "frame_idx": 42, "global_frame_idx": 18990, "task_index": 17}, {"db_idx": 18991, "episode_idx": 85, "frame_idx": 43, "global_frame_idx": 18991, "task_index": 17}, {"db_idx": 18992, "episode_idx": 85, "frame_idx": 44, "global_frame_idx": 18992, "task_index": 17}, {"db_idx": 18993, "episode_idx": 85, "frame_idx": 45, "global_frame_idx": 18993, "task_index": 17}, {"db_idx": 18994, "episode_idx": 85, "frame_idx": 46, "global_frame_idx": 18994, "task_index": 17}, {"db_idx": 18995, "episode_idx": 85, "frame_idx": 47, "global_frame_idx": 18995, "task_index": 17}, {"db_idx": 18996, "episode_idx": 85, "frame_idx": 48, "global_frame_idx": 18996, "task_index": 17}, {"db_idx": 18997, "episode_idx": 85, "frame_idx": 49, "global_frame_idx": 18997, "task_index": 17}, {"db_idx": 18998, "episode_idx": 85, "frame_idx": 50, "global_frame_idx": 18998, "task_index": 17}, {"db_idx": 18999, "episode_idx": 85, "frame_idx": 51, "global_frame_idx": 18999, "task_index": 17}, {"db_idx": 19000, "episode_idx": 85, "frame_idx": 52, "global_frame_idx": 19000, "task_index": 17}, {"db_idx": 19001, "episode_idx": 85, "frame_idx": 53, "global_frame_idx": 19001, "task_index": 17}, {"db_idx": 19002, "episode_idx": 85, "frame_idx": 54, "global_frame_idx": 19002, "task_index": 17}, {"db_idx": 19003, "episode_idx": 85, "frame_idx": 55, "global_frame_idx": 19003, "task_index": 17}, {"db_idx": 19004, "episode_idx": 85, "frame_idx": 56, "global_frame_idx": 19004, "task_index": 17}, {"db_idx": 19005, "episode_idx": 85, "frame_idx": 57, "global_frame_idx": 19005, "task_index": 17}, {"db_idx": 19006, "episode_idx": 85, "frame_idx": 58, "global_frame_idx": 19006, "task_index": 17}, {"db_idx": 19007, "episode_idx": 85, "frame_idx": 59, "global_frame_idx": 19007, "task_index": 17}, {"db_idx": 19008, "episode_idx": 85, "frame_idx": 60, "global_frame_idx": 19008, "task_index": 17}, {"db_idx": 19009, "episode_idx": 85, "frame_idx": 61, "global_frame_idx": 19009, "task_index": 17}, {"db_idx": 19010, "episode_idx": 85, "frame_idx": 62, "global_frame_idx": 19010, "task_index": 17}, {"db_idx": 19011, "episode_idx": 85, "frame_idx": 63, "global_frame_idx": 19011, "task_index": 17}, {"db_idx": 19012, "episode_idx": 85, "frame_idx": 64, "global_frame_idx": 19012, "task_index": 17}, {"db_idx": 19013, "episode_idx": 85, "frame_idx": 65, "global_frame_idx": 19013, "task_index": 17}, {"db_idx": 19014, "episode_idx": 85, "frame_idx": 66, "global_frame_idx": 19014, "task_index": 17}, {"db_idx": 19015, "episode_idx": 85, "frame_idx": 67, "global_frame_idx": 19015, "task_index": 17}, {"db_idx": 19016, "episode_idx": 85, "frame_idx": 68, "global_frame_idx": 19016, "task_index": 17}, {"db_idx": 19017, "episode_idx": 85, "frame_idx": 69, "global_frame_idx": 19017, "task_index": 17}, {"db_idx": 19018, "episode_idx": 85, "frame_idx": 70, "global_frame_idx": 19018, "task_index": 17}, {"db_idx": 19019, "episode_idx": 85, "frame_idx": 71, "global_frame_idx": 19019, "task_index": 17}, {"db_idx": 19020, "episode_idx": 85, "frame_idx": 72, "global_frame_idx": 19020, "task_index": 17}, {"db_idx": 19021, "episode_idx": 85, "frame_idx": 73, "global_frame_idx": 19021, "task_index": 17}, {"db_idx": 19022, "episode_idx": 85, "frame_idx": 74, "global_frame_idx": 19022, "task_index": 17}, {"db_idx": 19023, "episode_idx": 85, "frame_idx": 75, "global_frame_idx": 19023, "task_index": 17}, {"db_idx": 19024, "episode_idx": 85, "frame_idx": 76, "global_frame_idx": 19024, "task_index": 17}, {"db_idx": 19025, "episode_idx": 85, "frame_idx": 77, "global_frame_idx": 19025, "task_index": 17}, {"db_idx": 19026, "episode_idx": 85, "frame_idx": 78, "global_frame_idx": 19026, "task_index": 17}, {"db_idx": 19027, "episode_idx": 85, "frame_idx": 79, "global_frame_idx": 19027, "task_index": 17}, {"db_idx": 19028, "episode_idx": 85, "frame_idx": 80, "global_frame_idx": 19028, "task_index": 17}, {"db_idx": 19029, "episode_idx": 85, "frame_idx": 81, "global_frame_idx": 19029, "task_index": 17}, {"db_idx": 19030, "episode_idx": 85, "frame_idx": 82, "global_frame_idx": 19030, "task_index": 17}, {"db_idx": 19031, "episode_idx": 85, "frame_idx": 83, "global_frame_idx": 19031, "task_index": 17}, {"db_idx": 19032, "episode_idx": 85, "frame_idx": 84, "global_frame_idx": 19032, "task_index": 17}, {"db_idx": 19033, "episode_idx": 85, "frame_idx": 85, "global_frame_idx": 19033, "task_index": 17}, {"db_idx": 19034, "episode_idx": 85, "frame_idx": 86, "global_frame_idx": 19034, "task_index": 17}, {"db_idx": 19035, "episode_idx": 85, "frame_idx": 87, "global_frame_idx": 19035, "task_index": 17}, {"db_idx": 19036, "episode_idx": 85, "frame_idx": 88, "global_frame_idx": 19036, "task_index": 17}, {"db_idx": 19037, "episode_idx": 85, "frame_idx": 89, "global_frame_idx": 19037, "task_index": 17}, {"db_idx": 19038, "episode_idx": 85, "frame_idx": 90, "global_frame_idx": 19038, "task_index": 17}, {"db_idx": 19039, "episode_idx": 85, "frame_idx": 91, "global_frame_idx": 19039, "task_index": 17}, {"db_idx": 19040, "episode_idx": 85, "frame_idx": 92, "global_frame_idx": 19040, "task_index": 17}, {"db_idx": 19041, "episode_idx": 85, "frame_idx": 93, "global_frame_idx": 19041, "task_index": 17}, {"db_idx": 19042, "episode_idx": 85, "frame_idx": 94, "global_frame_idx": 19042, "task_index": 17}, {"db_idx": 19043, "episode_idx": 85, "frame_idx": 95, "global_frame_idx": 19043, "task_index": 17}, {"db_idx": 19044, "episode_idx": 85, "frame_idx": 96, "global_frame_idx": 19044, "task_index": 17}, {"db_idx": 19045, "episode_idx": 85, "frame_idx": 97, "global_frame_idx": 19045, "task_index": 17}, {"db_idx": 19046, "episode_idx": 85, "frame_idx": 98, "global_frame_idx": 19046, "task_index": 17}, {"db_idx": 19047, "episode_idx": 85, "frame_idx": 99, "global_frame_idx": 19047, "task_index": 17}, {"db_idx": 19048, "episode_idx": 85, "frame_idx": 100, "global_frame_idx": 19048, "task_index": 17}, {"db_idx": 19049, "episode_idx": 85, "frame_idx": 101, "global_frame_idx": 19049, "task_index": 17}, {"db_idx": 19050, "episode_idx": 85, "frame_idx": 102, "global_frame_idx": 19050, "task_index": 17}, {"db_idx": 19051, "episode_idx": 85, "frame_idx": 103, "global_frame_idx": 19051, "task_index": 17}, {"db_idx": 19052, "episode_idx": 85, "frame_idx": 104, "global_frame_idx": 19052, "task_index": 17}, {"db_idx": 19053, "episode_idx": 85, "frame_idx": 105, "global_frame_idx": 19053, "task_index": 17}, {"db_idx": 19054, "episode_idx": 85, "frame_idx": 106, "global_frame_idx": 19054, "task_index": 17}, {"db_idx": 19055, "episode_idx": 85, "frame_idx": 107, "global_frame_idx": 19055, "task_index": 17}, {"db_idx": 19056, "episode_idx": 85, "frame_idx": 108, "global_frame_idx": 19056, "task_index": 17}, {"db_idx": 19057, "episode_idx": 85, "frame_idx": 109, "global_frame_idx": 19057, "task_index": 17}, {"db_idx": 19058, "episode_idx": 85, "frame_idx": 110, "global_frame_idx": 19058, "task_index": 17}, {"db_idx": 19059, "episode_idx": 85, "frame_idx": 111, "global_frame_idx": 19059, "task_index": 17}, {"db_idx": 19060, "episode_idx": 85, "frame_idx": 112, "global_frame_idx": 19060, "task_index": 17}, {"db_idx": 19061, "episode_idx": 85, "frame_idx": 113, "global_frame_idx": 19061, "task_index": 17}, {"db_idx": 19062, "episode_idx": 85, "frame_idx": 114, "global_frame_idx": 19062, "task_index": 17}, {"db_idx": 19063, "episode_idx": 85, "frame_idx": 115, "global_frame_idx": 19063, "task_index": 17}, {"db_idx": 19064, "episode_idx": 85, "frame_idx": 116, "global_frame_idx": 19064, "task_index": 17}, {"db_idx": 19065, "episode_idx": 85, "frame_idx": 117, "global_frame_idx": 19065, "task_index": 17}, {"db_idx": 19066, "episode_idx": 85, "frame_idx": 118, "global_frame_idx": 19066, "task_index": 17}, {"db_idx": 19067, "episode_idx": 85, "frame_idx": 119, "global_frame_idx": 19067, "task_index": 17}, {"db_idx": 19068, "episode_idx": 85, "frame_idx": 120, "global_frame_idx": 19068, "task_index": 17}, {"db_idx": 19069, "episode_idx": 85, "frame_idx": 121, "global_frame_idx": 19069, "task_index": 17}, {"db_idx": 19070, "episode_idx": 85, "frame_idx": 122, "global_frame_idx": 19070, "task_index": 17}, {"db_idx": 19071, "episode_idx": 85, "frame_idx": 123, "global_frame_idx": 19071, "task_index": 17}, {"db_idx": 19072, "episode_idx": 85, "frame_idx": 124, "global_frame_idx": 19072, "task_index": 17}, {"db_idx": 19073, "episode_idx": 85, "frame_idx": 125, "global_frame_idx": 19073, "task_index": 17}, {"db_idx": 19074, "episode_idx": 85, "frame_idx": 126, "global_frame_idx": 19074, "task_index": 17}, {"db_idx": 19075, "episode_idx": 85, "frame_idx": 127, "global_frame_idx": 19075, "task_index": 17}, {"db_idx": 19076, "episode_idx": 85, "frame_idx": 128, "global_frame_idx": 19076, "task_index": 17}, {"db_idx": 19077, "episode_idx": 85, "frame_idx": 129, "global_frame_idx": 19077, "task_index": 17}, {"db_idx": 19078, "episode_idx": 85, "frame_idx": 130, "global_frame_idx": 19078, "task_index": 17}, {"db_idx": 19079, "episode_idx": 85, "frame_idx": 131, "global_frame_idx": 19079, "task_index": 17}, {"db_idx": 19080, "episode_idx": 85, "frame_idx": 132, "global_frame_idx": 19080, "task_index": 17}, {"db_idx": 19081, "episode_idx": 85, "frame_idx": 133, "global_frame_idx": 19081, "task_index": 17}, {"db_idx": 19082, "episode_idx": 85, "frame_idx": 134, "global_frame_idx": 19082, "task_index": 17}, {"db_idx": 19083, "episode_idx": 85, "frame_idx": 135, "global_frame_idx": 19083, "task_index": 17}, {"db_idx": 19084, "episode_idx": 85, "frame_idx": 136, "global_frame_idx": 19084, "task_index": 17}, {"db_idx": 19085, "episode_idx": 85, "frame_idx": 137, "global_frame_idx": 19085, "task_index": 17}, {"db_idx": 19086, "episode_idx": 85, "frame_idx": 138, "global_frame_idx": 19086, "task_index": 17}, {"db_idx": 19087, "episode_idx": 85, "frame_idx": 139, "global_frame_idx": 19087, "task_index": 17}, {"db_idx": 19088, "episode_idx": 85, "frame_idx": 140, "global_frame_idx": 19088, "task_index": 17}, {"db_idx": 19089, "episode_idx": 85, "frame_idx": 141, "global_frame_idx": 19089, "task_index": 17}, {"db_idx": 19090, "episode_idx": 85, "frame_idx": 142, "global_frame_idx": 19090, "task_index": 17}, {"db_idx": 19091, "episode_idx": 85, "frame_idx": 143, "global_frame_idx": 19091, "task_index": 17}, {"db_idx": 19092, "episode_idx": 85, "frame_idx": 144, "global_frame_idx": 19092, "task_index": 17}, {"db_idx": 19093, "episode_idx": 85, "frame_idx": 145, "global_frame_idx": 19093, "task_index": 17}, {"db_idx": 19094, "episode_idx": 85, "frame_idx": 146, "global_frame_idx": 19094, "task_index": 17}, {"db_idx": 19095, "episode_idx": 85, "frame_idx": 147, "global_frame_idx": 19095, "task_index": 17}, {"db_idx": 19096, "episode_idx": 85, "frame_idx": 148, "global_frame_idx": 19096, "task_index": 17}, {"db_idx": 19097, "episode_idx": 85, "frame_idx": 149, "global_frame_idx": 19097, "task_index": 17}, {"db_idx": 19098, "episode_idx": 85, "frame_idx": 150, "global_frame_idx": 19098, "task_index": 17}, {"db_idx": 19099, "episode_idx": 85, "frame_idx": 151, "global_frame_idx": 19099, "task_index": 17}, {"db_idx": 19100, "episode_idx": 86, "frame_idx": 0, "global_frame_idx": 19100, "task_index": 17}, {"db_idx": 19101, "episode_idx": 86, "frame_idx": 1, "global_frame_idx": 19101, "task_index": 17}, {"db_idx": 19102, "episode_idx": 86, "frame_idx": 2, "global_frame_idx": 19102, "task_index": 17}, {"db_idx": 19103, "episode_idx": 86, "frame_idx": 3, "global_frame_idx": 19103, "task_index": 17}, {"db_idx": 19104, "episode_idx": 86, "frame_idx": 4, "global_frame_idx": 19104, "task_index": 17}, {"db_idx": 19105, "episode_idx": 86, "frame_idx": 5, "global_frame_idx": 19105, "task_index": 17}, {"db_idx": 19106, "episode_idx": 86, "frame_idx": 6, "global_frame_idx": 19106, "task_index": 17}, {"db_idx": 19107, "episode_idx": 86, "frame_idx": 7, "global_frame_idx": 19107, "task_index": 17}, {"db_idx": 19108, "episode_idx": 86, "frame_idx": 8, "global_frame_idx": 19108, "task_index": 17}, {"db_idx": 19109, "episode_idx": 86, "frame_idx": 9, "global_frame_idx": 19109, "task_index": 17}, {"db_idx": 19110, "episode_idx": 86, "frame_idx": 10, "global_frame_idx": 19110, "task_index": 17}, {"db_idx": 19111, "episode_idx": 86, "frame_idx": 11, "global_frame_idx": 19111, "task_index": 17}, {"db_idx": 19112, "episode_idx": 86, "frame_idx": 12, "global_frame_idx": 19112, "task_index": 17}, {"db_idx": 19113, "episode_idx": 86, "frame_idx": 13, "global_frame_idx": 19113, "task_index": 17}, {"db_idx": 19114, "episode_idx": 86, "frame_idx": 14, "global_frame_idx": 19114, "task_index": 17}, {"db_idx": 19115, "episode_idx": 86, "frame_idx": 15, "global_frame_idx": 19115, "task_index": 17}, {"db_idx": 19116, "episode_idx": 86, "frame_idx": 16, "global_frame_idx": 19116, "task_index": 17}, {"db_idx": 19117, "episode_idx": 86, "frame_idx": 17, "global_frame_idx": 19117, "task_index": 17}, {"db_idx": 19118, "episode_idx": 86, "frame_idx": 18, "global_frame_idx": 19118, "task_index": 17}, {"db_idx": 19119, "episode_idx": 86, "frame_idx": 19, "global_frame_idx": 19119, "task_index": 17}, {"db_idx": 19120, "episode_idx": 86, "frame_idx": 20, "global_frame_idx": 19120, "task_index": 17}, {"db_idx": 19121, "episode_idx": 86, "frame_idx": 21, "global_frame_idx": 19121, "task_index": 17}, {"db_idx": 19122, "episode_idx": 86, "frame_idx": 22, "global_frame_idx": 19122, "task_index": 17}, {"db_idx": 19123, "episode_idx": 86, "frame_idx": 23, "global_frame_idx": 19123, "task_index": 17}, {"db_idx": 19124, "episode_idx": 86, "frame_idx": 24, "global_frame_idx": 19124, "task_index": 17}, {"db_idx": 19125, "episode_idx": 86, "frame_idx": 25, "global_frame_idx": 19125, "task_index": 17}, {"db_idx": 19126, "episode_idx": 86, "frame_idx": 26, "global_frame_idx": 19126, "task_index": 17}, {"db_idx": 19127, "episode_idx": 86, "frame_idx": 27, "global_frame_idx": 19127, "task_index": 17}, {"db_idx": 19128, "episode_idx": 86, "frame_idx": 28, "global_frame_idx": 19128, "task_index": 17}, {"db_idx": 19129, "episode_idx": 86, "frame_idx": 29, "global_frame_idx": 19129, "task_index": 17}, {"db_idx": 19130, "episode_idx": 86, "frame_idx": 30, "global_frame_idx": 19130, "task_index": 17}, {"db_idx": 19131, "episode_idx": 86, "frame_idx": 31, "global_frame_idx": 19131, "task_index": 17}, {"db_idx": 19132, "episode_idx": 86, "frame_idx": 32, "global_frame_idx": 19132, "task_index": 17}, {"db_idx": 19133, "episode_idx": 86, "frame_idx": 33, "global_frame_idx": 19133, "task_index": 17}, {"db_idx": 19134, "episode_idx": 86, "frame_idx": 34, "global_frame_idx": 19134, "task_index": 17}, {"db_idx": 19135, "episode_idx": 86, "frame_idx": 35, "global_frame_idx": 19135, "task_index": 17}, {"db_idx": 19136, "episode_idx": 86, "frame_idx": 36, "global_frame_idx": 19136, "task_index": 17}, {"db_idx": 19137, "episode_idx": 86, "frame_idx": 37, "global_frame_idx": 19137, "task_index": 17}, {"db_idx": 19138, "episode_idx": 86, "frame_idx": 38, "global_frame_idx": 19138, "task_index": 17}, {"db_idx": 19139, "episode_idx": 86, "frame_idx": 39, "global_frame_idx": 19139, "task_index": 17}, {"db_idx": 19140, "episode_idx": 86, "frame_idx": 40, "global_frame_idx": 19140, "task_index": 17}, {"db_idx": 19141, "episode_idx": 86, "frame_idx": 41, "global_frame_idx": 19141, "task_index": 17}, {"db_idx": 19142, "episode_idx": 86, "frame_idx": 42, "global_frame_idx": 19142, "task_index": 17}, {"db_idx": 19143, "episode_idx": 86, "frame_idx": 43, "global_frame_idx": 19143, "task_index": 17}, {"db_idx": 19144, "episode_idx": 86, "frame_idx": 44, "global_frame_idx": 19144, "task_index": 17}, {"db_idx": 19145, "episode_idx": 86, "frame_idx": 45, "global_frame_idx": 19145, "task_index": 17}, {"db_idx": 19146, "episode_idx": 86, "frame_idx": 46, "global_frame_idx": 19146, "task_index": 17}, {"db_idx": 19147, "episode_idx": 86, "frame_idx": 47, "global_frame_idx": 19147, "task_index": 17}, {"db_idx": 19148, "episode_idx": 86, "frame_idx": 48, "global_frame_idx": 19148, "task_index": 17}, {"db_idx": 19149, "episode_idx": 86, "frame_idx": 49, "global_frame_idx": 19149, "task_index": 17}, {"db_idx": 19150, "episode_idx": 86, "frame_idx": 50, "global_frame_idx": 19150, "task_index": 17}, {"db_idx": 19151, "episode_idx": 86, "frame_idx": 51, "global_frame_idx": 19151, "task_index": 17}, {"db_idx": 19152, "episode_idx": 86, "frame_idx": 52, "global_frame_idx": 19152, "task_index": 17}, {"db_idx": 19153, "episode_idx": 86, "frame_idx": 53, "global_frame_idx": 19153, "task_index": 17}, {"db_idx": 19154, "episode_idx": 86, "frame_idx": 54, "global_frame_idx": 19154, "task_index": 17}, {"db_idx": 19155, "episode_idx": 86, "frame_idx": 55, "global_frame_idx": 19155, "task_index": 17}, {"db_idx": 19156, "episode_idx": 86, "frame_idx": 56, "global_frame_idx": 19156, "task_index": 17}, {"db_idx": 19157, "episode_idx": 86, "frame_idx": 57, "global_frame_idx": 19157, "task_index": 17}, {"db_idx": 19158, "episode_idx": 86, "frame_idx": 58, "global_frame_idx": 19158, "task_index": 17}, {"db_idx": 19159, "episode_idx": 86, "frame_idx": 59, "global_frame_idx": 19159, "task_index": 17}, {"db_idx": 19160, "episode_idx": 86, "frame_idx": 60, "global_frame_idx": 19160, "task_index": 17}, {"db_idx": 19161, "episode_idx": 86, "frame_idx": 61, "global_frame_idx": 19161, "task_index": 17}, {"db_idx": 19162, "episode_idx": 86, "frame_idx": 62, "global_frame_idx": 19162, "task_index": 17}, {"db_idx": 19163, "episode_idx": 86, "frame_idx": 63, "global_frame_idx": 19163, "task_index": 17}, {"db_idx": 19164, "episode_idx": 86, "frame_idx": 64, "global_frame_idx": 19164, "task_index": 17}, {"db_idx": 19165, "episode_idx": 86, "frame_idx": 65, "global_frame_idx": 19165, "task_index": 17}, {"db_idx": 19166, "episode_idx": 86, "frame_idx": 66, "global_frame_idx": 19166, "task_index": 17}, {"db_idx": 19167, "episode_idx": 86, "frame_idx": 67, "global_frame_idx": 19167, "task_index": 17}, {"db_idx": 19168, "episode_idx": 86, "frame_idx": 68, "global_frame_idx": 19168, "task_index": 17}, {"db_idx": 19169, "episode_idx": 86, "frame_idx": 69, "global_frame_idx": 19169, "task_index": 17}, {"db_idx": 19170, "episode_idx": 86, "frame_idx": 70, "global_frame_idx": 19170, "task_index": 17}, {"db_idx": 19171, "episode_idx": 86, "frame_idx": 71, "global_frame_idx": 19171, "task_index": 17}, {"db_idx": 19172, "episode_idx": 86, "frame_idx": 72, "global_frame_idx": 19172, "task_index": 17}, {"db_idx": 19173, "episode_idx": 86, "frame_idx": 73, "global_frame_idx": 19173, "task_index": 17}, {"db_idx": 19174, "episode_idx": 86, "frame_idx": 74, "global_frame_idx": 19174, "task_index": 17}, {"db_idx": 19175, "episode_idx": 86, "frame_idx": 75, "global_frame_idx": 19175, "task_index": 17}, {"db_idx": 19176, "episode_idx": 86, "frame_idx": 76, "global_frame_idx": 19176, "task_index": 17}, {"db_idx": 19177, "episode_idx": 86, "frame_idx": 77, "global_frame_idx": 19177, "task_index": 17}, {"db_idx": 19178, "episode_idx": 86, "frame_idx": 78, "global_frame_idx": 19178, "task_index": 17}, {"db_idx": 19179, "episode_idx": 86, "frame_idx": 79, "global_frame_idx": 19179, "task_index": 17}, {"db_idx": 19180, "episode_idx": 86, "frame_idx": 80, "global_frame_idx": 19180, "task_index": 17}, {"db_idx": 19181, "episode_idx": 86, "frame_idx": 81, "global_frame_idx": 19181, "task_index": 17}, {"db_idx": 19182, "episode_idx": 86, "frame_idx": 82, "global_frame_idx": 19182, "task_index": 17}, {"db_idx": 19183, "episode_idx": 86, "frame_idx": 83, "global_frame_idx": 19183, "task_index": 17}, {"db_idx": 19184, "episode_idx": 86, "frame_idx": 84, "global_frame_idx": 19184, "task_index": 17}, {"db_idx": 19185, "episode_idx": 86, "frame_idx": 85, "global_frame_idx": 19185, "task_index": 17}, {"db_idx": 19186, "episode_idx": 86, "frame_idx": 86, "global_frame_idx": 19186, "task_index": 17}, {"db_idx": 19187, "episode_idx": 86, "frame_idx": 87, "global_frame_idx": 19187, "task_index": 17}, {"db_idx": 19188, "episode_idx": 86, "frame_idx": 88, "global_frame_idx": 19188, "task_index": 17}, {"db_idx": 19189, "episode_idx": 86, "frame_idx": 89, "global_frame_idx": 19189, "task_index": 17}, {"db_idx": 19190, "episode_idx": 86, "frame_idx": 90, "global_frame_idx": 19190, "task_index": 17}, {"db_idx": 19191, "episode_idx": 86, "frame_idx": 91, "global_frame_idx": 19191, "task_index": 17}, {"db_idx": 19192, "episode_idx": 86, "frame_idx": 92, "global_frame_idx": 19192, "task_index": 17}, {"db_idx": 19193, "episode_idx": 86, "frame_idx": 93, "global_frame_idx": 19193, "task_index": 17}, {"db_idx": 19194, "episode_idx": 86, "frame_idx": 94, "global_frame_idx": 19194, "task_index": 17}, {"db_idx": 19195, "episode_idx": 86, "frame_idx": 95, "global_frame_idx": 19195, "task_index": 17}, {"db_idx": 19196, "episode_idx": 86, "frame_idx": 96, "global_frame_idx": 19196, "task_index": 17}, {"db_idx": 19197, "episode_idx": 86, "frame_idx": 97, "global_frame_idx": 19197, "task_index": 17}, {"db_idx": 19198, "episode_idx": 86, "frame_idx": 98, "global_frame_idx": 19198, "task_index": 17}, {"db_idx": 19199, "episode_idx": 86, "frame_idx": 99, "global_frame_idx": 19199, "task_index": 17}, {"db_idx": 19200, "episode_idx": 86, "frame_idx": 100, "global_frame_idx": 19200, "task_index": 17}, {"db_idx": 19201, "episode_idx": 86, "frame_idx": 101, "global_frame_idx": 19201, "task_index": 17}, {"db_idx": 19202, "episode_idx": 86, "frame_idx": 102, "global_frame_idx": 19202, "task_index": 17}, {"db_idx": 19203, "episode_idx": 86, "frame_idx": 103, "global_frame_idx": 19203, "task_index": 17}, {"db_idx": 19204, "episode_idx": 86, "frame_idx": 104, "global_frame_idx": 19204, "task_index": 17}, {"db_idx": 19205, "episode_idx": 86, "frame_idx": 105, "global_frame_idx": 19205, "task_index": 17}, {"db_idx": 19206, "episode_idx": 86, "frame_idx": 106, "global_frame_idx": 19206, "task_index": 17}, {"db_idx": 19207, "episode_idx": 86, "frame_idx": 107, "global_frame_idx": 19207, "task_index": 17}, {"db_idx": 19208, "episode_idx": 86, "frame_idx": 108, "global_frame_idx": 19208, "task_index": 17}, {"db_idx": 19209, "episode_idx": 86, "frame_idx": 109, "global_frame_idx": 19209, "task_index": 17}, {"db_idx": 19210, "episode_idx": 86, "frame_idx": 110, "global_frame_idx": 19210, "task_index": 17}, {"db_idx": 19211, "episode_idx": 86, "frame_idx": 111, "global_frame_idx": 19211, "task_index": 17}, {"db_idx": 19212, "episode_idx": 86, "frame_idx": 112, "global_frame_idx": 19212, "task_index": 17}, {"db_idx": 19213, "episode_idx": 86, "frame_idx": 113, "global_frame_idx": 19213, "task_index": 17}, {"db_idx": 19214, "episode_idx": 86, "frame_idx": 114, "global_frame_idx": 19214, "task_index": 17}, {"db_idx": 19215, "episode_idx": 86, "frame_idx": 115, "global_frame_idx": 19215, "task_index": 17}, {"db_idx": 19216, "episode_idx": 86, "frame_idx": 116, "global_frame_idx": 19216, "task_index": 17}, {"db_idx": 19217, "episode_idx": 86, "frame_idx": 117, "global_frame_idx": 19217, "task_index": 17}, {"db_idx": 19218, "episode_idx": 86, "frame_idx": 118, "global_frame_idx": 19218, "task_index": 17}, {"db_idx": 19219, "episode_idx": 86, "frame_idx": 119, "global_frame_idx": 19219, "task_index": 17}, {"db_idx": 19220, "episode_idx": 86, "frame_idx": 120, "global_frame_idx": 19220, "task_index": 17}, {"db_idx": 19221, "episode_idx": 86, "frame_idx": 121, "global_frame_idx": 19221, "task_index": 17}, {"db_idx": 19222, "episode_idx": 86, "frame_idx": 122, "global_frame_idx": 19222, "task_index": 17}, {"db_idx": 19223, "episode_idx": 86, "frame_idx": 123, "global_frame_idx": 19223, "task_index": 17}, {"db_idx": 19224, "episode_idx": 86, "frame_idx": 124, "global_frame_idx": 19224, "task_index": 17}, {"db_idx": 19225, "episode_idx": 86, "frame_idx": 125, "global_frame_idx": 19225, "task_index": 17}, {"db_idx": 19226, "episode_idx": 86, "frame_idx": 126, "global_frame_idx": 19226, "task_index": 17}, {"db_idx": 19227, "episode_idx": 86, "frame_idx": 127, "global_frame_idx": 19227, "task_index": 17}, {"db_idx": 19228, "episode_idx": 86, "frame_idx": 128, "global_frame_idx": 19228, "task_index": 17}, {"db_idx": 19229, "episode_idx": 86, "frame_idx": 129, "global_frame_idx": 19229, "task_index": 17}, {"db_idx": 19230, "episode_idx": 86, "frame_idx": 130, "global_frame_idx": 19230, "task_index": 17}, {"db_idx": 19231, "episode_idx": 86, "frame_idx": 131, "global_frame_idx": 19231, "task_index": 17}, {"db_idx": 19232, "episode_idx": 86, "frame_idx": 132, "global_frame_idx": 19232, "task_index": 17}, {"db_idx": 19233, "episode_idx": 86, "frame_idx": 133, "global_frame_idx": 19233, "task_index": 17}, {"db_idx": 19234, "episode_idx": 86, "frame_idx": 134, "global_frame_idx": 19234, "task_index": 17}, {"db_idx": 19235, "episode_idx": 86, "frame_idx": 135, "global_frame_idx": 19235, "task_index": 17}, {"db_idx": 19236, "episode_idx": 86, "frame_idx": 136, "global_frame_idx": 19236, "task_index": 17}, {"db_idx": 19237, "episode_idx": 86, "frame_idx": 137, "global_frame_idx": 19237, "task_index": 17}, {"db_idx": 19238, "episode_idx": 86, "frame_idx": 138, "global_frame_idx": 19238, "task_index": 17}, {"db_idx": 19239, "episode_idx": 86, "frame_idx": 139, "global_frame_idx": 19239, "task_index": 17}, {"db_idx": 19240, "episode_idx": 86, "frame_idx": 140, "global_frame_idx": 19240, "task_index": 17}, {"db_idx": 19241, "episode_idx": 86, "frame_idx": 141, "global_frame_idx": 19241, "task_index": 17}, {"db_idx": 19242, "episode_idx": 86, "frame_idx": 142, "global_frame_idx": 19242, "task_index": 17}, {"db_idx": 19243, "episode_idx": 86, "frame_idx": 143, "global_frame_idx": 19243, "task_index": 17}, {"db_idx": 19244, "episode_idx": 86, "frame_idx": 144, "global_frame_idx": 19244, "task_index": 17}, {"db_idx": 19245, "episode_idx": 86, "frame_idx": 145, "global_frame_idx": 19245, "task_index": 17}, {"db_idx": 19246, "episode_idx": 87, "frame_idx": 0, "global_frame_idx": 19246, "task_index": 17}, {"db_idx": 19247, "episode_idx": 87, "frame_idx": 1, "global_frame_idx": 19247, "task_index": 17}, {"db_idx": 19248, "episode_idx": 87, "frame_idx": 2, "global_frame_idx": 19248, "task_index": 17}, {"db_idx": 19249, "episode_idx": 87, "frame_idx": 3, "global_frame_idx": 19249, "task_index": 17}, {"db_idx": 19250, "episode_idx": 87, "frame_idx": 4, "global_frame_idx": 19250, "task_index": 17}, {"db_idx": 19251, "episode_idx": 87, "frame_idx": 5, "global_frame_idx": 19251, "task_index": 17}, {"db_idx": 19252, "episode_idx": 87, "frame_idx": 6, "global_frame_idx": 19252, "task_index": 17}, {"db_idx": 19253, "episode_idx": 87, "frame_idx": 7, "global_frame_idx": 19253, "task_index": 17}, {"db_idx": 19254, "episode_idx": 87, "frame_idx": 8, "global_frame_idx": 19254, "task_index": 17}, {"db_idx": 19255, "episode_idx": 87, "frame_idx": 9, "global_frame_idx": 19255, "task_index": 17}, {"db_idx": 19256, "episode_idx": 87, "frame_idx": 10, "global_frame_idx": 19256, "task_index": 17}, {"db_idx": 19257, "episode_idx": 87, "frame_idx": 11, "global_frame_idx": 19257, "task_index": 17}, {"db_idx": 19258, "episode_idx": 87, "frame_idx": 12, "global_frame_idx": 19258, "task_index": 17}, {"db_idx": 19259, "episode_idx": 87, "frame_idx": 13, "global_frame_idx": 19259, "task_index": 17}, {"db_idx": 19260, "episode_idx": 87, "frame_idx": 14, "global_frame_idx": 19260, "task_index": 17}, {"db_idx": 19261, "episode_idx": 87, "frame_idx": 15, "global_frame_idx": 19261, "task_index": 17}, {"db_idx": 19262, "episode_idx": 87, "frame_idx": 16, "global_frame_idx": 19262, "task_index": 17}, {"db_idx": 19263, "episode_idx": 87, "frame_idx": 17, "global_frame_idx": 19263, "task_index": 17}, {"db_idx": 19264, "episode_idx": 87, "frame_idx": 18, "global_frame_idx": 19264, "task_index": 17}, {"db_idx": 19265, "episode_idx": 87, "frame_idx": 19, "global_frame_idx": 19265, "task_index": 17}, {"db_idx": 19266, "episode_idx": 87, "frame_idx": 20, "global_frame_idx": 19266, "task_index": 17}, {"db_idx": 19267, "episode_idx": 87, "frame_idx": 21, "global_frame_idx": 19267, "task_index": 17}, {"db_idx": 19268, "episode_idx": 87, "frame_idx": 22, "global_frame_idx": 19268, "task_index": 17}, {"db_idx": 19269, "episode_idx": 87, "frame_idx": 23, "global_frame_idx": 19269, "task_index": 17}, {"db_idx": 19270, "episode_idx": 87, "frame_idx": 24, "global_frame_idx": 19270, "task_index": 17}, {"db_idx": 19271, "episode_idx": 87, "frame_idx": 25, "global_frame_idx": 19271, "task_index": 17}, {"db_idx": 19272, "episode_idx": 87, "frame_idx": 26, "global_frame_idx": 19272, "task_index": 17}, {"db_idx": 19273, "episode_idx": 87, "frame_idx": 27, "global_frame_idx": 19273, "task_index": 17}, {"db_idx": 19274, "episode_idx": 87, "frame_idx": 28, "global_frame_idx": 19274, "task_index": 17}, {"db_idx": 19275, "episode_idx": 87, "frame_idx": 29, "global_frame_idx": 19275, "task_index": 17}, {"db_idx": 19276, "episode_idx": 87, "frame_idx": 30, "global_frame_idx": 19276, "task_index": 17}, {"db_idx": 19277, "episode_idx": 87, "frame_idx": 31, "global_frame_idx": 19277, "task_index": 17}, {"db_idx": 19278, "episode_idx": 87, "frame_idx": 32, "global_frame_idx": 19278, "task_index": 17}, {"db_idx": 19279, "episode_idx": 87, "frame_idx": 33, "global_frame_idx": 19279, "task_index": 17}, {"db_idx": 19280, "episode_idx": 87, "frame_idx": 34, "global_frame_idx": 19280, "task_index": 17}, {"db_idx": 19281, "episode_idx": 87, "frame_idx": 35, "global_frame_idx": 19281, "task_index": 17}, {"db_idx": 19282, "episode_idx": 87, "frame_idx": 36, "global_frame_idx": 19282, "task_index": 17}, {"db_idx": 19283, "episode_idx": 87, "frame_idx": 37, "global_frame_idx": 19283, "task_index": 17}, {"db_idx": 19284, "episode_idx": 87, "frame_idx": 38, "global_frame_idx": 19284, "task_index": 17}, {"db_idx": 19285, "episode_idx": 87, "frame_idx": 39, "global_frame_idx": 19285, "task_index": 17}, {"db_idx": 19286, "episode_idx": 87, "frame_idx": 40, "global_frame_idx": 19286, "task_index": 17}, {"db_idx": 19287, "episode_idx": 87, "frame_idx": 41, "global_frame_idx": 19287, "task_index": 17}, {"db_idx": 19288, "episode_idx": 87, "frame_idx": 42, "global_frame_idx": 19288, "task_index": 17}, {"db_idx": 19289, "episode_idx": 87, "frame_idx": 43, "global_frame_idx": 19289, "task_index": 17}, {"db_idx": 19290, "episode_idx": 87, "frame_idx": 44, "global_frame_idx": 19290, "task_index": 17}, {"db_idx": 19291, "episode_idx": 87, "frame_idx": 45, "global_frame_idx": 19291, "task_index": 17}, {"db_idx": 19292, "episode_idx": 87, "frame_idx": 46, "global_frame_idx": 19292, "task_index": 17}, {"db_idx": 19293, "episode_idx": 87, "frame_idx": 47, "global_frame_idx": 19293, "task_index": 17}, {"db_idx": 19294, "episode_idx": 87, "frame_idx": 48, "global_frame_idx": 19294, "task_index": 17}, {"db_idx": 19295, "episode_idx": 87, "frame_idx": 49, "global_frame_idx": 19295, "task_index": 17}, {"db_idx": 19296, "episode_idx": 87, "frame_idx": 50, "global_frame_idx": 19296, "task_index": 17}, {"db_idx": 19297, "episode_idx": 87, "frame_idx": 51, "global_frame_idx": 19297, "task_index": 17}, {"db_idx": 19298, "episode_idx": 87, "frame_idx": 52, "global_frame_idx": 19298, "task_index": 17}, {"db_idx": 19299, "episode_idx": 87, "frame_idx": 53, "global_frame_idx": 19299, "task_index": 17}, {"db_idx": 19300, "episode_idx": 87, "frame_idx": 54, "global_frame_idx": 19300, "task_index": 17}, {"db_idx": 19301, "episode_idx": 87, "frame_idx": 55, "global_frame_idx": 19301, "task_index": 17}, {"db_idx": 19302, "episode_idx": 87, "frame_idx": 56, "global_frame_idx": 19302, "task_index": 17}, {"db_idx": 19303, "episode_idx": 87, "frame_idx": 57, "global_frame_idx": 19303, "task_index": 17}, {"db_idx": 19304, "episode_idx": 87, "frame_idx": 58, "global_frame_idx": 19304, "task_index": 17}, {"db_idx": 19305, "episode_idx": 87, "frame_idx": 59, "global_frame_idx": 19305, "task_index": 17}, {"db_idx": 19306, "episode_idx": 87, "frame_idx": 60, "global_frame_idx": 19306, "task_index": 17}, {"db_idx": 19307, "episode_idx": 87, "frame_idx": 61, "global_frame_idx": 19307, "task_index": 17}, {"db_idx": 19308, "episode_idx": 87, "frame_idx": 62, "global_frame_idx": 19308, "task_index": 17}, {"db_idx": 19309, "episode_idx": 87, "frame_idx": 63, "global_frame_idx": 19309, "task_index": 17}, {"db_idx": 19310, "episode_idx": 87, "frame_idx": 64, "global_frame_idx": 19310, "task_index": 17}, {"db_idx": 19311, "episode_idx": 87, "frame_idx": 65, "global_frame_idx": 19311, "task_index": 17}, {"db_idx": 19312, "episode_idx": 87, "frame_idx": 66, "global_frame_idx": 19312, "task_index": 17}, {"db_idx": 19313, "episode_idx": 87, "frame_idx": 67, "global_frame_idx": 19313, "task_index": 17}, {"db_idx": 19314, "episode_idx": 87, "frame_idx": 68, "global_frame_idx": 19314, "task_index": 17}, {"db_idx": 19315, "episode_idx": 87, "frame_idx": 69, "global_frame_idx": 19315, "task_index": 17}, {"db_idx": 19316, "episode_idx": 87, "frame_idx": 70, "global_frame_idx": 19316, "task_index": 17}, {"db_idx": 19317, "episode_idx": 87, "frame_idx": 71, "global_frame_idx": 19317, "task_index": 17}, {"db_idx": 19318, "episode_idx": 87, "frame_idx": 72, "global_frame_idx": 19318, "task_index": 17}, {"db_idx": 19319, "episode_idx": 87, "frame_idx": 73, "global_frame_idx": 19319, "task_index": 17}, {"db_idx": 19320, "episode_idx": 87, "frame_idx": 74, "global_frame_idx": 19320, "task_index": 17}, {"db_idx": 19321, "episode_idx": 87, "frame_idx": 75, "global_frame_idx": 19321, "task_index": 17}, {"db_idx": 19322, "episode_idx": 87, "frame_idx": 76, "global_frame_idx": 19322, "task_index": 17}, {"db_idx": 19323, "episode_idx": 87, "frame_idx": 77, "global_frame_idx": 19323, "task_index": 17}, {"db_idx": 19324, "episode_idx": 87, "frame_idx": 78, "global_frame_idx": 19324, "task_index": 17}, {"db_idx": 19325, "episode_idx": 87, "frame_idx": 79, "global_frame_idx": 19325, "task_index": 17}, {"db_idx": 19326, "episode_idx": 87, "frame_idx": 80, "global_frame_idx": 19326, "task_index": 17}, {"db_idx": 19327, "episode_idx": 87, "frame_idx": 81, "global_frame_idx": 19327, "task_index": 17}, {"db_idx": 19328, "episode_idx": 87, "frame_idx": 82, "global_frame_idx": 19328, "task_index": 17}, {"db_idx": 19329, "episode_idx": 87, "frame_idx": 83, "global_frame_idx": 19329, "task_index": 17}, {"db_idx": 19330, "episode_idx": 87, "frame_idx": 84, "global_frame_idx": 19330, "task_index": 17}, {"db_idx": 19331, "episode_idx": 87, "frame_idx": 85, "global_frame_idx": 19331, "task_index": 17}, {"db_idx": 19332, "episode_idx": 87, "frame_idx": 86, "global_frame_idx": 19332, "task_index": 17}, {"db_idx": 19333, "episode_idx": 87, "frame_idx": 87, "global_frame_idx": 19333, "task_index": 17}, {"db_idx": 19334, "episode_idx": 87, "frame_idx": 88, "global_frame_idx": 19334, "task_index": 17}, {"db_idx": 19335, "episode_idx": 87, "frame_idx": 89, "global_frame_idx": 19335, "task_index": 17}, {"db_idx": 19336, "episode_idx": 87, "frame_idx": 90, "global_frame_idx": 19336, "task_index": 17}, {"db_idx": 19337, "episode_idx": 87, "frame_idx": 91, "global_frame_idx": 19337, "task_index": 17}, {"db_idx": 19338, "episode_idx": 87, "frame_idx": 92, "global_frame_idx": 19338, "task_index": 17}, {"db_idx": 19339, "episode_idx": 87, "frame_idx": 93, "global_frame_idx": 19339, "task_index": 17}, {"db_idx": 19340, "episode_idx": 87, "frame_idx": 94, "global_frame_idx": 19340, "task_index": 17}, {"db_idx": 19341, "episode_idx": 87, "frame_idx": 95, "global_frame_idx": 19341, "task_index": 17}, {"db_idx": 19342, "episode_idx": 87, "frame_idx": 96, "global_frame_idx": 19342, "task_index": 17}, {"db_idx": 19343, "episode_idx": 87, "frame_idx": 97, "global_frame_idx": 19343, "task_index": 17}, {"db_idx": 19344, "episode_idx": 87, "frame_idx": 98, "global_frame_idx": 19344, "task_index": 17}, {"db_idx": 19345, "episode_idx": 87, "frame_idx": 99, "global_frame_idx": 19345, "task_index": 17}, {"db_idx": 19346, "episode_idx": 87, "frame_idx": 100, "global_frame_idx": 19346, "task_index": 17}, {"db_idx": 19347, "episode_idx": 87, "frame_idx": 101, "global_frame_idx": 19347, "task_index": 17}, {"db_idx": 19348, "episode_idx": 87, "frame_idx": 102, "global_frame_idx": 19348, "task_index": 17}, {"db_idx": 19349, "episode_idx": 87, "frame_idx": 103, "global_frame_idx": 19349, "task_index": 17}, {"db_idx": 19350, "episode_idx": 87, "frame_idx": 104, "global_frame_idx": 19350, "task_index": 17}, {"db_idx": 19351, "episode_idx": 87, "frame_idx": 105, "global_frame_idx": 19351, "task_index": 17}, {"db_idx": 19352, "episode_idx": 87, "frame_idx": 106, "global_frame_idx": 19352, "task_index": 17}, {"db_idx": 19353, "episode_idx": 87, "frame_idx": 107, "global_frame_idx": 19353, "task_index": 17}, {"db_idx": 19354, "episode_idx": 87, "frame_idx": 108, "global_frame_idx": 19354, "task_index": 17}, {"db_idx": 19355, "episode_idx": 87, "frame_idx": 109, "global_frame_idx": 19355, "task_index": 17}, {"db_idx": 19356, "episode_idx": 87, "frame_idx": 110, "global_frame_idx": 19356, "task_index": 17}, {"db_idx": 19357, "episode_idx": 87, "frame_idx": 111, "global_frame_idx": 19357, "task_index": 17}, {"db_idx": 19358, "episode_idx": 87, "frame_idx": 112, "global_frame_idx": 19358, "task_index": 17}, {"db_idx": 19359, "episode_idx": 87, "frame_idx": 113, "global_frame_idx": 19359, "task_index": 17}, {"db_idx": 19360, "episode_idx": 87, "frame_idx": 114, "global_frame_idx": 19360, "task_index": 17}, {"db_idx": 19361, "episode_idx": 87, "frame_idx": 115, "global_frame_idx": 19361, "task_index": 17}, {"db_idx": 19362, "episode_idx": 87, "frame_idx": 116, "global_frame_idx": 19362, "task_index": 17}, {"db_idx": 19363, "episode_idx": 87, "frame_idx": 117, "global_frame_idx": 19363, "task_index": 17}, {"db_idx": 19364, "episode_idx": 87, "frame_idx": 118, "global_frame_idx": 19364, "task_index": 17}, {"db_idx": 19365, "episode_idx": 87, "frame_idx": 119, "global_frame_idx": 19365, "task_index": 17}, {"db_idx": 19366, "episode_idx": 87, "frame_idx": 120, "global_frame_idx": 19366, "task_index": 17}, {"db_idx": 19367, "episode_idx": 87, "frame_idx": 121, "global_frame_idx": 19367, "task_index": 17}, {"db_idx": 19368, "episode_idx": 87, "frame_idx": 122, "global_frame_idx": 19368, "task_index": 17}, {"db_idx": 19369, "episode_idx": 87, "frame_idx": 123, "global_frame_idx": 19369, "task_index": 17}, {"db_idx": 19370, "episode_idx": 87, "frame_idx": 124, "global_frame_idx": 19370, "task_index": 17}, {"db_idx": 19371, "episode_idx": 87, "frame_idx": 125, "global_frame_idx": 19371, "task_index": 17}, {"db_idx": 19372, "episode_idx": 87, "frame_idx": 126, "global_frame_idx": 19372, "task_index": 17}, {"db_idx": 19373, "episode_idx": 87, "frame_idx": 127, "global_frame_idx": 19373, "task_index": 17}, {"db_idx": 19374, "episode_idx": 87, "frame_idx": 128, "global_frame_idx": 19374, "task_index": 17}, {"db_idx": 19375, "episode_idx": 87, "frame_idx": 129, "global_frame_idx": 19375, "task_index": 17}, {"db_idx": 19376, "episode_idx": 87, "frame_idx": 130, "global_frame_idx": 19376, "task_index": 17}, {"db_idx": 19377, "episode_idx": 87, "frame_idx": 131, "global_frame_idx": 19377, "task_index": 17}, {"db_idx": 19378, "episode_idx": 87, "frame_idx": 132, "global_frame_idx": 19378, "task_index": 17}, {"db_idx": 19379, "episode_idx": 87, "frame_idx": 133, "global_frame_idx": 19379, "task_index": 17}, {"db_idx": 19380, "episode_idx": 87, "frame_idx": 134, "global_frame_idx": 19380, "task_index": 17}, {"db_idx": 19381, "episode_idx": 87, "frame_idx": 135, "global_frame_idx": 19381, "task_index": 17}, {"db_idx": 19382, "episode_idx": 87, "frame_idx": 136, "global_frame_idx": 19382, "task_index": 17}, {"db_idx": 19383, "episode_idx": 87, "frame_idx": 137, "global_frame_idx": 19383, "task_index": 17}, {"db_idx": 19384, "episode_idx": 87, "frame_idx": 138, "global_frame_idx": 19384, "task_index": 17}, {"db_idx": 19385, "episode_idx": 87, "frame_idx": 139, "global_frame_idx": 19385, "task_index": 17}, {"db_idx": 19386, "episode_idx": 87, "frame_idx": 140, "global_frame_idx": 19386, "task_index": 17}, {"db_idx": 19387, "episode_idx": 87, "frame_idx": 141, "global_frame_idx": 19387, "task_index": 17}, {"db_idx": 19388, "episode_idx": 87, "frame_idx": 142, "global_frame_idx": 19388, "task_index": 17}, {"db_idx": 19389, "episode_idx": 87, "frame_idx": 143, "global_frame_idx": 19389, "task_index": 17}, {"db_idx": 19390, "episode_idx": 87, "frame_idx": 144, "global_frame_idx": 19390, "task_index": 17}, {"db_idx": 19391, "episode_idx": 87, "frame_idx": 145, "global_frame_idx": 19391, "task_index": 17}, {"db_idx": 19392, "episode_idx": 87, "frame_idx": 146, "global_frame_idx": 19392, "task_index": 17}, {"db_idx": 19393, "episode_idx": 87, "frame_idx": 147, "global_frame_idx": 19393, "task_index": 17}, {"db_idx": 19394, "episode_idx": 87, "frame_idx": 148, "global_frame_idx": 19394, "task_index": 17}, {"db_idx": 19395, "episode_idx": 87, "frame_idx": 149, "global_frame_idx": 19395, "task_index": 17}, {"db_idx": 19396, "episode_idx": 87, "frame_idx": 150, "global_frame_idx": 19396, "task_index": 17}, {"db_idx": 19397, "episode_idx": 87, "frame_idx": 151, "global_frame_idx": 19397, "task_index": 17}, {"db_idx": 19398, "episode_idx": 87, "frame_idx": 152, "global_frame_idx": 19398, "task_index": 17}, {"db_idx": 19399, "episode_idx": 87, "frame_idx": 153, "global_frame_idx": 19399, "task_index": 17}, {"db_idx": 19400, "episode_idx": 87, "frame_idx": 154, "global_frame_idx": 19400, "task_index": 17}, {"db_idx": 19401, "episode_idx": 87, "frame_idx": 155, "global_frame_idx": 19401, "task_index": 17}, {"db_idx": 19402, "episode_idx": 87, "frame_idx": 156, "global_frame_idx": 19402, "task_index": 17}, {"db_idx": 19403, "episode_idx": 87, "frame_idx": 157, "global_frame_idx": 19403, "task_index": 17}, {"db_idx": 19404, "episode_idx": 87, "frame_idx": 158, "global_frame_idx": 19404, "task_index": 17}, {"db_idx": 19405, "episode_idx": 87, "frame_idx": 159, "global_frame_idx": 19405, "task_index": 17}, {"db_idx": 19406, "episode_idx": 87, "frame_idx": 160, "global_frame_idx": 19406, "task_index": 17}, {"db_idx": 19407, "episode_idx": 88, "frame_idx": 0, "global_frame_idx": 19407, "task_index": 17}, {"db_idx": 19408, "episode_idx": 88, "frame_idx": 1, "global_frame_idx": 19408, "task_index": 17}, {"db_idx": 19409, "episode_idx": 88, "frame_idx": 2, "global_frame_idx": 19409, "task_index": 17}, {"db_idx": 19410, "episode_idx": 88, "frame_idx": 3, "global_frame_idx": 19410, "task_index": 17}, {"db_idx": 19411, "episode_idx": 88, "frame_idx": 4, "global_frame_idx": 19411, "task_index": 17}, {"db_idx": 19412, "episode_idx": 88, "frame_idx": 5, "global_frame_idx": 19412, "task_index": 17}, {"db_idx": 19413, "episode_idx": 88, "frame_idx": 6, "global_frame_idx": 19413, "task_index": 17}, {"db_idx": 19414, "episode_idx": 88, "frame_idx": 7, "global_frame_idx": 19414, "task_index": 17}, {"db_idx": 19415, "episode_idx": 88, "frame_idx": 8, "global_frame_idx": 19415, "task_index": 17}, {"db_idx": 19416, "episode_idx": 88, "frame_idx": 9, "global_frame_idx": 19416, "task_index": 17}, {"db_idx": 19417, "episode_idx": 88, "frame_idx": 10, "global_frame_idx": 19417, "task_index": 17}, {"db_idx": 19418, "episode_idx": 88, "frame_idx": 11, "global_frame_idx": 19418, "task_index": 17}, {"db_idx": 19419, "episode_idx": 88, "frame_idx": 12, "global_frame_idx": 19419, "task_index": 17}, {"db_idx": 19420, "episode_idx": 88, "frame_idx": 13, "global_frame_idx": 19420, "task_index": 17}, {"db_idx": 19421, "episode_idx": 88, "frame_idx": 14, "global_frame_idx": 19421, "task_index": 17}, {"db_idx": 19422, "episode_idx": 88, "frame_idx": 15, "global_frame_idx": 19422, "task_index": 17}, {"db_idx": 19423, "episode_idx": 88, "frame_idx": 16, "global_frame_idx": 19423, "task_index": 17}, {"db_idx": 19424, "episode_idx": 88, "frame_idx": 17, "global_frame_idx": 19424, "task_index": 17}, {"db_idx": 19425, "episode_idx": 88, "frame_idx": 18, "global_frame_idx": 19425, "task_index": 17}, {"db_idx": 19426, "episode_idx": 88, "frame_idx": 19, "global_frame_idx": 19426, "task_index": 17}, {"db_idx": 19427, "episode_idx": 88, "frame_idx": 20, "global_frame_idx": 19427, "task_index": 17}, {"db_idx": 19428, "episode_idx": 88, "frame_idx": 21, "global_frame_idx": 19428, "task_index": 17}, {"db_idx": 19429, "episode_idx": 88, "frame_idx": 22, "global_frame_idx": 19429, "task_index": 17}, {"db_idx": 19430, "episode_idx": 88, "frame_idx": 23, "global_frame_idx": 19430, "task_index": 17}, {"db_idx": 19431, "episode_idx": 88, "frame_idx": 24, "global_frame_idx": 19431, "task_index": 17}, {"db_idx": 19432, "episode_idx": 88, "frame_idx": 25, "global_frame_idx": 19432, "task_index": 17}, {"db_idx": 19433, "episode_idx": 88, "frame_idx": 26, "global_frame_idx": 19433, "task_index": 17}, {"db_idx": 19434, "episode_idx": 88, "frame_idx": 27, "global_frame_idx": 19434, "task_index": 17}, {"db_idx": 19435, "episode_idx": 88, "frame_idx": 28, "global_frame_idx": 19435, "task_index": 17}, {"db_idx": 19436, "episode_idx": 88, "frame_idx": 29, "global_frame_idx": 19436, "task_index": 17}, {"db_idx": 19437, "episode_idx": 88, "frame_idx": 30, "global_frame_idx": 19437, "task_index": 17}, {"db_idx": 19438, "episode_idx": 88, "frame_idx": 31, "global_frame_idx": 19438, "task_index": 17}, {"db_idx": 19439, "episode_idx": 88, "frame_idx": 32, "global_frame_idx": 19439, "task_index": 17}, {"db_idx": 19440, "episode_idx": 88, "frame_idx": 33, "global_frame_idx": 19440, "task_index": 17}, {"db_idx": 19441, "episode_idx": 88, "frame_idx": 34, "global_frame_idx": 19441, "task_index": 17}, {"db_idx": 19442, "episode_idx": 88, "frame_idx": 35, "global_frame_idx": 19442, "task_index": 17}, {"db_idx": 19443, "episode_idx": 88, "frame_idx": 36, "global_frame_idx": 19443, "task_index": 17}, {"db_idx": 19444, "episode_idx": 88, "frame_idx": 37, "global_frame_idx": 19444, "task_index": 17}, {"db_idx": 19445, "episode_idx": 88, "frame_idx": 38, "global_frame_idx": 19445, "task_index": 17}, {"db_idx": 19446, "episode_idx": 88, "frame_idx": 39, "global_frame_idx": 19446, "task_index": 17}, {"db_idx": 19447, "episode_idx": 88, "frame_idx": 40, "global_frame_idx": 19447, "task_index": 17}, {"db_idx": 19448, "episode_idx": 88, "frame_idx": 41, "global_frame_idx": 19448, "task_index": 17}, {"db_idx": 19449, "episode_idx": 88, "frame_idx": 42, "global_frame_idx": 19449, "task_index": 17}, {"db_idx": 19450, "episode_idx": 88, "frame_idx": 43, "global_frame_idx": 19450, "task_index": 17}, {"db_idx": 19451, "episode_idx": 88, "frame_idx": 44, "global_frame_idx": 19451, "task_index": 17}, {"db_idx": 19452, "episode_idx": 88, "frame_idx": 45, "global_frame_idx": 19452, "task_index": 17}, {"db_idx": 19453, "episode_idx": 88, "frame_idx": 46, "global_frame_idx": 19453, "task_index": 17}, {"db_idx": 19454, "episode_idx": 88, "frame_idx": 47, "global_frame_idx": 19454, "task_index": 17}, {"db_idx": 19455, "episode_idx": 88, "frame_idx": 48, "global_frame_idx": 19455, "task_index": 17}, {"db_idx": 19456, "episode_idx": 88, "frame_idx": 49, "global_frame_idx": 19456, "task_index": 17}, {"db_idx": 19457, "episode_idx": 88, "frame_idx": 50, "global_frame_idx": 19457, "task_index": 17}, {"db_idx": 19458, "episode_idx": 88, "frame_idx": 51, "global_frame_idx": 19458, "task_index": 17}, {"db_idx": 19459, "episode_idx": 88, "frame_idx": 52, "global_frame_idx": 19459, "task_index": 17}, {"db_idx": 19460, "episode_idx": 88, "frame_idx": 53, "global_frame_idx": 19460, "task_index": 17}, {"db_idx": 19461, "episode_idx": 88, "frame_idx": 54, "global_frame_idx": 19461, "task_index": 17}, {"db_idx": 19462, "episode_idx": 88, "frame_idx": 55, "global_frame_idx": 19462, "task_index": 17}, {"db_idx": 19463, "episode_idx": 88, "frame_idx": 56, "global_frame_idx": 19463, "task_index": 17}, {"db_idx": 19464, "episode_idx": 88, "frame_idx": 57, "global_frame_idx": 19464, "task_index": 17}, {"db_idx": 19465, "episode_idx": 88, "frame_idx": 58, "global_frame_idx": 19465, "task_index": 17}, {"db_idx": 19466, "episode_idx": 88, "frame_idx": 59, "global_frame_idx": 19466, "task_index": 17}, {"db_idx": 19467, "episode_idx": 88, "frame_idx": 60, "global_frame_idx": 19467, "task_index": 17}, {"db_idx": 19468, "episode_idx": 88, "frame_idx": 61, "global_frame_idx": 19468, "task_index": 17}, {"db_idx": 19469, "episode_idx": 88, "frame_idx": 62, "global_frame_idx": 19469, "task_index": 17}, {"db_idx": 19470, "episode_idx": 88, "frame_idx": 63, "global_frame_idx": 19470, "task_index": 17}, {"db_idx": 19471, "episode_idx": 88, "frame_idx": 64, "global_frame_idx": 19471, "task_index": 17}, {"db_idx": 19472, "episode_idx": 88, "frame_idx": 65, "global_frame_idx": 19472, "task_index": 17}, {"db_idx": 19473, "episode_idx": 88, "frame_idx": 66, "global_frame_idx": 19473, "task_index": 17}, {"db_idx": 19474, "episode_idx": 88, "frame_idx": 67, "global_frame_idx": 19474, "task_index": 17}, {"db_idx": 19475, "episode_idx": 88, "frame_idx": 68, "global_frame_idx": 19475, "task_index": 17}, {"db_idx": 19476, "episode_idx": 88, "frame_idx": 69, "global_frame_idx": 19476, "task_index": 17}, {"db_idx": 19477, "episode_idx": 88, "frame_idx": 70, "global_frame_idx": 19477, "task_index": 17}, {"db_idx": 19478, "episode_idx": 88, "frame_idx": 71, "global_frame_idx": 19478, "task_index": 17}, {"db_idx": 19479, "episode_idx": 88, "frame_idx": 72, "global_frame_idx": 19479, "task_index": 17}, {"db_idx": 19480, "episode_idx": 88, "frame_idx": 73, "global_frame_idx": 19480, "task_index": 17}, {"db_idx": 19481, "episode_idx": 88, "frame_idx": 74, "global_frame_idx": 19481, "task_index": 17}, {"db_idx": 19482, "episode_idx": 88, "frame_idx": 75, "global_frame_idx": 19482, "task_index": 17}, {"db_idx": 19483, "episode_idx": 88, "frame_idx": 76, "global_frame_idx": 19483, "task_index": 17}, {"db_idx": 19484, "episode_idx": 88, "frame_idx": 77, "global_frame_idx": 19484, "task_index": 17}, {"db_idx": 19485, "episode_idx": 88, "frame_idx": 78, "global_frame_idx": 19485, "task_index": 17}, {"db_idx": 19486, "episode_idx": 88, "frame_idx": 79, "global_frame_idx": 19486, "task_index": 17}, {"db_idx": 19487, "episode_idx": 88, "frame_idx": 80, "global_frame_idx": 19487, "task_index": 17}, {"db_idx": 19488, "episode_idx": 88, "frame_idx": 81, "global_frame_idx": 19488, "task_index": 17}, {"db_idx": 19489, "episode_idx": 88, "frame_idx": 82, "global_frame_idx": 19489, "task_index": 17}, {"db_idx": 19490, "episode_idx": 88, "frame_idx": 83, "global_frame_idx": 19490, "task_index": 17}, {"db_idx": 19491, "episode_idx": 88, "frame_idx": 84, "global_frame_idx": 19491, "task_index": 17}, {"db_idx": 19492, "episode_idx": 88, "frame_idx": 85, "global_frame_idx": 19492, "task_index": 17}, {"db_idx": 19493, "episode_idx": 88, "frame_idx": 86, "global_frame_idx": 19493, "task_index": 17}, {"db_idx": 19494, "episode_idx": 88, "frame_idx": 87, "global_frame_idx": 19494, "task_index": 17}, {"db_idx": 19495, "episode_idx": 88, "frame_idx": 88, "global_frame_idx": 19495, "task_index": 17}, {"db_idx": 19496, "episode_idx": 88, "frame_idx": 89, "global_frame_idx": 19496, "task_index": 17}, {"db_idx": 19497, "episode_idx": 88, "frame_idx": 90, "global_frame_idx": 19497, "task_index": 17}, {"db_idx": 19498, "episode_idx": 88, "frame_idx": 91, "global_frame_idx": 19498, "task_index": 17}, {"db_idx": 19499, "episode_idx": 88, "frame_idx": 92, "global_frame_idx": 19499, "task_index": 17}, {"db_idx": 19500, "episode_idx": 88, "frame_idx": 93, "global_frame_idx": 19500, "task_index": 17}, {"db_idx": 19501, "episode_idx": 88, "frame_idx": 94, "global_frame_idx": 19501, "task_index": 17}, {"db_idx": 19502, "episode_idx": 88, "frame_idx": 95, "global_frame_idx": 19502, "task_index": 17}, {"db_idx": 19503, "episode_idx": 88, "frame_idx": 96, "global_frame_idx": 19503, "task_index": 17}, {"db_idx": 19504, "episode_idx": 88, "frame_idx": 97, "global_frame_idx": 19504, "task_index": 17}, {"db_idx": 19505, "episode_idx": 88, "frame_idx": 98, "global_frame_idx": 19505, "task_index": 17}, {"db_idx": 19506, "episode_idx": 88, "frame_idx": 99, "global_frame_idx": 19506, "task_index": 17}, {"db_idx": 19507, "episode_idx": 88, "frame_idx": 100, "global_frame_idx": 19507, "task_index": 17}, {"db_idx": 19508, "episode_idx": 88, "frame_idx": 101, "global_frame_idx": 19508, "task_index": 17}, {"db_idx": 19509, "episode_idx": 88, "frame_idx": 102, "global_frame_idx": 19509, "task_index": 17}, {"db_idx": 19510, "episode_idx": 88, "frame_idx": 103, "global_frame_idx": 19510, "task_index": 17}, {"db_idx": 19511, "episode_idx": 88, "frame_idx": 104, "global_frame_idx": 19511, "task_index": 17}, {"db_idx": 19512, "episode_idx": 88, "frame_idx": 105, "global_frame_idx": 19512, "task_index": 17}, {"db_idx": 19513, "episode_idx": 88, "frame_idx": 106, "global_frame_idx": 19513, "task_index": 17}, {"db_idx": 19514, "episode_idx": 88, "frame_idx": 107, "global_frame_idx": 19514, "task_index": 17}, {"db_idx": 19515, "episode_idx": 88, "frame_idx": 108, "global_frame_idx": 19515, "task_index": 17}, {"db_idx": 19516, "episode_idx": 88, "frame_idx": 109, "global_frame_idx": 19516, "task_index": 17}, {"db_idx": 19517, "episode_idx": 88, "frame_idx": 110, "global_frame_idx": 19517, "task_index": 17}, {"db_idx": 19518, "episode_idx": 88, "frame_idx": 111, "global_frame_idx": 19518, "task_index": 17}, {"db_idx": 19519, "episode_idx": 88, "frame_idx": 112, "global_frame_idx": 19519, "task_index": 17}, {"db_idx": 19520, "episode_idx": 88, "frame_idx": 113, "global_frame_idx": 19520, "task_index": 17}, {"db_idx": 19521, "episode_idx": 88, "frame_idx": 114, "global_frame_idx": 19521, "task_index": 17}, {"db_idx": 19522, "episode_idx": 88, "frame_idx": 115, "global_frame_idx": 19522, "task_index": 17}, {"db_idx": 19523, "episode_idx": 88, "frame_idx": 116, "global_frame_idx": 19523, "task_index": 17}, {"db_idx": 19524, "episode_idx": 88, "frame_idx": 117, "global_frame_idx": 19524, "task_index": 17}, {"db_idx": 19525, "episode_idx": 88, "frame_idx": 118, "global_frame_idx": 19525, "task_index": 17}, {"db_idx": 19526, "episode_idx": 88, "frame_idx": 119, "global_frame_idx": 19526, "task_index": 17}, {"db_idx": 19527, "episode_idx": 88, "frame_idx": 120, "global_frame_idx": 19527, "task_index": 17}, {"db_idx": 19528, "episode_idx": 88, "frame_idx": 121, "global_frame_idx": 19528, "task_index": 17}, {"db_idx": 19529, "episode_idx": 88, "frame_idx": 122, "global_frame_idx": 19529, "task_index": 17}, {"db_idx": 19530, "episode_idx": 88, "frame_idx": 123, "global_frame_idx": 19530, "task_index": 17}, {"db_idx": 19531, "episode_idx": 88, "frame_idx": 124, "global_frame_idx": 19531, "task_index": 17}, {"db_idx": 19532, "episode_idx": 88, "frame_idx": 125, "global_frame_idx": 19532, "task_index": 17}, {"db_idx": 19533, "episode_idx": 88, "frame_idx": 126, "global_frame_idx": 19533, "task_index": 17}, {"db_idx": 19534, "episode_idx": 88, "frame_idx": 127, "global_frame_idx": 19534, "task_index": 17}, {"db_idx": 19535, "episode_idx": 88, "frame_idx": 128, "global_frame_idx": 19535, "task_index": 17}, {"db_idx": 19536, "episode_idx": 88, "frame_idx": 129, "global_frame_idx": 19536, "task_index": 17}, {"db_idx": 19537, "episode_idx": 88, "frame_idx": 130, "global_frame_idx": 19537, "task_index": 17}, {"db_idx": 19538, "episode_idx": 88, "frame_idx": 131, "global_frame_idx": 19538, "task_index": 17}, {"db_idx": 19539, "episode_idx": 88, "frame_idx": 132, "global_frame_idx": 19539, "task_index": 17}, {"db_idx": 19540, "episode_idx": 88, "frame_idx": 133, "global_frame_idx": 19540, "task_index": 17}, {"db_idx": 19541, "episode_idx": 88, "frame_idx": 134, "global_frame_idx": 19541, "task_index": 17}, {"db_idx": 19542, "episode_idx": 88, "frame_idx": 135, "global_frame_idx": 19542, "task_index": 17}, {"db_idx": 19543, "episode_idx": 88, "frame_idx": 136, "global_frame_idx": 19543, "task_index": 17}, {"db_idx": 19544, "episode_idx": 88, "frame_idx": 137, "global_frame_idx": 19544, "task_index": 17}, {"db_idx": 19545, "episode_idx": 88, "frame_idx": 138, "global_frame_idx": 19545, "task_index": 17}, {"db_idx": 19546, "episode_idx": 88, "frame_idx": 139, "global_frame_idx": 19546, "task_index": 17}, {"db_idx": 19547, "episode_idx": 88, "frame_idx": 140, "global_frame_idx": 19547, "task_index": 17}, {"db_idx": 19548, "episode_idx": 88, "frame_idx": 141, "global_frame_idx": 19548, "task_index": 17}, {"db_idx": 19549, "episode_idx": 88, "frame_idx": 142, "global_frame_idx": 19549, "task_index": 17}, {"db_idx": 19550, "episode_idx": 88, "frame_idx": 143, "global_frame_idx": 19550, "task_index": 17}, {"db_idx": 19551, "episode_idx": 88, "frame_idx": 144, "global_frame_idx": 19551, "task_index": 17}, {"db_idx": 19552, "episode_idx": 88, "frame_idx": 145, "global_frame_idx": 19552, "task_index": 17}, {"db_idx": 19553, "episode_idx": 88, "frame_idx": 146, "global_frame_idx": 19553, "task_index": 17}, {"db_idx": 19554, "episode_idx": 88, "frame_idx": 147, "global_frame_idx": 19554, "task_index": 17}, {"db_idx": 19555, "episode_idx": 88, "frame_idx": 148, "global_frame_idx": 19555, "task_index": 17}, {"db_idx": 19556, "episode_idx": 88, "frame_idx": 149, "global_frame_idx": 19556, "task_index": 17}, {"db_idx": 19557, "episode_idx": 88, "frame_idx": 150, "global_frame_idx": 19557, "task_index": 17}, {"db_idx": 19558, "episode_idx": 88, "frame_idx": 151, "global_frame_idx": 19558, "task_index": 17}, {"db_idx": 19559, "episode_idx": 88, "frame_idx": 152, "global_frame_idx": 19559, "task_index": 17}, {"db_idx": 19560, "episode_idx": 88, "frame_idx": 153, "global_frame_idx": 19560, "task_index": 17}, {"db_idx": 19561, "episode_idx": 88, "frame_idx": 154, "global_frame_idx": 19561, "task_index": 17}, {"db_idx": 19562, "episode_idx": 88, "frame_idx": 155, "global_frame_idx": 19562, "task_index": 17}, {"db_idx": 19563, "episode_idx": 88, "frame_idx": 156, "global_frame_idx": 19563, "task_index": 17}, {"db_idx": 19564, "episode_idx": 88, "frame_idx": 157, "global_frame_idx": 19564, "task_index": 17}, {"db_idx": 19565, "episode_idx": 88, "frame_idx": 158, "global_frame_idx": 19565, "task_index": 17}, {"db_idx": 19566, "episode_idx": 88, "frame_idx": 159, "global_frame_idx": 19566, "task_index": 17}, {"db_idx": 19567, "episode_idx": 88, "frame_idx": 160, "global_frame_idx": 19567, "task_index": 17}, {"db_idx": 19568, "episode_idx": 88, "frame_idx": 161, "global_frame_idx": 19568, "task_index": 17}, {"db_idx": 19569, "episode_idx": 88, "frame_idx": 162, "global_frame_idx": 19569, "task_index": 17}, {"db_idx": 19570, "episode_idx": 88, "frame_idx": 163, "global_frame_idx": 19570, "task_index": 17}, {"db_idx": 19571, "episode_idx": 88, "frame_idx": 164, "global_frame_idx": 19571, "task_index": 17}, {"db_idx": 19572, "episode_idx": 89, "frame_idx": 0, "global_frame_idx": 19572, "task_index": 17}, {"db_idx": 19573, "episode_idx": 89, "frame_idx": 1, "global_frame_idx": 19573, "task_index": 17}, {"db_idx": 19574, "episode_idx": 89, "frame_idx": 2, "global_frame_idx": 19574, "task_index": 17}, {"db_idx": 19575, "episode_idx": 89, "frame_idx": 3, "global_frame_idx": 19575, "task_index": 17}, {"db_idx": 19576, "episode_idx": 89, "frame_idx": 4, "global_frame_idx": 19576, "task_index": 17}, {"db_idx": 19577, "episode_idx": 89, "frame_idx": 5, "global_frame_idx": 19577, "task_index": 17}, {"db_idx": 19578, "episode_idx": 89, "frame_idx": 6, "global_frame_idx": 19578, "task_index": 17}, {"db_idx": 19579, "episode_idx": 89, "frame_idx": 7, "global_frame_idx": 19579, "task_index": 17}, {"db_idx": 19580, "episode_idx": 89, "frame_idx": 8, "global_frame_idx": 19580, "task_index": 17}, {"db_idx": 19581, "episode_idx": 89, "frame_idx": 9, "global_frame_idx": 19581, "task_index": 17}, {"db_idx": 19582, "episode_idx": 89, "frame_idx": 10, "global_frame_idx": 19582, "task_index": 17}, {"db_idx": 19583, "episode_idx": 89, "frame_idx": 11, "global_frame_idx": 19583, "task_index": 17}, {"db_idx": 19584, "episode_idx": 89, "frame_idx": 12, "global_frame_idx": 19584, "task_index": 17}, {"db_idx": 19585, "episode_idx": 89, "frame_idx": 13, "global_frame_idx": 19585, "task_index": 17}, {"db_idx": 19586, "episode_idx": 89, "frame_idx": 14, "global_frame_idx": 19586, "task_index": 17}, {"db_idx": 19587, "episode_idx": 89, "frame_idx": 15, "global_frame_idx": 19587, "task_index": 17}, {"db_idx": 19588, "episode_idx": 89, "frame_idx": 16, "global_frame_idx": 19588, "task_index": 17}, {"db_idx": 19589, "episode_idx": 89, "frame_idx": 17, "global_frame_idx": 19589, "task_index": 17}, {"db_idx": 19590, "episode_idx": 89, "frame_idx": 18, "global_frame_idx": 19590, "task_index": 17}, {"db_idx": 19591, "episode_idx": 89, "frame_idx": 19, "global_frame_idx": 19591, "task_index": 17}, {"db_idx": 19592, "episode_idx": 89, "frame_idx": 20, "global_frame_idx": 19592, "task_index": 17}, {"db_idx": 19593, "episode_idx": 89, "frame_idx": 21, "global_frame_idx": 19593, "task_index": 17}, {"db_idx": 19594, "episode_idx": 89, "frame_idx": 22, "global_frame_idx": 19594, "task_index": 17}, {"db_idx": 19595, "episode_idx": 89, "frame_idx": 23, "global_frame_idx": 19595, "task_index": 17}, {"db_idx": 19596, "episode_idx": 89, "frame_idx": 24, "global_frame_idx": 19596, "task_index": 17}, {"db_idx": 19597, "episode_idx": 89, "frame_idx": 25, "global_frame_idx": 19597, "task_index": 17}, {"db_idx": 19598, "episode_idx": 89, "frame_idx": 26, "global_frame_idx": 19598, "task_index": 17}, {"db_idx": 19599, "episode_idx": 89, "frame_idx": 27, "global_frame_idx": 19599, "task_index": 17}, {"db_idx": 19600, "episode_idx": 89, "frame_idx": 28, "global_frame_idx": 19600, "task_index": 17}, {"db_idx": 19601, "episode_idx": 89, "frame_idx": 29, "global_frame_idx": 19601, "task_index": 17}, {"db_idx": 19602, "episode_idx": 89, "frame_idx": 30, "global_frame_idx": 19602, "task_index": 17}, {"db_idx": 19603, "episode_idx": 89, "frame_idx": 31, "global_frame_idx": 19603, "task_index": 17}, {"db_idx": 19604, "episode_idx": 89, "frame_idx": 32, "global_frame_idx": 19604, "task_index": 17}, {"db_idx": 19605, "episode_idx": 89, "frame_idx": 33, "global_frame_idx": 19605, "task_index": 17}, {"db_idx": 19606, "episode_idx": 89, "frame_idx": 34, "global_frame_idx": 19606, "task_index": 17}, {"db_idx": 19607, "episode_idx": 89, "frame_idx": 35, "global_frame_idx": 19607, "task_index": 17}, {"db_idx": 19608, "episode_idx": 89, "frame_idx": 36, "global_frame_idx": 19608, "task_index": 17}, {"db_idx": 19609, "episode_idx": 89, "frame_idx": 37, "global_frame_idx": 19609, "task_index": 17}, {"db_idx": 19610, "episode_idx": 89, "frame_idx": 38, "global_frame_idx": 19610, "task_index": 17}, {"db_idx": 19611, "episode_idx": 89, "frame_idx": 39, "global_frame_idx": 19611, "task_index": 17}, {"db_idx": 19612, "episode_idx": 89, "frame_idx": 40, "global_frame_idx": 19612, "task_index": 17}, {"db_idx": 19613, "episode_idx": 89, "frame_idx": 41, "global_frame_idx": 19613, "task_index": 17}, {"db_idx": 19614, "episode_idx": 89, "frame_idx": 42, "global_frame_idx": 19614, "task_index": 17}, {"db_idx": 19615, "episode_idx": 89, "frame_idx": 43, "global_frame_idx": 19615, "task_index": 17}, {"db_idx": 19616, "episode_idx": 89, "frame_idx": 44, "global_frame_idx": 19616, "task_index": 17}, {"db_idx": 19617, "episode_idx": 89, "frame_idx": 45, "global_frame_idx": 19617, "task_index": 17}, {"db_idx": 19618, "episode_idx": 89, "frame_idx": 46, "global_frame_idx": 19618, "task_index": 17}, {"db_idx": 19619, "episode_idx": 89, "frame_idx": 47, "global_frame_idx": 19619, "task_index": 17}, {"db_idx": 19620, "episode_idx": 89, "frame_idx": 48, "global_frame_idx": 19620, "task_index": 17}, {"db_idx": 19621, "episode_idx": 89, "frame_idx": 49, "global_frame_idx": 19621, "task_index": 17}, {"db_idx": 19622, "episode_idx": 89, "frame_idx": 50, "global_frame_idx": 19622, "task_index": 17}, {"db_idx": 19623, "episode_idx": 89, "frame_idx": 51, "global_frame_idx": 19623, "task_index": 17}, {"db_idx": 19624, "episode_idx": 89, "frame_idx": 52, "global_frame_idx": 19624, "task_index": 17}, {"db_idx": 19625, "episode_idx": 89, "frame_idx": 53, "global_frame_idx": 19625, "task_index": 17}, {"db_idx": 19626, "episode_idx": 89, "frame_idx": 54, "global_frame_idx": 19626, "task_index": 17}, {"db_idx": 19627, "episode_idx": 89, "frame_idx": 55, "global_frame_idx": 19627, "task_index": 17}, {"db_idx": 19628, "episode_idx": 89, "frame_idx": 56, "global_frame_idx": 19628, "task_index": 17}, {"db_idx": 19629, "episode_idx": 89, "frame_idx": 57, "global_frame_idx": 19629, "task_index": 17}, {"db_idx": 19630, "episode_idx": 89, "frame_idx": 58, "global_frame_idx": 19630, "task_index": 17}, {"db_idx": 19631, "episode_idx": 89, "frame_idx": 59, "global_frame_idx": 19631, "task_index": 17}, {"db_idx": 19632, "episode_idx": 89, "frame_idx": 60, "global_frame_idx": 19632, "task_index": 17}, {"db_idx": 19633, "episode_idx": 89, "frame_idx": 61, "global_frame_idx": 19633, "task_index": 17}, {"db_idx": 19634, "episode_idx": 89, "frame_idx": 62, "global_frame_idx": 19634, "task_index": 17}, {"db_idx": 19635, "episode_idx": 89, "frame_idx": 63, "global_frame_idx": 19635, "task_index": 17}, {"db_idx": 19636, "episode_idx": 89, "frame_idx": 64, "global_frame_idx": 19636, "task_index": 17}, {"db_idx": 19637, "episode_idx": 89, "frame_idx": 65, "global_frame_idx": 19637, "task_index": 17}, {"db_idx": 19638, "episode_idx": 89, "frame_idx": 66, "global_frame_idx": 19638, "task_index": 17}, {"db_idx": 19639, "episode_idx": 89, "frame_idx": 67, "global_frame_idx": 19639, "task_index": 17}, {"db_idx": 19640, "episode_idx": 89, "frame_idx": 68, "global_frame_idx": 19640, "task_index": 17}, {"db_idx": 19641, "episode_idx": 89, "frame_idx": 69, "global_frame_idx": 19641, "task_index": 17}, {"db_idx": 19642, "episode_idx": 89, "frame_idx": 70, "global_frame_idx": 19642, "task_index": 17}, {"db_idx": 19643, "episode_idx": 89, "frame_idx": 71, "global_frame_idx": 19643, "task_index": 17}, {"db_idx": 19644, "episode_idx": 89, "frame_idx": 72, "global_frame_idx": 19644, "task_index": 17}, {"db_idx": 19645, "episode_idx": 89, "frame_idx": 73, "global_frame_idx": 19645, "task_index": 17}, {"db_idx": 19646, "episode_idx": 89, "frame_idx": 74, "global_frame_idx": 19646, "task_index": 17}, {"db_idx": 19647, "episode_idx": 89, "frame_idx": 75, "global_frame_idx": 19647, "task_index": 17}, {"db_idx": 19648, "episode_idx": 89, "frame_idx": 76, "global_frame_idx": 19648, "task_index": 17}, {"db_idx": 19649, "episode_idx": 89, "frame_idx": 77, "global_frame_idx": 19649, "task_index": 17}, {"db_idx": 19650, "episode_idx": 89, "frame_idx": 78, "global_frame_idx": 19650, "task_index": 17}, {"db_idx": 19651, "episode_idx": 89, "frame_idx": 79, "global_frame_idx": 19651, "task_index": 17}, {"db_idx": 19652, "episode_idx": 89, "frame_idx": 80, "global_frame_idx": 19652, "task_index": 17}, {"db_idx": 19653, "episode_idx": 89, "frame_idx": 81, "global_frame_idx": 19653, "task_index": 17}, {"db_idx": 19654, "episode_idx": 89, "frame_idx": 82, "global_frame_idx": 19654, "task_index": 17}, {"db_idx": 19655, "episode_idx": 89, "frame_idx": 83, "global_frame_idx": 19655, "task_index": 17}, {"db_idx": 19656, "episode_idx": 89, "frame_idx": 84, "global_frame_idx": 19656, "task_index": 17}, {"db_idx": 19657, "episode_idx": 89, "frame_idx": 85, "global_frame_idx": 19657, "task_index": 17}, {"db_idx": 19658, "episode_idx": 89, "frame_idx": 86, "global_frame_idx": 19658, "task_index": 17}, {"db_idx": 19659, "episode_idx": 89, "frame_idx": 87, "global_frame_idx": 19659, "task_index": 17}, {"db_idx": 19660, "episode_idx": 89, "frame_idx": 88, "global_frame_idx": 19660, "task_index": 17}, {"db_idx": 19661, "episode_idx": 89, "frame_idx": 89, "global_frame_idx": 19661, "task_index": 17}, {"db_idx": 19662, "episode_idx": 89, "frame_idx": 90, "global_frame_idx": 19662, "task_index": 17}, {"db_idx": 19663, "episode_idx": 89, "frame_idx": 91, "global_frame_idx": 19663, "task_index": 17}, {"db_idx": 19664, "episode_idx": 89, "frame_idx": 92, "global_frame_idx": 19664, "task_index": 17}, {"db_idx": 19665, "episode_idx": 89, "frame_idx": 93, "global_frame_idx": 19665, "task_index": 17}, {"db_idx": 19666, "episode_idx": 89, "frame_idx": 94, "global_frame_idx": 19666, "task_index": 17}, {"db_idx": 19667, "episode_idx": 89, "frame_idx": 95, "global_frame_idx": 19667, "task_index": 17}, {"db_idx": 19668, "episode_idx": 89, "frame_idx": 96, "global_frame_idx": 19668, "task_index": 17}, {"db_idx": 19669, "episode_idx": 89, "frame_idx": 97, "global_frame_idx": 19669, "task_index": 17}, {"db_idx": 19670, "episode_idx": 89, "frame_idx": 98, "global_frame_idx": 19670, "task_index": 17}, {"db_idx": 19671, "episode_idx": 89, "frame_idx": 99, "global_frame_idx": 19671, "task_index": 17}, {"db_idx": 19672, "episode_idx": 89, "frame_idx": 100, "global_frame_idx": 19672, "task_index": 17}, {"db_idx": 19673, "episode_idx": 89, "frame_idx": 101, "global_frame_idx": 19673, "task_index": 17}, {"db_idx": 19674, "episode_idx": 89, "frame_idx": 102, "global_frame_idx": 19674, "task_index": 17}, {"db_idx": 19675, "episode_idx": 89, "frame_idx": 103, "global_frame_idx": 19675, "task_index": 17}, {"db_idx": 19676, "episode_idx": 89, "frame_idx": 104, "global_frame_idx": 19676, "task_index": 17}, {"db_idx": 19677, "episode_idx": 89, "frame_idx": 105, "global_frame_idx": 19677, "task_index": 17}, {"db_idx": 19678, "episode_idx": 89, "frame_idx": 106, "global_frame_idx": 19678, "task_index": 17}, {"db_idx": 19679, "episode_idx": 89, "frame_idx": 107, "global_frame_idx": 19679, "task_index": 17}, {"db_idx": 19680, "episode_idx": 89, "frame_idx": 108, "global_frame_idx": 19680, "task_index": 17}, {"db_idx": 19681, "episode_idx": 89, "frame_idx": 109, "global_frame_idx": 19681, "task_index": 17}, {"db_idx": 19682, "episode_idx": 89, "frame_idx": 110, "global_frame_idx": 19682, "task_index": 17}, {"db_idx": 19683, "episode_idx": 89, "frame_idx": 111, "global_frame_idx": 19683, "task_index": 17}, {"db_idx": 19684, "episode_idx": 89, "frame_idx": 112, "global_frame_idx": 19684, "task_index": 17}, {"db_idx": 19685, "episode_idx": 89, "frame_idx": 113, "global_frame_idx": 19685, "task_index": 17}, {"db_idx": 19686, "episode_idx": 89, "frame_idx": 114, "global_frame_idx": 19686, "task_index": 17}, {"db_idx": 19687, "episode_idx": 89, "frame_idx": 115, "global_frame_idx": 19687, "task_index": 17}, {"db_idx": 19688, "episode_idx": 89, "frame_idx": 116, "global_frame_idx": 19688, "task_index": 17}, {"db_idx": 19689, "episode_idx": 89, "frame_idx": 117, "global_frame_idx": 19689, "task_index": 17}, {"db_idx": 19690, "episode_idx": 89, "frame_idx": 118, "global_frame_idx": 19690, "task_index": 17}, {"db_idx": 19691, "episode_idx": 89, "frame_idx": 119, "global_frame_idx": 19691, "task_index": 17}, {"db_idx": 19692, "episode_idx": 89, "frame_idx": 120, "global_frame_idx": 19692, "task_index": 17}, {"db_idx": 19693, "episode_idx": 89, "frame_idx": 121, "global_frame_idx": 19693, "task_index": 17}, {"db_idx": 19694, "episode_idx": 89, "frame_idx": 122, "global_frame_idx": 19694, "task_index": 17}, {"db_idx": 19695, "episode_idx": 89, "frame_idx": 123, "global_frame_idx": 19695, "task_index": 17}, {"db_idx": 19696, "episode_idx": 89, "frame_idx": 124, "global_frame_idx": 19696, "task_index": 17}, {"db_idx": 19697, "episode_idx": 89, "frame_idx": 125, "global_frame_idx": 19697, "task_index": 17}, {"db_idx": 19698, "episode_idx": 89, "frame_idx": 126, "global_frame_idx": 19698, "task_index": 17}, {"db_idx": 19699, "episode_idx": 89, "frame_idx": 127, "global_frame_idx": 19699, "task_index": 17}, {"db_idx": 19700, "episode_idx": 89, "frame_idx": 128, "global_frame_idx": 19700, "task_index": 17}, {"db_idx": 19701, "episode_idx": 89, "frame_idx": 129, "global_frame_idx": 19701, "task_index": 17}, {"db_idx": 19702, "episode_idx": 89, "frame_idx": 130, "global_frame_idx": 19702, "task_index": 17}, {"db_idx": 19703, "episode_idx": 89, "frame_idx": 131, "global_frame_idx": 19703, "task_index": 17}, {"db_idx": 19704, "episode_idx": 89, "frame_idx": 132, "global_frame_idx": 19704, "task_index": 17}, {"db_idx": 19705, "episode_idx": 89, "frame_idx": 133, "global_frame_idx": 19705, "task_index": 17}, {"db_idx": 19706, "episode_idx": 89, "frame_idx": 134, "global_frame_idx": 19706, "task_index": 17}, {"db_idx": 19707, "episode_idx": 89, "frame_idx": 135, "global_frame_idx": 19707, "task_index": 17}, {"db_idx": 19708, "episode_idx": 89, "frame_idx": 136, "global_frame_idx": 19708, "task_index": 17}, {"db_idx": 19709, "episode_idx": 89, "frame_idx": 137, "global_frame_idx": 19709, "task_index": 17}, {"db_idx": 19710, "episode_idx": 89, "frame_idx": 138, "global_frame_idx": 19710, "task_index": 17}, {"db_idx": 19711, "episode_idx": 89, "frame_idx": 139, "global_frame_idx": 19711, "task_index": 17}, {"db_idx": 19712, "episode_idx": 89, "frame_idx": 140, "global_frame_idx": 19712, "task_index": 17}, {"db_idx": 19713, "episode_idx": 89, "frame_idx": 141, "global_frame_idx": 19713, "task_index": 17}, {"db_idx": 19714, "episode_idx": 89, "frame_idx": 142, "global_frame_idx": 19714, "task_index": 17}, {"db_idx": 19715, "episode_idx": 89, "frame_idx": 143, "global_frame_idx": 19715, "task_index": 17}, {"db_idx": 19716, "episode_idx": 90, "frame_idx": 0, "global_frame_idx": 19716, "task_index": 18}, {"db_idx": 19717, "episode_idx": 90, "frame_idx": 1, "global_frame_idx": 19717, "task_index": 18}, {"db_idx": 19718, "episode_idx": 90, "frame_idx": 2, "global_frame_idx": 19718, "task_index": 18}, {"db_idx": 19719, "episode_idx": 90, "frame_idx": 3, "global_frame_idx": 19719, "task_index": 18}, {"db_idx": 19720, "episode_idx": 90, "frame_idx": 4, "global_frame_idx": 19720, "task_index": 18}, {"db_idx": 19721, "episode_idx": 90, "frame_idx": 5, "global_frame_idx": 19721, "task_index": 18}, {"db_idx": 19722, "episode_idx": 90, "frame_idx": 6, "global_frame_idx": 19722, "task_index": 18}, {"db_idx": 19723, "episode_idx": 90, "frame_idx": 7, "global_frame_idx": 19723, "task_index": 18}, {"db_idx": 19724, "episode_idx": 90, "frame_idx": 8, "global_frame_idx": 19724, "task_index": 18}, {"db_idx": 19725, "episode_idx": 90, "frame_idx": 9, "global_frame_idx": 19725, "task_index": 18}, {"db_idx": 19726, "episode_idx": 90, "frame_idx": 10, "global_frame_idx": 19726, "task_index": 18}, {"db_idx": 19727, "episode_idx": 90, "frame_idx": 11, "global_frame_idx": 19727, "task_index": 18}, {"db_idx": 19728, "episode_idx": 90, "frame_idx": 12, "global_frame_idx": 19728, "task_index": 18}, {"db_idx": 19729, "episode_idx": 90, "frame_idx": 13, "global_frame_idx": 19729, "task_index": 18}, {"db_idx": 19730, "episode_idx": 90, "frame_idx": 14, "global_frame_idx": 19730, "task_index": 18}, {"db_idx": 19731, "episode_idx": 90, "frame_idx": 15, "global_frame_idx": 19731, "task_index": 18}, {"db_idx": 19732, "episode_idx": 90, "frame_idx": 16, "global_frame_idx": 19732, "task_index": 18}, {"db_idx": 19733, "episode_idx": 90, "frame_idx": 17, "global_frame_idx": 19733, "task_index": 18}, {"db_idx": 19734, "episode_idx": 90, "frame_idx": 18, "global_frame_idx": 19734, "task_index": 18}, {"db_idx": 19735, "episode_idx": 90, "frame_idx": 19, "global_frame_idx": 19735, "task_index": 18}, {"db_idx": 19736, "episode_idx": 90, "frame_idx": 20, "global_frame_idx": 19736, "task_index": 18}, {"db_idx": 19737, "episode_idx": 90, "frame_idx": 21, "global_frame_idx": 19737, "task_index": 18}, {"db_idx": 19738, "episode_idx": 90, "frame_idx": 22, "global_frame_idx": 19738, "task_index": 18}, {"db_idx": 19739, "episode_idx": 90, "frame_idx": 23, "global_frame_idx": 19739, "task_index": 18}, {"db_idx": 19740, "episode_idx": 90, "frame_idx": 24, "global_frame_idx": 19740, "task_index": 18}, {"db_idx": 19741, "episode_idx": 90, "frame_idx": 25, "global_frame_idx": 19741, "task_index": 18}, {"db_idx": 19742, "episode_idx": 90, "frame_idx": 26, "global_frame_idx": 19742, "task_index": 18}, {"db_idx": 19743, "episode_idx": 90, "frame_idx": 27, "global_frame_idx": 19743, "task_index": 18}, {"db_idx": 19744, "episode_idx": 90, "frame_idx": 28, "global_frame_idx": 19744, "task_index": 18}, {"db_idx": 19745, "episode_idx": 90, "frame_idx": 29, "global_frame_idx": 19745, "task_index": 18}, {"db_idx": 19746, "episode_idx": 90, "frame_idx": 30, "global_frame_idx": 19746, "task_index": 18}, {"db_idx": 19747, "episode_idx": 90, "frame_idx": 31, "global_frame_idx": 19747, "task_index": 18}, {"db_idx": 19748, "episode_idx": 90, "frame_idx": 32, "global_frame_idx": 19748, "task_index": 18}, {"db_idx": 19749, "episode_idx": 90, "frame_idx": 33, "global_frame_idx": 19749, "task_index": 18}, {"db_idx": 19750, "episode_idx": 90, "frame_idx": 34, "global_frame_idx": 19750, "task_index": 18}, {"db_idx": 19751, "episode_idx": 90, "frame_idx": 35, "global_frame_idx": 19751, "task_index": 18}, {"db_idx": 19752, "episode_idx": 90, "frame_idx": 36, "global_frame_idx": 19752, "task_index": 18}, {"db_idx": 19753, "episode_idx": 90, "frame_idx": 37, "global_frame_idx": 19753, "task_index": 18}, {"db_idx": 19754, "episode_idx": 90, "frame_idx": 38, "global_frame_idx": 19754, "task_index": 18}, {"db_idx": 19755, "episode_idx": 90, "frame_idx": 39, "global_frame_idx": 19755, "task_index": 18}, {"db_idx": 19756, "episode_idx": 90, "frame_idx": 40, "global_frame_idx": 19756, "task_index": 18}, {"db_idx": 19757, "episode_idx": 90, "frame_idx": 41, "global_frame_idx": 19757, "task_index": 18}, {"db_idx": 19758, "episode_idx": 90, "frame_idx": 42, "global_frame_idx": 19758, "task_index": 18}, {"db_idx": 19759, "episode_idx": 90, "frame_idx": 43, "global_frame_idx": 19759, "task_index": 18}, {"db_idx": 19760, "episode_idx": 90, "frame_idx": 44, "global_frame_idx": 19760, "task_index": 18}, {"db_idx": 19761, "episode_idx": 90, "frame_idx": 45, "global_frame_idx": 19761, "task_index": 18}, {"db_idx": 19762, "episode_idx": 90, "frame_idx": 46, "global_frame_idx": 19762, "task_index": 18}, {"db_idx": 19763, "episode_idx": 90, "frame_idx": 47, "global_frame_idx": 19763, "task_index": 18}, {"db_idx": 19764, "episode_idx": 90, "frame_idx": 48, "global_frame_idx": 19764, "task_index": 18}, {"db_idx": 19765, "episode_idx": 90, "frame_idx": 49, "global_frame_idx": 19765, "task_index": 18}, {"db_idx": 19766, "episode_idx": 90, "frame_idx": 50, "global_frame_idx": 19766, "task_index": 18}, {"db_idx": 19767, "episode_idx": 90, "frame_idx": 51, "global_frame_idx": 19767, "task_index": 18}, {"db_idx": 19768, "episode_idx": 90, "frame_idx": 52, "global_frame_idx": 19768, "task_index": 18}, {"db_idx": 19769, "episode_idx": 90, "frame_idx": 53, "global_frame_idx": 19769, "task_index": 18}, {"db_idx": 19770, "episode_idx": 90, "frame_idx": 54, "global_frame_idx": 19770, "task_index": 18}, {"db_idx": 19771, "episode_idx": 90, "frame_idx": 55, "global_frame_idx": 19771, "task_index": 18}, {"db_idx": 19772, "episode_idx": 90, "frame_idx": 56, "global_frame_idx": 19772, "task_index": 18}, {"db_idx": 19773, "episode_idx": 90, "frame_idx": 57, "global_frame_idx": 19773, "task_index": 18}, {"db_idx": 19774, "episode_idx": 90, "frame_idx": 58, "global_frame_idx": 19774, "task_index": 18}, {"db_idx": 19775, "episode_idx": 90, "frame_idx": 59, "global_frame_idx": 19775, "task_index": 18}, {"db_idx": 19776, "episode_idx": 90, "frame_idx": 60, "global_frame_idx": 19776, "task_index": 18}, {"db_idx": 19777, "episode_idx": 90, "frame_idx": 61, "global_frame_idx": 19777, "task_index": 18}, {"db_idx": 19778, "episode_idx": 90, "frame_idx": 62, "global_frame_idx": 19778, "task_index": 18}, {"db_idx": 19779, "episode_idx": 90, "frame_idx": 63, "global_frame_idx": 19779, "task_index": 18}, {"db_idx": 19780, "episode_idx": 90, "frame_idx": 64, "global_frame_idx": 19780, "task_index": 18}, {"db_idx": 19781, "episode_idx": 90, "frame_idx": 65, "global_frame_idx": 19781, "task_index": 18}, {"db_idx": 19782, "episode_idx": 90, "frame_idx": 66, "global_frame_idx": 19782, "task_index": 18}, {"db_idx": 19783, "episode_idx": 90, "frame_idx": 67, "global_frame_idx": 19783, "task_index": 18}, {"db_idx": 19784, "episode_idx": 90, "frame_idx": 68, "global_frame_idx": 19784, "task_index": 18}, {"db_idx": 19785, "episode_idx": 90, "frame_idx": 69, "global_frame_idx": 19785, "task_index": 18}, {"db_idx": 19786, "episode_idx": 90, "frame_idx": 70, "global_frame_idx": 19786, "task_index": 18}, {"db_idx": 19787, "episode_idx": 90, "frame_idx": 71, "global_frame_idx": 19787, "task_index": 18}, {"db_idx": 19788, "episode_idx": 90, "frame_idx": 72, "global_frame_idx": 19788, "task_index": 18}, {"db_idx": 19789, "episode_idx": 90, "frame_idx": 73, "global_frame_idx": 19789, "task_index": 18}, {"db_idx": 19790, "episode_idx": 90, "frame_idx": 74, "global_frame_idx": 19790, "task_index": 18}, {"db_idx": 19791, "episode_idx": 90, "frame_idx": 75, "global_frame_idx": 19791, "task_index": 18}, {"db_idx": 19792, "episode_idx": 90, "frame_idx": 76, "global_frame_idx": 19792, "task_index": 18}, {"db_idx": 19793, "episode_idx": 90, "frame_idx": 77, "global_frame_idx": 19793, "task_index": 18}, {"db_idx": 19794, "episode_idx": 90, "frame_idx": 78, "global_frame_idx": 19794, "task_index": 18}, {"db_idx": 19795, "episode_idx": 90, "frame_idx": 79, "global_frame_idx": 19795, "task_index": 18}, {"db_idx": 19796, "episode_idx": 90, "frame_idx": 80, "global_frame_idx": 19796, "task_index": 18}, {"db_idx": 19797, "episode_idx": 90, "frame_idx": 81, "global_frame_idx": 19797, "task_index": 18}, {"db_idx": 19798, "episode_idx": 90, "frame_idx": 82, "global_frame_idx": 19798, "task_index": 18}, {"db_idx": 19799, "episode_idx": 90, "frame_idx": 83, "global_frame_idx": 19799, "task_index": 18}, {"db_idx": 19800, "episode_idx": 90, "frame_idx": 84, "global_frame_idx": 19800, "task_index": 18}, {"db_idx": 19801, "episode_idx": 90, "frame_idx": 85, "global_frame_idx": 19801, "task_index": 18}, {"db_idx": 19802, "episode_idx": 90, "frame_idx": 86, "global_frame_idx": 19802, "task_index": 18}, {"db_idx": 19803, "episode_idx": 90, "frame_idx": 87, "global_frame_idx": 19803, "task_index": 18}, {"db_idx": 19804, "episode_idx": 90, "frame_idx": 88, "global_frame_idx": 19804, "task_index": 18}, {"db_idx": 19805, "episode_idx": 90, "frame_idx": 89, "global_frame_idx": 19805, "task_index": 18}, {"db_idx": 19806, "episode_idx": 90, "frame_idx": 90, "global_frame_idx": 19806, "task_index": 18}, {"db_idx": 19807, "episode_idx": 90, "frame_idx": 91, "global_frame_idx": 19807, "task_index": 18}, {"db_idx": 19808, "episode_idx": 90, "frame_idx": 92, "global_frame_idx": 19808, "task_index": 18}, {"db_idx": 19809, "episode_idx": 90, "frame_idx": 93, "global_frame_idx": 19809, "task_index": 18}, {"db_idx": 19810, "episode_idx": 90, "frame_idx": 94, "global_frame_idx": 19810, "task_index": 18}, {"db_idx": 19811, "episode_idx": 90, "frame_idx": 95, "global_frame_idx": 19811, "task_index": 18}, {"db_idx": 19812, "episode_idx": 90, "frame_idx": 96, "global_frame_idx": 19812, "task_index": 18}, {"db_idx": 19813, "episode_idx": 90, "frame_idx": 97, "global_frame_idx": 19813, "task_index": 18}, {"db_idx": 19814, "episode_idx": 90, "frame_idx": 98, "global_frame_idx": 19814, "task_index": 18}, {"db_idx": 19815, "episode_idx": 90, "frame_idx": 99, "global_frame_idx": 19815, "task_index": 18}, {"db_idx": 19816, "episode_idx": 90, "frame_idx": 100, "global_frame_idx": 19816, "task_index": 18}, {"db_idx": 19817, "episode_idx": 90, "frame_idx": 101, "global_frame_idx": 19817, "task_index": 18}, {"db_idx": 19818, "episode_idx": 90, "frame_idx": 102, "global_frame_idx": 19818, "task_index": 18}, {"db_idx": 19819, "episode_idx": 90, "frame_idx": 103, "global_frame_idx": 19819, "task_index": 18}, {"db_idx": 19820, "episode_idx": 90, "frame_idx": 104, "global_frame_idx": 19820, "task_index": 18}, {"db_idx": 19821, "episode_idx": 90, "frame_idx": 105, "global_frame_idx": 19821, "task_index": 18}, {"db_idx": 19822, "episode_idx": 90, "frame_idx": 106, "global_frame_idx": 19822, "task_index": 18}, {"db_idx": 19823, "episode_idx": 90, "frame_idx": 107, "global_frame_idx": 19823, "task_index": 18}, {"db_idx": 19824, "episode_idx": 90, "frame_idx": 108, "global_frame_idx": 19824, "task_index": 18}, {"db_idx": 19825, "episode_idx": 90, "frame_idx": 109, "global_frame_idx": 19825, "task_index": 18}, {"db_idx": 19826, "episode_idx": 90, "frame_idx": 110, "global_frame_idx": 19826, "task_index": 18}, {"db_idx": 19827, "episode_idx": 90, "frame_idx": 111, "global_frame_idx": 19827, "task_index": 18}, {"db_idx": 19828, "episode_idx": 90, "frame_idx": 112, "global_frame_idx": 19828, "task_index": 18}, {"db_idx": 19829, "episode_idx": 90, "frame_idx": 113, "global_frame_idx": 19829, "task_index": 18}, {"db_idx": 19830, "episode_idx": 90, "frame_idx": 114, "global_frame_idx": 19830, "task_index": 18}, {"db_idx": 19831, "episode_idx": 90, "frame_idx": 115, "global_frame_idx": 19831, "task_index": 18}, {"db_idx": 19832, "episode_idx": 90, "frame_idx": 116, "global_frame_idx": 19832, "task_index": 18}, {"db_idx": 19833, "episode_idx": 90, "frame_idx": 117, "global_frame_idx": 19833, "task_index": 18}, {"db_idx": 19834, "episode_idx": 90, "frame_idx": 118, "global_frame_idx": 19834, "task_index": 18}, {"db_idx": 19835, "episode_idx": 90, "frame_idx": 119, "global_frame_idx": 19835, "task_index": 18}, {"db_idx": 19836, "episode_idx": 90, "frame_idx": 120, "global_frame_idx": 19836, "task_index": 18}, {"db_idx": 19837, "episode_idx": 90, "frame_idx": 121, "global_frame_idx": 19837, "task_index": 18}, {"db_idx": 19838, "episode_idx": 90, "frame_idx": 122, "global_frame_idx": 19838, "task_index": 18}, {"db_idx": 19839, "episode_idx": 90, "frame_idx": 123, "global_frame_idx": 19839, "task_index": 18}, {"db_idx": 19840, "episode_idx": 90, "frame_idx": 124, "global_frame_idx": 19840, "task_index": 18}, {"db_idx": 19841, "episode_idx": 90, "frame_idx": 125, "global_frame_idx": 19841, "task_index": 18}, {"db_idx": 19842, "episode_idx": 90, "frame_idx": 126, "global_frame_idx": 19842, "task_index": 18}, {"db_idx": 19843, "episode_idx": 90, "frame_idx": 127, "global_frame_idx": 19843, "task_index": 18}, {"db_idx": 19844, "episode_idx": 90, "frame_idx": 128, "global_frame_idx": 19844, "task_index": 18}, {"db_idx": 19845, "episode_idx": 90, "frame_idx": 129, "global_frame_idx": 19845, "task_index": 18}, {"db_idx": 19846, "episode_idx": 90, "frame_idx": 130, "global_frame_idx": 19846, "task_index": 18}, {"db_idx": 19847, "episode_idx": 90, "frame_idx": 131, "global_frame_idx": 19847, "task_index": 18}, {"db_idx": 19848, "episode_idx": 90, "frame_idx": 132, "global_frame_idx": 19848, "task_index": 18}, {"db_idx": 19849, "episode_idx": 90, "frame_idx": 133, "global_frame_idx": 19849, "task_index": 18}, {"db_idx": 19850, "episode_idx": 90, "frame_idx": 134, "global_frame_idx": 19850, "task_index": 18}, {"db_idx": 19851, "episode_idx": 90, "frame_idx": 135, "global_frame_idx": 19851, "task_index": 18}, {"db_idx": 19852, "episode_idx": 90, "frame_idx": 136, "global_frame_idx": 19852, "task_index": 18}, {"db_idx": 19853, "episode_idx": 90, "frame_idx": 137, "global_frame_idx": 19853, "task_index": 18}, {"db_idx": 19854, "episode_idx": 90, "frame_idx": 138, "global_frame_idx": 19854, "task_index": 18}, {"db_idx": 19855, "episode_idx": 90, "frame_idx": 139, "global_frame_idx": 19855, "task_index": 18}, {"db_idx": 19856, "episode_idx": 90, "frame_idx": 140, "global_frame_idx": 19856, "task_index": 18}, {"db_idx": 19857, "episode_idx": 90, "frame_idx": 141, "global_frame_idx": 19857, "task_index": 18}, {"db_idx": 19858, "episode_idx": 91, "frame_idx": 0, "global_frame_idx": 19858, "task_index": 18}, {"db_idx": 19859, "episode_idx": 91, "frame_idx": 1, "global_frame_idx": 19859, "task_index": 18}, {"db_idx": 19860, "episode_idx": 91, "frame_idx": 2, "global_frame_idx": 19860, "task_index": 18}, {"db_idx": 19861, "episode_idx": 91, "frame_idx": 3, "global_frame_idx": 19861, "task_index": 18}, {"db_idx": 19862, "episode_idx": 91, "frame_idx": 4, "global_frame_idx": 19862, "task_index": 18}, {"db_idx": 19863, "episode_idx": 91, "frame_idx": 5, "global_frame_idx": 19863, "task_index": 18}, {"db_idx": 19864, "episode_idx": 91, "frame_idx": 6, "global_frame_idx": 19864, "task_index": 18}, {"db_idx": 19865, "episode_idx": 91, "frame_idx": 7, "global_frame_idx": 19865, "task_index": 18}, {"db_idx": 19866, "episode_idx": 91, "frame_idx": 8, "global_frame_idx": 19866, "task_index": 18}, {"db_idx": 19867, "episode_idx": 91, "frame_idx": 9, "global_frame_idx": 19867, "task_index": 18}, {"db_idx": 19868, "episode_idx": 91, "frame_idx": 10, "global_frame_idx": 19868, "task_index": 18}, {"db_idx": 19869, "episode_idx": 91, "frame_idx": 11, "global_frame_idx": 19869, "task_index": 18}, {"db_idx": 19870, "episode_idx": 91, "frame_idx": 12, "global_frame_idx": 19870, "task_index": 18}, {"db_idx": 19871, "episode_idx": 91, "frame_idx": 13, "global_frame_idx": 19871, "task_index": 18}, {"db_idx": 19872, "episode_idx": 91, "frame_idx": 14, "global_frame_idx": 19872, "task_index": 18}, {"db_idx": 19873, "episode_idx": 91, "frame_idx": 15, "global_frame_idx": 19873, "task_index": 18}, {"db_idx": 19874, "episode_idx": 91, "frame_idx": 16, "global_frame_idx": 19874, "task_index": 18}, {"db_idx": 19875, "episode_idx": 91, "frame_idx": 17, "global_frame_idx": 19875, "task_index": 18}, {"db_idx": 19876, "episode_idx": 91, "frame_idx": 18, "global_frame_idx": 19876, "task_index": 18}, {"db_idx": 19877, "episode_idx": 91, "frame_idx": 19, "global_frame_idx": 19877, "task_index": 18}, {"db_idx": 19878, "episode_idx": 91, "frame_idx": 20, "global_frame_idx": 19878, "task_index": 18}, {"db_idx": 19879, "episode_idx": 91, "frame_idx": 21, "global_frame_idx": 19879, "task_index": 18}, {"db_idx": 19880, "episode_idx": 91, "frame_idx": 22, "global_frame_idx": 19880, "task_index": 18}, {"db_idx": 19881, "episode_idx": 91, "frame_idx": 23, "global_frame_idx": 19881, "task_index": 18}, {"db_idx": 19882, "episode_idx": 91, "frame_idx": 24, "global_frame_idx": 19882, "task_index": 18}, {"db_idx": 19883, "episode_idx": 91, "frame_idx": 25, "global_frame_idx": 19883, "task_index": 18}, {"db_idx": 19884, "episode_idx": 91, "frame_idx": 26, "global_frame_idx": 19884, "task_index": 18}, {"db_idx": 19885, "episode_idx": 91, "frame_idx": 27, "global_frame_idx": 19885, "task_index": 18}, {"db_idx": 19886, "episode_idx": 91, "frame_idx": 28, "global_frame_idx": 19886, "task_index": 18}, {"db_idx": 19887, "episode_idx": 91, "frame_idx": 29, "global_frame_idx": 19887, "task_index": 18}, {"db_idx": 19888, "episode_idx": 91, "frame_idx": 30, "global_frame_idx": 19888, "task_index": 18}, {"db_idx": 19889, "episode_idx": 91, "frame_idx": 31, "global_frame_idx": 19889, "task_index": 18}, {"db_idx": 19890, "episode_idx": 91, "frame_idx": 32, "global_frame_idx": 19890, "task_index": 18}, {"db_idx": 19891, "episode_idx": 91, "frame_idx": 33, "global_frame_idx": 19891, "task_index": 18}, {"db_idx": 19892, "episode_idx": 91, "frame_idx": 34, "global_frame_idx": 19892, "task_index": 18}, {"db_idx": 19893, "episode_idx": 91, "frame_idx": 35, "global_frame_idx": 19893, "task_index": 18}, {"db_idx": 19894, "episode_idx": 91, "frame_idx": 36, "global_frame_idx": 19894, "task_index": 18}, {"db_idx": 19895, "episode_idx": 91, "frame_idx": 37, "global_frame_idx": 19895, "task_index": 18}, {"db_idx": 19896, "episode_idx": 91, "frame_idx": 38, "global_frame_idx": 19896, "task_index": 18}, {"db_idx": 19897, "episode_idx": 91, "frame_idx": 39, "global_frame_idx": 19897, "task_index": 18}, {"db_idx": 19898, "episode_idx": 91, "frame_idx": 40, "global_frame_idx": 19898, "task_index": 18}, {"db_idx": 19899, "episode_idx": 91, "frame_idx": 41, "global_frame_idx": 19899, "task_index": 18}, {"db_idx": 19900, "episode_idx": 91, "frame_idx": 42, "global_frame_idx": 19900, "task_index": 18}, {"db_idx": 19901, "episode_idx": 91, "frame_idx": 43, "global_frame_idx": 19901, "task_index": 18}, {"db_idx": 19902, "episode_idx": 91, "frame_idx": 44, "global_frame_idx": 19902, "task_index": 18}, {"db_idx": 19903, "episode_idx": 91, "frame_idx": 45, "global_frame_idx": 19903, "task_index": 18}, {"db_idx": 19904, "episode_idx": 91, "frame_idx": 46, "global_frame_idx": 19904, "task_index": 18}, {"db_idx": 19905, "episode_idx": 91, "frame_idx": 47, "global_frame_idx": 19905, "task_index": 18}, {"db_idx": 19906, "episode_idx": 91, "frame_idx": 48, "global_frame_idx": 19906, "task_index": 18}, {"db_idx": 19907, "episode_idx": 91, "frame_idx": 49, "global_frame_idx": 19907, "task_index": 18}, {"db_idx": 19908, "episode_idx": 91, "frame_idx": 50, "global_frame_idx": 19908, "task_index": 18}, {"db_idx": 19909, "episode_idx": 91, "frame_idx": 51, "global_frame_idx": 19909, "task_index": 18}, {"db_idx": 19910, "episode_idx": 91, "frame_idx": 52, "global_frame_idx": 19910, "task_index": 18}, {"db_idx": 19911, "episode_idx": 91, "frame_idx": 53, "global_frame_idx": 19911, "task_index": 18}, {"db_idx": 19912, "episode_idx": 91, "frame_idx": 54, "global_frame_idx": 19912, "task_index": 18}, {"db_idx": 19913, "episode_idx": 91, "frame_idx": 55, "global_frame_idx": 19913, "task_index": 18}, {"db_idx": 19914, "episode_idx": 91, "frame_idx": 56, "global_frame_idx": 19914, "task_index": 18}, {"db_idx": 19915, "episode_idx": 91, "frame_idx": 57, "global_frame_idx": 19915, "task_index": 18}, {"db_idx": 19916, "episode_idx": 91, "frame_idx": 58, "global_frame_idx": 19916, "task_index": 18}, {"db_idx": 19917, "episode_idx": 91, "frame_idx": 59, "global_frame_idx": 19917, "task_index": 18}, {"db_idx": 19918, "episode_idx": 91, "frame_idx": 60, "global_frame_idx": 19918, "task_index": 18}, {"db_idx": 19919, "episode_idx": 91, "frame_idx": 61, "global_frame_idx": 19919, "task_index": 18}, {"db_idx": 19920, "episode_idx": 91, "frame_idx": 62, "global_frame_idx": 19920, "task_index": 18}, {"db_idx": 19921, "episode_idx": 91, "frame_idx": 63, "global_frame_idx": 19921, "task_index": 18}, {"db_idx": 19922, "episode_idx": 91, "frame_idx": 64, "global_frame_idx": 19922, "task_index": 18}, {"db_idx": 19923, "episode_idx": 91, "frame_idx": 65, "global_frame_idx": 19923, "task_index": 18}, {"db_idx": 19924, "episode_idx": 91, "frame_idx": 66, "global_frame_idx": 19924, "task_index": 18}, {"db_idx": 19925, "episode_idx": 91, "frame_idx": 67, "global_frame_idx": 19925, "task_index": 18}, {"db_idx": 19926, "episode_idx": 91, "frame_idx": 68, "global_frame_idx": 19926, "task_index": 18}, {"db_idx": 19927, "episode_idx": 91, "frame_idx": 69, "global_frame_idx": 19927, "task_index": 18}, {"db_idx": 19928, "episode_idx": 91, "frame_idx": 70, "global_frame_idx": 19928, "task_index": 18}, {"db_idx": 19929, "episode_idx": 91, "frame_idx": 71, "global_frame_idx": 19929, "task_index": 18}, {"db_idx": 19930, "episode_idx": 91, "frame_idx": 72, "global_frame_idx": 19930, "task_index": 18}, {"db_idx": 19931, "episode_idx": 91, "frame_idx": 73, "global_frame_idx": 19931, "task_index": 18}, {"db_idx": 19932, "episode_idx": 91, "frame_idx": 74, "global_frame_idx": 19932, "task_index": 18}, {"db_idx": 19933, "episode_idx": 91, "frame_idx": 75, "global_frame_idx": 19933, "task_index": 18}, {"db_idx": 19934, "episode_idx": 91, "frame_idx": 76, "global_frame_idx": 19934, "task_index": 18}, {"db_idx": 19935, "episode_idx": 91, "frame_idx": 77, "global_frame_idx": 19935, "task_index": 18}, {"db_idx": 19936, "episode_idx": 91, "frame_idx": 78, "global_frame_idx": 19936, "task_index": 18}, {"db_idx": 19937, "episode_idx": 91, "frame_idx": 79, "global_frame_idx": 19937, "task_index": 18}, {"db_idx": 19938, "episode_idx": 91, "frame_idx": 80, "global_frame_idx": 19938, "task_index": 18}, {"db_idx": 19939, "episode_idx": 91, "frame_idx": 81, "global_frame_idx": 19939, "task_index": 18}, {"db_idx": 19940, "episode_idx": 91, "frame_idx": 82, "global_frame_idx": 19940, "task_index": 18}, {"db_idx": 19941, "episode_idx": 91, "frame_idx": 83, "global_frame_idx": 19941, "task_index": 18}, {"db_idx": 19942, "episode_idx": 91, "frame_idx": 84, "global_frame_idx": 19942, "task_index": 18}, {"db_idx": 19943, "episode_idx": 91, "frame_idx": 85, "global_frame_idx": 19943, "task_index": 18}, {"db_idx": 19944, "episode_idx": 91, "frame_idx": 86, "global_frame_idx": 19944, "task_index": 18}, {"db_idx": 19945, "episode_idx": 91, "frame_idx": 87, "global_frame_idx": 19945, "task_index": 18}, {"db_idx": 19946, "episode_idx": 91, "frame_idx": 88, "global_frame_idx": 19946, "task_index": 18}, {"db_idx": 19947, "episode_idx": 91, "frame_idx": 89, "global_frame_idx": 19947, "task_index": 18}, {"db_idx": 19948, "episode_idx": 91, "frame_idx": 90, "global_frame_idx": 19948, "task_index": 18}, {"db_idx": 19949, "episode_idx": 91, "frame_idx": 91, "global_frame_idx": 19949, "task_index": 18}, {"db_idx": 19950, "episode_idx": 91, "frame_idx": 92, "global_frame_idx": 19950, "task_index": 18}, {"db_idx": 19951, "episode_idx": 91, "frame_idx": 93, "global_frame_idx": 19951, "task_index": 18}, {"db_idx": 19952, "episode_idx": 91, "frame_idx": 94, "global_frame_idx": 19952, "task_index": 18}, {"db_idx": 19953, "episode_idx": 91, "frame_idx": 95, "global_frame_idx": 19953, "task_index": 18}, {"db_idx": 19954, "episode_idx": 91, "frame_idx": 96, "global_frame_idx": 19954, "task_index": 18}, {"db_idx": 19955, "episode_idx": 91, "frame_idx": 97, "global_frame_idx": 19955, "task_index": 18}, {"db_idx": 19956, "episode_idx": 91, "frame_idx": 98, "global_frame_idx": 19956, "task_index": 18}, {"db_idx": 19957, "episode_idx": 91, "frame_idx": 99, "global_frame_idx": 19957, "task_index": 18}, {"db_idx": 19958, "episode_idx": 91, "frame_idx": 100, "global_frame_idx": 19958, "task_index": 18}, {"db_idx": 19959, "episode_idx": 91, "frame_idx": 101, "global_frame_idx": 19959, "task_index": 18}, {"db_idx": 19960, "episode_idx": 91, "frame_idx": 102, "global_frame_idx": 19960, "task_index": 18}, {"db_idx": 19961, "episode_idx": 91, "frame_idx": 103, "global_frame_idx": 19961, "task_index": 18}, {"db_idx": 19962, "episode_idx": 91, "frame_idx": 104, "global_frame_idx": 19962, "task_index": 18}, {"db_idx": 19963, "episode_idx": 91, "frame_idx": 105, "global_frame_idx": 19963, "task_index": 18}, {"db_idx": 19964, "episode_idx": 91, "frame_idx": 106, "global_frame_idx": 19964, "task_index": 18}, {"db_idx": 19965, "episode_idx": 91, "frame_idx": 107, "global_frame_idx": 19965, "task_index": 18}, {"db_idx": 19966, "episode_idx": 91, "frame_idx": 108, "global_frame_idx": 19966, "task_index": 18}, {"db_idx": 19967, "episode_idx": 91, "frame_idx": 109, "global_frame_idx": 19967, "task_index": 18}, {"db_idx": 19968, "episode_idx": 91, "frame_idx": 110, "global_frame_idx": 19968, "task_index": 18}, {"db_idx": 19969, "episode_idx": 91, "frame_idx": 111, "global_frame_idx": 19969, "task_index": 18}, {"db_idx": 19970, "episode_idx": 91, "frame_idx": 112, "global_frame_idx": 19970, "task_index": 18}, {"db_idx": 19971, "episode_idx": 91, "frame_idx": 113, "global_frame_idx": 19971, "task_index": 18}, {"db_idx": 19972, "episode_idx": 91, "frame_idx": 114, "global_frame_idx": 19972, "task_index": 18}, {"db_idx": 19973, "episode_idx": 91, "frame_idx": 115, "global_frame_idx": 19973, "task_index": 18}, {"db_idx": 19974, "episode_idx": 91, "frame_idx": 116, "global_frame_idx": 19974, "task_index": 18}, {"db_idx": 19975, "episode_idx": 91, "frame_idx": 117, "global_frame_idx": 19975, "task_index": 18}, {"db_idx": 19976, "episode_idx": 91, "frame_idx": 118, "global_frame_idx": 19976, "task_index": 18}, {"db_idx": 19977, "episode_idx": 91, "frame_idx": 119, "global_frame_idx": 19977, "task_index": 18}, {"db_idx": 19978, "episode_idx": 91, "frame_idx": 120, "global_frame_idx": 19978, "task_index": 18}, {"db_idx": 19979, "episode_idx": 91, "frame_idx": 121, "global_frame_idx": 19979, "task_index": 18}, {"db_idx": 19980, "episode_idx": 91, "frame_idx": 122, "global_frame_idx": 19980, "task_index": 18}, {"db_idx": 19981, "episode_idx": 91, "frame_idx": 123, "global_frame_idx": 19981, "task_index": 18}, {"db_idx": 19982, "episode_idx": 91, "frame_idx": 124, "global_frame_idx": 19982, "task_index": 18}, {"db_idx": 19983, "episode_idx": 92, "frame_idx": 0, "global_frame_idx": 19983, "task_index": 18}, {"db_idx": 19984, "episode_idx": 92, "frame_idx": 1, "global_frame_idx": 19984, "task_index": 18}, {"db_idx": 19985, "episode_idx": 92, "frame_idx": 2, "global_frame_idx": 19985, "task_index": 18}, {"db_idx": 19986, "episode_idx": 92, "frame_idx": 3, "global_frame_idx": 19986, "task_index": 18}, {"db_idx": 19987, "episode_idx": 92, "frame_idx": 4, "global_frame_idx": 19987, "task_index": 18}, {"db_idx": 19988, "episode_idx": 92, "frame_idx": 5, "global_frame_idx": 19988, "task_index": 18}, {"db_idx": 19989, "episode_idx": 92, "frame_idx": 6, "global_frame_idx": 19989, "task_index": 18}, {"db_idx": 19990, "episode_idx": 92, "frame_idx": 7, "global_frame_idx": 19990, "task_index": 18}, {"db_idx": 19991, "episode_idx": 92, "frame_idx": 8, "global_frame_idx": 19991, "task_index": 18}, {"db_idx": 19992, "episode_idx": 92, "frame_idx": 9, "global_frame_idx": 19992, "task_index": 18}, {"db_idx": 19993, "episode_idx": 92, "frame_idx": 10, "global_frame_idx": 19993, "task_index": 18}, {"db_idx": 19994, "episode_idx": 92, "frame_idx": 11, "global_frame_idx": 19994, "task_index": 18}, {"db_idx": 19995, "episode_idx": 92, "frame_idx": 12, "global_frame_idx": 19995, "task_index": 18}, {"db_idx": 19996, "episode_idx": 92, "frame_idx": 13, "global_frame_idx": 19996, "task_index": 18}, {"db_idx": 19997, "episode_idx": 92, "frame_idx": 14, "global_frame_idx": 19997, "task_index": 18}, {"db_idx": 19998, "episode_idx": 92, "frame_idx": 15, "global_frame_idx": 19998, "task_index": 18}, {"db_idx": 19999, "episode_idx": 92, "frame_idx": 16, "global_frame_idx": 19999, "task_index": 18}, {"db_idx": 20000, "episode_idx": 92, "frame_idx": 17, "global_frame_idx": 20000, "task_index": 18}, {"db_idx": 20001, "episode_idx": 92, "frame_idx": 18, "global_frame_idx": 20001, "task_index": 18}, {"db_idx": 20002, "episode_idx": 92, "frame_idx": 19, "global_frame_idx": 20002, "task_index": 18}, {"db_idx": 20003, "episode_idx": 92, "frame_idx": 20, "global_frame_idx": 20003, "task_index": 18}, {"db_idx": 20004, "episode_idx": 92, "frame_idx": 21, "global_frame_idx": 20004, "task_index": 18}, {"db_idx": 20005, "episode_idx": 92, "frame_idx": 22, "global_frame_idx": 20005, "task_index": 18}, {"db_idx": 20006, "episode_idx": 92, "frame_idx": 23, "global_frame_idx": 20006, "task_index": 18}, {"db_idx": 20007, "episode_idx": 92, "frame_idx": 24, "global_frame_idx": 20007, "task_index": 18}, {"db_idx": 20008, "episode_idx": 92, "frame_idx": 25, "global_frame_idx": 20008, "task_index": 18}, {"db_idx": 20009, "episode_idx": 92, "frame_idx": 26, "global_frame_idx": 20009, "task_index": 18}, {"db_idx": 20010, "episode_idx": 92, "frame_idx": 27, "global_frame_idx": 20010, "task_index": 18}, {"db_idx": 20011, "episode_idx": 92, "frame_idx": 28, "global_frame_idx": 20011, "task_index": 18}, {"db_idx": 20012, "episode_idx": 92, "frame_idx": 29, "global_frame_idx": 20012, "task_index": 18}, {"db_idx": 20013, "episode_idx": 92, "frame_idx": 30, "global_frame_idx": 20013, "task_index": 18}, {"db_idx": 20014, "episode_idx": 92, "frame_idx": 31, "global_frame_idx": 20014, "task_index": 18}, {"db_idx": 20015, "episode_idx": 92, "frame_idx": 32, "global_frame_idx": 20015, "task_index": 18}, {"db_idx": 20016, "episode_idx": 92, "frame_idx": 33, "global_frame_idx": 20016, "task_index": 18}, {"db_idx": 20017, "episode_idx": 92, "frame_idx": 34, "global_frame_idx": 20017, "task_index": 18}, {"db_idx": 20018, "episode_idx": 92, "frame_idx": 35, "global_frame_idx": 20018, "task_index": 18}, {"db_idx": 20019, "episode_idx": 92, "frame_idx": 36, "global_frame_idx": 20019, "task_index": 18}, {"db_idx": 20020, "episode_idx": 92, "frame_idx": 37, "global_frame_idx": 20020, "task_index": 18}, {"db_idx": 20021, "episode_idx": 92, "frame_idx": 38, "global_frame_idx": 20021, "task_index": 18}, {"db_idx": 20022, "episode_idx": 92, "frame_idx": 39, "global_frame_idx": 20022, "task_index": 18}, {"db_idx": 20023, "episode_idx": 92, "frame_idx": 40, "global_frame_idx": 20023, "task_index": 18}, {"db_idx": 20024, "episode_idx": 92, "frame_idx": 41, "global_frame_idx": 20024, "task_index": 18}, {"db_idx": 20025, "episode_idx": 92, "frame_idx": 42, "global_frame_idx": 20025, "task_index": 18}, {"db_idx": 20026, "episode_idx": 92, "frame_idx": 43, "global_frame_idx": 20026, "task_index": 18}, {"db_idx": 20027, "episode_idx": 92, "frame_idx": 44, "global_frame_idx": 20027, "task_index": 18}, {"db_idx": 20028, "episode_idx": 92, "frame_idx": 45, "global_frame_idx": 20028, "task_index": 18}, {"db_idx": 20029, "episode_idx": 92, "frame_idx": 46, "global_frame_idx": 20029, "task_index": 18}, {"db_idx": 20030, "episode_idx": 92, "frame_idx": 47, "global_frame_idx": 20030, "task_index": 18}, {"db_idx": 20031, "episode_idx": 92, "frame_idx": 48, "global_frame_idx": 20031, "task_index": 18}, {"db_idx": 20032, "episode_idx": 92, "frame_idx": 49, "global_frame_idx": 20032, "task_index": 18}, {"db_idx": 20033, "episode_idx": 92, "frame_idx": 50, "global_frame_idx": 20033, "task_index": 18}, {"db_idx": 20034, "episode_idx": 92, "frame_idx": 51, "global_frame_idx": 20034, "task_index": 18}, {"db_idx": 20035, "episode_idx": 92, "frame_idx": 52, "global_frame_idx": 20035, "task_index": 18}, {"db_idx": 20036, "episode_idx": 92, "frame_idx": 53, "global_frame_idx": 20036, "task_index": 18}, {"db_idx": 20037, "episode_idx": 92, "frame_idx": 54, "global_frame_idx": 20037, "task_index": 18}, {"db_idx": 20038, "episode_idx": 92, "frame_idx": 55, "global_frame_idx": 20038, "task_index": 18}, {"db_idx": 20039, "episode_idx": 92, "frame_idx": 56, "global_frame_idx": 20039, "task_index": 18}, {"db_idx": 20040, "episode_idx": 92, "frame_idx": 57, "global_frame_idx": 20040, "task_index": 18}, {"db_idx": 20041, "episode_idx": 92, "frame_idx": 58, "global_frame_idx": 20041, "task_index": 18}, {"db_idx": 20042, "episode_idx": 92, "frame_idx": 59, "global_frame_idx": 20042, "task_index": 18}, {"db_idx": 20043, "episode_idx": 92, "frame_idx": 60, "global_frame_idx": 20043, "task_index": 18}, {"db_idx": 20044, "episode_idx": 92, "frame_idx": 61, "global_frame_idx": 20044, "task_index": 18}, {"db_idx": 20045, "episode_idx": 92, "frame_idx": 62, "global_frame_idx": 20045, "task_index": 18}, {"db_idx": 20046, "episode_idx": 92, "frame_idx": 63, "global_frame_idx": 20046, "task_index": 18}, {"db_idx": 20047, "episode_idx": 92, "frame_idx": 64, "global_frame_idx": 20047, "task_index": 18}, {"db_idx": 20048, "episode_idx": 92, "frame_idx": 65, "global_frame_idx": 20048, "task_index": 18}, {"db_idx": 20049, "episode_idx": 92, "frame_idx": 66, "global_frame_idx": 20049, "task_index": 18}, {"db_idx": 20050, "episode_idx": 92, "frame_idx": 67, "global_frame_idx": 20050, "task_index": 18}, {"db_idx": 20051, "episode_idx": 92, "frame_idx": 68, "global_frame_idx": 20051, "task_index": 18}, {"db_idx": 20052, "episode_idx": 92, "frame_idx": 69, "global_frame_idx": 20052, "task_index": 18}, {"db_idx": 20053, "episode_idx": 92, "frame_idx": 70, "global_frame_idx": 20053, "task_index": 18}, {"db_idx": 20054, "episode_idx": 92, "frame_idx": 71, "global_frame_idx": 20054, "task_index": 18}, {"db_idx": 20055, "episode_idx": 92, "frame_idx": 72, "global_frame_idx": 20055, "task_index": 18}, {"db_idx": 20056, "episode_idx": 92, "frame_idx": 73, "global_frame_idx": 20056, "task_index": 18}, {"db_idx": 20057, "episode_idx": 92, "frame_idx": 74, "global_frame_idx": 20057, "task_index": 18}, {"db_idx": 20058, "episode_idx": 92, "frame_idx": 75, "global_frame_idx": 20058, "task_index": 18}, {"db_idx": 20059, "episode_idx": 92, "frame_idx": 76, "global_frame_idx": 20059, "task_index": 18}, {"db_idx": 20060, "episode_idx": 92, "frame_idx": 77, "global_frame_idx": 20060, "task_index": 18}, {"db_idx": 20061, "episode_idx": 92, "frame_idx": 78, "global_frame_idx": 20061, "task_index": 18}, {"db_idx": 20062, "episode_idx": 92, "frame_idx": 79, "global_frame_idx": 20062, "task_index": 18}, {"db_idx": 20063, "episode_idx": 92, "frame_idx": 80, "global_frame_idx": 20063, "task_index": 18}, {"db_idx": 20064, "episode_idx": 92, "frame_idx": 81, "global_frame_idx": 20064, "task_index": 18}, {"db_idx": 20065, "episode_idx": 92, "frame_idx": 82, "global_frame_idx": 20065, "task_index": 18}, {"db_idx": 20066, "episode_idx": 92, "frame_idx": 83, "global_frame_idx": 20066, "task_index": 18}, {"db_idx": 20067, "episode_idx": 92, "frame_idx": 84, "global_frame_idx": 20067, "task_index": 18}, {"db_idx": 20068, "episode_idx": 92, "frame_idx": 85, "global_frame_idx": 20068, "task_index": 18}, {"db_idx": 20069, "episode_idx": 92, "frame_idx": 86, "global_frame_idx": 20069, "task_index": 18}, {"db_idx": 20070, "episode_idx": 92, "frame_idx": 87, "global_frame_idx": 20070, "task_index": 18}, {"db_idx": 20071, "episode_idx": 92, "frame_idx": 88, "global_frame_idx": 20071, "task_index": 18}, {"db_idx": 20072, "episode_idx": 92, "frame_idx": 89, "global_frame_idx": 20072, "task_index": 18}, {"db_idx": 20073, "episode_idx": 92, "frame_idx": 90, "global_frame_idx": 20073, "task_index": 18}, {"db_idx": 20074, "episode_idx": 92, "frame_idx": 91, "global_frame_idx": 20074, "task_index": 18}, {"db_idx": 20075, "episode_idx": 92, "frame_idx": 92, "global_frame_idx": 20075, "task_index": 18}, {"db_idx": 20076, "episode_idx": 92, "frame_idx": 93, "global_frame_idx": 20076, "task_index": 18}, {"db_idx": 20077, "episode_idx": 92, "frame_idx": 94, "global_frame_idx": 20077, "task_index": 18}, {"db_idx": 20078, "episode_idx": 92, "frame_idx": 95, "global_frame_idx": 20078, "task_index": 18}, {"db_idx": 20079, "episode_idx": 92, "frame_idx": 96, "global_frame_idx": 20079, "task_index": 18}, {"db_idx": 20080, "episode_idx": 92, "frame_idx": 97, "global_frame_idx": 20080, "task_index": 18}, {"db_idx": 20081, "episode_idx": 92, "frame_idx": 98, "global_frame_idx": 20081, "task_index": 18}, {"db_idx": 20082, "episode_idx": 92, "frame_idx": 99, "global_frame_idx": 20082, "task_index": 18}, {"db_idx": 20083, "episode_idx": 92, "frame_idx": 100, "global_frame_idx": 20083, "task_index": 18}, {"db_idx": 20084, "episode_idx": 92, "frame_idx": 101, "global_frame_idx": 20084, "task_index": 18}, {"db_idx": 20085, "episode_idx": 92, "frame_idx": 102, "global_frame_idx": 20085, "task_index": 18}, {"db_idx": 20086, "episode_idx": 92, "frame_idx": 103, "global_frame_idx": 20086, "task_index": 18}, {"db_idx": 20087, "episode_idx": 92, "frame_idx": 104, "global_frame_idx": 20087, "task_index": 18}, {"db_idx": 20088, "episode_idx": 92, "frame_idx": 105, "global_frame_idx": 20088, "task_index": 18}, {"db_idx": 20089, "episode_idx": 92, "frame_idx": 106, "global_frame_idx": 20089, "task_index": 18}, {"db_idx": 20090, "episode_idx": 92, "frame_idx": 107, "global_frame_idx": 20090, "task_index": 18}, {"db_idx": 20091, "episode_idx": 92, "frame_idx": 108, "global_frame_idx": 20091, "task_index": 18}, {"db_idx": 20092, "episode_idx": 92, "frame_idx": 109, "global_frame_idx": 20092, "task_index": 18}, {"db_idx": 20093, "episode_idx": 92, "frame_idx": 110, "global_frame_idx": 20093, "task_index": 18}, {"db_idx": 20094, "episode_idx": 92, "frame_idx": 111, "global_frame_idx": 20094, "task_index": 18}, {"db_idx": 20095, "episode_idx": 92, "frame_idx": 112, "global_frame_idx": 20095, "task_index": 18}, {"db_idx": 20096, "episode_idx": 92, "frame_idx": 113, "global_frame_idx": 20096, "task_index": 18}, {"db_idx": 20097, "episode_idx": 92, "frame_idx": 114, "global_frame_idx": 20097, "task_index": 18}, {"db_idx": 20098, "episode_idx": 92, "frame_idx": 115, "global_frame_idx": 20098, "task_index": 18}, {"db_idx": 20099, "episode_idx": 92, "frame_idx": 116, "global_frame_idx": 20099, "task_index": 18}, {"db_idx": 20100, "episode_idx": 92, "frame_idx": 117, "global_frame_idx": 20100, "task_index": 18}, {"db_idx": 20101, "episode_idx": 92, "frame_idx": 118, "global_frame_idx": 20101, "task_index": 18}, {"db_idx": 20102, "episode_idx": 92, "frame_idx": 119, "global_frame_idx": 20102, "task_index": 18}, {"db_idx": 20103, "episode_idx": 92, "frame_idx": 120, "global_frame_idx": 20103, "task_index": 18}, {"db_idx": 20104, "episode_idx": 92, "frame_idx": 121, "global_frame_idx": 20104, "task_index": 18}, {"db_idx": 20105, "episode_idx": 92, "frame_idx": 122, "global_frame_idx": 20105, "task_index": 18}, {"db_idx": 20106, "episode_idx": 92, "frame_idx": 123, "global_frame_idx": 20106, "task_index": 18}, {"db_idx": 20107, "episode_idx": 92, "frame_idx": 124, "global_frame_idx": 20107, "task_index": 18}, {"db_idx": 20108, "episode_idx": 93, "frame_idx": 0, "global_frame_idx": 20108, "task_index": 18}, {"db_idx": 20109, "episode_idx": 93, "frame_idx": 1, "global_frame_idx": 20109, "task_index": 18}, {"db_idx": 20110, "episode_idx": 93, "frame_idx": 2, "global_frame_idx": 20110, "task_index": 18}, {"db_idx": 20111, "episode_idx": 93, "frame_idx": 3, "global_frame_idx": 20111, "task_index": 18}, {"db_idx": 20112, "episode_idx": 93, "frame_idx": 4, "global_frame_idx": 20112, "task_index": 18}, {"db_idx": 20113, "episode_idx": 93, "frame_idx": 5, "global_frame_idx": 20113, "task_index": 18}, {"db_idx": 20114, "episode_idx": 93, "frame_idx": 6, "global_frame_idx": 20114, "task_index": 18}, {"db_idx": 20115, "episode_idx": 93, "frame_idx": 7, "global_frame_idx": 20115, "task_index": 18}, {"db_idx": 20116, "episode_idx": 93, "frame_idx": 8, "global_frame_idx": 20116, "task_index": 18}, {"db_idx": 20117, "episode_idx": 93, "frame_idx": 9, "global_frame_idx": 20117, "task_index": 18}, {"db_idx": 20118, "episode_idx": 93, "frame_idx": 10, "global_frame_idx": 20118, "task_index": 18}, {"db_idx": 20119, "episode_idx": 93, "frame_idx": 11, "global_frame_idx": 20119, "task_index": 18}, {"db_idx": 20120, "episode_idx": 93, "frame_idx": 12, "global_frame_idx": 20120, "task_index": 18}, {"db_idx": 20121, "episode_idx": 93, "frame_idx": 13, "global_frame_idx": 20121, "task_index": 18}, {"db_idx": 20122, "episode_idx": 93, "frame_idx": 14, "global_frame_idx": 20122, "task_index": 18}, {"db_idx": 20123, "episode_idx": 93, "frame_idx": 15, "global_frame_idx": 20123, "task_index": 18}, {"db_idx": 20124, "episode_idx": 93, "frame_idx": 16, "global_frame_idx": 20124, "task_index": 18}, {"db_idx": 20125, "episode_idx": 93, "frame_idx": 17, "global_frame_idx": 20125, "task_index": 18}, {"db_idx": 20126, "episode_idx": 93, "frame_idx": 18, "global_frame_idx": 20126, "task_index": 18}, {"db_idx": 20127, "episode_idx": 93, "frame_idx": 19, "global_frame_idx": 20127, "task_index": 18}, {"db_idx": 20128, "episode_idx": 93, "frame_idx": 20, "global_frame_idx": 20128, "task_index": 18}, {"db_idx": 20129, "episode_idx": 93, "frame_idx": 21, "global_frame_idx": 20129, "task_index": 18}, {"db_idx": 20130, "episode_idx": 93, "frame_idx": 22, "global_frame_idx": 20130, "task_index": 18}, {"db_idx": 20131, "episode_idx": 93, "frame_idx": 23, "global_frame_idx": 20131, "task_index": 18}, {"db_idx": 20132, "episode_idx": 93, "frame_idx": 24, "global_frame_idx": 20132, "task_index": 18}, {"db_idx": 20133, "episode_idx": 93, "frame_idx": 25, "global_frame_idx": 20133, "task_index": 18}, {"db_idx": 20134, "episode_idx": 93, "frame_idx": 26, "global_frame_idx": 20134, "task_index": 18}, {"db_idx": 20135, "episode_idx": 93, "frame_idx": 27, "global_frame_idx": 20135, "task_index": 18}, {"db_idx": 20136, "episode_idx": 93, "frame_idx": 28, "global_frame_idx": 20136, "task_index": 18}, {"db_idx": 20137, "episode_idx": 93, "frame_idx": 29, "global_frame_idx": 20137, "task_index": 18}, {"db_idx": 20138, "episode_idx": 93, "frame_idx": 30, "global_frame_idx": 20138, "task_index": 18}, {"db_idx": 20139, "episode_idx": 93, "frame_idx": 31, "global_frame_idx": 20139, "task_index": 18}, {"db_idx": 20140, "episode_idx": 93, "frame_idx": 32, "global_frame_idx": 20140, "task_index": 18}, {"db_idx": 20141, "episode_idx": 93, "frame_idx": 33, "global_frame_idx": 20141, "task_index": 18}, {"db_idx": 20142, "episode_idx": 93, "frame_idx": 34, "global_frame_idx": 20142, "task_index": 18}, {"db_idx": 20143, "episode_idx": 93, "frame_idx": 35, "global_frame_idx": 20143, "task_index": 18}, {"db_idx": 20144, "episode_idx": 93, "frame_idx": 36, "global_frame_idx": 20144, "task_index": 18}, {"db_idx": 20145, "episode_idx": 93, "frame_idx": 37, "global_frame_idx": 20145, "task_index": 18}, {"db_idx": 20146, "episode_idx": 93, "frame_idx": 38, "global_frame_idx": 20146, "task_index": 18}, {"db_idx": 20147, "episode_idx": 93, "frame_idx": 39, "global_frame_idx": 20147, "task_index": 18}, {"db_idx": 20148, "episode_idx": 93, "frame_idx": 40, "global_frame_idx": 20148, "task_index": 18}, {"db_idx": 20149, "episode_idx": 93, "frame_idx": 41, "global_frame_idx": 20149, "task_index": 18}, {"db_idx": 20150, "episode_idx": 93, "frame_idx": 42, "global_frame_idx": 20150, "task_index": 18}, {"db_idx": 20151, "episode_idx": 93, "frame_idx": 43, "global_frame_idx": 20151, "task_index": 18}, {"db_idx": 20152, "episode_idx": 93, "frame_idx": 44, "global_frame_idx": 20152, "task_index": 18}, {"db_idx": 20153, "episode_idx": 93, "frame_idx": 45, "global_frame_idx": 20153, "task_index": 18}, {"db_idx": 20154, "episode_idx": 93, "frame_idx": 46, "global_frame_idx": 20154, "task_index": 18}, {"db_idx": 20155, "episode_idx": 93, "frame_idx": 47, "global_frame_idx": 20155, "task_index": 18}, {"db_idx": 20156, "episode_idx": 93, "frame_idx": 48, "global_frame_idx": 20156, "task_index": 18}, {"db_idx": 20157, "episode_idx": 93, "frame_idx": 49, "global_frame_idx": 20157, "task_index": 18}, {"db_idx": 20158, "episode_idx": 93, "frame_idx": 50, "global_frame_idx": 20158, "task_index": 18}, {"db_idx": 20159, "episode_idx": 93, "frame_idx": 51, "global_frame_idx": 20159, "task_index": 18}, {"db_idx": 20160, "episode_idx": 93, "frame_idx": 52, "global_frame_idx": 20160, "task_index": 18}, {"db_idx": 20161, "episode_idx": 93, "frame_idx": 53, "global_frame_idx": 20161, "task_index": 18}, {"db_idx": 20162, "episode_idx": 93, "frame_idx": 54, "global_frame_idx": 20162, "task_index": 18}, {"db_idx": 20163, "episode_idx": 93, "frame_idx": 55, "global_frame_idx": 20163, "task_index": 18}, {"db_idx": 20164, "episode_idx": 93, "frame_idx": 56, "global_frame_idx": 20164, "task_index": 18}, {"db_idx": 20165, "episode_idx": 93, "frame_idx": 57, "global_frame_idx": 20165, "task_index": 18}, {"db_idx": 20166, "episode_idx": 93, "frame_idx": 58, "global_frame_idx": 20166, "task_index": 18}, {"db_idx": 20167, "episode_idx": 93, "frame_idx": 59, "global_frame_idx": 20167, "task_index": 18}, {"db_idx": 20168, "episode_idx": 93, "frame_idx": 60, "global_frame_idx": 20168, "task_index": 18}, {"db_idx": 20169, "episode_idx": 93, "frame_idx": 61, "global_frame_idx": 20169, "task_index": 18}, {"db_idx": 20170, "episode_idx": 93, "frame_idx": 62, "global_frame_idx": 20170, "task_index": 18}, {"db_idx": 20171, "episode_idx": 93, "frame_idx": 63, "global_frame_idx": 20171, "task_index": 18}, {"db_idx": 20172, "episode_idx": 93, "frame_idx": 64, "global_frame_idx": 20172, "task_index": 18}, {"db_idx": 20173, "episode_idx": 93, "frame_idx": 65, "global_frame_idx": 20173, "task_index": 18}, {"db_idx": 20174, "episode_idx": 93, "frame_idx": 66, "global_frame_idx": 20174, "task_index": 18}, {"db_idx": 20175, "episode_idx": 93, "frame_idx": 67, "global_frame_idx": 20175, "task_index": 18}, {"db_idx": 20176, "episode_idx": 93, "frame_idx": 68, "global_frame_idx": 20176, "task_index": 18}, {"db_idx": 20177, "episode_idx": 93, "frame_idx": 69, "global_frame_idx": 20177, "task_index": 18}, {"db_idx": 20178, "episode_idx": 93, "frame_idx": 70, "global_frame_idx": 20178, "task_index": 18}, {"db_idx": 20179, "episode_idx": 93, "frame_idx": 71, "global_frame_idx": 20179, "task_index": 18}, {"db_idx": 20180, "episode_idx": 93, "frame_idx": 72, "global_frame_idx": 20180, "task_index": 18}, {"db_idx": 20181, "episode_idx": 93, "frame_idx": 73, "global_frame_idx": 20181, "task_index": 18}, {"db_idx": 20182, "episode_idx": 93, "frame_idx": 74, "global_frame_idx": 20182, "task_index": 18}, {"db_idx": 20183, "episode_idx": 93, "frame_idx": 75, "global_frame_idx": 20183, "task_index": 18}, {"db_idx": 20184, "episode_idx": 93, "frame_idx": 76, "global_frame_idx": 20184, "task_index": 18}, {"db_idx": 20185, "episode_idx": 93, "frame_idx": 77, "global_frame_idx": 20185, "task_index": 18}, {"db_idx": 20186, "episode_idx": 93, "frame_idx": 78, "global_frame_idx": 20186, "task_index": 18}, {"db_idx": 20187, "episode_idx": 93, "frame_idx": 79, "global_frame_idx": 20187, "task_index": 18}, {"db_idx": 20188, "episode_idx": 93, "frame_idx": 80, "global_frame_idx": 20188, "task_index": 18}, {"db_idx": 20189, "episode_idx": 93, "frame_idx": 81, "global_frame_idx": 20189, "task_index": 18}, {"db_idx": 20190, "episode_idx": 93, "frame_idx": 82, "global_frame_idx": 20190, "task_index": 18}, {"db_idx": 20191, "episode_idx": 93, "frame_idx": 83, "global_frame_idx": 20191, "task_index": 18}, {"db_idx": 20192, "episode_idx": 93, "frame_idx": 84, "global_frame_idx": 20192, "task_index": 18}, {"db_idx": 20193, "episode_idx": 93, "frame_idx": 85, "global_frame_idx": 20193, "task_index": 18}, {"db_idx": 20194, "episode_idx": 93, "frame_idx": 86, "global_frame_idx": 20194, "task_index": 18}, {"db_idx": 20195, "episode_idx": 93, "frame_idx": 87, "global_frame_idx": 20195, "task_index": 18}, {"db_idx": 20196, "episode_idx": 93, "frame_idx": 88, "global_frame_idx": 20196, "task_index": 18}, {"db_idx": 20197, "episode_idx": 93, "frame_idx": 89, "global_frame_idx": 20197, "task_index": 18}, {"db_idx": 20198, "episode_idx": 93, "frame_idx": 90, "global_frame_idx": 20198, "task_index": 18}, {"db_idx": 20199, "episode_idx": 93, "frame_idx": 91, "global_frame_idx": 20199, "task_index": 18}, {"db_idx": 20200, "episode_idx": 93, "frame_idx": 92, "global_frame_idx": 20200, "task_index": 18}, {"db_idx": 20201, "episode_idx": 93, "frame_idx": 93, "global_frame_idx": 20201, "task_index": 18}, {"db_idx": 20202, "episode_idx": 93, "frame_idx": 94, "global_frame_idx": 20202, "task_index": 18}, {"db_idx": 20203, "episode_idx": 93, "frame_idx": 95, "global_frame_idx": 20203, "task_index": 18}, {"db_idx": 20204, "episode_idx": 93, "frame_idx": 96, "global_frame_idx": 20204, "task_index": 18}, {"db_idx": 20205, "episode_idx": 93, "frame_idx": 97, "global_frame_idx": 20205, "task_index": 18}, {"db_idx": 20206, "episode_idx": 93, "frame_idx": 98, "global_frame_idx": 20206, "task_index": 18}, {"db_idx": 20207, "episode_idx": 93, "frame_idx": 99, "global_frame_idx": 20207, "task_index": 18}, {"db_idx": 20208, "episode_idx": 93, "frame_idx": 100, "global_frame_idx": 20208, "task_index": 18}, {"db_idx": 20209, "episode_idx": 93, "frame_idx": 101, "global_frame_idx": 20209, "task_index": 18}, {"db_idx": 20210, "episode_idx": 93, "frame_idx": 102, "global_frame_idx": 20210, "task_index": 18}, {"db_idx": 20211, "episode_idx": 93, "frame_idx": 103, "global_frame_idx": 20211, "task_index": 18}, {"db_idx": 20212, "episode_idx": 93, "frame_idx": 104, "global_frame_idx": 20212, "task_index": 18}, {"db_idx": 20213, "episode_idx": 93, "frame_idx": 105, "global_frame_idx": 20213, "task_index": 18}, {"db_idx": 20214, "episode_idx": 93, "frame_idx": 106, "global_frame_idx": 20214, "task_index": 18}, {"db_idx": 20215, "episode_idx": 93, "frame_idx": 107, "global_frame_idx": 20215, "task_index": 18}, {"db_idx": 20216, "episode_idx": 93, "frame_idx": 108, "global_frame_idx": 20216, "task_index": 18}, {"db_idx": 20217, "episode_idx": 93, "frame_idx": 109, "global_frame_idx": 20217, "task_index": 18}, {"db_idx": 20218, "episode_idx": 93, "frame_idx": 110, "global_frame_idx": 20218, "task_index": 18}, {"db_idx": 20219, "episode_idx": 93, "frame_idx": 111, "global_frame_idx": 20219, "task_index": 18}, {"db_idx": 20220, "episode_idx": 93, "frame_idx": 112, "global_frame_idx": 20220, "task_index": 18}, {"db_idx": 20221, "episode_idx": 93, "frame_idx": 113, "global_frame_idx": 20221, "task_index": 18}, {"db_idx": 20222, "episode_idx": 93, "frame_idx": 114, "global_frame_idx": 20222, "task_index": 18}, {"db_idx": 20223, "episode_idx": 93, "frame_idx": 115, "global_frame_idx": 20223, "task_index": 18}, {"db_idx": 20224, "episode_idx": 93, "frame_idx": 116, "global_frame_idx": 20224, "task_index": 18}, {"db_idx": 20225, "episode_idx": 93, "frame_idx": 117, "global_frame_idx": 20225, "task_index": 18}, {"db_idx": 20226, "episode_idx": 93, "frame_idx": 118, "global_frame_idx": 20226, "task_index": 18}, {"db_idx": 20227, "episode_idx": 93, "frame_idx": 119, "global_frame_idx": 20227, "task_index": 18}, {"db_idx": 20228, "episode_idx": 93, "frame_idx": 120, "global_frame_idx": 20228, "task_index": 18}, {"db_idx": 20229, "episode_idx": 93, "frame_idx": 121, "global_frame_idx": 20229, "task_index": 18}, {"db_idx": 20230, "episode_idx": 93, "frame_idx": 122, "global_frame_idx": 20230, "task_index": 18}, {"db_idx": 20231, "episode_idx": 93, "frame_idx": 123, "global_frame_idx": 20231, "task_index": 18}, {"db_idx": 20232, "episode_idx": 93, "frame_idx": 124, "global_frame_idx": 20232, "task_index": 18}, {"db_idx": 20233, "episode_idx": 93, "frame_idx": 125, "global_frame_idx": 20233, "task_index": 18}, {"db_idx": 20234, "episode_idx": 93, "frame_idx": 126, "global_frame_idx": 20234, "task_index": 18}, {"db_idx": 20235, "episode_idx": 93, "frame_idx": 127, "global_frame_idx": 20235, "task_index": 18}, {"db_idx": 20236, "episode_idx": 93, "frame_idx": 128, "global_frame_idx": 20236, "task_index": 18}, {"db_idx": 20237, "episode_idx": 93, "frame_idx": 129, "global_frame_idx": 20237, "task_index": 18}, {"db_idx": 20238, "episode_idx": 93, "frame_idx": 130, "global_frame_idx": 20238, "task_index": 18}, {"db_idx": 20239, "episode_idx": 93, "frame_idx": 131, "global_frame_idx": 20239, "task_index": 18}, {"db_idx": 20240, "episode_idx": 93, "frame_idx": 132, "global_frame_idx": 20240, "task_index": 18}, {"db_idx": 20241, "episode_idx": 93, "frame_idx": 133, "global_frame_idx": 20241, "task_index": 18}, {"db_idx": 20242, "episode_idx": 93, "frame_idx": 134, "global_frame_idx": 20242, "task_index": 18}, {"db_idx": 20243, "episode_idx": 93, "frame_idx": 135, "global_frame_idx": 20243, "task_index": 18}, {"db_idx": 20244, "episode_idx": 93, "frame_idx": 136, "global_frame_idx": 20244, "task_index": 18}, {"db_idx": 20245, "episode_idx": 93, "frame_idx": 137, "global_frame_idx": 20245, "task_index": 18}, {"db_idx": 20246, "episode_idx": 93, "frame_idx": 138, "global_frame_idx": 20246, "task_index": 18}, {"db_idx": 20247, "episode_idx": 93, "frame_idx": 139, "global_frame_idx": 20247, "task_index": 18}, {"db_idx": 20248, "episode_idx": 93, "frame_idx": 140, "global_frame_idx": 20248, "task_index": 18}, {"db_idx": 20249, "episode_idx": 93, "frame_idx": 141, "global_frame_idx": 20249, "task_index": 18}, {"db_idx": 20250, "episode_idx": 93, "frame_idx": 142, "global_frame_idx": 20250, "task_index": 18}, {"db_idx": 20251, "episode_idx": 93, "frame_idx": 143, "global_frame_idx": 20251, "task_index": 18}, {"db_idx": 20252, "episode_idx": 93, "frame_idx": 144, "global_frame_idx": 20252, "task_index": 18}, {"db_idx": 20253, "episode_idx": 93, "frame_idx": 145, "global_frame_idx": 20253, "task_index": 18}, {"db_idx": 20254, "episode_idx": 93, "frame_idx": 146, "global_frame_idx": 20254, "task_index": 18}, {"db_idx": 20255, "episode_idx": 93, "frame_idx": 147, "global_frame_idx": 20255, "task_index": 18}, {"db_idx": 20256, "episode_idx": 94, "frame_idx": 0, "global_frame_idx": 20256, "task_index": 18}, {"db_idx": 20257, "episode_idx": 94, "frame_idx": 1, "global_frame_idx": 20257, "task_index": 18}, {"db_idx": 20258, "episode_idx": 94, "frame_idx": 2, "global_frame_idx": 20258, "task_index": 18}, {"db_idx": 20259, "episode_idx": 94, "frame_idx": 3, "global_frame_idx": 20259, "task_index": 18}, {"db_idx": 20260, "episode_idx": 94, "frame_idx": 4, "global_frame_idx": 20260, "task_index": 18}, {"db_idx": 20261, "episode_idx": 94, "frame_idx": 5, "global_frame_idx": 20261, "task_index": 18}, {"db_idx": 20262, "episode_idx": 94, "frame_idx": 6, "global_frame_idx": 20262, "task_index": 18}, {"db_idx": 20263, "episode_idx": 94, "frame_idx": 7, "global_frame_idx": 20263, "task_index": 18}, {"db_idx": 20264, "episode_idx": 94, "frame_idx": 8, "global_frame_idx": 20264, "task_index": 18}, {"db_idx": 20265, "episode_idx": 94, "frame_idx": 9, "global_frame_idx": 20265, "task_index": 18}, {"db_idx": 20266, "episode_idx": 94, "frame_idx": 10, "global_frame_idx": 20266, "task_index": 18}, {"db_idx": 20267, "episode_idx": 94, "frame_idx": 11, "global_frame_idx": 20267, "task_index": 18}, {"db_idx": 20268, "episode_idx": 94, "frame_idx": 12, "global_frame_idx": 20268, "task_index": 18}, {"db_idx": 20269, "episode_idx": 94, "frame_idx": 13, "global_frame_idx": 20269, "task_index": 18}, {"db_idx": 20270, "episode_idx": 94, "frame_idx": 14, "global_frame_idx": 20270, "task_index": 18}, {"db_idx": 20271, "episode_idx": 94, "frame_idx": 15, "global_frame_idx": 20271, "task_index": 18}, {"db_idx": 20272, "episode_idx": 94, "frame_idx": 16, "global_frame_idx": 20272, "task_index": 18}, {"db_idx": 20273, "episode_idx": 94, "frame_idx": 17, "global_frame_idx": 20273, "task_index": 18}, {"db_idx": 20274, "episode_idx": 94, "frame_idx": 18, "global_frame_idx": 20274, "task_index": 18}, {"db_idx": 20275, "episode_idx": 94, "frame_idx": 19, "global_frame_idx": 20275, "task_index": 18}, {"db_idx": 20276, "episode_idx": 94, "frame_idx": 20, "global_frame_idx": 20276, "task_index": 18}, {"db_idx": 20277, "episode_idx": 94, "frame_idx": 21, "global_frame_idx": 20277, "task_index": 18}, {"db_idx": 20278, "episode_idx": 94, "frame_idx": 22, "global_frame_idx": 20278, "task_index": 18}, {"db_idx": 20279, "episode_idx": 94, "frame_idx": 23, "global_frame_idx": 20279, "task_index": 18}, {"db_idx": 20280, "episode_idx": 94, "frame_idx": 24, "global_frame_idx": 20280, "task_index": 18}, {"db_idx": 20281, "episode_idx": 94, "frame_idx": 25, "global_frame_idx": 20281, "task_index": 18}, {"db_idx": 20282, "episode_idx": 94, "frame_idx": 26, "global_frame_idx": 20282, "task_index": 18}, {"db_idx": 20283, "episode_idx": 94, "frame_idx": 27, "global_frame_idx": 20283, "task_index": 18}, {"db_idx": 20284, "episode_idx": 94, "frame_idx": 28, "global_frame_idx": 20284, "task_index": 18}, {"db_idx": 20285, "episode_idx": 94, "frame_idx": 29, "global_frame_idx": 20285, "task_index": 18}, {"db_idx": 20286, "episode_idx": 94, "frame_idx": 30, "global_frame_idx": 20286, "task_index": 18}, {"db_idx": 20287, "episode_idx": 94, "frame_idx": 31, "global_frame_idx": 20287, "task_index": 18}, {"db_idx": 20288, "episode_idx": 94, "frame_idx": 32, "global_frame_idx": 20288, "task_index": 18}, {"db_idx": 20289, "episode_idx": 94, "frame_idx": 33, "global_frame_idx": 20289, "task_index": 18}, {"db_idx": 20290, "episode_idx": 94, "frame_idx": 34, "global_frame_idx": 20290, "task_index": 18}, {"db_idx": 20291, "episode_idx": 94, "frame_idx": 35, "global_frame_idx": 20291, "task_index": 18}, {"db_idx": 20292, "episode_idx": 94, "frame_idx": 36, "global_frame_idx": 20292, "task_index": 18}, {"db_idx": 20293, "episode_idx": 94, "frame_idx": 37, "global_frame_idx": 20293, "task_index": 18}, {"db_idx": 20294, "episode_idx": 94, "frame_idx": 38, "global_frame_idx": 20294, "task_index": 18}, {"db_idx": 20295, "episode_idx": 94, "frame_idx": 39, "global_frame_idx": 20295, "task_index": 18}, {"db_idx": 20296, "episode_idx": 94, "frame_idx": 40, "global_frame_idx": 20296, "task_index": 18}, {"db_idx": 20297, "episode_idx": 94, "frame_idx": 41, "global_frame_idx": 20297, "task_index": 18}, {"db_idx": 20298, "episode_idx": 94, "frame_idx": 42, "global_frame_idx": 20298, "task_index": 18}, {"db_idx": 20299, "episode_idx": 94, "frame_idx": 43, "global_frame_idx": 20299, "task_index": 18}, {"db_idx": 20300, "episode_idx": 94, "frame_idx": 44, "global_frame_idx": 20300, "task_index": 18}, {"db_idx": 20301, "episode_idx": 94, "frame_idx": 45, "global_frame_idx": 20301, "task_index": 18}, {"db_idx": 20302, "episode_idx": 94, "frame_idx": 46, "global_frame_idx": 20302, "task_index": 18}, {"db_idx": 20303, "episode_idx": 94, "frame_idx": 47, "global_frame_idx": 20303, "task_index": 18}, {"db_idx": 20304, "episode_idx": 94, "frame_idx": 48, "global_frame_idx": 20304, "task_index": 18}, {"db_idx": 20305, "episode_idx": 94, "frame_idx": 49, "global_frame_idx": 20305, "task_index": 18}, {"db_idx": 20306, "episode_idx": 94, "frame_idx": 50, "global_frame_idx": 20306, "task_index": 18}, {"db_idx": 20307, "episode_idx": 94, "frame_idx": 51, "global_frame_idx": 20307, "task_index": 18}, {"db_idx": 20308, "episode_idx": 94, "frame_idx": 52, "global_frame_idx": 20308, "task_index": 18}, {"db_idx": 20309, "episode_idx": 94, "frame_idx": 53, "global_frame_idx": 20309, "task_index": 18}, {"db_idx": 20310, "episode_idx": 94, "frame_idx": 54, "global_frame_idx": 20310, "task_index": 18}, {"db_idx": 20311, "episode_idx": 94, "frame_idx": 55, "global_frame_idx": 20311, "task_index": 18}, {"db_idx": 20312, "episode_idx": 94, "frame_idx": 56, "global_frame_idx": 20312, "task_index": 18}, {"db_idx": 20313, "episode_idx": 94, "frame_idx": 57, "global_frame_idx": 20313, "task_index": 18}, {"db_idx": 20314, "episode_idx": 94, "frame_idx": 58, "global_frame_idx": 20314, "task_index": 18}, {"db_idx": 20315, "episode_idx": 94, "frame_idx": 59, "global_frame_idx": 20315, "task_index": 18}, {"db_idx": 20316, "episode_idx": 94, "frame_idx": 60, "global_frame_idx": 20316, "task_index": 18}, {"db_idx": 20317, "episode_idx": 94, "frame_idx": 61, "global_frame_idx": 20317, "task_index": 18}, {"db_idx": 20318, "episode_idx": 94, "frame_idx": 62, "global_frame_idx": 20318, "task_index": 18}, {"db_idx": 20319, "episode_idx": 94, "frame_idx": 63, "global_frame_idx": 20319, "task_index": 18}, {"db_idx": 20320, "episode_idx": 94, "frame_idx": 64, "global_frame_idx": 20320, "task_index": 18}, {"db_idx": 20321, "episode_idx": 94, "frame_idx": 65, "global_frame_idx": 20321, "task_index": 18}, {"db_idx": 20322, "episode_idx": 94, "frame_idx": 66, "global_frame_idx": 20322, "task_index": 18}, {"db_idx": 20323, "episode_idx": 94, "frame_idx": 67, "global_frame_idx": 20323, "task_index": 18}, {"db_idx": 20324, "episode_idx": 94, "frame_idx": 68, "global_frame_idx": 20324, "task_index": 18}, {"db_idx": 20325, "episode_idx": 94, "frame_idx": 69, "global_frame_idx": 20325, "task_index": 18}, {"db_idx": 20326, "episode_idx": 94, "frame_idx": 70, "global_frame_idx": 20326, "task_index": 18}, {"db_idx": 20327, "episode_idx": 94, "frame_idx": 71, "global_frame_idx": 20327, "task_index": 18}, {"db_idx": 20328, "episode_idx": 94, "frame_idx": 72, "global_frame_idx": 20328, "task_index": 18}, {"db_idx": 20329, "episode_idx": 94, "frame_idx": 73, "global_frame_idx": 20329, "task_index": 18}, {"db_idx": 20330, "episode_idx": 94, "frame_idx": 74, "global_frame_idx": 20330, "task_index": 18}, {"db_idx": 20331, "episode_idx": 94, "frame_idx": 75, "global_frame_idx": 20331, "task_index": 18}, {"db_idx": 20332, "episode_idx": 94, "frame_idx": 76, "global_frame_idx": 20332, "task_index": 18}, {"db_idx": 20333, "episode_idx": 94, "frame_idx": 77, "global_frame_idx": 20333, "task_index": 18}, {"db_idx": 20334, "episode_idx": 94, "frame_idx": 78, "global_frame_idx": 20334, "task_index": 18}, {"db_idx": 20335, "episode_idx": 94, "frame_idx": 79, "global_frame_idx": 20335, "task_index": 18}, {"db_idx": 20336, "episode_idx": 94, "frame_idx": 80, "global_frame_idx": 20336, "task_index": 18}, {"db_idx": 20337, "episode_idx": 94, "frame_idx": 81, "global_frame_idx": 20337, "task_index": 18}, {"db_idx": 20338, "episode_idx": 94, "frame_idx": 82, "global_frame_idx": 20338, "task_index": 18}, {"db_idx": 20339, "episode_idx": 94, "frame_idx": 83, "global_frame_idx": 20339, "task_index": 18}, {"db_idx": 20340, "episode_idx": 94, "frame_idx": 84, "global_frame_idx": 20340, "task_index": 18}, {"db_idx": 20341, "episode_idx": 94, "frame_idx": 85, "global_frame_idx": 20341, "task_index": 18}, {"db_idx": 20342, "episode_idx": 94, "frame_idx": 86, "global_frame_idx": 20342, "task_index": 18}, {"db_idx": 20343, "episode_idx": 94, "frame_idx": 87, "global_frame_idx": 20343, "task_index": 18}, {"db_idx": 20344, "episode_idx": 94, "frame_idx": 88, "global_frame_idx": 20344, "task_index": 18}, {"db_idx": 20345, "episode_idx": 94, "frame_idx": 89, "global_frame_idx": 20345, "task_index": 18}, {"db_idx": 20346, "episode_idx": 94, "frame_idx": 90, "global_frame_idx": 20346, "task_index": 18}, {"db_idx": 20347, "episode_idx": 94, "frame_idx": 91, "global_frame_idx": 20347, "task_index": 18}, {"db_idx": 20348, "episode_idx": 94, "frame_idx": 92, "global_frame_idx": 20348, "task_index": 18}, {"db_idx": 20349, "episode_idx": 94, "frame_idx": 93, "global_frame_idx": 20349, "task_index": 18}, {"db_idx": 20350, "episode_idx": 94, "frame_idx": 94, "global_frame_idx": 20350, "task_index": 18}, {"db_idx": 20351, "episode_idx": 94, "frame_idx": 95, "global_frame_idx": 20351, "task_index": 18}, {"db_idx": 20352, "episode_idx": 94, "frame_idx": 96, "global_frame_idx": 20352, "task_index": 18}, {"db_idx": 20353, "episode_idx": 94, "frame_idx": 97, "global_frame_idx": 20353, "task_index": 18}, {"db_idx": 20354, "episode_idx": 94, "frame_idx": 98, "global_frame_idx": 20354, "task_index": 18}, {"db_idx": 20355, "episode_idx": 94, "frame_idx": 99, "global_frame_idx": 20355, "task_index": 18}, {"db_idx": 20356, "episode_idx": 94, "frame_idx": 100, "global_frame_idx": 20356, "task_index": 18}, {"db_idx": 20357, "episode_idx": 94, "frame_idx": 101, "global_frame_idx": 20357, "task_index": 18}, {"db_idx": 20358, "episode_idx": 94, "frame_idx": 102, "global_frame_idx": 20358, "task_index": 18}, {"db_idx": 20359, "episode_idx": 94, "frame_idx": 103, "global_frame_idx": 20359, "task_index": 18}, {"db_idx": 20360, "episode_idx": 94, "frame_idx": 104, "global_frame_idx": 20360, "task_index": 18}, {"db_idx": 20361, "episode_idx": 94, "frame_idx": 105, "global_frame_idx": 20361, "task_index": 18}, {"db_idx": 20362, "episode_idx": 94, "frame_idx": 106, "global_frame_idx": 20362, "task_index": 18}, {"db_idx": 20363, "episode_idx": 94, "frame_idx": 107, "global_frame_idx": 20363, "task_index": 18}, {"db_idx": 20364, "episode_idx": 94, "frame_idx": 108, "global_frame_idx": 20364, "task_index": 18}, {"db_idx": 20365, "episode_idx": 94, "frame_idx": 109, "global_frame_idx": 20365, "task_index": 18}, {"db_idx": 20366, "episode_idx": 94, "frame_idx": 110, "global_frame_idx": 20366, "task_index": 18}, {"db_idx": 20367, "episode_idx": 94, "frame_idx": 111, "global_frame_idx": 20367, "task_index": 18}, {"db_idx": 20368, "episode_idx": 94, "frame_idx": 112, "global_frame_idx": 20368, "task_index": 18}, {"db_idx": 20369, "episode_idx": 94, "frame_idx": 113, "global_frame_idx": 20369, "task_index": 18}, {"db_idx": 20370, "episode_idx": 94, "frame_idx": 114, "global_frame_idx": 20370, "task_index": 18}, {"db_idx": 20371, "episode_idx": 94, "frame_idx": 115, "global_frame_idx": 20371, "task_index": 18}, {"db_idx": 20372, "episode_idx": 94, "frame_idx": 116, "global_frame_idx": 20372, "task_index": 18}, {"db_idx": 20373, "episode_idx": 94, "frame_idx": 117, "global_frame_idx": 20373, "task_index": 18}, {"db_idx": 20374, "episode_idx": 94, "frame_idx": 118, "global_frame_idx": 20374, "task_index": 18}, {"db_idx": 20375, "episode_idx": 94, "frame_idx": 119, "global_frame_idx": 20375, "task_index": 18}, {"db_idx": 20376, "episode_idx": 94, "frame_idx": 120, "global_frame_idx": 20376, "task_index": 18}, {"db_idx": 20377, "episode_idx": 94, "frame_idx": 121, "global_frame_idx": 20377, "task_index": 18}, {"db_idx": 20378, "episode_idx": 94, "frame_idx": 122, "global_frame_idx": 20378, "task_index": 18}, {"db_idx": 20379, "episode_idx": 94, "frame_idx": 123, "global_frame_idx": 20379, "task_index": 18}, {"db_idx": 20380, "episode_idx": 94, "frame_idx": 124, "global_frame_idx": 20380, "task_index": 18}, {"db_idx": 20381, "episode_idx": 94, "frame_idx": 125, "global_frame_idx": 20381, "task_index": 18}, {"db_idx": 20382, "episode_idx": 95, "frame_idx": 0, "global_frame_idx": 20382, "task_index": 19}, {"db_idx": 20383, "episode_idx": 95, "frame_idx": 1, "global_frame_idx": 20383, "task_index": 19}, {"db_idx": 20384, "episode_idx": 95, "frame_idx": 2, "global_frame_idx": 20384, "task_index": 19}, {"db_idx": 20385, "episode_idx": 95, "frame_idx": 3, "global_frame_idx": 20385, "task_index": 19}, {"db_idx": 20386, "episode_idx": 95, "frame_idx": 4, "global_frame_idx": 20386, "task_index": 19}, {"db_idx": 20387, "episode_idx": 95, "frame_idx": 5, "global_frame_idx": 20387, "task_index": 19}, {"db_idx": 20388, "episode_idx": 95, "frame_idx": 6, "global_frame_idx": 20388, "task_index": 19}, {"db_idx": 20389, "episode_idx": 95, "frame_idx": 7, "global_frame_idx": 20389, "task_index": 19}, {"db_idx": 20390, "episode_idx": 95, "frame_idx": 8, "global_frame_idx": 20390, "task_index": 19}, {"db_idx": 20391, "episode_idx": 95, "frame_idx": 9, "global_frame_idx": 20391, "task_index": 19}, {"db_idx": 20392, "episode_idx": 95, "frame_idx": 10, "global_frame_idx": 20392, "task_index": 19}, {"db_idx": 20393, "episode_idx": 95, "frame_idx": 11, "global_frame_idx": 20393, "task_index": 19}, {"db_idx": 20394, "episode_idx": 95, "frame_idx": 12, "global_frame_idx": 20394, "task_index": 19}, {"db_idx": 20395, "episode_idx": 95, "frame_idx": 13, "global_frame_idx": 20395, "task_index": 19}, {"db_idx": 20396, "episode_idx": 95, "frame_idx": 14, "global_frame_idx": 20396, "task_index": 19}, {"db_idx": 20397, "episode_idx": 95, "frame_idx": 15, "global_frame_idx": 20397, "task_index": 19}, {"db_idx": 20398, "episode_idx": 95, "frame_idx": 16, "global_frame_idx": 20398, "task_index": 19}, {"db_idx": 20399, "episode_idx": 95, "frame_idx": 17, "global_frame_idx": 20399, "task_index": 19}, {"db_idx": 20400, "episode_idx": 95, "frame_idx": 18, "global_frame_idx": 20400, "task_index": 19}, {"db_idx": 20401, "episode_idx": 95, "frame_idx": 19, "global_frame_idx": 20401, "task_index": 19}, {"db_idx": 20402, "episode_idx": 95, "frame_idx": 20, "global_frame_idx": 20402, "task_index": 19}, {"db_idx": 20403, "episode_idx": 95, "frame_idx": 21, "global_frame_idx": 20403, "task_index": 19}, {"db_idx": 20404, "episode_idx": 95, "frame_idx": 22, "global_frame_idx": 20404, "task_index": 19}, {"db_idx": 20405, "episode_idx": 95, "frame_idx": 23, "global_frame_idx": 20405, "task_index": 19}, {"db_idx": 20406, "episode_idx": 95, "frame_idx": 24, "global_frame_idx": 20406, "task_index": 19}, {"db_idx": 20407, "episode_idx": 95, "frame_idx": 25, "global_frame_idx": 20407, "task_index": 19}, {"db_idx": 20408, "episode_idx": 95, "frame_idx": 26, "global_frame_idx": 20408, "task_index": 19}, {"db_idx": 20409, "episode_idx": 95, "frame_idx": 27, "global_frame_idx": 20409, "task_index": 19}, {"db_idx": 20410, "episode_idx": 95, "frame_idx": 28, "global_frame_idx": 20410, "task_index": 19}, {"db_idx": 20411, "episode_idx": 95, "frame_idx": 29, "global_frame_idx": 20411, "task_index": 19}, {"db_idx": 20412, "episode_idx": 95, "frame_idx": 30, "global_frame_idx": 20412, "task_index": 19}, {"db_idx": 20413, "episode_idx": 95, "frame_idx": 31, "global_frame_idx": 20413, "task_index": 19}, {"db_idx": 20414, "episode_idx": 95, "frame_idx": 32, "global_frame_idx": 20414, "task_index": 19}, {"db_idx": 20415, "episode_idx": 95, "frame_idx": 33, "global_frame_idx": 20415, "task_index": 19}, {"db_idx": 20416, "episode_idx": 95, "frame_idx": 34, "global_frame_idx": 20416, "task_index": 19}, {"db_idx": 20417, "episode_idx": 95, "frame_idx": 35, "global_frame_idx": 20417, "task_index": 19}, {"db_idx": 20418, "episode_idx": 95, "frame_idx": 36, "global_frame_idx": 20418, "task_index": 19}, {"db_idx": 20419, "episode_idx": 95, "frame_idx": 37, "global_frame_idx": 20419, "task_index": 19}, {"db_idx": 20420, "episode_idx": 95, "frame_idx": 38, "global_frame_idx": 20420, "task_index": 19}, {"db_idx": 20421, "episode_idx": 95, "frame_idx": 39, "global_frame_idx": 20421, "task_index": 19}, {"db_idx": 20422, "episode_idx": 95, "frame_idx": 40, "global_frame_idx": 20422, "task_index": 19}, {"db_idx": 20423, "episode_idx": 95, "frame_idx": 41, "global_frame_idx": 20423, "task_index": 19}, {"db_idx": 20424, "episode_idx": 95, "frame_idx": 42, "global_frame_idx": 20424, "task_index": 19}, {"db_idx": 20425, "episode_idx": 95, "frame_idx": 43, "global_frame_idx": 20425, "task_index": 19}, {"db_idx": 20426, "episode_idx": 95, "frame_idx": 44, "global_frame_idx": 20426, "task_index": 19}, {"db_idx": 20427, "episode_idx": 95, "frame_idx": 45, "global_frame_idx": 20427, "task_index": 19}, {"db_idx": 20428, "episode_idx": 95, "frame_idx": 46, "global_frame_idx": 20428, "task_index": 19}, {"db_idx": 20429, "episode_idx": 95, "frame_idx": 47, "global_frame_idx": 20429, "task_index": 19}, {"db_idx": 20430, "episode_idx": 95, "frame_idx": 48, "global_frame_idx": 20430, "task_index": 19}, {"db_idx": 20431, "episode_idx": 95, "frame_idx": 49, "global_frame_idx": 20431, "task_index": 19}, {"db_idx": 20432, "episode_idx": 95, "frame_idx": 50, "global_frame_idx": 20432, "task_index": 19}, {"db_idx": 20433, "episode_idx": 95, "frame_idx": 51, "global_frame_idx": 20433, "task_index": 19}, {"db_idx": 20434, "episode_idx": 95, "frame_idx": 52, "global_frame_idx": 20434, "task_index": 19}, {"db_idx": 20435, "episode_idx": 95, "frame_idx": 53, "global_frame_idx": 20435, "task_index": 19}, {"db_idx": 20436, "episode_idx": 95, "frame_idx": 54, "global_frame_idx": 20436, "task_index": 19}, {"db_idx": 20437, "episode_idx": 95, "frame_idx": 55, "global_frame_idx": 20437, "task_index": 19}, {"db_idx": 20438, "episode_idx": 95, "frame_idx": 56, "global_frame_idx": 20438, "task_index": 19}, {"db_idx": 20439, "episode_idx": 95, "frame_idx": 57, "global_frame_idx": 20439, "task_index": 19}, {"db_idx": 20440, "episode_idx": 95, "frame_idx": 58, "global_frame_idx": 20440, "task_index": 19}, {"db_idx": 20441, "episode_idx": 95, "frame_idx": 59, "global_frame_idx": 20441, "task_index": 19}, {"db_idx": 20442, "episode_idx": 95, "frame_idx": 60, "global_frame_idx": 20442, "task_index": 19}, {"db_idx": 20443, "episode_idx": 95, "frame_idx": 61, "global_frame_idx": 20443, "task_index": 19}, {"db_idx": 20444, "episode_idx": 95, "frame_idx": 62, "global_frame_idx": 20444, "task_index": 19}, {"db_idx": 20445, "episode_idx": 95, "frame_idx": 63, "global_frame_idx": 20445, "task_index": 19}, {"db_idx": 20446, "episode_idx": 95, "frame_idx": 64, "global_frame_idx": 20446, "task_index": 19}, {"db_idx": 20447, "episode_idx": 95, "frame_idx": 65, "global_frame_idx": 20447, "task_index": 19}, {"db_idx": 20448, "episode_idx": 95, "frame_idx": 66, "global_frame_idx": 20448, "task_index": 19}, {"db_idx": 20449, "episode_idx": 95, "frame_idx": 67, "global_frame_idx": 20449, "task_index": 19}, {"db_idx": 20450, "episode_idx": 95, "frame_idx": 68, "global_frame_idx": 20450, "task_index": 19}, {"db_idx": 20451, "episode_idx": 95, "frame_idx": 69, "global_frame_idx": 20451, "task_index": 19}, {"db_idx": 20452, "episode_idx": 95, "frame_idx": 70, "global_frame_idx": 20452, "task_index": 19}, {"db_idx": 20453, "episode_idx": 95, "frame_idx": 71, "global_frame_idx": 20453, "task_index": 19}, {"db_idx": 20454, "episode_idx": 95, "frame_idx": 72, "global_frame_idx": 20454, "task_index": 19}, {"db_idx": 20455, "episode_idx": 95, "frame_idx": 73, "global_frame_idx": 20455, "task_index": 19}, {"db_idx": 20456, "episode_idx": 95, "frame_idx": 74, "global_frame_idx": 20456, "task_index": 19}, {"db_idx": 20457, "episode_idx": 95, "frame_idx": 75, "global_frame_idx": 20457, "task_index": 19}, {"db_idx": 20458, "episode_idx": 95, "frame_idx": 76, "global_frame_idx": 20458, "task_index": 19}, {"db_idx": 20459, "episode_idx": 95, "frame_idx": 77, "global_frame_idx": 20459, "task_index": 19}, {"db_idx": 20460, "episode_idx": 95, "frame_idx": 78, "global_frame_idx": 20460, "task_index": 19}, {"db_idx": 20461, "episode_idx": 95, "frame_idx": 79, "global_frame_idx": 20461, "task_index": 19}, {"db_idx": 20462, "episode_idx": 95, "frame_idx": 80, "global_frame_idx": 20462, "task_index": 19}, {"db_idx": 20463, "episode_idx": 95, "frame_idx": 81, "global_frame_idx": 20463, "task_index": 19}, {"db_idx": 20464, "episode_idx": 95, "frame_idx": 82, "global_frame_idx": 20464, "task_index": 19}, {"db_idx": 20465, "episode_idx": 95, "frame_idx": 83, "global_frame_idx": 20465, "task_index": 19}, {"db_idx": 20466, "episode_idx": 95, "frame_idx": 84, "global_frame_idx": 20466, "task_index": 19}, {"db_idx": 20467, "episode_idx": 95, "frame_idx": 85, "global_frame_idx": 20467, "task_index": 19}, {"db_idx": 20468, "episode_idx": 95, "frame_idx": 86, "global_frame_idx": 20468, "task_index": 19}, {"db_idx": 20469, "episode_idx": 95, "frame_idx": 87, "global_frame_idx": 20469, "task_index": 19}, {"db_idx": 20470, "episode_idx": 95, "frame_idx": 88, "global_frame_idx": 20470, "task_index": 19}, {"db_idx": 20471, "episode_idx": 95, "frame_idx": 89, "global_frame_idx": 20471, "task_index": 19}, {"db_idx": 20472, "episode_idx": 95, "frame_idx": 90, "global_frame_idx": 20472, "task_index": 19}, {"db_idx": 20473, "episode_idx": 95, "frame_idx": 91, "global_frame_idx": 20473, "task_index": 19}, {"db_idx": 20474, "episode_idx": 95, "frame_idx": 92, "global_frame_idx": 20474, "task_index": 19}, {"db_idx": 20475, "episode_idx": 95, "frame_idx": 93, "global_frame_idx": 20475, "task_index": 19}, {"db_idx": 20476, "episode_idx": 95, "frame_idx": 94, "global_frame_idx": 20476, "task_index": 19}, {"db_idx": 20477, "episode_idx": 95, "frame_idx": 95, "global_frame_idx": 20477, "task_index": 19}, {"db_idx": 20478, "episode_idx": 95, "frame_idx": 96, "global_frame_idx": 20478, "task_index": 19}, {"db_idx": 20479, "episode_idx": 95, "frame_idx": 97, "global_frame_idx": 20479, "task_index": 19}, {"db_idx": 20480, "episode_idx": 95, "frame_idx": 98, "global_frame_idx": 20480, "task_index": 19}, {"db_idx": 20481, "episode_idx": 95, "frame_idx": 99, "global_frame_idx": 20481, "task_index": 19}, {"db_idx": 20482, "episode_idx": 95, "frame_idx": 100, "global_frame_idx": 20482, "task_index": 19}, {"db_idx": 20483, "episode_idx": 95, "frame_idx": 101, "global_frame_idx": 20483, "task_index": 19}, {"db_idx": 20484, "episode_idx": 95, "frame_idx": 102, "global_frame_idx": 20484, "task_index": 19}, {"db_idx": 20485, "episode_idx": 95, "frame_idx": 103, "global_frame_idx": 20485, "task_index": 19}, {"db_idx": 20486, "episode_idx": 95, "frame_idx": 104, "global_frame_idx": 20486, "task_index": 19}, {"db_idx": 20487, "episode_idx": 95, "frame_idx": 105, "global_frame_idx": 20487, "task_index": 19}, {"db_idx": 20488, "episode_idx": 95, "frame_idx": 106, "global_frame_idx": 20488, "task_index": 19}, {"db_idx": 20489, "episode_idx": 95, "frame_idx": 107, "global_frame_idx": 20489, "task_index": 19}, {"db_idx": 20490, "episode_idx": 95, "frame_idx": 108, "global_frame_idx": 20490, "task_index": 19}, {"db_idx": 20491, "episode_idx": 95, "frame_idx": 109, "global_frame_idx": 20491, "task_index": 19}, {"db_idx": 20492, "episode_idx": 95, "frame_idx": 110, "global_frame_idx": 20492, "task_index": 19}, {"db_idx": 20493, "episode_idx": 95, "frame_idx": 111, "global_frame_idx": 20493, "task_index": 19}, {"db_idx": 20494, "episode_idx": 95, "frame_idx": 112, "global_frame_idx": 20494, "task_index": 19}, {"db_idx": 20495, "episode_idx": 95, "frame_idx": 113, "global_frame_idx": 20495, "task_index": 19}, {"db_idx": 20496, "episode_idx": 95, "frame_idx": 114, "global_frame_idx": 20496, "task_index": 19}, {"db_idx": 20497, "episode_idx": 95, "frame_idx": 115, "global_frame_idx": 20497, "task_index": 19}, {"db_idx": 20498, "episode_idx": 95, "frame_idx": 116, "global_frame_idx": 20498, "task_index": 19}, {"db_idx": 20499, "episode_idx": 95, "frame_idx": 117, "global_frame_idx": 20499, "task_index": 19}, {"db_idx": 20500, "episode_idx": 95, "frame_idx": 118, "global_frame_idx": 20500, "task_index": 19}, {"db_idx": 20501, "episode_idx": 95, "frame_idx": 119, "global_frame_idx": 20501, "task_index": 19}, {"db_idx": 20502, "episode_idx": 95, "frame_idx": 120, "global_frame_idx": 20502, "task_index": 19}, {"db_idx": 20503, "episode_idx": 95, "frame_idx": 121, "global_frame_idx": 20503, "task_index": 19}, {"db_idx": 20504, "episode_idx": 95, "frame_idx": 122, "global_frame_idx": 20504, "task_index": 19}, {"db_idx": 20505, "episode_idx": 95, "frame_idx": 123, "global_frame_idx": 20505, "task_index": 19}, {"db_idx": 20506, "episode_idx": 95, "frame_idx": 124, "global_frame_idx": 20506, "task_index": 19}, {"db_idx": 20507, "episode_idx": 95, "frame_idx": 125, "global_frame_idx": 20507, "task_index": 19}, {"db_idx": 20508, "episode_idx": 96, "frame_idx": 0, "global_frame_idx": 20508, "task_index": 19}, {"db_idx": 20509, "episode_idx": 96, "frame_idx": 1, "global_frame_idx": 20509, "task_index": 19}, {"db_idx": 20510, "episode_idx": 96, "frame_idx": 2, "global_frame_idx": 20510, "task_index": 19}, {"db_idx": 20511, "episode_idx": 96, "frame_idx": 3, "global_frame_idx": 20511, "task_index": 19}, {"db_idx": 20512, "episode_idx": 96, "frame_idx": 4, "global_frame_idx": 20512, "task_index": 19}, {"db_idx": 20513, "episode_idx": 96, "frame_idx": 5, "global_frame_idx": 20513, "task_index": 19}, {"db_idx": 20514, "episode_idx": 96, "frame_idx": 6, "global_frame_idx": 20514, "task_index": 19}, {"db_idx": 20515, "episode_idx": 96, "frame_idx": 7, "global_frame_idx": 20515, "task_index": 19}, {"db_idx": 20516, "episode_idx": 96, "frame_idx": 8, "global_frame_idx": 20516, "task_index": 19}, {"db_idx": 20517, "episode_idx": 96, "frame_idx": 9, "global_frame_idx": 20517, "task_index": 19}, {"db_idx": 20518, "episode_idx": 96, "frame_idx": 10, "global_frame_idx": 20518, "task_index": 19}, {"db_idx": 20519, "episode_idx": 96, "frame_idx": 11, "global_frame_idx": 20519, "task_index": 19}, {"db_idx": 20520, "episode_idx": 96, "frame_idx": 12, "global_frame_idx": 20520, "task_index": 19}, {"db_idx": 20521, "episode_idx": 96, "frame_idx": 13, "global_frame_idx": 20521, "task_index": 19}, {"db_idx": 20522, "episode_idx": 96, "frame_idx": 14, "global_frame_idx": 20522, "task_index": 19}, {"db_idx": 20523, "episode_idx": 96, "frame_idx": 15, "global_frame_idx": 20523, "task_index": 19}, {"db_idx": 20524, "episode_idx": 96, "frame_idx": 16, "global_frame_idx": 20524, "task_index": 19}, {"db_idx": 20525, "episode_idx": 96, "frame_idx": 17, "global_frame_idx": 20525, "task_index": 19}, {"db_idx": 20526, "episode_idx": 96, "frame_idx": 18, "global_frame_idx": 20526, "task_index": 19}, {"db_idx": 20527, "episode_idx": 96, "frame_idx": 19, "global_frame_idx": 20527, "task_index": 19}, {"db_idx": 20528, "episode_idx": 96, "frame_idx": 20, "global_frame_idx": 20528, "task_index": 19}, {"db_idx": 20529, "episode_idx": 96, "frame_idx": 21, "global_frame_idx": 20529, "task_index": 19}, {"db_idx": 20530, "episode_idx": 96, "frame_idx": 22, "global_frame_idx": 20530, "task_index": 19}, {"db_idx": 20531, "episode_idx": 96, "frame_idx": 23, "global_frame_idx": 20531, "task_index": 19}, {"db_idx": 20532, "episode_idx": 96, "frame_idx": 24, "global_frame_idx": 20532, "task_index": 19}, {"db_idx": 20533, "episode_idx": 96, "frame_idx": 25, "global_frame_idx": 20533, "task_index": 19}, {"db_idx": 20534, "episode_idx": 96, "frame_idx": 26, "global_frame_idx": 20534, "task_index": 19}, {"db_idx": 20535, "episode_idx": 96, "frame_idx": 27, "global_frame_idx": 20535, "task_index": 19}, {"db_idx": 20536, "episode_idx": 96, "frame_idx": 28, "global_frame_idx": 20536, "task_index": 19}, {"db_idx": 20537, "episode_idx": 96, "frame_idx": 29, "global_frame_idx": 20537, "task_index": 19}, {"db_idx": 20538, "episode_idx": 96, "frame_idx": 30, "global_frame_idx": 20538, "task_index": 19}, {"db_idx": 20539, "episode_idx": 96, "frame_idx": 31, "global_frame_idx": 20539, "task_index": 19}, {"db_idx": 20540, "episode_idx": 96, "frame_idx": 32, "global_frame_idx": 20540, "task_index": 19}, {"db_idx": 20541, "episode_idx": 96, "frame_idx": 33, "global_frame_idx": 20541, "task_index": 19}, {"db_idx": 20542, "episode_idx": 96, "frame_idx": 34, "global_frame_idx": 20542, "task_index": 19}, {"db_idx": 20543, "episode_idx": 96, "frame_idx": 35, "global_frame_idx": 20543, "task_index": 19}, {"db_idx": 20544, "episode_idx": 96, "frame_idx": 36, "global_frame_idx": 20544, "task_index": 19}, {"db_idx": 20545, "episode_idx": 96, "frame_idx": 37, "global_frame_idx": 20545, "task_index": 19}, {"db_idx": 20546, "episode_idx": 96, "frame_idx": 38, "global_frame_idx": 20546, "task_index": 19}, {"db_idx": 20547, "episode_idx": 96, "frame_idx": 39, "global_frame_idx": 20547, "task_index": 19}, {"db_idx": 20548, "episode_idx": 96, "frame_idx": 40, "global_frame_idx": 20548, "task_index": 19}, {"db_idx": 20549, "episode_idx": 96, "frame_idx": 41, "global_frame_idx": 20549, "task_index": 19}, {"db_idx": 20550, "episode_idx": 96, "frame_idx": 42, "global_frame_idx": 20550, "task_index": 19}, {"db_idx": 20551, "episode_idx": 96, "frame_idx": 43, "global_frame_idx": 20551, "task_index": 19}, {"db_idx": 20552, "episode_idx": 96, "frame_idx": 44, "global_frame_idx": 20552, "task_index": 19}, {"db_idx": 20553, "episode_idx": 96, "frame_idx": 45, "global_frame_idx": 20553, "task_index": 19}, {"db_idx": 20554, "episode_idx": 96, "frame_idx": 46, "global_frame_idx": 20554, "task_index": 19}, {"db_idx": 20555, "episode_idx": 96, "frame_idx": 47, "global_frame_idx": 20555, "task_index": 19}, {"db_idx": 20556, "episode_idx": 96, "frame_idx": 48, "global_frame_idx": 20556, "task_index": 19}, {"db_idx": 20557, "episode_idx": 96, "frame_idx": 49, "global_frame_idx": 20557, "task_index": 19}, {"db_idx": 20558, "episode_idx": 96, "frame_idx": 50, "global_frame_idx": 20558, "task_index": 19}, {"db_idx": 20559, "episode_idx": 96, "frame_idx": 51, "global_frame_idx": 20559, "task_index": 19}, {"db_idx": 20560, "episode_idx": 96, "frame_idx": 52, "global_frame_idx": 20560, "task_index": 19}, {"db_idx": 20561, "episode_idx": 96, "frame_idx": 53, "global_frame_idx": 20561, "task_index": 19}, {"db_idx": 20562, "episode_idx": 96, "frame_idx": 54, "global_frame_idx": 20562, "task_index": 19}, {"db_idx": 20563, "episode_idx": 96, "frame_idx": 55, "global_frame_idx": 20563, "task_index": 19}, {"db_idx": 20564, "episode_idx": 96, "frame_idx": 56, "global_frame_idx": 20564, "task_index": 19}, {"db_idx": 20565, "episode_idx": 96, "frame_idx": 57, "global_frame_idx": 20565, "task_index": 19}, {"db_idx": 20566, "episode_idx": 96, "frame_idx": 58, "global_frame_idx": 20566, "task_index": 19}, {"db_idx": 20567, "episode_idx": 96, "frame_idx": 59, "global_frame_idx": 20567, "task_index": 19}, {"db_idx": 20568, "episode_idx": 96, "frame_idx": 60, "global_frame_idx": 20568, "task_index": 19}, {"db_idx": 20569, "episode_idx": 96, "frame_idx": 61, "global_frame_idx": 20569, "task_index": 19}, {"db_idx": 20570, "episode_idx": 96, "frame_idx": 62, "global_frame_idx": 20570, "task_index": 19}, {"db_idx": 20571, "episode_idx": 96, "frame_idx": 63, "global_frame_idx": 20571, "task_index": 19}, {"db_idx": 20572, "episode_idx": 96, "frame_idx": 64, "global_frame_idx": 20572, "task_index": 19}, {"db_idx": 20573, "episode_idx": 96, "frame_idx": 65, "global_frame_idx": 20573, "task_index": 19}, {"db_idx": 20574, "episode_idx": 96, "frame_idx": 66, "global_frame_idx": 20574, "task_index": 19}, {"db_idx": 20575, "episode_idx": 96, "frame_idx": 67, "global_frame_idx": 20575, "task_index": 19}, {"db_idx": 20576, "episode_idx": 96, "frame_idx": 68, "global_frame_idx": 20576, "task_index": 19}, {"db_idx": 20577, "episode_idx": 96, "frame_idx": 69, "global_frame_idx": 20577, "task_index": 19}, {"db_idx": 20578, "episode_idx": 96, "frame_idx": 70, "global_frame_idx": 20578, "task_index": 19}, {"db_idx": 20579, "episode_idx": 96, "frame_idx": 71, "global_frame_idx": 20579, "task_index": 19}, {"db_idx": 20580, "episode_idx": 96, "frame_idx": 72, "global_frame_idx": 20580, "task_index": 19}, {"db_idx": 20581, "episode_idx": 96, "frame_idx": 73, "global_frame_idx": 20581, "task_index": 19}, {"db_idx": 20582, "episode_idx": 96, "frame_idx": 74, "global_frame_idx": 20582, "task_index": 19}, {"db_idx": 20583, "episode_idx": 96, "frame_idx": 75, "global_frame_idx": 20583, "task_index": 19}, {"db_idx": 20584, "episode_idx": 96, "frame_idx": 76, "global_frame_idx": 20584, "task_index": 19}, {"db_idx": 20585, "episode_idx": 96, "frame_idx": 77, "global_frame_idx": 20585, "task_index": 19}, {"db_idx": 20586, "episode_idx": 96, "frame_idx": 78, "global_frame_idx": 20586, "task_index": 19}, {"db_idx": 20587, "episode_idx": 96, "frame_idx": 79, "global_frame_idx": 20587, "task_index": 19}, {"db_idx": 20588, "episode_idx": 96, "frame_idx": 80, "global_frame_idx": 20588, "task_index": 19}, {"db_idx": 20589, "episode_idx": 96, "frame_idx": 81, "global_frame_idx": 20589, "task_index": 19}, {"db_idx": 20590, "episode_idx": 96, "frame_idx": 82, "global_frame_idx": 20590, "task_index": 19}, {"db_idx": 20591, "episode_idx": 96, "frame_idx": 83, "global_frame_idx": 20591, "task_index": 19}, {"db_idx": 20592, "episode_idx": 96, "frame_idx": 84, "global_frame_idx": 20592, "task_index": 19}, {"db_idx": 20593, "episode_idx": 96, "frame_idx": 85, "global_frame_idx": 20593, "task_index": 19}, {"db_idx": 20594, "episode_idx": 96, "frame_idx": 86, "global_frame_idx": 20594, "task_index": 19}, {"db_idx": 20595, "episode_idx": 96, "frame_idx": 87, "global_frame_idx": 20595, "task_index": 19}, {"db_idx": 20596, "episode_idx": 96, "frame_idx": 88, "global_frame_idx": 20596, "task_index": 19}, {"db_idx": 20597, "episode_idx": 96, "frame_idx": 89, "global_frame_idx": 20597, "task_index": 19}, {"db_idx": 20598, "episode_idx": 96, "frame_idx": 90, "global_frame_idx": 20598, "task_index": 19}, {"db_idx": 20599, "episode_idx": 96, "frame_idx": 91, "global_frame_idx": 20599, "task_index": 19}, {"db_idx": 20600, "episode_idx": 96, "frame_idx": 92, "global_frame_idx": 20600, "task_index": 19}, {"db_idx": 20601, "episode_idx": 96, "frame_idx": 93, "global_frame_idx": 20601, "task_index": 19}, {"db_idx": 20602, "episode_idx": 96, "frame_idx": 94, "global_frame_idx": 20602, "task_index": 19}, {"db_idx": 20603, "episode_idx": 96, "frame_idx": 95, "global_frame_idx": 20603, "task_index": 19}, {"db_idx": 20604, "episode_idx": 96, "frame_idx": 96, "global_frame_idx": 20604, "task_index": 19}, {"db_idx": 20605, "episode_idx": 96, "frame_idx": 97, "global_frame_idx": 20605, "task_index": 19}, {"db_idx": 20606, "episode_idx": 96, "frame_idx": 98, "global_frame_idx": 20606, "task_index": 19}, {"db_idx": 20607, "episode_idx": 96, "frame_idx": 99, "global_frame_idx": 20607, "task_index": 19}, {"db_idx": 20608, "episode_idx": 96, "frame_idx": 100, "global_frame_idx": 20608, "task_index": 19}, {"db_idx": 20609, "episode_idx": 96, "frame_idx": 101, "global_frame_idx": 20609, "task_index": 19}, {"db_idx": 20610, "episode_idx": 96, "frame_idx": 102, "global_frame_idx": 20610, "task_index": 19}, {"db_idx": 20611, "episode_idx": 96, "frame_idx": 103, "global_frame_idx": 20611, "task_index": 19}, {"db_idx": 20612, "episode_idx": 96, "frame_idx": 104, "global_frame_idx": 20612, "task_index": 19}, {"db_idx": 20613, "episode_idx": 96, "frame_idx": 105, "global_frame_idx": 20613, "task_index": 19}, {"db_idx": 20614, "episode_idx": 96, "frame_idx": 106, "global_frame_idx": 20614, "task_index": 19}, {"db_idx": 20615, "episode_idx": 96, "frame_idx": 107, "global_frame_idx": 20615, "task_index": 19}, {"db_idx": 20616, "episode_idx": 96, "frame_idx": 108, "global_frame_idx": 20616, "task_index": 19}, {"db_idx": 20617, "episode_idx": 96, "frame_idx": 109, "global_frame_idx": 20617, "task_index": 19}, {"db_idx": 20618, "episode_idx": 96, "frame_idx": 110, "global_frame_idx": 20618, "task_index": 19}, {"db_idx": 20619, "episode_idx": 96, "frame_idx": 111, "global_frame_idx": 20619, "task_index": 19}, {"db_idx": 20620, "episode_idx": 96, "frame_idx": 112, "global_frame_idx": 20620, "task_index": 19}, {"db_idx": 20621, "episode_idx": 96, "frame_idx": 113, "global_frame_idx": 20621, "task_index": 19}, {"db_idx": 20622, "episode_idx": 96, "frame_idx": 114, "global_frame_idx": 20622, "task_index": 19}, {"db_idx": 20623, "episode_idx": 96, "frame_idx": 115, "global_frame_idx": 20623, "task_index": 19}, {"db_idx": 20624, "episode_idx": 96, "frame_idx": 116, "global_frame_idx": 20624, "task_index": 19}, {"db_idx": 20625, "episode_idx": 96, "frame_idx": 117, "global_frame_idx": 20625, "task_index": 19}, {"db_idx": 20626, "episode_idx": 96, "frame_idx": 118, "global_frame_idx": 20626, "task_index": 19}, {"db_idx": 20627, "episode_idx": 96, "frame_idx": 119, "global_frame_idx": 20627, "task_index": 19}, {"db_idx": 20628, "episode_idx": 96, "frame_idx": 120, "global_frame_idx": 20628, "task_index": 19}, {"db_idx": 20629, "episode_idx": 96, "frame_idx": 121, "global_frame_idx": 20629, "task_index": 19}, {"db_idx": 20630, "episode_idx": 96, "frame_idx": 122, "global_frame_idx": 20630, "task_index": 19}, {"db_idx": 20631, "episode_idx": 96, "frame_idx": 123, "global_frame_idx": 20631, "task_index": 19}, {"db_idx": 20632, "episode_idx": 96, "frame_idx": 124, "global_frame_idx": 20632, "task_index": 19}, {"db_idx": 20633, "episode_idx": 96, "frame_idx": 125, "global_frame_idx": 20633, "task_index": 19}, {"db_idx": 20634, "episode_idx": 96, "frame_idx": 126, "global_frame_idx": 20634, "task_index": 19}, {"db_idx": 20635, "episode_idx": 96, "frame_idx": 127, "global_frame_idx": 20635, "task_index": 19}, {"db_idx": 20636, "episode_idx": 96, "frame_idx": 128, "global_frame_idx": 20636, "task_index": 19}, {"db_idx": 20637, "episode_idx": 96, "frame_idx": 129, "global_frame_idx": 20637, "task_index": 19}, {"db_idx": 20638, "episode_idx": 96, "frame_idx": 130, "global_frame_idx": 20638, "task_index": 19}, {"db_idx": 20639, "episode_idx": 96, "frame_idx": 131, "global_frame_idx": 20639, "task_index": 19}, {"db_idx": 20640, "episode_idx": 96, "frame_idx": 132, "global_frame_idx": 20640, "task_index": 19}, {"db_idx": 20641, "episode_idx": 96, "frame_idx": 133, "global_frame_idx": 20641, "task_index": 19}, {"db_idx": 20642, "episode_idx": 96, "frame_idx": 134, "global_frame_idx": 20642, "task_index": 19}, {"db_idx": 20643, "episode_idx": 96, "frame_idx": 135, "global_frame_idx": 20643, "task_index": 19}, {"db_idx": 20644, "episode_idx": 96, "frame_idx": 136, "global_frame_idx": 20644, "task_index": 19}, {"db_idx": 20645, "episode_idx": 96, "frame_idx": 137, "global_frame_idx": 20645, "task_index": 19}, {"db_idx": 20646, "episode_idx": 96, "frame_idx": 138, "global_frame_idx": 20646, "task_index": 19}, {"db_idx": 20647, "episode_idx": 96, "frame_idx": 139, "global_frame_idx": 20647, "task_index": 19}, {"db_idx": 20648, "episode_idx": 96, "frame_idx": 140, "global_frame_idx": 20648, "task_index": 19}, {"db_idx": 20649, "episode_idx": 96, "frame_idx": 141, "global_frame_idx": 20649, "task_index": 19}, {"db_idx": 20650, "episode_idx": 96, "frame_idx": 142, "global_frame_idx": 20650, "task_index": 19}, {"db_idx": 20651, "episode_idx": 97, "frame_idx": 0, "global_frame_idx": 20651, "task_index": 19}, {"db_idx": 20652, "episode_idx": 97, "frame_idx": 1, "global_frame_idx": 20652, "task_index": 19}, {"db_idx": 20653, "episode_idx": 97, "frame_idx": 2, "global_frame_idx": 20653, "task_index": 19}, {"db_idx": 20654, "episode_idx": 97, "frame_idx": 3, "global_frame_idx": 20654, "task_index": 19}, {"db_idx": 20655, "episode_idx": 97, "frame_idx": 4, "global_frame_idx": 20655, "task_index": 19}, {"db_idx": 20656, "episode_idx": 97, "frame_idx": 5, "global_frame_idx": 20656, "task_index": 19}, {"db_idx": 20657, "episode_idx": 97, "frame_idx": 6, "global_frame_idx": 20657, "task_index": 19}, {"db_idx": 20658, "episode_idx": 97, "frame_idx": 7, "global_frame_idx": 20658, "task_index": 19}, {"db_idx": 20659, "episode_idx": 97, "frame_idx": 8, "global_frame_idx": 20659, "task_index": 19}, {"db_idx": 20660, "episode_idx": 97, "frame_idx": 9, "global_frame_idx": 20660, "task_index": 19}, {"db_idx": 20661, "episode_idx": 97, "frame_idx": 10, "global_frame_idx": 20661, "task_index": 19}, {"db_idx": 20662, "episode_idx": 97, "frame_idx": 11, "global_frame_idx": 20662, "task_index": 19}, {"db_idx": 20663, "episode_idx": 97, "frame_idx": 12, "global_frame_idx": 20663, "task_index": 19}, {"db_idx": 20664, "episode_idx": 97, "frame_idx": 13, "global_frame_idx": 20664, "task_index": 19}, {"db_idx": 20665, "episode_idx": 97, "frame_idx": 14, "global_frame_idx": 20665, "task_index": 19}, {"db_idx": 20666, "episode_idx": 97, "frame_idx": 15, "global_frame_idx": 20666, "task_index": 19}, {"db_idx": 20667, "episode_idx": 97, "frame_idx": 16, "global_frame_idx": 20667, "task_index": 19}, {"db_idx": 20668, "episode_idx": 97, "frame_idx": 17, "global_frame_idx": 20668, "task_index": 19}, {"db_idx": 20669, "episode_idx": 97, "frame_idx": 18, "global_frame_idx": 20669, "task_index": 19}, {"db_idx": 20670, "episode_idx": 97, "frame_idx": 19, "global_frame_idx": 20670, "task_index": 19}, {"db_idx": 20671, "episode_idx": 97, "frame_idx": 20, "global_frame_idx": 20671, "task_index": 19}, {"db_idx": 20672, "episode_idx": 97, "frame_idx": 21, "global_frame_idx": 20672, "task_index": 19}, {"db_idx": 20673, "episode_idx": 97, "frame_idx": 22, "global_frame_idx": 20673, "task_index": 19}, {"db_idx": 20674, "episode_idx": 97, "frame_idx": 23, "global_frame_idx": 20674, "task_index": 19}, {"db_idx": 20675, "episode_idx": 97, "frame_idx": 24, "global_frame_idx": 20675, "task_index": 19}, {"db_idx": 20676, "episode_idx": 97, "frame_idx": 25, "global_frame_idx": 20676, "task_index": 19}, {"db_idx": 20677, "episode_idx": 97, "frame_idx": 26, "global_frame_idx": 20677, "task_index": 19}, {"db_idx": 20678, "episode_idx": 97, "frame_idx": 27, "global_frame_idx": 20678, "task_index": 19}, {"db_idx": 20679, "episode_idx": 97, "frame_idx": 28, "global_frame_idx": 20679, "task_index": 19}, {"db_idx": 20680, "episode_idx": 97, "frame_idx": 29, "global_frame_idx": 20680, "task_index": 19}, {"db_idx": 20681, "episode_idx": 97, "frame_idx": 30, "global_frame_idx": 20681, "task_index": 19}, {"db_idx": 20682, "episode_idx": 97, "frame_idx": 31, "global_frame_idx": 20682, "task_index": 19}, {"db_idx": 20683, "episode_idx": 97, "frame_idx": 32, "global_frame_idx": 20683, "task_index": 19}, {"db_idx": 20684, "episode_idx": 97, "frame_idx": 33, "global_frame_idx": 20684, "task_index": 19}, {"db_idx": 20685, "episode_idx": 97, "frame_idx": 34, "global_frame_idx": 20685, "task_index": 19}, {"db_idx": 20686, "episode_idx": 97, "frame_idx": 35, "global_frame_idx": 20686, "task_index": 19}, {"db_idx": 20687, "episode_idx": 97, "frame_idx": 36, "global_frame_idx": 20687, "task_index": 19}, {"db_idx": 20688, "episode_idx": 97, "frame_idx": 37, "global_frame_idx": 20688, "task_index": 19}, {"db_idx": 20689, "episode_idx": 97, "frame_idx": 38, "global_frame_idx": 20689, "task_index": 19}, {"db_idx": 20690, "episode_idx": 97, "frame_idx": 39, "global_frame_idx": 20690, "task_index": 19}, {"db_idx": 20691, "episode_idx": 97, "frame_idx": 40, "global_frame_idx": 20691, "task_index": 19}, {"db_idx": 20692, "episode_idx": 97, "frame_idx": 41, "global_frame_idx": 20692, "task_index": 19}, {"db_idx": 20693, "episode_idx": 97, "frame_idx": 42, "global_frame_idx": 20693, "task_index": 19}, {"db_idx": 20694, "episode_idx": 97, "frame_idx": 43, "global_frame_idx": 20694, "task_index": 19}, {"db_idx": 20695, "episode_idx": 97, "frame_idx": 44, "global_frame_idx": 20695, "task_index": 19}, {"db_idx": 20696, "episode_idx": 97, "frame_idx": 45, "global_frame_idx": 20696, "task_index": 19}, {"db_idx": 20697, "episode_idx": 97, "frame_idx": 46, "global_frame_idx": 20697, "task_index": 19}, {"db_idx": 20698, "episode_idx": 97, "frame_idx": 47, "global_frame_idx": 20698, "task_index": 19}, {"db_idx": 20699, "episode_idx": 97, "frame_idx": 48, "global_frame_idx": 20699, "task_index": 19}, {"db_idx": 20700, "episode_idx": 97, "frame_idx": 49, "global_frame_idx": 20700, "task_index": 19}, {"db_idx": 20701, "episode_idx": 97, "frame_idx": 50, "global_frame_idx": 20701, "task_index": 19}, {"db_idx": 20702, "episode_idx": 97, "frame_idx": 51, "global_frame_idx": 20702, "task_index": 19}, {"db_idx": 20703, "episode_idx": 97, "frame_idx": 52, "global_frame_idx": 20703, "task_index": 19}, {"db_idx": 20704, "episode_idx": 97, "frame_idx": 53, "global_frame_idx": 20704, "task_index": 19}, {"db_idx": 20705, "episode_idx": 97, "frame_idx": 54, "global_frame_idx": 20705, "task_index": 19}, {"db_idx": 20706, "episode_idx": 97, "frame_idx": 55, "global_frame_idx": 20706, "task_index": 19}, {"db_idx": 20707, "episode_idx": 97, "frame_idx": 56, "global_frame_idx": 20707, "task_index": 19}, {"db_idx": 20708, "episode_idx": 97, "frame_idx": 57, "global_frame_idx": 20708, "task_index": 19}, {"db_idx": 20709, "episode_idx": 97, "frame_idx": 58, "global_frame_idx": 20709, "task_index": 19}, {"db_idx": 20710, "episode_idx": 97, "frame_idx": 59, "global_frame_idx": 20710, "task_index": 19}, {"db_idx": 20711, "episode_idx": 97, "frame_idx": 60, "global_frame_idx": 20711, "task_index": 19}, {"db_idx": 20712, "episode_idx": 97, "frame_idx": 61, "global_frame_idx": 20712, "task_index": 19}, {"db_idx": 20713, "episode_idx": 97, "frame_idx": 62, "global_frame_idx": 20713, "task_index": 19}, {"db_idx": 20714, "episode_idx": 97, "frame_idx": 63, "global_frame_idx": 20714, "task_index": 19}, {"db_idx": 20715, "episode_idx": 97, "frame_idx": 64, "global_frame_idx": 20715, "task_index": 19}, {"db_idx": 20716, "episode_idx": 97, "frame_idx": 65, "global_frame_idx": 20716, "task_index": 19}, {"db_idx": 20717, "episode_idx": 97, "frame_idx": 66, "global_frame_idx": 20717, "task_index": 19}, {"db_idx": 20718, "episode_idx": 97, "frame_idx": 67, "global_frame_idx": 20718, "task_index": 19}, {"db_idx": 20719, "episode_idx": 97, "frame_idx": 68, "global_frame_idx": 20719, "task_index": 19}, {"db_idx": 20720, "episode_idx": 97, "frame_idx": 69, "global_frame_idx": 20720, "task_index": 19}, {"db_idx": 20721, "episode_idx": 97, "frame_idx": 70, "global_frame_idx": 20721, "task_index": 19}, {"db_idx": 20722, "episode_idx": 97, "frame_idx": 71, "global_frame_idx": 20722, "task_index": 19}, {"db_idx": 20723, "episode_idx": 97, "frame_idx": 72, "global_frame_idx": 20723, "task_index": 19}, {"db_idx": 20724, "episode_idx": 97, "frame_idx": 73, "global_frame_idx": 20724, "task_index": 19}, {"db_idx": 20725, "episode_idx": 97, "frame_idx": 74, "global_frame_idx": 20725, "task_index": 19}, {"db_idx": 20726, "episode_idx": 97, "frame_idx": 75, "global_frame_idx": 20726, "task_index": 19}, {"db_idx": 20727, "episode_idx": 97, "frame_idx": 76, "global_frame_idx": 20727, "task_index": 19}, {"db_idx": 20728, "episode_idx": 97, "frame_idx": 77, "global_frame_idx": 20728, "task_index": 19}, {"db_idx": 20729, "episode_idx": 97, "frame_idx": 78, "global_frame_idx": 20729, "task_index": 19}, {"db_idx": 20730, "episode_idx": 97, "frame_idx": 79, "global_frame_idx": 20730, "task_index": 19}, {"db_idx": 20731, "episode_idx": 97, "frame_idx": 80, "global_frame_idx": 20731, "task_index": 19}, {"db_idx": 20732, "episode_idx": 97, "frame_idx": 81, "global_frame_idx": 20732, "task_index": 19}, {"db_idx": 20733, "episode_idx": 97, "frame_idx": 82, "global_frame_idx": 20733, "task_index": 19}, {"db_idx": 20734, "episode_idx": 97, "frame_idx": 83, "global_frame_idx": 20734, "task_index": 19}, {"db_idx": 20735, "episode_idx": 97, "frame_idx": 84, "global_frame_idx": 20735, "task_index": 19}, {"db_idx": 20736, "episode_idx": 97, "frame_idx": 85, "global_frame_idx": 20736, "task_index": 19}, {"db_idx": 20737, "episode_idx": 97, "frame_idx": 86, "global_frame_idx": 20737, "task_index": 19}, {"db_idx": 20738, "episode_idx": 97, "frame_idx": 87, "global_frame_idx": 20738, "task_index": 19}, {"db_idx": 20739, "episode_idx": 97, "frame_idx": 88, "global_frame_idx": 20739, "task_index": 19}, {"db_idx": 20740, "episode_idx": 97, "frame_idx": 89, "global_frame_idx": 20740, "task_index": 19}, {"db_idx": 20741, "episode_idx": 97, "frame_idx": 90, "global_frame_idx": 20741, "task_index": 19}, {"db_idx": 20742, "episode_idx": 97, "frame_idx": 91, "global_frame_idx": 20742, "task_index": 19}, {"db_idx": 20743, "episode_idx": 97, "frame_idx": 92, "global_frame_idx": 20743, "task_index": 19}, {"db_idx": 20744, "episode_idx": 97, "frame_idx": 93, "global_frame_idx": 20744, "task_index": 19}, {"db_idx": 20745, "episode_idx": 97, "frame_idx": 94, "global_frame_idx": 20745, "task_index": 19}, {"db_idx": 20746, "episode_idx": 97, "frame_idx": 95, "global_frame_idx": 20746, "task_index": 19}, {"db_idx": 20747, "episode_idx": 97, "frame_idx": 96, "global_frame_idx": 20747, "task_index": 19}, {"db_idx": 20748, "episode_idx": 97, "frame_idx": 97, "global_frame_idx": 20748, "task_index": 19}, {"db_idx": 20749, "episode_idx": 97, "frame_idx": 98, "global_frame_idx": 20749, "task_index": 19}, {"db_idx": 20750, "episode_idx": 97, "frame_idx": 99, "global_frame_idx": 20750, "task_index": 19}, {"db_idx": 20751, "episode_idx": 97, "frame_idx": 100, "global_frame_idx": 20751, "task_index": 19}, {"db_idx": 20752, "episode_idx": 97, "frame_idx": 101, "global_frame_idx": 20752, "task_index": 19}, {"db_idx": 20753, "episode_idx": 97, "frame_idx": 102, "global_frame_idx": 20753, "task_index": 19}, {"db_idx": 20754, "episode_idx": 97, "frame_idx": 103, "global_frame_idx": 20754, "task_index": 19}, {"db_idx": 20755, "episode_idx": 97, "frame_idx": 104, "global_frame_idx": 20755, "task_index": 19}, {"db_idx": 20756, "episode_idx": 97, "frame_idx": 105, "global_frame_idx": 20756, "task_index": 19}, {"db_idx": 20757, "episode_idx": 97, "frame_idx": 106, "global_frame_idx": 20757, "task_index": 19}, {"db_idx": 20758, "episode_idx": 97, "frame_idx": 107, "global_frame_idx": 20758, "task_index": 19}, {"db_idx": 20759, "episode_idx": 97, "frame_idx": 108, "global_frame_idx": 20759, "task_index": 19}, {"db_idx": 20760, "episode_idx": 97, "frame_idx": 109, "global_frame_idx": 20760, "task_index": 19}, {"db_idx": 20761, "episode_idx": 97, "frame_idx": 110, "global_frame_idx": 20761, "task_index": 19}, {"db_idx": 20762, "episode_idx": 97, "frame_idx": 111, "global_frame_idx": 20762, "task_index": 19}, {"db_idx": 20763, "episode_idx": 97, "frame_idx": 112, "global_frame_idx": 20763, "task_index": 19}, {"db_idx": 20764, "episode_idx": 97, "frame_idx": 113, "global_frame_idx": 20764, "task_index": 19}, {"db_idx": 20765, "episode_idx": 97, "frame_idx": 114, "global_frame_idx": 20765, "task_index": 19}, {"db_idx": 20766, "episode_idx": 97, "frame_idx": 115, "global_frame_idx": 20766, "task_index": 19}, {"db_idx": 20767, "episode_idx": 97, "frame_idx": 116, "global_frame_idx": 20767, "task_index": 19}, {"db_idx": 20768, "episode_idx": 97, "frame_idx": 117, "global_frame_idx": 20768, "task_index": 19}, {"db_idx": 20769, "episode_idx": 97, "frame_idx": 118, "global_frame_idx": 20769, "task_index": 19}, {"db_idx": 20770, "episode_idx": 97, "frame_idx": 119, "global_frame_idx": 20770, "task_index": 19}, {"db_idx": 20771, "episode_idx": 97, "frame_idx": 120, "global_frame_idx": 20771, "task_index": 19}, {"db_idx": 20772, "episode_idx": 97, "frame_idx": 121, "global_frame_idx": 20772, "task_index": 19}, {"db_idx": 20773, "episode_idx": 97, "frame_idx": 122, "global_frame_idx": 20773, "task_index": 19}, {"db_idx": 20774, "episode_idx": 97, "frame_idx": 123, "global_frame_idx": 20774, "task_index": 19}, {"db_idx": 20775, "episode_idx": 97, "frame_idx": 124, "global_frame_idx": 20775, "task_index": 19}, {"db_idx": 20776, "episode_idx": 97, "frame_idx": 125, "global_frame_idx": 20776, "task_index": 19}, {"db_idx": 20777, "episode_idx": 97, "frame_idx": 126, "global_frame_idx": 20777, "task_index": 19}, {"db_idx": 20778, "episode_idx": 97, "frame_idx": 127, "global_frame_idx": 20778, "task_index": 19}, {"db_idx": 20779, "episode_idx": 97, "frame_idx": 128, "global_frame_idx": 20779, "task_index": 19}, {"db_idx": 20780, "episode_idx": 97, "frame_idx": 129, "global_frame_idx": 20780, "task_index": 19}, {"db_idx": 20781, "episode_idx": 97, "frame_idx": 130, "global_frame_idx": 20781, "task_index": 19}, {"db_idx": 20782, "episode_idx": 97, "frame_idx": 131, "global_frame_idx": 20782, "task_index": 19}, {"db_idx": 20783, "episode_idx": 97, "frame_idx": 132, "global_frame_idx": 20783, "task_index": 19}, {"db_idx": 20784, "episode_idx": 98, "frame_idx": 0, "global_frame_idx": 20784, "task_index": 19}, {"db_idx": 20785, "episode_idx": 98, "frame_idx": 1, "global_frame_idx": 20785, "task_index": 19}, {"db_idx": 20786, "episode_idx": 98, "frame_idx": 2, "global_frame_idx": 20786, "task_index": 19}, {"db_idx": 20787, "episode_idx": 98, "frame_idx": 3, "global_frame_idx": 20787, "task_index": 19}, {"db_idx": 20788, "episode_idx": 98, "frame_idx": 4, "global_frame_idx": 20788, "task_index": 19}, {"db_idx": 20789, "episode_idx": 98, "frame_idx": 5, "global_frame_idx": 20789, "task_index": 19}, {"db_idx": 20790, "episode_idx": 98, "frame_idx": 6, "global_frame_idx": 20790, "task_index": 19}, {"db_idx": 20791, "episode_idx": 98, "frame_idx": 7, "global_frame_idx": 20791, "task_index": 19}, {"db_idx": 20792, "episode_idx": 98, "frame_idx": 8, "global_frame_idx": 20792, "task_index": 19}, {"db_idx": 20793, "episode_idx": 98, "frame_idx": 9, "global_frame_idx": 20793, "task_index": 19}, {"db_idx": 20794, "episode_idx": 98, "frame_idx": 10, "global_frame_idx": 20794, "task_index": 19}, {"db_idx": 20795, "episode_idx": 98, "frame_idx": 11, "global_frame_idx": 20795, "task_index": 19}, {"db_idx": 20796, "episode_idx": 98, "frame_idx": 12, "global_frame_idx": 20796, "task_index": 19}, {"db_idx": 20797, "episode_idx": 98, "frame_idx": 13, "global_frame_idx": 20797, "task_index": 19}, {"db_idx": 20798, "episode_idx": 98, "frame_idx": 14, "global_frame_idx": 20798, "task_index": 19}, {"db_idx": 20799, "episode_idx": 98, "frame_idx": 15, "global_frame_idx": 20799, "task_index": 19}, {"db_idx": 20800, "episode_idx": 98, "frame_idx": 16, "global_frame_idx": 20800, "task_index": 19}, {"db_idx": 20801, "episode_idx": 98, "frame_idx": 17, "global_frame_idx": 20801, "task_index": 19}, {"db_idx": 20802, "episode_idx": 98, "frame_idx": 18, "global_frame_idx": 20802, "task_index": 19}, {"db_idx": 20803, "episode_idx": 98, "frame_idx": 19, "global_frame_idx": 20803, "task_index": 19}, {"db_idx": 20804, "episode_idx": 98, "frame_idx": 20, "global_frame_idx": 20804, "task_index": 19}, {"db_idx": 20805, "episode_idx": 98, "frame_idx": 21, "global_frame_idx": 20805, "task_index": 19}, {"db_idx": 20806, "episode_idx": 98, "frame_idx": 22, "global_frame_idx": 20806, "task_index": 19}, {"db_idx": 20807, "episode_idx": 98, "frame_idx": 23, "global_frame_idx": 20807, "task_index": 19}, {"db_idx": 20808, "episode_idx": 98, "frame_idx": 24, "global_frame_idx": 20808, "task_index": 19}, {"db_idx": 20809, "episode_idx": 98, "frame_idx": 25, "global_frame_idx": 20809, "task_index": 19}, {"db_idx": 20810, "episode_idx": 98, "frame_idx": 26, "global_frame_idx": 20810, "task_index": 19}, {"db_idx": 20811, "episode_idx": 98, "frame_idx": 27, "global_frame_idx": 20811, "task_index": 19}, {"db_idx": 20812, "episode_idx": 98, "frame_idx": 28, "global_frame_idx": 20812, "task_index": 19}, {"db_idx": 20813, "episode_idx": 98, "frame_idx": 29, "global_frame_idx": 20813, "task_index": 19}, {"db_idx": 20814, "episode_idx": 98, "frame_idx": 30, "global_frame_idx": 20814, "task_index": 19}, {"db_idx": 20815, "episode_idx": 98, "frame_idx": 31, "global_frame_idx": 20815, "task_index": 19}, {"db_idx": 20816, "episode_idx": 98, "frame_idx": 32, "global_frame_idx": 20816, "task_index": 19}, {"db_idx": 20817, "episode_idx": 98, "frame_idx": 33, "global_frame_idx": 20817, "task_index": 19}, {"db_idx": 20818, "episode_idx": 98, "frame_idx": 34, "global_frame_idx": 20818, "task_index": 19}, {"db_idx": 20819, "episode_idx": 98, "frame_idx": 35, "global_frame_idx": 20819, "task_index": 19}, {"db_idx": 20820, "episode_idx": 98, "frame_idx": 36, "global_frame_idx": 20820, "task_index": 19}, {"db_idx": 20821, "episode_idx": 98, "frame_idx": 37, "global_frame_idx": 20821, "task_index": 19}, {"db_idx": 20822, "episode_idx": 98, "frame_idx": 38, "global_frame_idx": 20822, "task_index": 19}, {"db_idx": 20823, "episode_idx": 98, "frame_idx": 39, "global_frame_idx": 20823, "task_index": 19}, {"db_idx": 20824, "episode_idx": 98, "frame_idx": 40, "global_frame_idx": 20824, "task_index": 19}, {"db_idx": 20825, "episode_idx": 98, "frame_idx": 41, "global_frame_idx": 20825, "task_index": 19}, {"db_idx": 20826, "episode_idx": 98, "frame_idx": 42, "global_frame_idx": 20826, "task_index": 19}, {"db_idx": 20827, "episode_idx": 98, "frame_idx": 43, "global_frame_idx": 20827, "task_index": 19}, {"db_idx": 20828, "episode_idx": 98, "frame_idx": 44, "global_frame_idx": 20828, "task_index": 19}, {"db_idx": 20829, "episode_idx": 98, "frame_idx": 45, "global_frame_idx": 20829, "task_index": 19}, {"db_idx": 20830, "episode_idx": 98, "frame_idx": 46, "global_frame_idx": 20830, "task_index": 19}, {"db_idx": 20831, "episode_idx": 98, "frame_idx": 47, "global_frame_idx": 20831, "task_index": 19}, {"db_idx": 20832, "episode_idx": 98, "frame_idx": 48, "global_frame_idx": 20832, "task_index": 19}, {"db_idx": 20833, "episode_idx": 98, "frame_idx": 49, "global_frame_idx": 20833, "task_index": 19}, {"db_idx": 20834, "episode_idx": 98, "frame_idx": 50, "global_frame_idx": 20834, "task_index": 19}, {"db_idx": 20835, "episode_idx": 98, "frame_idx": 51, "global_frame_idx": 20835, "task_index": 19}, {"db_idx": 20836, "episode_idx": 98, "frame_idx": 52, "global_frame_idx": 20836, "task_index": 19}, {"db_idx": 20837, "episode_idx": 98, "frame_idx": 53, "global_frame_idx": 20837, "task_index": 19}, {"db_idx": 20838, "episode_idx": 98, "frame_idx": 54, "global_frame_idx": 20838, "task_index": 19}, {"db_idx": 20839, "episode_idx": 98, "frame_idx": 55, "global_frame_idx": 20839, "task_index": 19}, {"db_idx": 20840, "episode_idx": 98, "frame_idx": 56, "global_frame_idx": 20840, "task_index": 19}, {"db_idx": 20841, "episode_idx": 98, "frame_idx": 57, "global_frame_idx": 20841, "task_index": 19}, {"db_idx": 20842, "episode_idx": 98, "frame_idx": 58, "global_frame_idx": 20842, "task_index": 19}, {"db_idx": 20843, "episode_idx": 98, "frame_idx": 59, "global_frame_idx": 20843, "task_index": 19}, {"db_idx": 20844, "episode_idx": 98, "frame_idx": 60, "global_frame_idx": 20844, "task_index": 19}, {"db_idx": 20845, "episode_idx": 98, "frame_idx": 61, "global_frame_idx": 20845, "task_index": 19}, {"db_idx": 20846, "episode_idx": 98, "frame_idx": 62, "global_frame_idx": 20846, "task_index": 19}, {"db_idx": 20847, "episode_idx": 98, "frame_idx": 63, "global_frame_idx": 20847, "task_index": 19}, {"db_idx": 20848, "episode_idx": 98, "frame_idx": 64, "global_frame_idx": 20848, "task_index": 19}, {"db_idx": 20849, "episode_idx": 98, "frame_idx": 65, "global_frame_idx": 20849, "task_index": 19}, {"db_idx": 20850, "episode_idx": 98, "frame_idx": 66, "global_frame_idx": 20850, "task_index": 19}, {"db_idx": 20851, "episode_idx": 98, "frame_idx": 67, "global_frame_idx": 20851, "task_index": 19}, {"db_idx": 20852, "episode_idx": 98, "frame_idx": 68, "global_frame_idx": 20852, "task_index": 19}, {"db_idx": 20853, "episode_idx": 98, "frame_idx": 69, "global_frame_idx": 20853, "task_index": 19}, {"db_idx": 20854, "episode_idx": 98, "frame_idx": 70, "global_frame_idx": 20854, "task_index": 19}, {"db_idx": 20855, "episode_idx": 98, "frame_idx": 71, "global_frame_idx": 20855, "task_index": 19}, {"db_idx": 20856, "episode_idx": 98, "frame_idx": 72, "global_frame_idx": 20856, "task_index": 19}, {"db_idx": 20857, "episode_idx": 98, "frame_idx": 73, "global_frame_idx": 20857, "task_index": 19}, {"db_idx": 20858, "episode_idx": 98, "frame_idx": 74, "global_frame_idx": 20858, "task_index": 19}, {"db_idx": 20859, "episode_idx": 98, "frame_idx": 75, "global_frame_idx": 20859, "task_index": 19}, {"db_idx": 20860, "episode_idx": 98, "frame_idx": 76, "global_frame_idx": 20860, "task_index": 19}, {"db_idx": 20861, "episode_idx": 98, "frame_idx": 77, "global_frame_idx": 20861, "task_index": 19}, {"db_idx": 20862, "episode_idx": 98, "frame_idx": 78, "global_frame_idx": 20862, "task_index": 19}, {"db_idx": 20863, "episode_idx": 98, "frame_idx": 79, "global_frame_idx": 20863, "task_index": 19}, {"db_idx": 20864, "episode_idx": 98, "frame_idx": 80, "global_frame_idx": 20864, "task_index": 19}, {"db_idx": 20865, "episode_idx": 98, "frame_idx": 81, "global_frame_idx": 20865, "task_index": 19}, {"db_idx": 20866, "episode_idx": 98, "frame_idx": 82, "global_frame_idx": 20866, "task_index": 19}, {"db_idx": 20867, "episode_idx": 98, "frame_idx": 83, "global_frame_idx": 20867, "task_index": 19}, {"db_idx": 20868, "episode_idx": 98, "frame_idx": 84, "global_frame_idx": 20868, "task_index": 19}, {"db_idx": 20869, "episode_idx": 98, "frame_idx": 85, "global_frame_idx": 20869, "task_index": 19}, {"db_idx": 20870, "episode_idx": 98, "frame_idx": 86, "global_frame_idx": 20870, "task_index": 19}, {"db_idx": 20871, "episode_idx": 98, "frame_idx": 87, "global_frame_idx": 20871, "task_index": 19}, {"db_idx": 20872, "episode_idx": 98, "frame_idx": 88, "global_frame_idx": 20872, "task_index": 19}, {"db_idx": 20873, "episode_idx": 98, "frame_idx": 89, "global_frame_idx": 20873, "task_index": 19}, {"db_idx": 20874, "episode_idx": 98, "frame_idx": 90, "global_frame_idx": 20874, "task_index": 19}, {"db_idx": 20875, "episode_idx": 98, "frame_idx": 91, "global_frame_idx": 20875, "task_index": 19}, {"db_idx": 20876, "episode_idx": 98, "frame_idx": 92, "global_frame_idx": 20876, "task_index": 19}, {"db_idx": 20877, "episode_idx": 98, "frame_idx": 93, "global_frame_idx": 20877, "task_index": 19}, {"db_idx": 20878, "episode_idx": 98, "frame_idx": 94, "global_frame_idx": 20878, "task_index": 19}, {"db_idx": 20879, "episode_idx": 98, "frame_idx": 95, "global_frame_idx": 20879, "task_index": 19}, {"db_idx": 20880, "episode_idx": 98, "frame_idx": 96, "global_frame_idx": 20880, "task_index": 19}, {"db_idx": 20881, "episode_idx": 98, "frame_idx": 97, "global_frame_idx": 20881, "task_index": 19}, {"db_idx": 20882, "episode_idx": 98, "frame_idx": 98, "global_frame_idx": 20882, "task_index": 19}, {"db_idx": 20883, "episode_idx": 98, "frame_idx": 99, "global_frame_idx": 20883, "task_index": 19}, {"db_idx": 20884, "episode_idx": 98, "frame_idx": 100, "global_frame_idx": 20884, "task_index": 19}, {"db_idx": 20885, "episode_idx": 98, "frame_idx": 101, "global_frame_idx": 20885, "task_index": 19}, {"db_idx": 20886, "episode_idx": 98, "frame_idx": 102, "global_frame_idx": 20886, "task_index": 19}, {"db_idx": 20887, "episode_idx": 98, "frame_idx": 103, "global_frame_idx": 20887, "task_index": 19}, {"db_idx": 20888, "episode_idx": 98, "frame_idx": 104, "global_frame_idx": 20888, "task_index": 19}, {"db_idx": 20889, "episode_idx": 98, "frame_idx": 105, "global_frame_idx": 20889, "task_index": 19}, {"db_idx": 20890, "episode_idx": 98, "frame_idx": 106, "global_frame_idx": 20890, "task_index": 19}, {"db_idx": 20891, "episode_idx": 98, "frame_idx": 107, "global_frame_idx": 20891, "task_index": 19}, {"db_idx": 20892, "episode_idx": 98, "frame_idx": 108, "global_frame_idx": 20892, "task_index": 19}, {"db_idx": 20893, "episode_idx": 98, "frame_idx": 109, "global_frame_idx": 20893, "task_index": 19}, {"db_idx": 20894, "episode_idx": 98, "frame_idx": 110, "global_frame_idx": 20894, "task_index": 19}, {"db_idx": 20895, "episode_idx": 98, "frame_idx": 111, "global_frame_idx": 20895, "task_index": 19}, {"db_idx": 20896, "episode_idx": 98, "frame_idx": 112, "global_frame_idx": 20896, "task_index": 19}, {"db_idx": 20897, "episode_idx": 98, "frame_idx": 113, "global_frame_idx": 20897, "task_index": 19}, {"db_idx": 20898, "episode_idx": 98, "frame_idx": 114, "global_frame_idx": 20898, "task_index": 19}, {"db_idx": 20899, "episode_idx": 98, "frame_idx": 115, "global_frame_idx": 20899, "task_index": 19}, {"db_idx": 20900, "episode_idx": 98, "frame_idx": 116, "global_frame_idx": 20900, "task_index": 19}, {"db_idx": 20901, "episode_idx": 98, "frame_idx": 117, "global_frame_idx": 20901, "task_index": 19}, {"db_idx": 20902, "episode_idx": 98, "frame_idx": 118, "global_frame_idx": 20902, "task_index": 19}, {"db_idx": 20903, "episode_idx": 98, "frame_idx": 119, "global_frame_idx": 20903, "task_index": 19}, {"db_idx": 20904, "episode_idx": 98, "frame_idx": 120, "global_frame_idx": 20904, "task_index": 19}, {"db_idx": 20905, "episode_idx": 98, "frame_idx": 121, "global_frame_idx": 20905, "task_index": 19}, {"db_idx": 20906, "episode_idx": 98, "frame_idx": 122, "global_frame_idx": 20906, "task_index": 19}, {"db_idx": 20907, "episode_idx": 98, "frame_idx": 123, "global_frame_idx": 20907, "task_index": 19}, {"db_idx": 20908, "episode_idx": 98, "frame_idx": 124, "global_frame_idx": 20908, "task_index": 19}, {"db_idx": 20909, "episode_idx": 98, "frame_idx": 125, "global_frame_idx": 20909, "task_index": 19}, {"db_idx": 20910, "episode_idx": 98, "frame_idx": 126, "global_frame_idx": 20910, "task_index": 19}, {"db_idx": 20911, "episode_idx": 98, "frame_idx": 127, "global_frame_idx": 20911, "task_index": 19}, {"db_idx": 20912, "episode_idx": 98, "frame_idx": 128, "global_frame_idx": 20912, "task_index": 19}, {"db_idx": 20913, "episode_idx": 98, "frame_idx": 129, "global_frame_idx": 20913, "task_index": 19}, {"db_idx": 20914, "episode_idx": 98, "frame_idx": 130, "global_frame_idx": 20914, "task_index": 19}, {"db_idx": 20915, "episode_idx": 98, "frame_idx": 131, "global_frame_idx": 20915, "task_index": 19}, {"db_idx": 20916, "episode_idx": 98, "frame_idx": 132, "global_frame_idx": 20916, "task_index": 19}, {"db_idx": 20917, "episode_idx": 98, "frame_idx": 133, "global_frame_idx": 20917, "task_index": 19}, {"db_idx": 20918, "episode_idx": 98, "frame_idx": 134, "global_frame_idx": 20918, "task_index": 19}, {"db_idx": 20919, "episode_idx": 98, "frame_idx": 135, "global_frame_idx": 20919, "task_index": 19}, {"db_idx": 20920, "episode_idx": 98, "frame_idx": 136, "global_frame_idx": 20920, "task_index": 19}, {"db_idx": 20921, "episode_idx": 98, "frame_idx": 137, "global_frame_idx": 20921, "task_index": 19}, {"db_idx": 20922, "episode_idx": 98, "frame_idx": 138, "global_frame_idx": 20922, "task_index": 19}, {"db_idx": 20923, "episode_idx": 98, "frame_idx": 139, "global_frame_idx": 20923, "task_index": 19}, {"db_idx": 20924, "episode_idx": 98, "frame_idx": 140, "global_frame_idx": 20924, "task_index": 19}, {"db_idx": 20925, "episode_idx": 98, "frame_idx": 141, "global_frame_idx": 20925, "task_index": 19}, {"db_idx": 20926, "episode_idx": 98, "frame_idx": 142, "global_frame_idx": 20926, "task_index": 19}, {"db_idx": 20927, "episode_idx": 99, "frame_idx": 0, "global_frame_idx": 20927, "task_index": 19}, {"db_idx": 20928, "episode_idx": 99, "frame_idx": 1, "global_frame_idx": 20928, "task_index": 19}, {"db_idx": 20929, "episode_idx": 99, "frame_idx": 2, "global_frame_idx": 20929, "task_index": 19}, {"db_idx": 20930, "episode_idx": 99, "frame_idx": 3, "global_frame_idx": 20930, "task_index": 19}, {"db_idx": 20931, "episode_idx": 99, "frame_idx": 4, "global_frame_idx": 20931, "task_index": 19}, {"db_idx": 20932, "episode_idx": 99, "frame_idx": 5, "global_frame_idx": 20932, "task_index": 19}, {"db_idx": 20933, "episode_idx": 99, "frame_idx": 6, "global_frame_idx": 20933, "task_index": 19}, {"db_idx": 20934, "episode_idx": 99, "frame_idx": 7, "global_frame_idx": 20934, "task_index": 19}, {"db_idx": 20935, "episode_idx": 99, "frame_idx": 8, "global_frame_idx": 20935, "task_index": 19}, {"db_idx": 20936, "episode_idx": 99, "frame_idx": 9, "global_frame_idx": 20936, "task_index": 19}, {"db_idx": 20937, "episode_idx": 99, "frame_idx": 10, "global_frame_idx": 20937, "task_index": 19}, {"db_idx": 20938, "episode_idx": 99, "frame_idx": 11, "global_frame_idx": 20938, "task_index": 19}, {"db_idx": 20939, "episode_idx": 99, "frame_idx": 12, "global_frame_idx": 20939, "task_index": 19}, {"db_idx": 20940, "episode_idx": 99, "frame_idx": 13, "global_frame_idx": 20940, "task_index": 19}, {"db_idx": 20941, "episode_idx": 99, "frame_idx": 14, "global_frame_idx": 20941, "task_index": 19}, {"db_idx": 20942, "episode_idx": 99, "frame_idx": 15, "global_frame_idx": 20942, "task_index": 19}, {"db_idx": 20943, "episode_idx": 99, "frame_idx": 16, "global_frame_idx": 20943, "task_index": 19}, {"db_idx": 20944, "episode_idx": 99, "frame_idx": 17, "global_frame_idx": 20944, "task_index": 19}, {"db_idx": 20945, "episode_idx": 99, "frame_idx": 18, "global_frame_idx": 20945, "task_index": 19}, {"db_idx": 20946, "episode_idx": 99, "frame_idx": 19, "global_frame_idx": 20946, "task_index": 19}, {"db_idx": 20947, "episode_idx": 99, "frame_idx": 20, "global_frame_idx": 20947, "task_index": 19}, {"db_idx": 20948, "episode_idx": 99, "frame_idx": 21, "global_frame_idx": 20948, "task_index": 19}, {"db_idx": 20949, "episode_idx": 99, "frame_idx": 22, "global_frame_idx": 20949, "task_index": 19}, {"db_idx": 20950, "episode_idx": 99, "frame_idx": 23, "global_frame_idx": 20950, "task_index": 19}, {"db_idx": 20951, "episode_idx": 99, "frame_idx": 24, "global_frame_idx": 20951, "task_index": 19}, {"db_idx": 20952, "episode_idx": 99, "frame_idx": 25, "global_frame_idx": 20952, "task_index": 19}, {"db_idx": 20953, "episode_idx": 99, "frame_idx": 26, "global_frame_idx": 20953, "task_index": 19}, {"db_idx": 20954, "episode_idx": 99, "frame_idx": 27, "global_frame_idx": 20954, "task_index": 19}, {"db_idx": 20955, "episode_idx": 99, "frame_idx": 28, "global_frame_idx": 20955, "task_index": 19}, {"db_idx": 20956, "episode_idx": 99, "frame_idx": 29, "global_frame_idx": 20956, "task_index": 19}, {"db_idx": 20957, "episode_idx": 99, "frame_idx": 30, "global_frame_idx": 20957, "task_index": 19}, {"db_idx": 20958, "episode_idx": 99, "frame_idx": 31, "global_frame_idx": 20958, "task_index": 19}, {"db_idx": 20959, "episode_idx": 99, "frame_idx": 32, "global_frame_idx": 20959, "task_index": 19}, {"db_idx": 20960, "episode_idx": 99, "frame_idx": 33, "global_frame_idx": 20960, "task_index": 19}, {"db_idx": 20961, "episode_idx": 99, "frame_idx": 34, "global_frame_idx": 20961, "task_index": 19}, {"db_idx": 20962, "episode_idx": 99, "frame_idx": 35, "global_frame_idx": 20962, "task_index": 19}, {"db_idx": 20963, "episode_idx": 99, "frame_idx": 36, "global_frame_idx": 20963, "task_index": 19}, {"db_idx": 20964, "episode_idx": 99, "frame_idx": 37, "global_frame_idx": 20964, "task_index": 19}, {"db_idx": 20965, "episode_idx": 99, "frame_idx": 38, "global_frame_idx": 20965, "task_index": 19}, {"db_idx": 20966, "episode_idx": 99, "frame_idx": 39, "global_frame_idx": 20966, "task_index": 19}, {"db_idx": 20967, "episode_idx": 99, "frame_idx": 40, "global_frame_idx": 20967, "task_index": 19}, {"db_idx": 20968, "episode_idx": 99, "frame_idx": 41, "global_frame_idx": 20968, "task_index": 19}, {"db_idx": 20969, "episode_idx": 99, "frame_idx": 42, "global_frame_idx": 20969, "task_index": 19}, {"db_idx": 20970, "episode_idx": 99, "frame_idx": 43, "global_frame_idx": 20970, "task_index": 19}, {"db_idx": 20971, "episode_idx": 99, "frame_idx": 44, "global_frame_idx": 20971, "task_index": 19}, {"db_idx": 20972, "episode_idx": 99, "frame_idx": 45, "global_frame_idx": 20972, "task_index": 19}, {"db_idx": 20973, "episode_idx": 99, "frame_idx": 46, "global_frame_idx": 20973, "task_index": 19}, {"db_idx": 20974, "episode_idx": 99, "frame_idx": 47, "global_frame_idx": 20974, "task_index": 19}, {"db_idx": 20975, "episode_idx": 99, "frame_idx": 48, "global_frame_idx": 20975, "task_index": 19}, {"db_idx": 20976, "episode_idx": 99, "frame_idx": 49, "global_frame_idx": 20976, "task_index": 19}, {"db_idx": 20977, "episode_idx": 99, "frame_idx": 50, "global_frame_idx": 20977, "task_index": 19}, {"db_idx": 20978, "episode_idx": 99, "frame_idx": 51, "global_frame_idx": 20978, "task_index": 19}, {"db_idx": 20979, "episode_idx": 99, "frame_idx": 52, "global_frame_idx": 20979, "task_index": 19}, {"db_idx": 20980, "episode_idx": 99, "frame_idx": 53, "global_frame_idx": 20980, "task_index": 19}, {"db_idx": 20981, "episode_idx": 99, "frame_idx": 54, "global_frame_idx": 20981, "task_index": 19}, {"db_idx": 20982, "episode_idx": 99, "frame_idx": 55, "global_frame_idx": 20982, "task_index": 19}, {"db_idx": 20983, "episode_idx": 99, "frame_idx": 56, "global_frame_idx": 20983, "task_index": 19}, {"db_idx": 20984, "episode_idx": 99, "frame_idx": 57, "global_frame_idx": 20984, "task_index": 19}, {"db_idx": 20985, "episode_idx": 99, "frame_idx": 58, "global_frame_idx": 20985, "task_index": 19}, {"db_idx": 20986, "episode_idx": 99, "frame_idx": 59, "global_frame_idx": 20986, "task_index": 19}, {"db_idx": 20987, "episode_idx": 99, "frame_idx": 60, "global_frame_idx": 20987, "task_index": 19}, {"db_idx": 20988, "episode_idx": 99, "frame_idx": 61, "global_frame_idx": 20988, "task_index": 19}, {"db_idx": 20989, "episode_idx": 99, "frame_idx": 62, "global_frame_idx": 20989, "task_index": 19}, {"db_idx": 20990, "episode_idx": 99, "frame_idx": 63, "global_frame_idx": 20990, "task_index": 19}, {"db_idx": 20991, "episode_idx": 99, "frame_idx": 64, "global_frame_idx": 20991, "task_index": 19}, {"db_idx": 20992, "episode_idx": 99, "frame_idx": 65, "global_frame_idx": 20992, "task_index": 19}, {"db_idx": 20993, "episode_idx": 99, "frame_idx": 66, "global_frame_idx": 20993, "task_index": 19}, {"db_idx": 20994, "episode_idx": 99, "frame_idx": 67, "global_frame_idx": 20994, "task_index": 19}, {"db_idx": 20995, "episode_idx": 99, "frame_idx": 68, "global_frame_idx": 20995, "task_index": 19}, {"db_idx": 20996, "episode_idx": 99, "frame_idx": 69, "global_frame_idx": 20996, "task_index": 19}, {"db_idx": 20997, "episode_idx": 99, "frame_idx": 70, "global_frame_idx": 20997, "task_index": 19}, {"db_idx": 20998, "episode_idx": 99, "frame_idx": 71, "global_frame_idx": 20998, "task_index": 19}, {"db_idx": 20999, "episode_idx": 99, "frame_idx": 72, "global_frame_idx": 20999, "task_index": 19}, {"db_idx": 21000, "episode_idx": 99, "frame_idx": 73, "global_frame_idx": 21000, "task_index": 19}, {"db_idx": 21001, "episode_idx": 99, "frame_idx": 74, "global_frame_idx": 21001, "task_index": 19}, {"db_idx": 21002, "episode_idx": 99, "frame_idx": 75, "global_frame_idx": 21002, "task_index": 19}, {"db_idx": 21003, "episode_idx": 99, "frame_idx": 76, "global_frame_idx": 21003, "task_index": 19}, {"db_idx": 21004, "episode_idx": 99, "frame_idx": 77, "global_frame_idx": 21004, "task_index": 19}, {"db_idx": 21005, "episode_idx": 99, "frame_idx": 78, "global_frame_idx": 21005, "task_index": 19}, {"db_idx": 21006, "episode_idx": 99, "frame_idx": 79, "global_frame_idx": 21006, "task_index": 19}, {"db_idx": 21007, "episode_idx": 99, "frame_idx": 80, "global_frame_idx": 21007, "task_index": 19}, {"db_idx": 21008, "episode_idx": 99, "frame_idx": 81, "global_frame_idx": 21008, "task_index": 19}, {"db_idx": 21009, "episode_idx": 99, "frame_idx": 82, "global_frame_idx": 21009, "task_index": 19}, {"db_idx": 21010, "episode_idx": 99, "frame_idx": 83, "global_frame_idx": 21010, "task_index": 19}, {"db_idx": 21011, "episode_idx": 99, "frame_idx": 84, "global_frame_idx": 21011, "task_index": 19}, {"db_idx": 21012, "episode_idx": 99, "frame_idx": 85, "global_frame_idx": 21012, "task_index": 19}, {"db_idx": 21013, "episode_idx": 99, "frame_idx": 86, "global_frame_idx": 21013, "task_index": 19}, {"db_idx": 21014, "episode_idx": 99, "frame_idx": 87, "global_frame_idx": 21014, "task_index": 19}, {"db_idx": 21015, "episode_idx": 99, "frame_idx": 88, "global_frame_idx": 21015, "task_index": 19}, {"db_idx": 21016, "episode_idx": 99, "frame_idx": 89, "global_frame_idx": 21016, "task_index": 19}, {"db_idx": 21017, "episode_idx": 99, "frame_idx": 90, "global_frame_idx": 21017, "task_index": 19}, {"db_idx": 21018, "episode_idx": 99, "frame_idx": 91, "global_frame_idx": 21018, "task_index": 19}, {"db_idx": 21019, "episode_idx": 99, "frame_idx": 92, "global_frame_idx": 21019, "task_index": 19}, {"db_idx": 21020, "episode_idx": 99, "frame_idx": 93, "global_frame_idx": 21020, "task_index": 19}, {"db_idx": 21021, "episode_idx": 99, "frame_idx": 94, "global_frame_idx": 21021, "task_index": 19}, {"db_idx": 21022, "episode_idx": 99, "frame_idx": 95, "global_frame_idx": 21022, "task_index": 19}, {"db_idx": 21023, "episode_idx": 99, "frame_idx": 96, "global_frame_idx": 21023, "task_index": 19}, {"db_idx": 21024, "episode_idx": 99, "frame_idx": 97, "global_frame_idx": 21024, "task_index": 19}, {"db_idx": 21025, "episode_idx": 99, "frame_idx": 98, "global_frame_idx": 21025, "task_index": 19}, {"db_idx": 21026, "episode_idx": 99, "frame_idx": 99, "global_frame_idx": 21026, "task_index": 19}, {"db_idx": 21027, "episode_idx": 99, "frame_idx": 100, "global_frame_idx": 21027, "task_index": 19}, {"db_idx": 21028, "episode_idx": 99, "frame_idx": 101, "global_frame_idx": 21028, "task_index": 19}, {"db_idx": 21029, "episode_idx": 99, "frame_idx": 102, "global_frame_idx": 21029, "task_index": 19}, {"db_idx": 21030, "episode_idx": 99, "frame_idx": 103, "global_frame_idx": 21030, "task_index": 19}, {"db_idx": 21031, "episode_idx": 99, "frame_idx": 104, "global_frame_idx": 21031, "task_index": 19}, {"db_idx": 21032, "episode_idx": 99, "frame_idx": 105, "global_frame_idx": 21032, "task_index": 19}, {"db_idx": 21033, "episode_idx": 99, "frame_idx": 106, "global_frame_idx": 21033, "task_index": 19}, {"db_idx": 21034, "episode_idx": 99, "frame_idx": 107, "global_frame_idx": 21034, "task_index": 19}, {"db_idx": 21035, "episode_idx": 99, "frame_idx": 108, "global_frame_idx": 21035, "task_index": 19}, {"db_idx": 21036, "episode_idx": 99, "frame_idx": 109, "global_frame_idx": 21036, "task_index": 19}, {"db_idx": 21037, "episode_idx": 99, "frame_idx": 110, "global_frame_idx": 21037, "task_index": 19}, {"db_idx": 21038, "episode_idx": 99, "frame_idx": 111, "global_frame_idx": 21038, "task_index": 19}, {"db_idx": 21039, "episode_idx": 99, "frame_idx": 112, "global_frame_idx": 21039, "task_index": 19}, {"db_idx": 21040, "episode_idx": 99, "frame_idx": 113, "global_frame_idx": 21040, "task_index": 19}, {"db_idx": 21041, "episode_idx": 99, "frame_idx": 114, "global_frame_idx": 21041, "task_index": 19}, {"db_idx": 21042, "episode_idx": 99, "frame_idx": 115, "global_frame_idx": 21042, "task_index": 19}, {"db_idx": 21043, "episode_idx": 99, "frame_idx": 116, "global_frame_idx": 21043, "task_index": 19}, {"db_idx": 21044, "episode_idx": 99, "frame_idx": 117, "global_frame_idx": 21044, "task_index": 19}, {"db_idx": 21045, "episode_idx": 99, "frame_idx": 118, "global_frame_idx": 21045, "task_index": 19}, {"db_idx": 21046, "episode_idx": 99, "frame_idx": 119, "global_frame_idx": 21046, "task_index": 19}, {"db_idx": 21047, "episode_idx": 99, "frame_idx": 120, "global_frame_idx": 21047, "task_index": 19}, {"db_idx": 21048, "episode_idx": 99, "frame_idx": 121, "global_frame_idx": 21048, "task_index": 19}, {"db_idx": 21049, "episode_idx": 99, "frame_idx": 122, "global_frame_idx": 21049, "task_index": 19}, {"db_idx": 21050, "episode_idx": 99, "frame_idx": 123, "global_frame_idx": 21050, "task_index": 19}, {"db_idx": 21051, "episode_idx": 99, "frame_idx": 124, "global_frame_idx": 21051, "task_index": 19}, {"db_idx": 21052, "episode_idx": 99, "frame_idx": 125, "global_frame_idx": 21052, "task_index": 19}, {"db_idx": 21053, "episode_idx": 99, "frame_idx": 126, "global_frame_idx": 21053, "task_index": 19}, {"db_idx": 21054, "episode_idx": 99, "frame_idx": 127, "global_frame_idx": 21054, "task_index": 19}, {"db_idx": 21055, "episode_idx": 99, "frame_idx": 128, "global_frame_idx": 21055, "task_index": 19}, {"db_idx": 21056, "episode_idx": 99, "frame_idx": 129, "global_frame_idx": 21056, "task_index": 19}, {"db_idx": 21057, "episode_idx": 99, "frame_idx": 130, "global_frame_idx": 21057, "task_index": 19}, {"db_idx": 21058, "episode_idx": 99, "frame_idx": 131, "global_frame_idx": 21058, "task_index": 19}, {"db_idx": 21059, "episode_idx": 99, "frame_idx": 132, "global_frame_idx": 21059, "task_index": 19}, {"db_idx": 21060, "episode_idx": 99, "frame_idx": 133, "global_frame_idx": 21060, "task_index": 19}, {"db_idx": 21061, "episode_idx": 99, "frame_idx": 134, "global_frame_idx": 21061, "task_index": 19}, {"db_idx": 21062, "episode_idx": 99, "frame_idx": 135, "global_frame_idx": 21062, "task_index": 19}, {"db_idx": 21063, "episode_idx": 99, "frame_idx": 136, "global_frame_idx": 21063, "task_index": 19}, {"db_idx": 21064, "episode_idx": 99, "frame_idx": 137, "global_frame_idx": 21064, "task_index": 19}, {"db_idx": 21065, "episode_idx": 99, "frame_idx": 138, "global_frame_idx": 21065, "task_index": 19}, {"db_idx": 21066, "episode_idx": 99, "frame_idx": 139, "global_frame_idx": 21066, "task_index": 19}, {"db_idx": 21067, "episode_idx": 99, "frame_idx": 140, "global_frame_idx": 21067, "task_index": 19}, {"db_idx": 21068, "episode_idx": 99, "frame_idx": 141, "global_frame_idx": 21068, "task_index": 19}, {"db_idx": 21069, "episode_idx": 99, "frame_idx": 142, "global_frame_idx": 21069, "task_index": 19}, {"db_idx": 21070, "episode_idx": 99, "frame_idx": 143, "global_frame_idx": 21070, "task_index": 19}, {"db_idx": 21071, "episode_idx": 99, "frame_idx": 144, "global_frame_idx": 21071, "task_index": 19}, {"db_idx": 21072, "episode_idx": 99, "frame_idx": 145, "global_frame_idx": 21072, "task_index": 19}, {"db_idx": 21073, "episode_idx": 99, "frame_idx": 146, "global_frame_idx": 21073, "task_index": 19}, {"db_idx": 21074, "episode_idx": 99, "frame_idx": 147, "global_frame_idx": 21074, "task_index": 19}, {"db_idx": 21075, "episode_idx": 99, "frame_idx": 148, "global_frame_idx": 21075, "task_index": 19}, {"db_idx": 21076, "episode_idx": 99, "frame_idx": 149, "global_frame_idx": 21076, "task_index": 19}, {"db_idx": 21077, "episode_idx": 99, "frame_idx": 150, "global_frame_idx": 21077, "task_index": 19}, {"db_idx": 21078, "episode_idx": 99, "frame_idx": 151, "global_frame_idx": 21078, "task_index": 19}, {"db_idx": 21079, "episode_idx": 99, "frame_idx": 152, "global_frame_idx": 21079, "task_index": 19}, {"db_idx": 21080, "episode_idx": 99, "frame_idx": 153, "global_frame_idx": 21080, "task_index": 19}, {"db_idx": 21081, "episode_idx": 99, "frame_idx": 154, "global_frame_idx": 21081, "task_index": 19}, {"db_idx": 21082, "episode_idx": 99, "frame_idx": 155, "global_frame_idx": 21082, "task_index": 19}, {"db_idx": 21083, "episode_idx": 99, "frame_idx": 156, "global_frame_idx": 21083, "task_index": 19}, {"db_idx": 21084, "episode_idx": 99, "frame_idx": 157, "global_frame_idx": 21084, "task_index": 19}, {"db_idx": 21085, "episode_idx": 99, "frame_idx": 158, "global_frame_idx": 21085, "task_index": 19}, {"db_idx": 21086, "episode_idx": 99, "frame_idx": 159, "global_frame_idx": 21086, "task_index": 19}, {"db_idx": 21087, "episode_idx": 99, "frame_idx": 160, "global_frame_idx": 21087, "task_index": 19}, {"db_idx": 21088, "episode_idx": 99, "frame_idx": 161, "global_frame_idx": 21088, "task_index": 19}, {"db_idx": 21089, "episode_idx": 99, "frame_idx": 162, "global_frame_idx": 21089, "task_index": 19}, {"db_idx": 21090, "episode_idx": 99, "frame_idx": 163, "global_frame_idx": 21090, "task_index": 19}, {"db_idx": 21091, "episode_idx": 99, "frame_idx": 164, "global_frame_idx": 21091, "task_index": 19}, {"db_idx": 21092, "episode_idx": 99, "frame_idx": 165, "global_frame_idx": 21092, "task_index": 19}, {"db_idx": 21093, "episode_idx": 99, "frame_idx": 166, "global_frame_idx": 21093, "task_index": 19}, {"db_idx": 21094, "episode_idx": 99, "frame_idx": 167, "global_frame_idx": 21094, "task_index": 19}, {"db_idx": 21095, "episode_idx": 99, "frame_idx": 168, "global_frame_idx": 21095, "task_index": 19}, {"db_idx": 21096, "episode_idx": 99, "frame_idx": 169, "global_frame_idx": 21096, "task_index": 19}, {"db_idx": 21097, "episode_idx": 99, "frame_idx": 170, "global_frame_idx": 21097, "task_index": 19}, {"db_idx": 21098, "episode_idx": 99, "frame_idx": 171, "global_frame_idx": 21098, "task_index": 19}, {"db_idx": 21099, "episode_idx": 99, "frame_idx": 172, "global_frame_idx": 21099, "task_index": 19}, {"db_idx": 21100, "episode_idx": 99, "frame_idx": 173, "global_frame_idx": 21100, "task_index": 19}, {"db_idx": 21101, "episode_idx": 99, "frame_idx": 174, "global_frame_idx": 21101, "task_index": 19}, {"db_idx": 21102, "episode_idx": 100, "frame_idx": 0, "global_frame_idx": 21102, "task_index": 20}, {"db_idx": 21103, "episode_idx": 100, "frame_idx": 1, "global_frame_idx": 21103, "task_index": 20}, {"db_idx": 21104, "episode_idx": 100, "frame_idx": 2, "global_frame_idx": 21104, "task_index": 20}, {"db_idx": 21105, "episode_idx": 100, "frame_idx": 3, "global_frame_idx": 21105, "task_index": 20}, {"db_idx": 21106, "episode_idx": 100, "frame_idx": 4, "global_frame_idx": 21106, "task_index": 20}, {"db_idx": 21107, "episode_idx": 100, "frame_idx": 5, "global_frame_idx": 21107, "task_index": 20}, {"db_idx": 21108, "episode_idx": 100, "frame_idx": 6, "global_frame_idx": 21108, "task_index": 20}, {"db_idx": 21109, "episode_idx": 100, "frame_idx": 7, "global_frame_idx": 21109, "task_index": 20}, {"db_idx": 21110, "episode_idx": 100, "frame_idx": 8, "global_frame_idx": 21110, "task_index": 20}, {"db_idx": 21111, "episode_idx": 100, "frame_idx": 9, "global_frame_idx": 21111, "task_index": 20}, {"db_idx": 21112, "episode_idx": 100, "frame_idx": 10, "global_frame_idx": 21112, "task_index": 20}, {"db_idx": 21113, "episode_idx": 100, "frame_idx": 11, "global_frame_idx": 21113, "task_index": 20}, {"db_idx": 21114, "episode_idx": 100, "frame_idx": 12, "global_frame_idx": 21114, "task_index": 20}, {"db_idx": 21115, "episode_idx": 100, "frame_idx": 13, "global_frame_idx": 21115, "task_index": 20}, {"db_idx": 21116, "episode_idx": 100, "frame_idx": 14, "global_frame_idx": 21116, "task_index": 20}, {"db_idx": 21117, "episode_idx": 100, "frame_idx": 15, "global_frame_idx": 21117, "task_index": 20}, {"db_idx": 21118, "episode_idx": 100, "frame_idx": 16, "global_frame_idx": 21118, "task_index": 20}, {"db_idx": 21119, "episode_idx": 100, "frame_idx": 17, "global_frame_idx": 21119, "task_index": 20}, {"db_idx": 21120, "episode_idx": 100, "frame_idx": 18, "global_frame_idx": 21120, "task_index": 20}, {"db_idx": 21121, "episode_idx": 100, "frame_idx": 19, "global_frame_idx": 21121, "task_index": 20}, {"db_idx": 21122, "episode_idx": 100, "frame_idx": 20, "global_frame_idx": 21122, "task_index": 20}, {"db_idx": 21123, "episode_idx": 100, "frame_idx": 21, "global_frame_idx": 21123, "task_index": 20}, {"db_idx": 21124, "episode_idx": 100, "frame_idx": 22, "global_frame_idx": 21124, "task_index": 20}, {"db_idx": 21125, "episode_idx": 100, "frame_idx": 23, "global_frame_idx": 21125, "task_index": 20}, {"db_idx": 21126, "episode_idx": 100, "frame_idx": 24, "global_frame_idx": 21126, "task_index": 20}, {"db_idx": 21127, "episode_idx": 100, "frame_idx": 25, "global_frame_idx": 21127, "task_index": 20}, {"db_idx": 21128, "episode_idx": 100, "frame_idx": 26, "global_frame_idx": 21128, "task_index": 20}, {"db_idx": 21129, "episode_idx": 100, "frame_idx": 27, "global_frame_idx": 21129, "task_index": 20}, {"db_idx": 21130, "episode_idx": 100, "frame_idx": 28, "global_frame_idx": 21130, "task_index": 20}, {"db_idx": 21131, "episode_idx": 100, "frame_idx": 29, "global_frame_idx": 21131, "task_index": 20}, {"db_idx": 21132, "episode_idx": 100, "frame_idx": 30, "global_frame_idx": 21132, "task_index": 20}, {"db_idx": 21133, "episode_idx": 100, "frame_idx": 31, "global_frame_idx": 21133, "task_index": 20}, {"db_idx": 21134, "episode_idx": 100, "frame_idx": 32, "global_frame_idx": 21134, "task_index": 20}, {"db_idx": 21135, "episode_idx": 100, "frame_idx": 33, "global_frame_idx": 21135, "task_index": 20}, {"db_idx": 21136, "episode_idx": 100, "frame_idx": 34, "global_frame_idx": 21136, "task_index": 20}, {"db_idx": 21137, "episode_idx": 100, "frame_idx": 35, "global_frame_idx": 21137, "task_index": 20}, {"db_idx": 21138, "episode_idx": 100, "frame_idx": 36, "global_frame_idx": 21138, "task_index": 20}, {"db_idx": 21139, "episode_idx": 100, "frame_idx": 37, "global_frame_idx": 21139, "task_index": 20}, {"db_idx": 21140, "episode_idx": 100, "frame_idx": 38, "global_frame_idx": 21140, "task_index": 20}, {"db_idx": 21141, "episode_idx": 100, "frame_idx": 39, "global_frame_idx": 21141, "task_index": 20}, {"db_idx": 21142, "episode_idx": 100, "frame_idx": 40, "global_frame_idx": 21142, "task_index": 20}, {"db_idx": 21143, "episode_idx": 100, "frame_idx": 41, "global_frame_idx": 21143, "task_index": 20}, {"db_idx": 21144, "episode_idx": 100, "frame_idx": 42, "global_frame_idx": 21144, "task_index": 20}, {"db_idx": 21145, "episode_idx": 100, "frame_idx": 43, "global_frame_idx": 21145, "task_index": 20}, {"db_idx": 21146, "episode_idx": 100, "frame_idx": 44, "global_frame_idx": 21146, "task_index": 20}, {"db_idx": 21147, "episode_idx": 100, "frame_idx": 45, "global_frame_idx": 21147, "task_index": 20}, {"db_idx": 21148, "episode_idx": 100, "frame_idx": 46, "global_frame_idx": 21148, "task_index": 20}, {"db_idx": 21149, "episode_idx": 100, "frame_idx": 47, "global_frame_idx": 21149, "task_index": 20}, {"db_idx": 21150, "episode_idx": 100, "frame_idx": 48, "global_frame_idx": 21150, "task_index": 20}, {"db_idx": 21151, "episode_idx": 100, "frame_idx": 49, "global_frame_idx": 21151, "task_index": 20}, {"db_idx": 21152, "episode_idx": 100, "frame_idx": 50, "global_frame_idx": 21152, "task_index": 20}, {"db_idx": 21153, "episode_idx": 100, "frame_idx": 51, "global_frame_idx": 21153, "task_index": 20}, {"db_idx": 21154, "episode_idx": 100, "frame_idx": 52, "global_frame_idx": 21154, "task_index": 20}, {"db_idx": 21155, "episode_idx": 100, "frame_idx": 53, "global_frame_idx": 21155, "task_index": 20}, {"db_idx": 21156, "episode_idx": 100, "frame_idx": 54, "global_frame_idx": 21156, "task_index": 20}, {"db_idx": 21157, "episode_idx": 100, "frame_idx": 55, "global_frame_idx": 21157, "task_index": 20}, {"db_idx": 21158, "episode_idx": 100, "frame_idx": 56, "global_frame_idx": 21158, "task_index": 20}, {"db_idx": 21159, "episode_idx": 100, "frame_idx": 57, "global_frame_idx": 21159, "task_index": 20}, {"db_idx": 21160, "episode_idx": 100, "frame_idx": 58, "global_frame_idx": 21160, "task_index": 20}, {"db_idx": 21161, "episode_idx": 100, "frame_idx": 59, "global_frame_idx": 21161, "task_index": 20}, {"db_idx": 21162, "episode_idx": 100, "frame_idx": 60, "global_frame_idx": 21162, "task_index": 20}, {"db_idx": 21163, "episode_idx": 100, "frame_idx": 61, "global_frame_idx": 21163, "task_index": 20}, {"db_idx": 21164, "episode_idx": 100, "frame_idx": 62, "global_frame_idx": 21164, "task_index": 20}, {"db_idx": 21165, "episode_idx": 100, "frame_idx": 63, "global_frame_idx": 21165, "task_index": 20}, {"db_idx": 21166, "episode_idx": 100, "frame_idx": 64, "global_frame_idx": 21166, "task_index": 20}, {"db_idx": 21167, "episode_idx": 100, "frame_idx": 65, "global_frame_idx": 21167, "task_index": 20}, {"db_idx": 21168, "episode_idx": 100, "frame_idx": 66, "global_frame_idx": 21168, "task_index": 20}, {"db_idx": 21169, "episode_idx": 100, "frame_idx": 67, "global_frame_idx": 21169, "task_index": 20}, {"db_idx": 21170, "episode_idx": 100, "frame_idx": 68, "global_frame_idx": 21170, "task_index": 20}, {"db_idx": 21171, "episode_idx": 100, "frame_idx": 69, "global_frame_idx": 21171, "task_index": 20}, {"db_idx": 21172, "episode_idx": 100, "frame_idx": 70, "global_frame_idx": 21172, "task_index": 20}, {"db_idx": 21173, "episode_idx": 100, "frame_idx": 71, "global_frame_idx": 21173, "task_index": 20}, {"db_idx": 21174, "episode_idx": 100, "frame_idx": 72, "global_frame_idx": 21174, "task_index": 20}, {"db_idx": 21175, "episode_idx": 100, "frame_idx": 73, "global_frame_idx": 21175, "task_index": 20}, {"db_idx": 21176, "episode_idx": 100, "frame_idx": 74, "global_frame_idx": 21176, "task_index": 20}, {"db_idx": 21177, "episode_idx": 100, "frame_idx": 75, "global_frame_idx": 21177, "task_index": 20}, {"db_idx": 21178, "episode_idx": 100, "frame_idx": 76, "global_frame_idx": 21178, "task_index": 20}, {"db_idx": 21179, "episode_idx": 100, "frame_idx": 77, "global_frame_idx": 21179, "task_index": 20}, {"db_idx": 21180, "episode_idx": 100, "frame_idx": 78, "global_frame_idx": 21180, "task_index": 20}, {"db_idx": 21181, "episode_idx": 100, "frame_idx": 79, "global_frame_idx": 21181, "task_index": 20}, {"db_idx": 21182, "episode_idx": 100, "frame_idx": 80, "global_frame_idx": 21182, "task_index": 20}, {"db_idx": 21183, "episode_idx": 100, "frame_idx": 81, "global_frame_idx": 21183, "task_index": 20}, {"db_idx": 21184, "episode_idx": 100, "frame_idx": 82, "global_frame_idx": 21184, "task_index": 20}, {"db_idx": 21185, "episode_idx": 100, "frame_idx": 83, "global_frame_idx": 21185, "task_index": 20}, {"db_idx": 21186, "episode_idx": 100, "frame_idx": 84, "global_frame_idx": 21186, "task_index": 20}, {"db_idx": 21187, "episode_idx": 100, "frame_idx": 85, "global_frame_idx": 21187, "task_index": 20}, {"db_idx": 21188, "episode_idx": 100, "frame_idx": 86, "global_frame_idx": 21188, "task_index": 20}, {"db_idx": 21189, "episode_idx": 100, "frame_idx": 87, "global_frame_idx": 21189, "task_index": 20}, {"db_idx": 21190, "episode_idx": 100, "frame_idx": 88, "global_frame_idx": 21190, "task_index": 20}, {"db_idx": 21191, "episode_idx": 100, "frame_idx": 89, "global_frame_idx": 21191, "task_index": 20}, {"db_idx": 21192, "episode_idx": 100, "frame_idx": 90, "global_frame_idx": 21192, "task_index": 20}, {"db_idx": 21193, "episode_idx": 100, "frame_idx": 91, "global_frame_idx": 21193, "task_index": 20}, {"db_idx": 21194, "episode_idx": 100, "frame_idx": 92, "global_frame_idx": 21194, "task_index": 20}, {"db_idx": 21195, "episode_idx": 100, "frame_idx": 93, "global_frame_idx": 21195, "task_index": 20}, {"db_idx": 21196, "episode_idx": 100, "frame_idx": 94, "global_frame_idx": 21196, "task_index": 20}, {"db_idx": 21197, "episode_idx": 100, "frame_idx": 95, "global_frame_idx": 21197, "task_index": 20}, {"db_idx": 21198, "episode_idx": 101, "frame_idx": 0, "global_frame_idx": 21198, "task_index": 20}, {"db_idx": 21199, "episode_idx": 101, "frame_idx": 1, "global_frame_idx": 21199, "task_index": 20}, {"db_idx": 21200, "episode_idx": 101, "frame_idx": 2, "global_frame_idx": 21200, "task_index": 20}, {"db_idx": 21201, "episode_idx": 101, "frame_idx": 3, "global_frame_idx": 21201, "task_index": 20}, {"db_idx": 21202, "episode_idx": 101, "frame_idx": 4, "global_frame_idx": 21202, "task_index": 20}, {"db_idx": 21203, "episode_idx": 101, "frame_idx": 5, "global_frame_idx": 21203, "task_index": 20}, {"db_idx": 21204, "episode_idx": 101, "frame_idx": 6, "global_frame_idx": 21204, "task_index": 20}, {"db_idx": 21205, "episode_idx": 101, "frame_idx": 7, "global_frame_idx": 21205, "task_index": 20}, {"db_idx": 21206, "episode_idx": 101, "frame_idx": 8, "global_frame_idx": 21206, "task_index": 20}, {"db_idx": 21207, "episode_idx": 101, "frame_idx": 9, "global_frame_idx": 21207, "task_index": 20}, {"db_idx": 21208, "episode_idx": 101, "frame_idx": 10, "global_frame_idx": 21208, "task_index": 20}, {"db_idx": 21209, "episode_idx": 101, "frame_idx": 11, "global_frame_idx": 21209, "task_index": 20}, {"db_idx": 21210, "episode_idx": 101, "frame_idx": 12, "global_frame_idx": 21210, "task_index": 20}, {"db_idx": 21211, "episode_idx": 101, "frame_idx": 13, "global_frame_idx": 21211, "task_index": 20}, {"db_idx": 21212, "episode_idx": 101, "frame_idx": 14, "global_frame_idx": 21212, "task_index": 20}, {"db_idx": 21213, "episode_idx": 101, "frame_idx": 15, "global_frame_idx": 21213, "task_index": 20}, {"db_idx": 21214, "episode_idx": 101, "frame_idx": 16, "global_frame_idx": 21214, "task_index": 20}, {"db_idx": 21215, "episode_idx": 101, "frame_idx": 17, "global_frame_idx": 21215, "task_index": 20}, {"db_idx": 21216, "episode_idx": 101, "frame_idx": 18, "global_frame_idx": 21216, "task_index": 20}, {"db_idx": 21217, "episode_idx": 101, "frame_idx": 19, "global_frame_idx": 21217, "task_index": 20}, {"db_idx": 21218, "episode_idx": 101, "frame_idx": 20, "global_frame_idx": 21218, "task_index": 20}, {"db_idx": 21219, "episode_idx": 101, "frame_idx": 21, "global_frame_idx": 21219, "task_index": 20}, {"db_idx": 21220, "episode_idx": 101, "frame_idx": 22, "global_frame_idx": 21220, "task_index": 20}, {"db_idx": 21221, "episode_idx": 101, "frame_idx": 23, "global_frame_idx": 21221, "task_index": 20}, {"db_idx": 21222, "episode_idx": 101, "frame_idx": 24, "global_frame_idx": 21222, "task_index": 20}, {"db_idx": 21223, "episode_idx": 101, "frame_idx": 25, "global_frame_idx": 21223, "task_index": 20}, {"db_idx": 21224, "episode_idx": 101, "frame_idx": 26, "global_frame_idx": 21224, "task_index": 20}, {"db_idx": 21225, "episode_idx": 101, "frame_idx": 27, "global_frame_idx": 21225, "task_index": 20}, {"db_idx": 21226, "episode_idx": 101, "frame_idx": 28, "global_frame_idx": 21226, "task_index": 20}, {"db_idx": 21227, "episode_idx": 101, "frame_idx": 29, "global_frame_idx": 21227, "task_index": 20}, {"db_idx": 21228, "episode_idx": 101, "frame_idx": 30, "global_frame_idx": 21228, "task_index": 20}, {"db_idx": 21229, "episode_idx": 101, "frame_idx": 31, "global_frame_idx": 21229, "task_index": 20}, {"db_idx": 21230, "episode_idx": 101, "frame_idx": 32, "global_frame_idx": 21230, "task_index": 20}, {"db_idx": 21231, "episode_idx": 101, "frame_idx": 33, "global_frame_idx": 21231, "task_index": 20}, {"db_idx": 21232, "episode_idx": 101, "frame_idx": 34, "global_frame_idx": 21232, "task_index": 20}, {"db_idx": 21233, "episode_idx": 101, "frame_idx": 35, "global_frame_idx": 21233, "task_index": 20}, {"db_idx": 21234, "episode_idx": 101, "frame_idx": 36, "global_frame_idx": 21234, "task_index": 20}, {"db_idx": 21235, "episode_idx": 101, "frame_idx": 37, "global_frame_idx": 21235, "task_index": 20}, {"db_idx": 21236, "episode_idx": 101, "frame_idx": 38, "global_frame_idx": 21236, "task_index": 20}, {"db_idx": 21237, "episode_idx": 101, "frame_idx": 39, "global_frame_idx": 21237, "task_index": 20}, {"db_idx": 21238, "episode_idx": 101, "frame_idx": 40, "global_frame_idx": 21238, "task_index": 20}, {"db_idx": 21239, "episode_idx": 101, "frame_idx": 41, "global_frame_idx": 21239, "task_index": 20}, {"db_idx": 21240, "episode_idx": 101, "frame_idx": 42, "global_frame_idx": 21240, "task_index": 20}, {"db_idx": 21241, "episode_idx": 101, "frame_idx": 43, "global_frame_idx": 21241, "task_index": 20}, {"db_idx": 21242, "episode_idx": 101, "frame_idx": 44, "global_frame_idx": 21242, "task_index": 20}, {"db_idx": 21243, "episode_idx": 101, "frame_idx": 45, "global_frame_idx": 21243, "task_index": 20}, {"db_idx": 21244, "episode_idx": 101, "frame_idx": 46, "global_frame_idx": 21244, "task_index": 20}, {"db_idx": 21245, "episode_idx": 101, "frame_idx": 47, "global_frame_idx": 21245, "task_index": 20}, {"db_idx": 21246, "episode_idx": 101, "frame_idx": 48, "global_frame_idx": 21246, "task_index": 20}, {"db_idx": 21247, "episode_idx": 101, "frame_idx": 49, "global_frame_idx": 21247, "task_index": 20}, {"db_idx": 21248, "episode_idx": 101, "frame_idx": 50, "global_frame_idx": 21248, "task_index": 20}, {"db_idx": 21249, "episode_idx": 101, "frame_idx": 51, "global_frame_idx": 21249, "task_index": 20}, {"db_idx": 21250, "episode_idx": 101, "frame_idx": 52, "global_frame_idx": 21250, "task_index": 20}, {"db_idx": 21251, "episode_idx": 101, "frame_idx": 53, "global_frame_idx": 21251, "task_index": 20}, {"db_idx": 21252, "episode_idx": 101, "frame_idx": 54, "global_frame_idx": 21252, "task_index": 20}, {"db_idx": 21253, "episode_idx": 101, "frame_idx": 55, "global_frame_idx": 21253, "task_index": 20}, {"db_idx": 21254, "episode_idx": 101, "frame_idx": 56, "global_frame_idx": 21254, "task_index": 20}, {"db_idx": 21255, "episode_idx": 101, "frame_idx": 57, "global_frame_idx": 21255, "task_index": 20}, {"db_idx": 21256, "episode_idx": 101, "frame_idx": 58, "global_frame_idx": 21256, "task_index": 20}, {"db_idx": 21257, "episode_idx": 101, "frame_idx": 59, "global_frame_idx": 21257, "task_index": 20}, {"db_idx": 21258, "episode_idx": 101, "frame_idx": 60, "global_frame_idx": 21258, "task_index": 20}, {"db_idx": 21259, "episode_idx": 101, "frame_idx": 61, "global_frame_idx": 21259, "task_index": 20}, {"db_idx": 21260, "episode_idx": 101, "frame_idx": 62, "global_frame_idx": 21260, "task_index": 20}, {"db_idx": 21261, "episode_idx": 101, "frame_idx": 63, "global_frame_idx": 21261, "task_index": 20}, {"db_idx": 21262, "episode_idx": 101, "frame_idx": 64, "global_frame_idx": 21262, "task_index": 20}, {"db_idx": 21263, "episode_idx": 101, "frame_idx": 65, "global_frame_idx": 21263, "task_index": 20}, {"db_idx": 21264, "episode_idx": 101, "frame_idx": 66, "global_frame_idx": 21264, "task_index": 20}, {"db_idx": 21265, "episode_idx": 101, "frame_idx": 67, "global_frame_idx": 21265, "task_index": 20}, {"db_idx": 21266, "episode_idx": 101, "frame_idx": 68, "global_frame_idx": 21266, "task_index": 20}, {"db_idx": 21267, "episode_idx": 101, "frame_idx": 69, "global_frame_idx": 21267, "task_index": 20}, {"db_idx": 21268, "episode_idx": 101, "frame_idx": 70, "global_frame_idx": 21268, "task_index": 20}, {"db_idx": 21269, "episode_idx": 101, "frame_idx": 71, "global_frame_idx": 21269, "task_index": 20}, {"db_idx": 21270, "episode_idx": 101, "frame_idx": 72, "global_frame_idx": 21270, "task_index": 20}, {"db_idx": 21271, "episode_idx": 101, "frame_idx": 73, "global_frame_idx": 21271, "task_index": 20}, {"db_idx": 21272, "episode_idx": 101, "frame_idx": 74, "global_frame_idx": 21272, "task_index": 20}, {"db_idx": 21273, "episode_idx": 101, "frame_idx": 75, "global_frame_idx": 21273, "task_index": 20}, {"db_idx": 21274, "episode_idx": 101, "frame_idx": 76, "global_frame_idx": 21274, "task_index": 20}, {"db_idx": 21275, "episode_idx": 101, "frame_idx": 77, "global_frame_idx": 21275, "task_index": 20}, {"db_idx": 21276, "episode_idx": 101, "frame_idx": 78, "global_frame_idx": 21276, "task_index": 20}, {"db_idx": 21277, "episode_idx": 101, "frame_idx": 79, "global_frame_idx": 21277, "task_index": 20}, {"db_idx": 21278, "episode_idx": 101, "frame_idx": 80, "global_frame_idx": 21278, "task_index": 20}, {"db_idx": 21279, "episode_idx": 102, "frame_idx": 0, "global_frame_idx": 21279, "task_index": 20}, {"db_idx": 21280, "episode_idx": 102, "frame_idx": 1, "global_frame_idx": 21280, "task_index": 20}, {"db_idx": 21281, "episode_idx": 102, "frame_idx": 2, "global_frame_idx": 21281, "task_index": 20}, {"db_idx": 21282, "episode_idx": 102, "frame_idx": 3, "global_frame_idx": 21282, "task_index": 20}, {"db_idx": 21283, "episode_idx": 102, "frame_idx": 4, "global_frame_idx": 21283, "task_index": 20}, {"db_idx": 21284, "episode_idx": 102, "frame_idx": 5, "global_frame_idx": 21284, "task_index": 20}, {"db_idx": 21285, "episode_idx": 102, "frame_idx": 6, "global_frame_idx": 21285, "task_index": 20}, {"db_idx": 21286, "episode_idx": 102, "frame_idx": 7, "global_frame_idx": 21286, "task_index": 20}, {"db_idx": 21287, "episode_idx": 102, "frame_idx": 8, "global_frame_idx": 21287, "task_index": 20}, {"db_idx": 21288, "episode_idx": 102, "frame_idx": 9, "global_frame_idx": 21288, "task_index": 20}, {"db_idx": 21289, "episode_idx": 102, "frame_idx": 10, "global_frame_idx": 21289, "task_index": 20}, {"db_idx": 21290, "episode_idx": 102, "frame_idx": 11, "global_frame_idx": 21290, "task_index": 20}, {"db_idx": 21291, "episode_idx": 102, "frame_idx": 12, "global_frame_idx": 21291, "task_index": 20}, {"db_idx": 21292, "episode_idx": 102, "frame_idx": 13, "global_frame_idx": 21292, "task_index": 20}, {"db_idx": 21293, "episode_idx": 102, "frame_idx": 14, "global_frame_idx": 21293, "task_index": 20}, {"db_idx": 21294, "episode_idx": 102, "frame_idx": 15, "global_frame_idx": 21294, "task_index": 20}, {"db_idx": 21295, "episode_idx": 102, "frame_idx": 16, "global_frame_idx": 21295, "task_index": 20}, {"db_idx": 21296, "episode_idx": 102, "frame_idx": 17, "global_frame_idx": 21296, "task_index": 20}, {"db_idx": 21297, "episode_idx": 102, "frame_idx": 18, "global_frame_idx": 21297, "task_index": 20}, {"db_idx": 21298, "episode_idx": 102, "frame_idx": 19, "global_frame_idx": 21298, "task_index": 20}, {"db_idx": 21299, "episode_idx": 102, "frame_idx": 20, "global_frame_idx": 21299, "task_index": 20}, {"db_idx": 21300, "episode_idx": 102, "frame_idx": 21, "global_frame_idx": 21300, "task_index": 20}, {"db_idx": 21301, "episode_idx": 102, "frame_idx": 22, "global_frame_idx": 21301, "task_index": 20}, {"db_idx": 21302, "episode_idx": 102, "frame_idx": 23, "global_frame_idx": 21302, "task_index": 20}, {"db_idx": 21303, "episode_idx": 102, "frame_idx": 24, "global_frame_idx": 21303, "task_index": 20}, {"db_idx": 21304, "episode_idx": 102, "frame_idx": 25, "global_frame_idx": 21304, "task_index": 20}, {"db_idx": 21305, "episode_idx": 102, "frame_idx": 26, "global_frame_idx": 21305, "task_index": 20}, {"db_idx": 21306, "episode_idx": 102, "frame_idx": 27, "global_frame_idx": 21306, "task_index": 20}, {"db_idx": 21307, "episode_idx": 102, "frame_idx": 28, "global_frame_idx": 21307, "task_index": 20}, {"db_idx": 21308, "episode_idx": 102, "frame_idx": 29, "global_frame_idx": 21308, "task_index": 20}, {"db_idx": 21309, "episode_idx": 102, "frame_idx": 30, "global_frame_idx": 21309, "task_index": 20}, {"db_idx": 21310, "episode_idx": 102, "frame_idx": 31, "global_frame_idx": 21310, "task_index": 20}, {"db_idx": 21311, "episode_idx": 102, "frame_idx": 32, "global_frame_idx": 21311, "task_index": 20}, {"db_idx": 21312, "episode_idx": 102, "frame_idx": 33, "global_frame_idx": 21312, "task_index": 20}, {"db_idx": 21313, "episode_idx": 102, "frame_idx": 34, "global_frame_idx": 21313, "task_index": 20}, {"db_idx": 21314, "episode_idx": 102, "frame_idx": 35, "global_frame_idx": 21314, "task_index": 20}, {"db_idx": 21315, "episode_idx": 102, "frame_idx": 36, "global_frame_idx": 21315, "task_index": 20}, {"db_idx": 21316, "episode_idx": 102, "frame_idx": 37, "global_frame_idx": 21316, "task_index": 20}, {"db_idx": 21317, "episode_idx": 102, "frame_idx": 38, "global_frame_idx": 21317, "task_index": 20}, {"db_idx": 21318, "episode_idx": 102, "frame_idx": 39, "global_frame_idx": 21318, "task_index": 20}, {"db_idx": 21319, "episode_idx": 102, "frame_idx": 40, "global_frame_idx": 21319, "task_index": 20}, {"db_idx": 21320, "episode_idx": 102, "frame_idx": 41, "global_frame_idx": 21320, "task_index": 20}, {"db_idx": 21321, "episode_idx": 102, "frame_idx": 42, "global_frame_idx": 21321, "task_index": 20}, {"db_idx": 21322, "episode_idx": 102, "frame_idx": 43, "global_frame_idx": 21322, "task_index": 20}, {"db_idx": 21323, "episode_idx": 102, "frame_idx": 44, "global_frame_idx": 21323, "task_index": 20}, {"db_idx": 21324, "episode_idx": 102, "frame_idx": 45, "global_frame_idx": 21324, "task_index": 20}, {"db_idx": 21325, "episode_idx": 102, "frame_idx": 46, "global_frame_idx": 21325, "task_index": 20}, {"db_idx": 21326, "episode_idx": 102, "frame_idx": 47, "global_frame_idx": 21326, "task_index": 20}, {"db_idx": 21327, "episode_idx": 102, "frame_idx": 48, "global_frame_idx": 21327, "task_index": 20}, {"db_idx": 21328, "episode_idx": 102, "frame_idx": 49, "global_frame_idx": 21328, "task_index": 20}, {"db_idx": 21329, "episode_idx": 102, "frame_idx": 50, "global_frame_idx": 21329, "task_index": 20}, {"db_idx": 21330, "episode_idx": 102, "frame_idx": 51, "global_frame_idx": 21330, "task_index": 20}, {"db_idx": 21331, "episode_idx": 102, "frame_idx": 52, "global_frame_idx": 21331, "task_index": 20}, {"db_idx": 21332, "episode_idx": 102, "frame_idx": 53, "global_frame_idx": 21332, "task_index": 20}, {"db_idx": 21333, "episode_idx": 102, "frame_idx": 54, "global_frame_idx": 21333, "task_index": 20}, {"db_idx": 21334, "episode_idx": 102, "frame_idx": 55, "global_frame_idx": 21334, "task_index": 20}, {"db_idx": 21335, "episode_idx": 102, "frame_idx": 56, "global_frame_idx": 21335, "task_index": 20}, {"db_idx": 21336, "episode_idx": 102, "frame_idx": 57, "global_frame_idx": 21336, "task_index": 20}, {"db_idx": 21337, "episode_idx": 102, "frame_idx": 58, "global_frame_idx": 21337, "task_index": 20}, {"db_idx": 21338, "episode_idx": 102, "frame_idx": 59, "global_frame_idx": 21338, "task_index": 20}, {"db_idx": 21339, "episode_idx": 102, "frame_idx": 60, "global_frame_idx": 21339, "task_index": 20}, {"db_idx": 21340, "episode_idx": 102, "frame_idx": 61, "global_frame_idx": 21340, "task_index": 20}, {"db_idx": 21341, "episode_idx": 102, "frame_idx": 62, "global_frame_idx": 21341, "task_index": 20}, {"db_idx": 21342, "episode_idx": 102, "frame_idx": 63, "global_frame_idx": 21342, "task_index": 20}, {"db_idx": 21343, "episode_idx": 102, "frame_idx": 64, "global_frame_idx": 21343, "task_index": 20}, {"db_idx": 21344, "episode_idx": 102, "frame_idx": 65, "global_frame_idx": 21344, "task_index": 20}, {"db_idx": 21345, "episode_idx": 102, "frame_idx": 66, "global_frame_idx": 21345, "task_index": 20}, {"db_idx": 21346, "episode_idx": 102, "frame_idx": 67, "global_frame_idx": 21346, "task_index": 20}, {"db_idx": 21347, "episode_idx": 102, "frame_idx": 68, "global_frame_idx": 21347, "task_index": 20}, {"db_idx": 21348, "episode_idx": 102, "frame_idx": 69, "global_frame_idx": 21348, "task_index": 20}, {"db_idx": 21349, "episode_idx": 102, "frame_idx": 70, "global_frame_idx": 21349, "task_index": 20}, {"db_idx": 21350, "episode_idx": 102, "frame_idx": 71, "global_frame_idx": 21350, "task_index": 20}, {"db_idx": 21351, "episode_idx": 102, "frame_idx": 72, "global_frame_idx": 21351, "task_index": 20}, {"db_idx": 21352, "episode_idx": 102, "frame_idx": 73, "global_frame_idx": 21352, "task_index": 20}, {"db_idx": 21353, "episode_idx": 102, "frame_idx": 74, "global_frame_idx": 21353, "task_index": 20}, {"db_idx": 21354, "episode_idx": 102, "frame_idx": 75, "global_frame_idx": 21354, "task_index": 20}, {"db_idx": 21355, "episode_idx": 102, "frame_idx": 76, "global_frame_idx": 21355, "task_index": 20}, {"db_idx": 21356, "episode_idx": 102, "frame_idx": 77, "global_frame_idx": 21356, "task_index": 20}, {"db_idx": 21357, "episode_idx": 102, "frame_idx": 78, "global_frame_idx": 21357, "task_index": 20}, {"db_idx": 21358, "episode_idx": 102, "frame_idx": 79, "global_frame_idx": 21358, "task_index": 20}, {"db_idx": 21359, "episode_idx": 102, "frame_idx": 80, "global_frame_idx": 21359, "task_index": 20}, {"db_idx": 21360, "episode_idx": 102, "frame_idx": 81, "global_frame_idx": 21360, "task_index": 20}, {"db_idx": 21361, "episode_idx": 102, "frame_idx": 82, "global_frame_idx": 21361, "task_index": 20}, {"db_idx": 21362, "episode_idx": 102, "frame_idx": 83, "global_frame_idx": 21362, "task_index": 20}, {"db_idx": 21363, "episode_idx": 102, "frame_idx": 84, "global_frame_idx": 21363, "task_index": 20}, {"db_idx": 21364, "episode_idx": 102, "frame_idx": 85, "global_frame_idx": 21364, "task_index": 20}, {"db_idx": 21365, "episode_idx": 102, "frame_idx": 86, "global_frame_idx": 21365, "task_index": 20}, {"db_idx": 21366, "episode_idx": 102, "frame_idx": 87, "global_frame_idx": 21366, "task_index": 20}, {"db_idx": 21367, "episode_idx": 102, "frame_idx": 88, "global_frame_idx": 21367, "task_index": 20}, {"db_idx": 21368, "episode_idx": 102, "frame_idx": 89, "global_frame_idx": 21368, "task_index": 20}, {"db_idx": 21369, "episode_idx": 102, "frame_idx": 90, "global_frame_idx": 21369, "task_index": 20}, {"db_idx": 21370, "episode_idx": 102, "frame_idx": 91, "global_frame_idx": 21370, "task_index": 20}, {"db_idx": 21371, "episode_idx": 102, "frame_idx": 92, "global_frame_idx": 21371, "task_index": 20}, {"db_idx": 21372, "episode_idx": 102, "frame_idx": 93, "global_frame_idx": 21372, "task_index": 20}, {"db_idx": 21373, "episode_idx": 102, "frame_idx": 94, "global_frame_idx": 21373, "task_index": 20}, {"db_idx": 21374, "episode_idx": 102, "frame_idx": 95, "global_frame_idx": 21374, "task_index": 20}, {"db_idx": 21375, "episode_idx": 102, "frame_idx": 96, "global_frame_idx": 21375, "task_index": 20}, {"db_idx": 21376, "episode_idx": 103, "frame_idx": 0, "global_frame_idx": 21376, "task_index": 20}, {"db_idx": 21377, "episode_idx": 103, "frame_idx": 1, "global_frame_idx": 21377, "task_index": 20}, {"db_idx": 21378, "episode_idx": 103, "frame_idx": 2, "global_frame_idx": 21378, "task_index": 20}, {"db_idx": 21379, "episode_idx": 103, "frame_idx": 3, "global_frame_idx": 21379, "task_index": 20}, {"db_idx": 21380, "episode_idx": 103, "frame_idx": 4, "global_frame_idx": 21380, "task_index": 20}, {"db_idx": 21381, "episode_idx": 103, "frame_idx": 5, "global_frame_idx": 21381, "task_index": 20}, {"db_idx": 21382, "episode_idx": 103, "frame_idx": 6, "global_frame_idx": 21382, "task_index": 20}, {"db_idx": 21383, "episode_idx": 103, "frame_idx": 7, "global_frame_idx": 21383, "task_index": 20}, {"db_idx": 21384, "episode_idx": 103, "frame_idx": 8, "global_frame_idx": 21384, "task_index": 20}, {"db_idx": 21385, "episode_idx": 103, "frame_idx": 9, "global_frame_idx": 21385, "task_index": 20}, {"db_idx": 21386, "episode_idx": 103, "frame_idx": 10, "global_frame_idx": 21386, "task_index": 20}, {"db_idx": 21387, "episode_idx": 103, "frame_idx": 11, "global_frame_idx": 21387, "task_index": 20}, {"db_idx": 21388, "episode_idx": 103, "frame_idx": 12, "global_frame_idx": 21388, "task_index": 20}, {"db_idx": 21389, "episode_idx": 103, "frame_idx": 13, "global_frame_idx": 21389, "task_index": 20}, {"db_idx": 21390, "episode_idx": 103, "frame_idx": 14, "global_frame_idx": 21390, "task_index": 20}, {"db_idx": 21391, "episode_idx": 103, "frame_idx": 15, "global_frame_idx": 21391, "task_index": 20}, {"db_idx": 21392, "episode_idx": 103, "frame_idx": 16, "global_frame_idx": 21392, "task_index": 20}, {"db_idx": 21393, "episode_idx": 103, "frame_idx": 17, "global_frame_idx": 21393, "task_index": 20}, {"db_idx": 21394, "episode_idx": 103, "frame_idx": 18, "global_frame_idx": 21394, "task_index": 20}, {"db_idx": 21395, "episode_idx": 103, "frame_idx": 19, "global_frame_idx": 21395, "task_index": 20}, {"db_idx": 21396, "episode_idx": 103, "frame_idx": 20, "global_frame_idx": 21396, "task_index": 20}, {"db_idx": 21397, "episode_idx": 103, "frame_idx": 21, "global_frame_idx": 21397, "task_index": 20}, {"db_idx": 21398, "episode_idx": 103, "frame_idx": 22, "global_frame_idx": 21398, "task_index": 20}, {"db_idx": 21399, "episode_idx": 103, "frame_idx": 23, "global_frame_idx": 21399, "task_index": 20}, {"db_idx": 21400, "episode_idx": 103, "frame_idx": 24, "global_frame_idx": 21400, "task_index": 20}, {"db_idx": 21401, "episode_idx": 103, "frame_idx": 25, "global_frame_idx": 21401, "task_index": 20}, {"db_idx": 21402, "episode_idx": 103, "frame_idx": 26, "global_frame_idx": 21402, "task_index": 20}, {"db_idx": 21403, "episode_idx": 103, "frame_idx": 27, "global_frame_idx": 21403, "task_index": 20}, {"db_idx": 21404, "episode_idx": 103, "frame_idx": 28, "global_frame_idx": 21404, "task_index": 20}, {"db_idx": 21405, "episode_idx": 103, "frame_idx": 29, "global_frame_idx": 21405, "task_index": 20}, {"db_idx": 21406, "episode_idx": 103, "frame_idx": 30, "global_frame_idx": 21406, "task_index": 20}, {"db_idx": 21407, "episode_idx": 103, "frame_idx": 31, "global_frame_idx": 21407, "task_index": 20}, {"db_idx": 21408, "episode_idx": 103, "frame_idx": 32, "global_frame_idx": 21408, "task_index": 20}, {"db_idx": 21409, "episode_idx": 103, "frame_idx": 33, "global_frame_idx": 21409, "task_index": 20}, {"db_idx": 21410, "episode_idx": 103, "frame_idx": 34, "global_frame_idx": 21410, "task_index": 20}, {"db_idx": 21411, "episode_idx": 103, "frame_idx": 35, "global_frame_idx": 21411, "task_index": 20}, {"db_idx": 21412, "episode_idx": 103, "frame_idx": 36, "global_frame_idx": 21412, "task_index": 20}, {"db_idx": 21413, "episode_idx": 103, "frame_idx": 37, "global_frame_idx": 21413, "task_index": 20}, {"db_idx": 21414, "episode_idx": 103, "frame_idx": 38, "global_frame_idx": 21414, "task_index": 20}, {"db_idx": 21415, "episode_idx": 103, "frame_idx": 39, "global_frame_idx": 21415, "task_index": 20}, {"db_idx": 21416, "episode_idx": 103, "frame_idx": 40, "global_frame_idx": 21416, "task_index": 20}, {"db_idx": 21417, "episode_idx": 103, "frame_idx": 41, "global_frame_idx": 21417, "task_index": 20}, {"db_idx": 21418, "episode_idx": 103, "frame_idx": 42, "global_frame_idx": 21418, "task_index": 20}, {"db_idx": 21419, "episode_idx": 103, "frame_idx": 43, "global_frame_idx": 21419, "task_index": 20}, {"db_idx": 21420, "episode_idx": 103, "frame_idx": 44, "global_frame_idx": 21420, "task_index": 20}, {"db_idx": 21421, "episode_idx": 103, "frame_idx": 45, "global_frame_idx": 21421, "task_index": 20}, {"db_idx": 21422, "episode_idx": 103, "frame_idx": 46, "global_frame_idx": 21422, "task_index": 20}, {"db_idx": 21423, "episode_idx": 103, "frame_idx": 47, "global_frame_idx": 21423, "task_index": 20}, {"db_idx": 21424, "episode_idx": 103, "frame_idx": 48, "global_frame_idx": 21424, "task_index": 20}, {"db_idx": 21425, "episode_idx": 103, "frame_idx": 49, "global_frame_idx": 21425, "task_index": 20}, {"db_idx": 21426, "episode_idx": 103, "frame_idx": 50, "global_frame_idx": 21426, "task_index": 20}, {"db_idx": 21427, "episode_idx": 103, "frame_idx": 51, "global_frame_idx": 21427, "task_index": 20}, {"db_idx": 21428, "episode_idx": 103, "frame_idx": 52, "global_frame_idx": 21428, "task_index": 20}, {"db_idx": 21429, "episode_idx": 103, "frame_idx": 53, "global_frame_idx": 21429, "task_index": 20}, {"db_idx": 21430, "episode_idx": 103, "frame_idx": 54, "global_frame_idx": 21430, "task_index": 20}, {"db_idx": 21431, "episode_idx": 103, "frame_idx": 55, "global_frame_idx": 21431, "task_index": 20}, {"db_idx": 21432, "episode_idx": 103, "frame_idx": 56, "global_frame_idx": 21432, "task_index": 20}, {"db_idx": 21433, "episode_idx": 103, "frame_idx": 57, "global_frame_idx": 21433, "task_index": 20}, {"db_idx": 21434, "episode_idx": 103, "frame_idx": 58, "global_frame_idx": 21434, "task_index": 20}, {"db_idx": 21435, "episode_idx": 103, "frame_idx": 59, "global_frame_idx": 21435, "task_index": 20}, {"db_idx": 21436, "episode_idx": 103, "frame_idx": 60, "global_frame_idx": 21436, "task_index": 20}, {"db_idx": 21437, "episode_idx": 103, "frame_idx": 61, "global_frame_idx": 21437, "task_index": 20}, {"db_idx": 21438, "episode_idx": 103, "frame_idx": 62, "global_frame_idx": 21438, "task_index": 20}, {"db_idx": 21439, "episode_idx": 103, "frame_idx": 63, "global_frame_idx": 21439, "task_index": 20}, {"db_idx": 21440, "episode_idx": 103, "frame_idx": 64, "global_frame_idx": 21440, "task_index": 20}, {"db_idx": 21441, "episode_idx": 103, "frame_idx": 65, "global_frame_idx": 21441, "task_index": 20}, {"db_idx": 21442, "episode_idx": 103, "frame_idx": 66, "global_frame_idx": 21442, "task_index": 20}, {"db_idx": 21443, "episode_idx": 103, "frame_idx": 67, "global_frame_idx": 21443, "task_index": 20}, {"db_idx": 21444, "episode_idx": 103, "frame_idx": 68, "global_frame_idx": 21444, "task_index": 20}, {"db_idx": 21445, "episode_idx": 103, "frame_idx": 69, "global_frame_idx": 21445, "task_index": 20}, {"db_idx": 21446, "episode_idx": 103, "frame_idx": 70, "global_frame_idx": 21446, "task_index": 20}, {"db_idx": 21447, "episode_idx": 103, "frame_idx": 71, "global_frame_idx": 21447, "task_index": 20}, {"db_idx": 21448, "episode_idx": 103, "frame_idx": 72, "global_frame_idx": 21448, "task_index": 20}, {"db_idx": 21449, "episode_idx": 103, "frame_idx": 73, "global_frame_idx": 21449, "task_index": 20}, {"db_idx": 21450, "episode_idx": 103, "frame_idx": 74, "global_frame_idx": 21450, "task_index": 20}, {"db_idx": 21451, "episode_idx": 103, "frame_idx": 75, "global_frame_idx": 21451, "task_index": 20}, {"db_idx": 21452, "episode_idx": 103, "frame_idx": 76, "global_frame_idx": 21452, "task_index": 20}, {"db_idx": 21453, "episode_idx": 103, "frame_idx": 77, "global_frame_idx": 21453, "task_index": 20}, {"db_idx": 21454, "episode_idx": 103, "frame_idx": 78, "global_frame_idx": 21454, "task_index": 20}, {"db_idx": 21455, "episode_idx": 103, "frame_idx": 79, "global_frame_idx": 21455, "task_index": 20}, {"db_idx": 21456, "episode_idx": 103, "frame_idx": 80, "global_frame_idx": 21456, "task_index": 20}, {"db_idx": 21457, "episode_idx": 103, "frame_idx": 81, "global_frame_idx": 21457, "task_index": 20}, {"db_idx": 21458, "episode_idx": 103, "frame_idx": 82, "global_frame_idx": 21458, "task_index": 20}, {"db_idx": 21459, "episode_idx": 103, "frame_idx": 83, "global_frame_idx": 21459, "task_index": 20}, {"db_idx": 21460, "episode_idx": 103, "frame_idx": 84, "global_frame_idx": 21460, "task_index": 20}, {"db_idx": 21461, "episode_idx": 103, "frame_idx": 85, "global_frame_idx": 21461, "task_index": 20}, {"db_idx": 21462, "episode_idx": 103, "frame_idx": 86, "global_frame_idx": 21462, "task_index": 20}, {"db_idx": 21463, "episode_idx": 103, "frame_idx": 87, "global_frame_idx": 21463, "task_index": 20}, {"db_idx": 21464, "episode_idx": 103, "frame_idx": 88, "global_frame_idx": 21464, "task_index": 20}, {"db_idx": 21465, "episode_idx": 103, "frame_idx": 89, "global_frame_idx": 21465, "task_index": 20}, {"db_idx": 21466, "episode_idx": 103, "frame_idx": 90, "global_frame_idx": 21466, "task_index": 20}, {"db_idx": 21467, "episode_idx": 103, "frame_idx": 91, "global_frame_idx": 21467, "task_index": 20}, {"db_idx": 21468, "episode_idx": 104, "frame_idx": 0, "global_frame_idx": 21468, "task_index": 20}, {"db_idx": 21469, "episode_idx": 104, "frame_idx": 1, "global_frame_idx": 21469, "task_index": 20}, {"db_idx": 21470, "episode_idx": 104, "frame_idx": 2, "global_frame_idx": 21470, "task_index": 20}, {"db_idx": 21471, "episode_idx": 104, "frame_idx": 3, "global_frame_idx": 21471, "task_index": 20}, {"db_idx": 21472, "episode_idx": 104, "frame_idx": 4, "global_frame_idx": 21472, "task_index": 20}, {"db_idx": 21473, "episode_idx": 104, "frame_idx": 5, "global_frame_idx": 21473, "task_index": 20}, {"db_idx": 21474, "episode_idx": 104, "frame_idx": 6, "global_frame_idx": 21474, "task_index": 20}, {"db_idx": 21475, "episode_idx": 104, "frame_idx": 7, "global_frame_idx": 21475, "task_index": 20}, {"db_idx": 21476, "episode_idx": 104, "frame_idx": 8, "global_frame_idx": 21476, "task_index": 20}, {"db_idx": 21477, "episode_idx": 104, "frame_idx": 9, "global_frame_idx": 21477, "task_index": 20}, {"db_idx": 21478, "episode_idx": 104, "frame_idx": 10, "global_frame_idx": 21478, "task_index": 20}, {"db_idx": 21479, "episode_idx": 104, "frame_idx": 11, "global_frame_idx": 21479, "task_index": 20}, {"db_idx": 21480, "episode_idx": 104, "frame_idx": 12, "global_frame_idx": 21480, "task_index": 20}, {"db_idx": 21481, "episode_idx": 104, "frame_idx": 13, "global_frame_idx": 21481, "task_index": 20}, {"db_idx": 21482, "episode_idx": 104, "frame_idx": 14, "global_frame_idx": 21482, "task_index": 20}, {"db_idx": 21483, "episode_idx": 104, "frame_idx": 15, "global_frame_idx": 21483, "task_index": 20}, {"db_idx": 21484, "episode_idx": 104, "frame_idx": 16, "global_frame_idx": 21484, "task_index": 20}, {"db_idx": 21485, "episode_idx": 104, "frame_idx": 17, "global_frame_idx": 21485, "task_index": 20}, {"db_idx": 21486, "episode_idx": 104, "frame_idx": 18, "global_frame_idx": 21486, "task_index": 20}, {"db_idx": 21487, "episode_idx": 104, "frame_idx": 19, "global_frame_idx": 21487, "task_index": 20}, {"db_idx": 21488, "episode_idx": 104, "frame_idx": 20, "global_frame_idx": 21488, "task_index": 20}, {"db_idx": 21489, "episode_idx": 104, "frame_idx": 21, "global_frame_idx": 21489, "task_index": 20}, {"db_idx": 21490, "episode_idx": 104, "frame_idx": 22, "global_frame_idx": 21490, "task_index": 20}, {"db_idx": 21491, "episode_idx": 104, "frame_idx": 23, "global_frame_idx": 21491, "task_index": 20}, {"db_idx": 21492, "episode_idx": 104, "frame_idx": 24, "global_frame_idx": 21492, "task_index": 20}, {"db_idx": 21493, "episode_idx": 104, "frame_idx": 25, "global_frame_idx": 21493, "task_index": 20}, {"db_idx": 21494, "episode_idx": 104, "frame_idx": 26, "global_frame_idx": 21494, "task_index": 20}, {"db_idx": 21495, "episode_idx": 104, "frame_idx": 27, "global_frame_idx": 21495, "task_index": 20}, {"db_idx": 21496, "episode_idx": 104, "frame_idx": 28, "global_frame_idx": 21496, "task_index": 20}, {"db_idx": 21497, "episode_idx": 104, "frame_idx": 29, "global_frame_idx": 21497, "task_index": 20}, {"db_idx": 21498, "episode_idx": 104, "frame_idx": 30, "global_frame_idx": 21498, "task_index": 20}, {"db_idx": 21499, "episode_idx": 104, "frame_idx": 31, "global_frame_idx": 21499, "task_index": 20}, {"db_idx": 21500, "episode_idx": 104, "frame_idx": 32, "global_frame_idx": 21500, "task_index": 20}, {"db_idx": 21501, "episode_idx": 104, "frame_idx": 33, "global_frame_idx": 21501, "task_index": 20}, {"db_idx": 21502, "episode_idx": 104, "frame_idx": 34, "global_frame_idx": 21502, "task_index": 20}, {"db_idx": 21503, "episode_idx": 104, "frame_idx": 35, "global_frame_idx": 21503, "task_index": 20}, {"db_idx": 21504, "episode_idx": 104, "frame_idx": 36, "global_frame_idx": 21504, "task_index": 20}, {"db_idx": 21505, "episode_idx": 104, "frame_idx": 37, "global_frame_idx": 21505, "task_index": 20}, {"db_idx": 21506, "episode_idx": 104, "frame_idx": 38, "global_frame_idx": 21506, "task_index": 20}, {"db_idx": 21507, "episode_idx": 104, "frame_idx": 39, "global_frame_idx": 21507, "task_index": 20}, {"db_idx": 21508, "episode_idx": 104, "frame_idx": 40, "global_frame_idx": 21508, "task_index": 20}, {"db_idx": 21509, "episode_idx": 104, "frame_idx": 41, "global_frame_idx": 21509, "task_index": 20}, {"db_idx": 21510, "episode_idx": 104, "frame_idx": 42, "global_frame_idx": 21510, "task_index": 20}, {"db_idx": 21511, "episode_idx": 104, "frame_idx": 43, "global_frame_idx": 21511, "task_index": 20}, {"db_idx": 21512, "episode_idx": 104, "frame_idx": 44, "global_frame_idx": 21512, "task_index": 20}, {"db_idx": 21513, "episode_idx": 104, "frame_idx": 45, "global_frame_idx": 21513, "task_index": 20}, {"db_idx": 21514, "episode_idx": 104, "frame_idx": 46, "global_frame_idx": 21514, "task_index": 20}, {"db_idx": 21515, "episode_idx": 104, "frame_idx": 47, "global_frame_idx": 21515, "task_index": 20}, {"db_idx": 21516, "episode_idx": 104, "frame_idx": 48, "global_frame_idx": 21516, "task_index": 20}, {"db_idx": 21517, "episode_idx": 104, "frame_idx": 49, "global_frame_idx": 21517, "task_index": 20}, {"db_idx": 21518, "episode_idx": 104, "frame_idx": 50, "global_frame_idx": 21518, "task_index": 20}, {"db_idx": 21519, "episode_idx": 104, "frame_idx": 51, "global_frame_idx": 21519, "task_index": 20}, {"db_idx": 21520, "episode_idx": 104, "frame_idx": 52, "global_frame_idx": 21520, "task_index": 20}, {"db_idx": 21521, "episode_idx": 104, "frame_idx": 53, "global_frame_idx": 21521, "task_index": 20}, {"db_idx": 21522, "episode_idx": 104, "frame_idx": 54, "global_frame_idx": 21522, "task_index": 20}, {"db_idx": 21523, "episode_idx": 104, "frame_idx": 55, "global_frame_idx": 21523, "task_index": 20}, {"db_idx": 21524, "episode_idx": 104, "frame_idx": 56, "global_frame_idx": 21524, "task_index": 20}, {"db_idx": 21525, "episode_idx": 104, "frame_idx": 57, "global_frame_idx": 21525, "task_index": 20}, {"db_idx": 21526, "episode_idx": 104, "frame_idx": 58, "global_frame_idx": 21526, "task_index": 20}, {"db_idx": 21527, "episode_idx": 104, "frame_idx": 59, "global_frame_idx": 21527, "task_index": 20}, {"db_idx": 21528, "episode_idx": 104, "frame_idx": 60, "global_frame_idx": 21528, "task_index": 20}, {"db_idx": 21529, "episode_idx": 104, "frame_idx": 61, "global_frame_idx": 21529, "task_index": 20}, {"db_idx": 21530, "episode_idx": 104, "frame_idx": 62, "global_frame_idx": 21530, "task_index": 20}, {"db_idx": 21531, "episode_idx": 104, "frame_idx": 63, "global_frame_idx": 21531, "task_index": 20}, {"db_idx": 21532, "episode_idx": 104, "frame_idx": 64, "global_frame_idx": 21532, "task_index": 20}, {"db_idx": 21533, "episode_idx": 104, "frame_idx": 65, "global_frame_idx": 21533, "task_index": 20}, {"db_idx": 21534, "episode_idx": 104, "frame_idx": 66, "global_frame_idx": 21534, "task_index": 20}, {"db_idx": 21535, "episode_idx": 104, "frame_idx": 67, "global_frame_idx": 21535, "task_index": 20}, {"db_idx": 21536, "episode_idx": 104, "frame_idx": 68, "global_frame_idx": 21536, "task_index": 20}, {"db_idx": 21537, "episode_idx": 104, "frame_idx": 69, "global_frame_idx": 21537, "task_index": 20}, {"db_idx": 21538, "episode_idx": 104, "frame_idx": 70, "global_frame_idx": 21538, "task_index": 20}, {"db_idx": 21539, "episode_idx": 104, "frame_idx": 71, "global_frame_idx": 21539, "task_index": 20}, {"db_idx": 21540, "episode_idx": 104, "frame_idx": 72, "global_frame_idx": 21540, "task_index": 20}, {"db_idx": 21541, "episode_idx": 104, "frame_idx": 73, "global_frame_idx": 21541, "task_index": 20}, {"db_idx": 21542, "episode_idx": 104, "frame_idx": 74, "global_frame_idx": 21542, "task_index": 20}, {"db_idx": 21543, "episode_idx": 104, "frame_idx": 75, "global_frame_idx": 21543, "task_index": 20}, {"db_idx": 21544, "episode_idx": 104, "frame_idx": 76, "global_frame_idx": 21544, "task_index": 20}, {"db_idx": 21545, "episode_idx": 105, "frame_idx": 0, "global_frame_idx": 21545, "task_index": 21}, {"db_idx": 21546, "episode_idx": 105, "frame_idx": 1, "global_frame_idx": 21546, "task_index": 21}, {"db_idx": 21547, "episode_idx": 105, "frame_idx": 2, "global_frame_idx": 21547, "task_index": 21}, {"db_idx": 21548, "episode_idx": 105, "frame_idx": 3, "global_frame_idx": 21548, "task_index": 21}, {"db_idx": 21549, "episode_idx": 105, "frame_idx": 4, "global_frame_idx": 21549, "task_index": 21}, {"db_idx": 21550, "episode_idx": 105, "frame_idx": 5, "global_frame_idx": 21550, "task_index": 21}, {"db_idx": 21551, "episode_idx": 105, "frame_idx": 6, "global_frame_idx": 21551, "task_index": 21}, {"db_idx": 21552, "episode_idx": 105, "frame_idx": 7, "global_frame_idx": 21552, "task_index": 21}, {"db_idx": 21553, "episode_idx": 105, "frame_idx": 8, "global_frame_idx": 21553, "task_index": 21}, {"db_idx": 21554, "episode_idx": 105, "frame_idx": 9, "global_frame_idx": 21554, "task_index": 21}, {"db_idx": 21555, "episode_idx": 105, "frame_idx": 10, "global_frame_idx": 21555, "task_index": 21}, {"db_idx": 21556, "episode_idx": 105, "frame_idx": 11, "global_frame_idx": 21556, "task_index": 21}, {"db_idx": 21557, "episode_idx": 105, "frame_idx": 12, "global_frame_idx": 21557, "task_index": 21}, {"db_idx": 21558, "episode_idx": 105, "frame_idx": 13, "global_frame_idx": 21558, "task_index": 21}, {"db_idx": 21559, "episode_idx": 105, "frame_idx": 14, "global_frame_idx": 21559, "task_index": 21}, {"db_idx": 21560, "episode_idx": 105, "frame_idx": 15, "global_frame_idx": 21560, "task_index": 21}, {"db_idx": 21561, "episode_idx": 105, "frame_idx": 16, "global_frame_idx": 21561, "task_index": 21}, {"db_idx": 21562, "episode_idx": 105, "frame_idx": 17, "global_frame_idx": 21562, "task_index": 21}, {"db_idx": 21563, "episode_idx": 105, "frame_idx": 18, "global_frame_idx": 21563, "task_index": 21}, {"db_idx": 21564, "episode_idx": 105, "frame_idx": 19, "global_frame_idx": 21564, "task_index": 21}, {"db_idx": 21565, "episode_idx": 105, "frame_idx": 20, "global_frame_idx": 21565, "task_index": 21}, {"db_idx": 21566, "episode_idx": 105, "frame_idx": 21, "global_frame_idx": 21566, "task_index": 21}, {"db_idx": 21567, "episode_idx": 105, "frame_idx": 22, "global_frame_idx": 21567, "task_index": 21}, {"db_idx": 21568, "episode_idx": 105, "frame_idx": 23, "global_frame_idx": 21568, "task_index": 21}, {"db_idx": 21569, "episode_idx": 105, "frame_idx": 24, "global_frame_idx": 21569, "task_index": 21}, {"db_idx": 21570, "episode_idx": 105, "frame_idx": 25, "global_frame_idx": 21570, "task_index": 21}, {"db_idx": 21571, "episode_idx": 105, "frame_idx": 26, "global_frame_idx": 21571, "task_index": 21}, {"db_idx": 21572, "episode_idx": 105, "frame_idx": 27, "global_frame_idx": 21572, "task_index": 21}, {"db_idx": 21573, "episode_idx": 105, "frame_idx": 28, "global_frame_idx": 21573, "task_index": 21}, {"db_idx": 21574, "episode_idx": 105, "frame_idx": 29, "global_frame_idx": 21574, "task_index": 21}, {"db_idx": 21575, "episode_idx": 105, "frame_idx": 30, "global_frame_idx": 21575, "task_index": 21}, {"db_idx": 21576, "episode_idx": 105, "frame_idx": 31, "global_frame_idx": 21576, "task_index": 21}, {"db_idx": 21577, "episode_idx": 105, "frame_idx": 32, "global_frame_idx": 21577, "task_index": 21}, {"db_idx": 21578, "episode_idx": 105, "frame_idx": 33, "global_frame_idx": 21578, "task_index": 21}, {"db_idx": 21579, "episode_idx": 105, "frame_idx": 34, "global_frame_idx": 21579, "task_index": 21}, {"db_idx": 21580, "episode_idx": 105, "frame_idx": 35, "global_frame_idx": 21580, "task_index": 21}, {"db_idx": 21581, "episode_idx": 105, "frame_idx": 36, "global_frame_idx": 21581, "task_index": 21}, {"db_idx": 21582, "episode_idx": 105, "frame_idx": 37, "global_frame_idx": 21582, "task_index": 21}, {"db_idx": 21583, "episode_idx": 105, "frame_idx": 38, "global_frame_idx": 21583, "task_index": 21}, {"db_idx": 21584, "episode_idx": 105, "frame_idx": 39, "global_frame_idx": 21584, "task_index": 21}, {"db_idx": 21585, "episode_idx": 105, "frame_idx": 40, "global_frame_idx": 21585, "task_index": 21}, {"db_idx": 21586, "episode_idx": 105, "frame_idx": 41, "global_frame_idx": 21586, "task_index": 21}, {"db_idx": 21587, "episode_idx": 105, "frame_idx": 42, "global_frame_idx": 21587, "task_index": 21}, {"db_idx": 21588, "episode_idx": 105, "frame_idx": 43, "global_frame_idx": 21588, "task_index": 21}, {"db_idx": 21589, "episode_idx": 105, "frame_idx": 44, "global_frame_idx": 21589, "task_index": 21}, {"db_idx": 21590, "episode_idx": 105, "frame_idx": 45, "global_frame_idx": 21590, "task_index": 21}, {"db_idx": 21591, "episode_idx": 105, "frame_idx": 46, "global_frame_idx": 21591, "task_index": 21}, {"db_idx": 21592, "episode_idx": 105, "frame_idx": 47, "global_frame_idx": 21592, "task_index": 21}, {"db_idx": 21593, "episode_idx": 105, "frame_idx": 48, "global_frame_idx": 21593, "task_index": 21}, {"db_idx": 21594, "episode_idx": 105, "frame_idx": 49, "global_frame_idx": 21594, "task_index": 21}, {"db_idx": 21595, "episode_idx": 105, "frame_idx": 50, "global_frame_idx": 21595, "task_index": 21}, {"db_idx": 21596, "episode_idx": 105, "frame_idx": 51, "global_frame_idx": 21596, "task_index": 21}, {"db_idx": 21597, "episode_idx": 105, "frame_idx": 52, "global_frame_idx": 21597, "task_index": 21}, {"db_idx": 21598, "episode_idx": 105, "frame_idx": 53, "global_frame_idx": 21598, "task_index": 21}, {"db_idx": 21599, "episode_idx": 105, "frame_idx": 54, "global_frame_idx": 21599, "task_index": 21}, {"db_idx": 21600, "episode_idx": 105, "frame_idx": 55, "global_frame_idx": 21600, "task_index": 21}, {"db_idx": 21601, "episode_idx": 105, "frame_idx": 56, "global_frame_idx": 21601, "task_index": 21}, {"db_idx": 21602, "episode_idx": 105, "frame_idx": 57, "global_frame_idx": 21602, "task_index": 21}, {"db_idx": 21603, "episode_idx": 105, "frame_idx": 58, "global_frame_idx": 21603, "task_index": 21}, {"db_idx": 21604, "episode_idx": 105, "frame_idx": 59, "global_frame_idx": 21604, "task_index": 21}, {"db_idx": 21605, "episode_idx": 105, "frame_idx": 60, "global_frame_idx": 21605, "task_index": 21}, {"db_idx": 21606, "episode_idx": 105, "frame_idx": 61, "global_frame_idx": 21606, "task_index": 21}, {"db_idx": 21607, "episode_idx": 105, "frame_idx": 62, "global_frame_idx": 21607, "task_index": 21}, {"db_idx": 21608, "episode_idx": 105, "frame_idx": 63, "global_frame_idx": 21608, "task_index": 21}, {"db_idx": 21609, "episode_idx": 105, "frame_idx": 64, "global_frame_idx": 21609, "task_index": 21}, {"db_idx": 21610, "episode_idx": 105, "frame_idx": 65, "global_frame_idx": 21610, "task_index": 21}, {"db_idx": 21611, "episode_idx": 105, "frame_idx": 66, "global_frame_idx": 21611, "task_index": 21}, {"db_idx": 21612, "episode_idx": 105, "frame_idx": 67, "global_frame_idx": 21612, "task_index": 21}, {"db_idx": 21613, "episode_idx": 105, "frame_idx": 68, "global_frame_idx": 21613, "task_index": 21}, {"db_idx": 21614, "episode_idx": 105, "frame_idx": 69, "global_frame_idx": 21614, "task_index": 21}, {"db_idx": 21615, "episode_idx": 105, "frame_idx": 70, "global_frame_idx": 21615, "task_index": 21}, {"db_idx": 21616, "episode_idx": 105, "frame_idx": 71, "global_frame_idx": 21616, "task_index": 21}, {"db_idx": 21617, "episode_idx": 105, "frame_idx": 72, "global_frame_idx": 21617, "task_index": 21}, {"db_idx": 21618, "episode_idx": 105, "frame_idx": 73, "global_frame_idx": 21618, "task_index": 21}, {"db_idx": 21619, "episode_idx": 105, "frame_idx": 74, "global_frame_idx": 21619, "task_index": 21}, {"db_idx": 21620, "episode_idx": 105, "frame_idx": 75, "global_frame_idx": 21620, "task_index": 21}, {"db_idx": 21621, "episode_idx": 105, "frame_idx": 76, "global_frame_idx": 21621, "task_index": 21}, {"db_idx": 21622, "episode_idx": 105, "frame_idx": 77, "global_frame_idx": 21622, "task_index": 21}, {"db_idx": 21623, "episode_idx": 105, "frame_idx": 78, "global_frame_idx": 21623, "task_index": 21}, {"db_idx": 21624, "episode_idx": 105, "frame_idx": 79, "global_frame_idx": 21624, "task_index": 21}, {"db_idx": 21625, "episode_idx": 105, "frame_idx": 80, "global_frame_idx": 21625, "task_index": 21}, {"db_idx": 21626, "episode_idx": 105, "frame_idx": 81, "global_frame_idx": 21626, "task_index": 21}, {"db_idx": 21627, "episode_idx": 105, "frame_idx": 82, "global_frame_idx": 21627, "task_index": 21}, {"db_idx": 21628, "episode_idx": 105, "frame_idx": 83, "global_frame_idx": 21628, "task_index": 21}, {"db_idx": 21629, "episode_idx": 105, "frame_idx": 84, "global_frame_idx": 21629, "task_index": 21}, {"db_idx": 21630, "episode_idx": 105, "frame_idx": 85, "global_frame_idx": 21630, "task_index": 21}, {"db_idx": 21631, "episode_idx": 105, "frame_idx": 86, "global_frame_idx": 21631, "task_index": 21}, {"db_idx": 21632, "episode_idx": 105, "frame_idx": 87, "global_frame_idx": 21632, "task_index": 21}, {"db_idx": 21633, "episode_idx": 105, "frame_idx": 88, "global_frame_idx": 21633, "task_index": 21}, {"db_idx": 21634, "episode_idx": 105, "frame_idx": 89, "global_frame_idx": 21634, "task_index": 21}, {"db_idx": 21635, "episode_idx": 105, "frame_idx": 90, "global_frame_idx": 21635, "task_index": 21}, {"db_idx": 21636, "episode_idx": 105, "frame_idx": 91, "global_frame_idx": 21636, "task_index": 21}, {"db_idx": 21637, "episode_idx": 105, "frame_idx": 92, "global_frame_idx": 21637, "task_index": 21}, {"db_idx": 21638, "episode_idx": 105, "frame_idx": 93, "global_frame_idx": 21638, "task_index": 21}, {"db_idx": 21639, "episode_idx": 105, "frame_idx": 94, "global_frame_idx": 21639, "task_index": 21}, {"db_idx": 21640, "episode_idx": 105, "frame_idx": 95, "global_frame_idx": 21640, "task_index": 21}, {"db_idx": 21641, "episode_idx": 105, "frame_idx": 96, "global_frame_idx": 21641, "task_index": 21}, {"db_idx": 21642, "episode_idx": 105, "frame_idx": 97, "global_frame_idx": 21642, "task_index": 21}, {"db_idx": 21643, "episode_idx": 106, "frame_idx": 0, "global_frame_idx": 21643, "task_index": 21}, {"db_idx": 21644, "episode_idx": 106, "frame_idx": 1, "global_frame_idx": 21644, "task_index": 21}, {"db_idx": 21645, "episode_idx": 106, "frame_idx": 2, "global_frame_idx": 21645, "task_index": 21}, {"db_idx": 21646, "episode_idx": 106, "frame_idx": 3, "global_frame_idx": 21646, "task_index": 21}, {"db_idx": 21647, "episode_idx": 106, "frame_idx": 4, "global_frame_idx": 21647, "task_index": 21}, {"db_idx": 21648, "episode_idx": 106, "frame_idx": 5, "global_frame_idx": 21648, "task_index": 21}, {"db_idx": 21649, "episode_idx": 106, "frame_idx": 6, "global_frame_idx": 21649, "task_index": 21}, {"db_idx": 21650, "episode_idx": 106, "frame_idx": 7, "global_frame_idx": 21650, "task_index": 21}, {"db_idx": 21651, "episode_idx": 106, "frame_idx": 8, "global_frame_idx": 21651, "task_index": 21}, {"db_idx": 21652, "episode_idx": 106, "frame_idx": 9, "global_frame_idx": 21652, "task_index": 21}, {"db_idx": 21653, "episode_idx": 106, "frame_idx": 10, "global_frame_idx": 21653, "task_index": 21}, {"db_idx": 21654, "episode_idx": 106, "frame_idx": 11, "global_frame_idx": 21654, "task_index": 21}, {"db_idx": 21655, "episode_idx": 106, "frame_idx": 12, "global_frame_idx": 21655, "task_index": 21}, {"db_idx": 21656, "episode_idx": 106, "frame_idx": 13, "global_frame_idx": 21656, "task_index": 21}, {"db_idx": 21657, "episode_idx": 106, "frame_idx": 14, "global_frame_idx": 21657, "task_index": 21}, {"db_idx": 21658, "episode_idx": 106, "frame_idx": 15, "global_frame_idx": 21658, "task_index": 21}, {"db_idx": 21659, "episode_idx": 106, "frame_idx": 16, "global_frame_idx": 21659, "task_index": 21}, {"db_idx": 21660, "episode_idx": 106, "frame_idx": 17, "global_frame_idx": 21660, "task_index": 21}, {"db_idx": 21661, "episode_idx": 106, "frame_idx": 18, "global_frame_idx": 21661, "task_index": 21}, {"db_idx": 21662, "episode_idx": 106, "frame_idx": 19, "global_frame_idx": 21662, "task_index": 21}, {"db_idx": 21663, "episode_idx": 106, "frame_idx": 20, "global_frame_idx": 21663, "task_index": 21}, {"db_idx": 21664, "episode_idx": 106, "frame_idx": 21, "global_frame_idx": 21664, "task_index": 21}, {"db_idx": 21665, "episode_idx": 106, "frame_idx": 22, "global_frame_idx": 21665, "task_index": 21}, {"db_idx": 21666, "episode_idx": 106, "frame_idx": 23, "global_frame_idx": 21666, "task_index": 21}, {"db_idx": 21667, "episode_idx": 106, "frame_idx": 24, "global_frame_idx": 21667, "task_index": 21}, {"db_idx": 21668, "episode_idx": 106, "frame_idx": 25, "global_frame_idx": 21668, "task_index": 21}, {"db_idx": 21669, "episode_idx": 106, "frame_idx": 26, "global_frame_idx": 21669, "task_index": 21}, {"db_idx": 21670, "episode_idx": 106, "frame_idx": 27, "global_frame_idx": 21670, "task_index": 21}, {"db_idx": 21671, "episode_idx": 106, "frame_idx": 28, "global_frame_idx": 21671, "task_index": 21}, {"db_idx": 21672, "episode_idx": 106, "frame_idx": 29, "global_frame_idx": 21672, "task_index": 21}, {"db_idx": 21673, "episode_idx": 106, "frame_idx": 30, "global_frame_idx": 21673, "task_index": 21}, {"db_idx": 21674, "episode_idx": 106, "frame_idx": 31, "global_frame_idx": 21674, "task_index": 21}, {"db_idx": 21675, "episode_idx": 106, "frame_idx": 32, "global_frame_idx": 21675, "task_index": 21}, {"db_idx": 21676, "episode_idx": 106, "frame_idx": 33, "global_frame_idx": 21676, "task_index": 21}, {"db_idx": 21677, "episode_idx": 106, "frame_idx": 34, "global_frame_idx": 21677, "task_index": 21}, {"db_idx": 21678, "episode_idx": 106, "frame_idx": 35, "global_frame_idx": 21678, "task_index": 21}, {"db_idx": 21679, "episode_idx": 106, "frame_idx": 36, "global_frame_idx": 21679, "task_index": 21}, {"db_idx": 21680, "episode_idx": 106, "frame_idx": 37, "global_frame_idx": 21680, "task_index": 21}, {"db_idx": 21681, "episode_idx": 106, "frame_idx": 38, "global_frame_idx": 21681, "task_index": 21}, {"db_idx": 21682, "episode_idx": 106, "frame_idx": 39, "global_frame_idx": 21682, "task_index": 21}, {"db_idx": 21683, "episode_idx": 106, "frame_idx": 40, "global_frame_idx": 21683, "task_index": 21}, {"db_idx": 21684, "episode_idx": 106, "frame_idx": 41, "global_frame_idx": 21684, "task_index": 21}, {"db_idx": 21685, "episode_idx": 106, "frame_idx": 42, "global_frame_idx": 21685, "task_index": 21}, {"db_idx": 21686, "episode_idx": 106, "frame_idx": 43, "global_frame_idx": 21686, "task_index": 21}, {"db_idx": 21687, "episode_idx": 106, "frame_idx": 44, "global_frame_idx": 21687, "task_index": 21}, {"db_idx": 21688, "episode_idx": 106, "frame_idx": 45, "global_frame_idx": 21688, "task_index": 21}, {"db_idx": 21689, "episode_idx": 106, "frame_idx": 46, "global_frame_idx": 21689, "task_index": 21}, {"db_idx": 21690, "episode_idx": 106, "frame_idx": 47, "global_frame_idx": 21690, "task_index": 21}, {"db_idx": 21691, "episode_idx": 106, "frame_idx": 48, "global_frame_idx": 21691, "task_index": 21}, {"db_idx": 21692, "episode_idx": 106, "frame_idx": 49, "global_frame_idx": 21692, "task_index": 21}, {"db_idx": 21693, "episode_idx": 106, "frame_idx": 50, "global_frame_idx": 21693, "task_index": 21}, {"db_idx": 21694, "episode_idx": 106, "frame_idx": 51, "global_frame_idx": 21694, "task_index": 21}, {"db_idx": 21695, "episode_idx": 106, "frame_idx": 52, "global_frame_idx": 21695, "task_index": 21}, {"db_idx": 21696, "episode_idx": 106, "frame_idx": 53, "global_frame_idx": 21696, "task_index": 21}, {"db_idx": 21697, "episode_idx": 106, "frame_idx": 54, "global_frame_idx": 21697, "task_index": 21}, {"db_idx": 21698, "episode_idx": 106, "frame_idx": 55, "global_frame_idx": 21698, "task_index": 21}, {"db_idx": 21699, "episode_idx": 106, "frame_idx": 56, "global_frame_idx": 21699, "task_index": 21}, {"db_idx": 21700, "episode_idx": 106, "frame_idx": 57, "global_frame_idx": 21700, "task_index": 21}, {"db_idx": 21701, "episode_idx": 106, "frame_idx": 58, "global_frame_idx": 21701, "task_index": 21}, {"db_idx": 21702, "episode_idx": 106, "frame_idx": 59, "global_frame_idx": 21702, "task_index": 21}, {"db_idx": 21703, "episode_idx": 106, "frame_idx": 60, "global_frame_idx": 21703, "task_index": 21}, {"db_idx": 21704, "episode_idx": 106, "frame_idx": 61, "global_frame_idx": 21704, "task_index": 21}, {"db_idx": 21705, "episode_idx": 106, "frame_idx": 62, "global_frame_idx": 21705, "task_index": 21}, {"db_idx": 21706, "episode_idx": 106, "frame_idx": 63, "global_frame_idx": 21706, "task_index": 21}, {"db_idx": 21707, "episode_idx": 106, "frame_idx": 64, "global_frame_idx": 21707, "task_index": 21}, {"db_idx": 21708, "episode_idx": 106, "frame_idx": 65, "global_frame_idx": 21708, "task_index": 21}, {"db_idx": 21709, "episode_idx": 106, "frame_idx": 66, "global_frame_idx": 21709, "task_index": 21}, {"db_idx": 21710, "episode_idx": 106, "frame_idx": 67, "global_frame_idx": 21710, "task_index": 21}, {"db_idx": 21711, "episode_idx": 106, "frame_idx": 68, "global_frame_idx": 21711, "task_index": 21}, {"db_idx": 21712, "episode_idx": 106, "frame_idx": 69, "global_frame_idx": 21712, "task_index": 21}, {"db_idx": 21713, "episode_idx": 106, "frame_idx": 70, "global_frame_idx": 21713, "task_index": 21}, {"db_idx": 21714, "episode_idx": 106, "frame_idx": 71, "global_frame_idx": 21714, "task_index": 21}, {"db_idx": 21715, "episode_idx": 106, "frame_idx": 72, "global_frame_idx": 21715, "task_index": 21}, {"db_idx": 21716, "episode_idx": 106, "frame_idx": 73, "global_frame_idx": 21716, "task_index": 21}, {"db_idx": 21717, "episode_idx": 106, "frame_idx": 74, "global_frame_idx": 21717, "task_index": 21}, {"db_idx": 21718, "episode_idx": 106, "frame_idx": 75, "global_frame_idx": 21718, "task_index": 21}, {"db_idx": 21719, "episode_idx": 106, "frame_idx": 76, "global_frame_idx": 21719, "task_index": 21}, {"db_idx": 21720, "episode_idx": 106, "frame_idx": 77, "global_frame_idx": 21720, "task_index": 21}, {"db_idx": 21721, "episode_idx": 106, "frame_idx": 78, "global_frame_idx": 21721, "task_index": 21}, {"db_idx": 21722, "episode_idx": 106, "frame_idx": 79, "global_frame_idx": 21722, "task_index": 21}, {"db_idx": 21723, "episode_idx": 106, "frame_idx": 80, "global_frame_idx": 21723, "task_index": 21}, {"db_idx": 21724, "episode_idx": 106, "frame_idx": 81, "global_frame_idx": 21724, "task_index": 21}, {"db_idx": 21725, "episode_idx": 106, "frame_idx": 82, "global_frame_idx": 21725, "task_index": 21}, {"db_idx": 21726, "episode_idx": 106, "frame_idx": 83, "global_frame_idx": 21726, "task_index": 21}, {"db_idx": 21727, "episode_idx": 106, "frame_idx": 84, "global_frame_idx": 21727, "task_index": 21}, {"db_idx": 21728, "episode_idx": 106, "frame_idx": 85, "global_frame_idx": 21728, "task_index": 21}, {"db_idx": 21729, "episode_idx": 106, "frame_idx": 86, "global_frame_idx": 21729, "task_index": 21}, {"db_idx": 21730, "episode_idx": 106, "frame_idx": 87, "global_frame_idx": 21730, "task_index": 21}, {"db_idx": 21731, "episode_idx": 106, "frame_idx": 88, "global_frame_idx": 21731, "task_index": 21}, {"db_idx": 21732, "episode_idx": 106, "frame_idx": 89, "global_frame_idx": 21732, "task_index": 21}, {"db_idx": 21733, "episode_idx": 106, "frame_idx": 90, "global_frame_idx": 21733, "task_index": 21}, {"db_idx": 21734, "episode_idx": 106, "frame_idx": 91, "global_frame_idx": 21734, "task_index": 21}, {"db_idx": 21735, "episode_idx": 106, "frame_idx": 92, "global_frame_idx": 21735, "task_index": 21}, {"db_idx": 21736, "episode_idx": 106, "frame_idx": 93, "global_frame_idx": 21736, "task_index": 21}, {"db_idx": 21737, "episode_idx": 106, "frame_idx": 94, "global_frame_idx": 21737, "task_index": 21}, {"db_idx": 21738, "episode_idx": 106, "frame_idx": 95, "global_frame_idx": 21738, "task_index": 21}, {"db_idx": 21739, "episode_idx": 106, "frame_idx": 96, "global_frame_idx": 21739, "task_index": 21}, {"db_idx": 21740, "episode_idx": 106, "frame_idx": 97, "global_frame_idx": 21740, "task_index": 21}, {"db_idx": 21741, "episode_idx": 106, "frame_idx": 98, "global_frame_idx": 21741, "task_index": 21}, {"db_idx": 21742, "episode_idx": 106, "frame_idx": 99, "global_frame_idx": 21742, "task_index": 21}, {"db_idx": 21743, "episode_idx": 106, "frame_idx": 100, "global_frame_idx": 21743, "task_index": 21}, {"db_idx": 21744, "episode_idx": 106, "frame_idx": 101, "global_frame_idx": 21744, "task_index": 21}, {"db_idx": 21745, "episode_idx": 106, "frame_idx": 102, "global_frame_idx": 21745, "task_index": 21}, {"db_idx": 21746, "episode_idx": 106, "frame_idx": 103, "global_frame_idx": 21746, "task_index": 21}, {"db_idx": 21747, "episode_idx": 106, "frame_idx": 104, "global_frame_idx": 21747, "task_index": 21}, {"db_idx": 21748, "episode_idx": 106, "frame_idx": 105, "global_frame_idx": 21748, "task_index": 21}, {"db_idx": 21749, "episode_idx": 106, "frame_idx": 106, "global_frame_idx": 21749, "task_index": 21}, {"db_idx": 21750, "episode_idx": 106, "frame_idx": 107, "global_frame_idx": 21750, "task_index": 21}, {"db_idx": 21751, "episode_idx": 106, "frame_idx": 108, "global_frame_idx": 21751, "task_index": 21}, {"db_idx": 21752, "episode_idx": 106, "frame_idx": 109, "global_frame_idx": 21752, "task_index": 21}, {"db_idx": 21753, "episode_idx": 106, "frame_idx": 110, "global_frame_idx": 21753, "task_index": 21}, {"db_idx": 21754, "episode_idx": 106, "frame_idx": 111, "global_frame_idx": 21754, "task_index": 21}, {"db_idx": 21755, "episode_idx": 107, "frame_idx": 0, "global_frame_idx": 21755, "task_index": 21}, {"db_idx": 21756, "episode_idx": 107, "frame_idx": 1, "global_frame_idx": 21756, "task_index": 21}, {"db_idx": 21757, "episode_idx": 107, "frame_idx": 2, "global_frame_idx": 21757, "task_index": 21}, {"db_idx": 21758, "episode_idx": 107, "frame_idx": 3, "global_frame_idx": 21758, "task_index": 21}, {"db_idx": 21759, "episode_idx": 107, "frame_idx": 4, "global_frame_idx": 21759, "task_index": 21}, {"db_idx": 21760, "episode_idx": 107, "frame_idx": 5, "global_frame_idx": 21760, "task_index": 21}, {"db_idx": 21761, "episode_idx": 107, "frame_idx": 6, "global_frame_idx": 21761, "task_index": 21}, {"db_idx": 21762, "episode_idx": 107, "frame_idx": 7, "global_frame_idx": 21762, "task_index": 21}, {"db_idx": 21763, "episode_idx": 107, "frame_idx": 8, "global_frame_idx": 21763, "task_index": 21}, {"db_idx": 21764, "episode_idx": 107, "frame_idx": 9, "global_frame_idx": 21764, "task_index": 21}, {"db_idx": 21765, "episode_idx": 107, "frame_idx": 10, "global_frame_idx": 21765, "task_index": 21}, {"db_idx": 21766, "episode_idx": 107, "frame_idx": 11, "global_frame_idx": 21766, "task_index": 21}, {"db_idx": 21767, "episode_idx": 107, "frame_idx": 12, "global_frame_idx": 21767, "task_index": 21}, {"db_idx": 21768, "episode_idx": 107, "frame_idx": 13, "global_frame_idx": 21768, "task_index": 21}, {"db_idx": 21769, "episode_idx": 107, "frame_idx": 14, "global_frame_idx": 21769, "task_index": 21}, {"db_idx": 21770, "episode_idx": 107, "frame_idx": 15, "global_frame_idx": 21770, "task_index": 21}, {"db_idx": 21771, "episode_idx": 107, "frame_idx": 16, "global_frame_idx": 21771, "task_index": 21}, {"db_idx": 21772, "episode_idx": 107, "frame_idx": 17, "global_frame_idx": 21772, "task_index": 21}, {"db_idx": 21773, "episode_idx": 107, "frame_idx": 18, "global_frame_idx": 21773, "task_index": 21}, {"db_idx": 21774, "episode_idx": 107, "frame_idx": 19, "global_frame_idx": 21774, "task_index": 21}, {"db_idx": 21775, "episode_idx": 107, "frame_idx": 20, "global_frame_idx": 21775, "task_index": 21}, {"db_idx": 21776, "episode_idx": 107, "frame_idx": 21, "global_frame_idx": 21776, "task_index": 21}, {"db_idx": 21777, "episode_idx": 107, "frame_idx": 22, "global_frame_idx": 21777, "task_index": 21}, {"db_idx": 21778, "episode_idx": 107, "frame_idx": 23, "global_frame_idx": 21778, "task_index": 21}, {"db_idx": 21779, "episode_idx": 107, "frame_idx": 24, "global_frame_idx": 21779, "task_index": 21}, {"db_idx": 21780, "episode_idx": 107, "frame_idx": 25, "global_frame_idx": 21780, "task_index": 21}, {"db_idx": 21781, "episode_idx": 107, "frame_idx": 26, "global_frame_idx": 21781, "task_index": 21}, {"db_idx": 21782, "episode_idx": 107, "frame_idx": 27, "global_frame_idx": 21782, "task_index": 21}, {"db_idx": 21783, "episode_idx": 107, "frame_idx": 28, "global_frame_idx": 21783, "task_index": 21}, {"db_idx": 21784, "episode_idx": 107, "frame_idx": 29, "global_frame_idx": 21784, "task_index": 21}, {"db_idx": 21785, "episode_idx": 107, "frame_idx": 30, "global_frame_idx": 21785, "task_index": 21}, {"db_idx": 21786, "episode_idx": 107, "frame_idx": 31, "global_frame_idx": 21786, "task_index": 21}, {"db_idx": 21787, "episode_idx": 107, "frame_idx": 32, "global_frame_idx": 21787, "task_index": 21}, {"db_idx": 21788, "episode_idx": 107, "frame_idx": 33, "global_frame_idx": 21788, "task_index": 21}, {"db_idx": 21789, "episode_idx": 107, "frame_idx": 34, "global_frame_idx": 21789, "task_index": 21}, {"db_idx": 21790, "episode_idx": 107, "frame_idx": 35, "global_frame_idx": 21790, "task_index": 21}, {"db_idx": 21791, "episode_idx": 107, "frame_idx": 36, "global_frame_idx": 21791, "task_index": 21}, {"db_idx": 21792, "episode_idx": 107, "frame_idx": 37, "global_frame_idx": 21792, "task_index": 21}, {"db_idx": 21793, "episode_idx": 107, "frame_idx": 38, "global_frame_idx": 21793, "task_index": 21}, {"db_idx": 21794, "episode_idx": 107, "frame_idx": 39, "global_frame_idx": 21794, "task_index": 21}, {"db_idx": 21795, "episode_idx": 107, "frame_idx": 40, "global_frame_idx": 21795, "task_index": 21}, {"db_idx": 21796, "episode_idx": 107, "frame_idx": 41, "global_frame_idx": 21796, "task_index": 21}, {"db_idx": 21797, "episode_idx": 107, "frame_idx": 42, "global_frame_idx": 21797, "task_index": 21}, {"db_idx": 21798, "episode_idx": 107, "frame_idx": 43, "global_frame_idx": 21798, "task_index": 21}, {"db_idx": 21799, "episode_idx": 107, "frame_idx": 44, "global_frame_idx": 21799, "task_index": 21}, {"db_idx": 21800, "episode_idx": 107, "frame_idx": 45, "global_frame_idx": 21800, "task_index": 21}, {"db_idx": 21801, "episode_idx": 107, "frame_idx": 46, "global_frame_idx": 21801, "task_index": 21}, {"db_idx": 21802, "episode_idx": 107, "frame_idx": 47, "global_frame_idx": 21802, "task_index": 21}, {"db_idx": 21803, "episode_idx": 107, "frame_idx": 48, "global_frame_idx": 21803, "task_index": 21}, {"db_idx": 21804, "episode_idx": 107, "frame_idx": 49, "global_frame_idx": 21804, "task_index": 21}, {"db_idx": 21805, "episode_idx": 107, "frame_idx": 50, "global_frame_idx": 21805, "task_index": 21}, {"db_idx": 21806, "episode_idx": 107, "frame_idx": 51, "global_frame_idx": 21806, "task_index": 21}, {"db_idx": 21807, "episode_idx": 107, "frame_idx": 52, "global_frame_idx": 21807, "task_index": 21}, {"db_idx": 21808, "episode_idx": 107, "frame_idx": 53, "global_frame_idx": 21808, "task_index": 21}, {"db_idx": 21809, "episode_idx": 107, "frame_idx": 54, "global_frame_idx": 21809, "task_index": 21}, {"db_idx": 21810, "episode_idx": 107, "frame_idx": 55, "global_frame_idx": 21810, "task_index": 21}, {"db_idx": 21811, "episode_idx": 107, "frame_idx": 56, "global_frame_idx": 21811, "task_index": 21}, {"db_idx": 21812, "episode_idx": 107, "frame_idx": 57, "global_frame_idx": 21812, "task_index": 21}, {"db_idx": 21813, "episode_idx": 107, "frame_idx": 58, "global_frame_idx": 21813, "task_index": 21}, {"db_idx": 21814, "episode_idx": 107, "frame_idx": 59, "global_frame_idx": 21814, "task_index": 21}, {"db_idx": 21815, "episode_idx": 107, "frame_idx": 60, "global_frame_idx": 21815, "task_index": 21}, {"db_idx": 21816, "episode_idx": 107, "frame_idx": 61, "global_frame_idx": 21816, "task_index": 21}, {"db_idx": 21817, "episode_idx": 107, "frame_idx": 62, "global_frame_idx": 21817, "task_index": 21}, {"db_idx": 21818, "episode_idx": 107, "frame_idx": 63, "global_frame_idx": 21818, "task_index": 21}, {"db_idx": 21819, "episode_idx": 107, "frame_idx": 64, "global_frame_idx": 21819, "task_index": 21}, {"db_idx": 21820, "episode_idx": 107, "frame_idx": 65, "global_frame_idx": 21820, "task_index": 21}, {"db_idx": 21821, "episode_idx": 107, "frame_idx": 66, "global_frame_idx": 21821, "task_index": 21}, {"db_idx": 21822, "episode_idx": 107, "frame_idx": 67, "global_frame_idx": 21822, "task_index": 21}, {"db_idx": 21823, "episode_idx": 107, "frame_idx": 68, "global_frame_idx": 21823, "task_index": 21}, {"db_idx": 21824, "episode_idx": 107, "frame_idx": 69, "global_frame_idx": 21824, "task_index": 21}, {"db_idx": 21825, "episode_idx": 107, "frame_idx": 70, "global_frame_idx": 21825, "task_index": 21}, {"db_idx": 21826, "episode_idx": 107, "frame_idx": 71, "global_frame_idx": 21826, "task_index": 21}, {"db_idx": 21827, "episode_idx": 107, "frame_idx": 72, "global_frame_idx": 21827, "task_index": 21}, {"db_idx": 21828, "episode_idx": 107, "frame_idx": 73, "global_frame_idx": 21828, "task_index": 21}, {"db_idx": 21829, "episode_idx": 107, "frame_idx": 74, "global_frame_idx": 21829, "task_index": 21}, {"db_idx": 21830, "episode_idx": 107, "frame_idx": 75, "global_frame_idx": 21830, "task_index": 21}, {"db_idx": 21831, "episode_idx": 107, "frame_idx": 76, "global_frame_idx": 21831, "task_index": 21}, {"db_idx": 21832, "episode_idx": 107, "frame_idx": 77, "global_frame_idx": 21832, "task_index": 21}, {"db_idx": 21833, "episode_idx": 107, "frame_idx": 78, "global_frame_idx": 21833, "task_index": 21}, {"db_idx": 21834, "episode_idx": 107, "frame_idx": 79, "global_frame_idx": 21834, "task_index": 21}, {"db_idx": 21835, "episode_idx": 107, "frame_idx": 80, "global_frame_idx": 21835, "task_index": 21}, {"db_idx": 21836, "episode_idx": 107, "frame_idx": 81, "global_frame_idx": 21836, "task_index": 21}, {"db_idx": 21837, "episode_idx": 107, "frame_idx": 82, "global_frame_idx": 21837, "task_index": 21}, {"db_idx": 21838, "episode_idx": 107, "frame_idx": 83, "global_frame_idx": 21838, "task_index": 21}, {"db_idx": 21839, "episode_idx": 107, "frame_idx": 84, "global_frame_idx": 21839, "task_index": 21}, {"db_idx": 21840, "episode_idx": 107, "frame_idx": 85, "global_frame_idx": 21840, "task_index": 21}, {"db_idx": 21841, "episode_idx": 107, "frame_idx": 86, "global_frame_idx": 21841, "task_index": 21}, {"db_idx": 21842, "episode_idx": 107, "frame_idx": 87, "global_frame_idx": 21842, "task_index": 21}, {"db_idx": 21843, "episode_idx": 107, "frame_idx": 88, "global_frame_idx": 21843, "task_index": 21}, {"db_idx": 21844, "episode_idx": 107, "frame_idx": 89, "global_frame_idx": 21844, "task_index": 21}, {"db_idx": 21845, "episode_idx": 107, "frame_idx": 90, "global_frame_idx": 21845, "task_index": 21}, {"db_idx": 21846, "episode_idx": 107, "frame_idx": 91, "global_frame_idx": 21846, "task_index": 21}, {"db_idx": 21847, "episode_idx": 107, "frame_idx": 92, "global_frame_idx": 21847, "task_index": 21}, {"db_idx": 21848, "episode_idx": 108, "frame_idx": 0, "global_frame_idx": 21848, "task_index": 21}, {"db_idx": 21849, "episode_idx": 108, "frame_idx": 1, "global_frame_idx": 21849, "task_index": 21}, {"db_idx": 21850, "episode_idx": 108, "frame_idx": 2, "global_frame_idx": 21850, "task_index": 21}, {"db_idx": 21851, "episode_idx": 108, "frame_idx": 3, "global_frame_idx": 21851, "task_index": 21}, {"db_idx": 21852, "episode_idx": 108, "frame_idx": 4, "global_frame_idx": 21852, "task_index": 21}, {"db_idx": 21853, "episode_idx": 108, "frame_idx": 5, "global_frame_idx": 21853, "task_index": 21}, {"db_idx": 21854, "episode_idx": 108, "frame_idx": 6, "global_frame_idx": 21854, "task_index": 21}, {"db_idx": 21855, "episode_idx": 108, "frame_idx": 7, "global_frame_idx": 21855, "task_index": 21}, {"db_idx": 21856, "episode_idx": 108, "frame_idx": 8, "global_frame_idx": 21856, "task_index": 21}, {"db_idx": 21857, "episode_idx": 108, "frame_idx": 9, "global_frame_idx": 21857, "task_index": 21}, {"db_idx": 21858, "episode_idx": 108, "frame_idx": 10, "global_frame_idx": 21858, "task_index": 21}, {"db_idx": 21859, "episode_idx": 108, "frame_idx": 11, "global_frame_idx": 21859, "task_index": 21}, {"db_idx": 21860, "episode_idx": 108, "frame_idx": 12, "global_frame_idx": 21860, "task_index": 21}, {"db_idx": 21861, "episode_idx": 108, "frame_idx": 13, "global_frame_idx": 21861, "task_index": 21}, {"db_idx": 21862, "episode_idx": 108, "frame_idx": 14, "global_frame_idx": 21862, "task_index": 21}, {"db_idx": 21863, "episode_idx": 108, "frame_idx": 15, "global_frame_idx": 21863, "task_index": 21}, {"db_idx": 21864, "episode_idx": 108, "frame_idx": 16, "global_frame_idx": 21864, "task_index": 21}, {"db_idx": 21865, "episode_idx": 108, "frame_idx": 17, "global_frame_idx": 21865, "task_index": 21}, {"db_idx": 21866, "episode_idx": 108, "frame_idx": 18, "global_frame_idx": 21866, "task_index": 21}, {"db_idx": 21867, "episode_idx": 108, "frame_idx": 19, "global_frame_idx": 21867, "task_index": 21}, {"db_idx": 21868, "episode_idx": 108, "frame_idx": 20, "global_frame_idx": 21868, "task_index": 21}, {"db_idx": 21869, "episode_idx": 108, "frame_idx": 21, "global_frame_idx": 21869, "task_index": 21}, {"db_idx": 21870, "episode_idx": 108, "frame_idx": 22, "global_frame_idx": 21870, "task_index": 21}, {"db_idx": 21871, "episode_idx": 108, "frame_idx": 23, "global_frame_idx": 21871, "task_index": 21}, {"db_idx": 21872, "episode_idx": 108, "frame_idx": 24, "global_frame_idx": 21872, "task_index": 21}, {"db_idx": 21873, "episode_idx": 108, "frame_idx": 25, "global_frame_idx": 21873, "task_index": 21}, {"db_idx": 21874, "episode_idx": 108, "frame_idx": 26, "global_frame_idx": 21874, "task_index": 21}, {"db_idx": 21875, "episode_idx": 108, "frame_idx": 27, "global_frame_idx": 21875, "task_index": 21}, {"db_idx": 21876, "episode_idx": 108, "frame_idx": 28, "global_frame_idx": 21876, "task_index": 21}, {"db_idx": 21877, "episode_idx": 108, "frame_idx": 29, "global_frame_idx": 21877, "task_index": 21}, {"db_idx": 21878, "episode_idx": 108, "frame_idx": 30, "global_frame_idx": 21878, "task_index": 21}, {"db_idx": 21879, "episode_idx": 108, "frame_idx": 31, "global_frame_idx": 21879, "task_index": 21}, {"db_idx": 21880, "episode_idx": 108, "frame_idx": 32, "global_frame_idx": 21880, "task_index": 21}, {"db_idx": 21881, "episode_idx": 108, "frame_idx": 33, "global_frame_idx": 21881, "task_index": 21}, {"db_idx": 21882, "episode_idx": 108, "frame_idx": 34, "global_frame_idx": 21882, "task_index": 21}, {"db_idx": 21883, "episode_idx": 108, "frame_idx": 35, "global_frame_idx": 21883, "task_index": 21}, {"db_idx": 21884, "episode_idx": 108, "frame_idx": 36, "global_frame_idx": 21884, "task_index": 21}, {"db_idx": 21885, "episode_idx": 108, "frame_idx": 37, "global_frame_idx": 21885, "task_index": 21}, {"db_idx": 21886, "episode_idx": 108, "frame_idx": 38, "global_frame_idx": 21886, "task_index": 21}, {"db_idx": 21887, "episode_idx": 108, "frame_idx": 39, "global_frame_idx": 21887, "task_index": 21}, {"db_idx": 21888, "episode_idx": 108, "frame_idx": 40, "global_frame_idx": 21888, "task_index": 21}, {"db_idx": 21889, "episode_idx": 108, "frame_idx": 41, "global_frame_idx": 21889, "task_index": 21}, {"db_idx": 21890, "episode_idx": 108, "frame_idx": 42, "global_frame_idx": 21890, "task_index": 21}, {"db_idx": 21891, "episode_idx": 108, "frame_idx": 43, "global_frame_idx": 21891, "task_index": 21}, {"db_idx": 21892, "episode_idx": 108, "frame_idx": 44, "global_frame_idx": 21892, "task_index": 21}, {"db_idx": 21893, "episode_idx": 108, "frame_idx": 45, "global_frame_idx": 21893, "task_index": 21}, {"db_idx": 21894, "episode_idx": 108, "frame_idx": 46, "global_frame_idx": 21894, "task_index": 21}, {"db_idx": 21895, "episode_idx": 108, "frame_idx": 47, "global_frame_idx": 21895, "task_index": 21}, {"db_idx": 21896, "episode_idx": 108, "frame_idx": 48, "global_frame_idx": 21896, "task_index": 21}, {"db_idx": 21897, "episode_idx": 108, "frame_idx": 49, "global_frame_idx": 21897, "task_index": 21}, {"db_idx": 21898, "episode_idx": 108, "frame_idx": 50, "global_frame_idx": 21898, "task_index": 21}, {"db_idx": 21899, "episode_idx": 108, "frame_idx": 51, "global_frame_idx": 21899, "task_index": 21}, {"db_idx": 21900, "episode_idx": 108, "frame_idx": 52, "global_frame_idx": 21900, "task_index": 21}, {"db_idx": 21901, "episode_idx": 108, "frame_idx": 53, "global_frame_idx": 21901, "task_index": 21}, {"db_idx": 21902, "episode_idx": 108, "frame_idx": 54, "global_frame_idx": 21902, "task_index": 21}, {"db_idx": 21903, "episode_idx": 108, "frame_idx": 55, "global_frame_idx": 21903, "task_index": 21}, {"db_idx": 21904, "episode_idx": 108, "frame_idx": 56, "global_frame_idx": 21904, "task_index": 21}, {"db_idx": 21905, "episode_idx": 108, "frame_idx": 57, "global_frame_idx": 21905, "task_index": 21}, {"db_idx": 21906, "episode_idx": 108, "frame_idx": 58, "global_frame_idx": 21906, "task_index": 21}, {"db_idx": 21907, "episode_idx": 108, "frame_idx": 59, "global_frame_idx": 21907, "task_index": 21}, {"db_idx": 21908, "episode_idx": 108, "frame_idx": 60, "global_frame_idx": 21908, "task_index": 21}, {"db_idx": 21909, "episode_idx": 108, "frame_idx": 61, "global_frame_idx": 21909, "task_index": 21}, {"db_idx": 21910, "episode_idx": 108, "frame_idx": 62, "global_frame_idx": 21910, "task_index": 21}, {"db_idx": 21911, "episode_idx": 108, "frame_idx": 63, "global_frame_idx": 21911, "task_index": 21}, {"db_idx": 21912, "episode_idx": 108, "frame_idx": 64, "global_frame_idx": 21912, "task_index": 21}, {"db_idx": 21913, "episode_idx": 108, "frame_idx": 65, "global_frame_idx": 21913, "task_index": 21}, {"db_idx": 21914, "episode_idx": 108, "frame_idx": 66, "global_frame_idx": 21914, "task_index": 21}, {"db_idx": 21915, "episode_idx": 108, "frame_idx": 67, "global_frame_idx": 21915, "task_index": 21}, {"db_idx": 21916, "episode_idx": 108, "frame_idx": 68, "global_frame_idx": 21916, "task_index": 21}, {"db_idx": 21917, "episode_idx": 108, "frame_idx": 69, "global_frame_idx": 21917, "task_index": 21}, {"db_idx": 21918, "episode_idx": 108, "frame_idx": 70, "global_frame_idx": 21918, "task_index": 21}, {"db_idx": 21919, "episode_idx": 108, "frame_idx": 71, "global_frame_idx": 21919, "task_index": 21}, {"db_idx": 21920, "episode_idx": 108, "frame_idx": 72, "global_frame_idx": 21920, "task_index": 21}, {"db_idx": 21921, "episode_idx": 108, "frame_idx": 73, "global_frame_idx": 21921, "task_index": 21}, {"db_idx": 21922, "episode_idx": 108, "frame_idx": 74, "global_frame_idx": 21922, "task_index": 21}, {"db_idx": 21923, "episode_idx": 108, "frame_idx": 75, "global_frame_idx": 21923, "task_index": 21}, {"db_idx": 21924, "episode_idx": 108, "frame_idx": 76, "global_frame_idx": 21924, "task_index": 21}, {"db_idx": 21925, "episode_idx": 108, "frame_idx": 77, "global_frame_idx": 21925, "task_index": 21}, {"db_idx": 21926, "episode_idx": 108, "frame_idx": 78, "global_frame_idx": 21926, "task_index": 21}, {"db_idx": 21927, "episode_idx": 108, "frame_idx": 79, "global_frame_idx": 21927, "task_index": 21}, {"db_idx": 21928, "episode_idx": 108, "frame_idx": 80, "global_frame_idx": 21928, "task_index": 21}, {"db_idx": 21929, "episode_idx": 108, "frame_idx": 81, "global_frame_idx": 21929, "task_index": 21}, {"db_idx": 21930, "episode_idx": 108, "frame_idx": 82, "global_frame_idx": 21930, "task_index": 21}, {"db_idx": 21931, "episode_idx": 108, "frame_idx": 83, "global_frame_idx": 21931, "task_index": 21}, {"db_idx": 21932, "episode_idx": 108, "frame_idx": 84, "global_frame_idx": 21932, "task_index": 21}, {"db_idx": 21933, "episode_idx": 108, "frame_idx": 85, "global_frame_idx": 21933, "task_index": 21}, {"db_idx": 21934, "episode_idx": 108, "frame_idx": 86, "global_frame_idx": 21934, "task_index": 21}, {"db_idx": 21935, "episode_idx": 108, "frame_idx": 87, "global_frame_idx": 21935, "task_index": 21}, {"db_idx": 21936, "episode_idx": 108, "frame_idx": 88, "global_frame_idx": 21936, "task_index": 21}, {"db_idx": 21937, "episode_idx": 108, "frame_idx": 89, "global_frame_idx": 21937, "task_index": 21}, {"db_idx": 21938, "episode_idx": 108, "frame_idx": 90, "global_frame_idx": 21938, "task_index": 21}, {"db_idx": 21939, "episode_idx": 108, "frame_idx": 91, "global_frame_idx": 21939, "task_index": 21}, {"db_idx": 21940, "episode_idx": 108, "frame_idx": 92, "global_frame_idx": 21940, "task_index": 21}, {"db_idx": 21941, "episode_idx": 108, "frame_idx": 93, "global_frame_idx": 21941, "task_index": 21}, {"db_idx": 21942, "episode_idx": 108, "frame_idx": 94, "global_frame_idx": 21942, "task_index": 21}, {"db_idx": 21943, "episode_idx": 108, "frame_idx": 95, "global_frame_idx": 21943, "task_index": 21}, {"db_idx": 21944, "episode_idx": 108, "frame_idx": 96, "global_frame_idx": 21944, "task_index": 21}, {"db_idx": 21945, "episode_idx": 108, "frame_idx": 97, "global_frame_idx": 21945, "task_index": 21}, {"db_idx": 21946, "episode_idx": 108, "frame_idx": 98, "global_frame_idx": 21946, "task_index": 21}, {"db_idx": 21947, "episode_idx": 108, "frame_idx": 99, "global_frame_idx": 21947, "task_index": 21}, {"db_idx": 21948, "episode_idx": 108, "frame_idx": 100, "global_frame_idx": 21948, "task_index": 21}, {"db_idx": 21949, "episode_idx": 108, "frame_idx": 101, "global_frame_idx": 21949, "task_index": 21}, {"db_idx": 21950, "episode_idx": 108, "frame_idx": 102, "global_frame_idx": 21950, "task_index": 21}, {"db_idx": 21951, "episode_idx": 108, "frame_idx": 103, "global_frame_idx": 21951, "task_index": 21}, {"db_idx": 21952, "episode_idx": 108, "frame_idx": 104, "global_frame_idx": 21952, "task_index": 21}, {"db_idx": 21953, "episode_idx": 108, "frame_idx": 105, "global_frame_idx": 21953, "task_index": 21}, {"db_idx": 21954, "episode_idx": 108, "frame_idx": 106, "global_frame_idx": 21954, "task_index": 21}, {"db_idx": 21955, "episode_idx": 108, "frame_idx": 107, "global_frame_idx": 21955, "task_index": 21}, {"db_idx": 21956, "episode_idx": 108, "frame_idx": 108, "global_frame_idx": 21956, "task_index": 21}, {"db_idx": 21957, "episode_idx": 108, "frame_idx": 109, "global_frame_idx": 21957, "task_index": 21}, {"db_idx": 21958, "episode_idx": 108, "frame_idx": 110, "global_frame_idx": 21958, "task_index": 21}, {"db_idx": 21959, "episode_idx": 108, "frame_idx": 111, "global_frame_idx": 21959, "task_index": 21}, {"db_idx": 21960, "episode_idx": 108, "frame_idx": 112, "global_frame_idx": 21960, "task_index": 21}, {"db_idx": 21961, "episode_idx": 108, "frame_idx": 113, "global_frame_idx": 21961, "task_index": 21}, {"db_idx": 21962, "episode_idx": 108, "frame_idx": 114, "global_frame_idx": 21962, "task_index": 21}, {"db_idx": 21963, "episode_idx": 108, "frame_idx": 115, "global_frame_idx": 21963, "task_index": 21}, {"db_idx": 21964, "episode_idx": 108, "frame_idx": 116, "global_frame_idx": 21964, "task_index": 21}, {"db_idx": 21965, "episode_idx": 108, "frame_idx": 117, "global_frame_idx": 21965, "task_index": 21}, {"db_idx": 21966, "episode_idx": 108, "frame_idx": 118, "global_frame_idx": 21966, "task_index": 21}, {"db_idx": 21967, "episode_idx": 108, "frame_idx": 119, "global_frame_idx": 21967, "task_index": 21}, {"db_idx": 21968, "episode_idx": 108, "frame_idx": 120, "global_frame_idx": 21968, "task_index": 21}, {"db_idx": 21969, "episode_idx": 108, "frame_idx": 121, "global_frame_idx": 21969, "task_index": 21}, {"db_idx": 21970, "episode_idx": 108, "frame_idx": 122, "global_frame_idx": 21970, "task_index": 21}, {"db_idx": 21971, "episode_idx": 109, "frame_idx": 0, "global_frame_idx": 21971, "task_index": 21}, {"db_idx": 21972, "episode_idx": 109, "frame_idx": 1, "global_frame_idx": 21972, "task_index": 21}, {"db_idx": 21973, "episode_idx": 109, "frame_idx": 2, "global_frame_idx": 21973, "task_index": 21}, {"db_idx": 21974, "episode_idx": 109, "frame_idx": 3, "global_frame_idx": 21974, "task_index": 21}, {"db_idx": 21975, "episode_idx": 109, "frame_idx": 4, "global_frame_idx": 21975, "task_index": 21}, {"db_idx": 21976, "episode_idx": 109, "frame_idx": 5, "global_frame_idx": 21976, "task_index": 21}, {"db_idx": 21977, "episode_idx": 109, "frame_idx": 6, "global_frame_idx": 21977, "task_index": 21}, {"db_idx": 21978, "episode_idx": 109, "frame_idx": 7, "global_frame_idx": 21978, "task_index": 21}, {"db_idx": 21979, "episode_idx": 109, "frame_idx": 8, "global_frame_idx": 21979, "task_index": 21}, {"db_idx": 21980, "episode_idx": 109, "frame_idx": 9, "global_frame_idx": 21980, "task_index": 21}, {"db_idx": 21981, "episode_idx": 109, "frame_idx": 10, "global_frame_idx": 21981, "task_index": 21}, {"db_idx": 21982, "episode_idx": 109, "frame_idx": 11, "global_frame_idx": 21982, "task_index": 21}, {"db_idx": 21983, "episode_idx": 109, "frame_idx": 12, "global_frame_idx": 21983, "task_index": 21}, {"db_idx": 21984, "episode_idx": 109, "frame_idx": 13, "global_frame_idx": 21984, "task_index": 21}, {"db_idx": 21985, "episode_idx": 109, "frame_idx": 14, "global_frame_idx": 21985, "task_index": 21}, {"db_idx": 21986, "episode_idx": 109, "frame_idx": 15, "global_frame_idx": 21986, "task_index": 21}, {"db_idx": 21987, "episode_idx": 109, "frame_idx": 16, "global_frame_idx": 21987, "task_index": 21}, {"db_idx": 21988, "episode_idx": 109, "frame_idx": 17, "global_frame_idx": 21988, "task_index": 21}, {"db_idx": 21989, "episode_idx": 109, "frame_idx": 18, "global_frame_idx": 21989, "task_index": 21}, {"db_idx": 21990, "episode_idx": 109, "frame_idx": 19, "global_frame_idx": 21990, "task_index": 21}, {"db_idx": 21991, "episode_idx": 109, "frame_idx": 20, "global_frame_idx": 21991, "task_index": 21}, {"db_idx": 21992, "episode_idx": 109, "frame_idx": 21, "global_frame_idx": 21992, "task_index": 21}, {"db_idx": 21993, "episode_idx": 109, "frame_idx": 22, "global_frame_idx": 21993, "task_index": 21}, {"db_idx": 21994, "episode_idx": 109, "frame_idx": 23, "global_frame_idx": 21994, "task_index": 21}, {"db_idx": 21995, "episode_idx": 109, "frame_idx": 24, "global_frame_idx": 21995, "task_index": 21}, {"db_idx": 21996, "episode_idx": 109, "frame_idx": 25, "global_frame_idx": 21996, "task_index": 21}, {"db_idx": 21997, "episode_idx": 109, "frame_idx": 26, "global_frame_idx": 21997, "task_index": 21}, {"db_idx": 21998, "episode_idx": 109, "frame_idx": 27, "global_frame_idx": 21998, "task_index": 21}, {"db_idx": 21999, "episode_idx": 109, "frame_idx": 28, "global_frame_idx": 21999, "task_index": 21}, {"db_idx": 22000, "episode_idx": 109, "frame_idx": 29, "global_frame_idx": 22000, "task_index": 21}, {"db_idx": 22001, "episode_idx": 109, "frame_idx": 30, "global_frame_idx": 22001, "task_index": 21}, {"db_idx": 22002, "episode_idx": 109, "frame_idx": 31, "global_frame_idx": 22002, "task_index": 21}, {"db_idx": 22003, "episode_idx": 109, "frame_idx": 32, "global_frame_idx": 22003, "task_index": 21}, {"db_idx": 22004, "episode_idx": 109, "frame_idx": 33, "global_frame_idx": 22004, "task_index": 21}, {"db_idx": 22005, "episode_idx": 109, "frame_idx": 34, "global_frame_idx": 22005, "task_index": 21}, {"db_idx": 22006, "episode_idx": 109, "frame_idx": 35, "global_frame_idx": 22006, "task_index": 21}, {"db_idx": 22007, "episode_idx": 109, "frame_idx": 36, "global_frame_idx": 22007, "task_index": 21}, {"db_idx": 22008, "episode_idx": 109, "frame_idx": 37, "global_frame_idx": 22008, "task_index": 21}, {"db_idx": 22009, "episode_idx": 109, "frame_idx": 38, "global_frame_idx": 22009, "task_index": 21}, {"db_idx": 22010, "episode_idx": 109, "frame_idx": 39, "global_frame_idx": 22010, "task_index": 21}, {"db_idx": 22011, "episode_idx": 109, "frame_idx": 40, "global_frame_idx": 22011, "task_index": 21}, {"db_idx": 22012, "episode_idx": 109, "frame_idx": 41, "global_frame_idx": 22012, "task_index": 21}, {"db_idx": 22013, "episode_idx": 109, "frame_idx": 42, "global_frame_idx": 22013, "task_index": 21}, {"db_idx": 22014, "episode_idx": 109, "frame_idx": 43, "global_frame_idx": 22014, "task_index": 21}, {"db_idx": 22015, "episode_idx": 109, "frame_idx": 44, "global_frame_idx": 22015, "task_index": 21}, {"db_idx": 22016, "episode_idx": 109, "frame_idx": 45, "global_frame_idx": 22016, "task_index": 21}, {"db_idx": 22017, "episode_idx": 109, "frame_idx": 46, "global_frame_idx": 22017, "task_index": 21}, {"db_idx": 22018, "episode_idx": 109, "frame_idx": 47, "global_frame_idx": 22018, "task_index": 21}, {"db_idx": 22019, "episode_idx": 109, "frame_idx": 48, "global_frame_idx": 22019, "task_index": 21}, {"db_idx": 22020, "episode_idx": 109, "frame_idx": 49, "global_frame_idx": 22020, "task_index": 21}, {"db_idx": 22021, "episode_idx": 109, "frame_idx": 50, "global_frame_idx": 22021, "task_index": 21}, {"db_idx": 22022, "episode_idx": 109, "frame_idx": 51, "global_frame_idx": 22022, "task_index": 21}, {"db_idx": 22023, "episode_idx": 109, "frame_idx": 52, "global_frame_idx": 22023, "task_index": 21}, {"db_idx": 22024, "episode_idx": 109, "frame_idx": 53, "global_frame_idx": 22024, "task_index": 21}, {"db_idx": 22025, "episode_idx": 109, "frame_idx": 54, "global_frame_idx": 22025, "task_index": 21}, {"db_idx": 22026, "episode_idx": 109, "frame_idx": 55, "global_frame_idx": 22026, "task_index": 21}, {"db_idx": 22027, "episode_idx": 109, "frame_idx": 56, "global_frame_idx": 22027, "task_index": 21}, {"db_idx": 22028, "episode_idx": 109, "frame_idx": 57, "global_frame_idx": 22028, "task_index": 21}, {"db_idx": 22029, "episode_idx": 109, "frame_idx": 58, "global_frame_idx": 22029, "task_index": 21}, {"db_idx": 22030, "episode_idx": 109, "frame_idx": 59, "global_frame_idx": 22030, "task_index": 21}, {"db_idx": 22031, "episode_idx": 109, "frame_idx": 60, "global_frame_idx": 22031, "task_index": 21}, {"db_idx": 22032, "episode_idx": 109, "frame_idx": 61, "global_frame_idx": 22032, "task_index": 21}, {"db_idx": 22033, "episode_idx": 109, "frame_idx": 62, "global_frame_idx": 22033, "task_index": 21}, {"db_idx": 22034, "episode_idx": 109, "frame_idx": 63, "global_frame_idx": 22034, "task_index": 21}, {"db_idx": 22035, "episode_idx": 109, "frame_idx": 64, "global_frame_idx": 22035, "task_index": 21}, {"db_idx": 22036, "episode_idx": 109, "frame_idx": 65, "global_frame_idx": 22036, "task_index": 21}, {"db_idx": 22037, "episode_idx": 109, "frame_idx": 66, "global_frame_idx": 22037, "task_index": 21}, {"db_idx": 22038, "episode_idx": 109, "frame_idx": 67, "global_frame_idx": 22038, "task_index": 21}, {"db_idx": 22039, "episode_idx": 109, "frame_idx": 68, "global_frame_idx": 22039, "task_index": 21}, {"db_idx": 22040, "episode_idx": 109, "frame_idx": 69, "global_frame_idx": 22040, "task_index": 21}, {"db_idx": 22041, "episode_idx": 109, "frame_idx": 70, "global_frame_idx": 22041, "task_index": 21}, {"db_idx": 22042, "episode_idx": 109, "frame_idx": 71, "global_frame_idx": 22042, "task_index": 21}, {"db_idx": 22043, "episode_idx": 109, "frame_idx": 72, "global_frame_idx": 22043, "task_index": 21}, {"db_idx": 22044, "episode_idx": 109, "frame_idx": 73, "global_frame_idx": 22044, "task_index": 21}, {"db_idx": 22045, "episode_idx": 109, "frame_idx": 74, "global_frame_idx": 22045, "task_index": 21}, {"db_idx": 22046, "episode_idx": 109, "frame_idx": 75, "global_frame_idx": 22046, "task_index": 21}, {"db_idx": 22047, "episode_idx": 109, "frame_idx": 76, "global_frame_idx": 22047, "task_index": 21}, {"db_idx": 22048, "episode_idx": 109, "frame_idx": 77, "global_frame_idx": 22048, "task_index": 21}, {"db_idx": 22049, "episode_idx": 109, "frame_idx": 78, "global_frame_idx": 22049, "task_index": 21}, {"db_idx": 22050, "episode_idx": 109, "frame_idx": 79, "global_frame_idx": 22050, "task_index": 21}, {"db_idx": 22051, "episode_idx": 109, "frame_idx": 80, "global_frame_idx": 22051, "task_index": 21}, {"db_idx": 22052, "episode_idx": 109, "frame_idx": 81, "global_frame_idx": 22052, "task_index": 21}, {"db_idx": 22053, "episode_idx": 109, "frame_idx": 82, "global_frame_idx": 22053, "task_index": 21}, {"db_idx": 22054, "episode_idx": 109, "frame_idx": 83, "global_frame_idx": 22054, "task_index": 21}, {"db_idx": 22055, "episode_idx": 109, "frame_idx": 84, "global_frame_idx": 22055, "task_index": 21}, {"db_idx": 22056, "episode_idx": 109, "frame_idx": 85, "global_frame_idx": 22056, "task_index": 21}, {"db_idx": 22057, "episode_idx": 109, "frame_idx": 86, "global_frame_idx": 22057, "task_index": 21}, {"db_idx": 22058, "episode_idx": 109, "frame_idx": 87, "global_frame_idx": 22058, "task_index": 21}, {"db_idx": 22059, "episode_idx": 109, "frame_idx": 88, "global_frame_idx": 22059, "task_index": 21}, {"db_idx": 22060, "episode_idx": 109, "frame_idx": 89, "global_frame_idx": 22060, "task_index": 21}, {"db_idx": 22061, "episode_idx": 109, "frame_idx": 90, "global_frame_idx": 22061, "task_index": 21}, {"db_idx": 22062, "episode_idx": 109, "frame_idx": 91, "global_frame_idx": 22062, "task_index": 21}, {"db_idx": 22063, "episode_idx": 109, "frame_idx": 92, "global_frame_idx": 22063, "task_index": 21}, {"db_idx": 22064, "episode_idx": 109, "frame_idx": 93, "global_frame_idx": 22064, "task_index": 21}, {"db_idx": 22065, "episode_idx": 109, "frame_idx": 94, "global_frame_idx": 22065, "task_index": 21}, {"db_idx": 22066, "episode_idx": 109, "frame_idx": 95, "global_frame_idx": 22066, "task_index": 21}, {"db_idx": 22067, "episode_idx": 109, "frame_idx": 96, "global_frame_idx": 22067, "task_index": 21}, {"db_idx": 22068, "episode_idx": 109, "frame_idx": 97, "global_frame_idx": 22068, "task_index": 21}, {"db_idx": 22069, "episode_idx": 110, "frame_idx": 0, "global_frame_idx": 22069, "task_index": 22}, {"db_idx": 22070, "episode_idx": 110, "frame_idx": 1, "global_frame_idx": 22070, "task_index": 22}, {"db_idx": 22071, "episode_idx": 110, "frame_idx": 2, "global_frame_idx": 22071, "task_index": 22}, {"db_idx": 22072, "episode_idx": 110, "frame_idx": 3, "global_frame_idx": 22072, "task_index": 22}, {"db_idx": 22073, "episode_idx": 110, "frame_idx": 4, "global_frame_idx": 22073, "task_index": 22}, {"db_idx": 22074, "episode_idx": 110, "frame_idx": 5, "global_frame_idx": 22074, "task_index": 22}, {"db_idx": 22075, "episode_idx": 110, "frame_idx": 6, "global_frame_idx": 22075, "task_index": 22}, {"db_idx": 22076, "episode_idx": 110, "frame_idx": 7, "global_frame_idx": 22076, "task_index": 22}, {"db_idx": 22077, "episode_idx": 110, "frame_idx": 8, "global_frame_idx": 22077, "task_index": 22}, {"db_idx": 22078, "episode_idx": 110, "frame_idx": 9, "global_frame_idx": 22078, "task_index": 22}, {"db_idx": 22079, "episode_idx": 110, "frame_idx": 10, "global_frame_idx": 22079, "task_index": 22}, {"db_idx": 22080, "episode_idx": 110, "frame_idx": 11, "global_frame_idx": 22080, "task_index": 22}, {"db_idx": 22081, "episode_idx": 110, "frame_idx": 12, "global_frame_idx": 22081, "task_index": 22}, {"db_idx": 22082, "episode_idx": 110, "frame_idx": 13, "global_frame_idx": 22082, "task_index": 22}, {"db_idx": 22083, "episode_idx": 110, "frame_idx": 14, "global_frame_idx": 22083, "task_index": 22}, {"db_idx": 22084, "episode_idx": 110, "frame_idx": 15, "global_frame_idx": 22084, "task_index": 22}, {"db_idx": 22085, "episode_idx": 110, "frame_idx": 16, "global_frame_idx": 22085, "task_index": 22}, {"db_idx": 22086, "episode_idx": 110, "frame_idx": 17, "global_frame_idx": 22086, "task_index": 22}, {"db_idx": 22087, "episode_idx": 110, "frame_idx": 18, "global_frame_idx": 22087, "task_index": 22}, {"db_idx": 22088, "episode_idx": 110, "frame_idx": 19, "global_frame_idx": 22088, "task_index": 22}, {"db_idx": 22089, "episode_idx": 110, "frame_idx": 20, "global_frame_idx": 22089, "task_index": 22}, {"db_idx": 22090, "episode_idx": 110, "frame_idx": 21, "global_frame_idx": 22090, "task_index": 22}, {"db_idx": 22091, "episode_idx": 110, "frame_idx": 22, "global_frame_idx": 22091, "task_index": 22}, {"db_idx": 22092, "episode_idx": 110, "frame_idx": 23, "global_frame_idx": 22092, "task_index": 22}, {"db_idx": 22093, "episode_idx": 110, "frame_idx": 24, "global_frame_idx": 22093, "task_index": 22}, {"db_idx": 22094, "episode_idx": 110, "frame_idx": 25, "global_frame_idx": 22094, "task_index": 22}, {"db_idx": 22095, "episode_idx": 110, "frame_idx": 26, "global_frame_idx": 22095, "task_index": 22}, {"db_idx": 22096, "episode_idx": 110, "frame_idx": 27, "global_frame_idx": 22096, "task_index": 22}, {"db_idx": 22097, "episode_idx": 110, "frame_idx": 28, "global_frame_idx": 22097, "task_index": 22}, {"db_idx": 22098, "episode_idx": 110, "frame_idx": 29, "global_frame_idx": 22098, "task_index": 22}, {"db_idx": 22099, "episode_idx": 110, "frame_idx": 30, "global_frame_idx": 22099, "task_index": 22}, {"db_idx": 22100, "episode_idx": 110, "frame_idx": 31, "global_frame_idx": 22100, "task_index": 22}, {"db_idx": 22101, "episode_idx": 110, "frame_idx": 32, "global_frame_idx": 22101, "task_index": 22}, {"db_idx": 22102, "episode_idx": 110, "frame_idx": 33, "global_frame_idx": 22102, "task_index": 22}, {"db_idx": 22103, "episode_idx": 110, "frame_idx": 34, "global_frame_idx": 22103, "task_index": 22}, {"db_idx": 22104, "episode_idx": 110, "frame_idx": 35, "global_frame_idx": 22104, "task_index": 22}, {"db_idx": 22105, "episode_idx": 110, "frame_idx": 36, "global_frame_idx": 22105, "task_index": 22}, {"db_idx": 22106, "episode_idx": 110, "frame_idx": 37, "global_frame_idx": 22106, "task_index": 22}, {"db_idx": 22107, "episode_idx": 110, "frame_idx": 38, "global_frame_idx": 22107, "task_index": 22}, {"db_idx": 22108, "episode_idx": 110, "frame_idx": 39, "global_frame_idx": 22108, "task_index": 22}, {"db_idx": 22109, "episode_idx": 110, "frame_idx": 40, "global_frame_idx": 22109, "task_index": 22}, {"db_idx": 22110, "episode_idx": 110, "frame_idx": 41, "global_frame_idx": 22110, "task_index": 22}, {"db_idx": 22111, "episode_idx": 110, "frame_idx": 42, "global_frame_idx": 22111, "task_index": 22}, {"db_idx": 22112, "episode_idx": 110, "frame_idx": 43, "global_frame_idx": 22112, "task_index": 22}, {"db_idx": 22113, "episode_idx": 110, "frame_idx": 44, "global_frame_idx": 22113, "task_index": 22}, {"db_idx": 22114, "episode_idx": 110, "frame_idx": 45, "global_frame_idx": 22114, "task_index": 22}, {"db_idx": 22115, "episode_idx": 110, "frame_idx": 46, "global_frame_idx": 22115, "task_index": 22}, {"db_idx": 22116, "episode_idx": 110, "frame_idx": 47, "global_frame_idx": 22116, "task_index": 22}, {"db_idx": 22117, "episode_idx": 110, "frame_idx": 48, "global_frame_idx": 22117, "task_index": 22}, {"db_idx": 22118, "episode_idx": 110, "frame_idx": 49, "global_frame_idx": 22118, "task_index": 22}, {"db_idx": 22119, "episode_idx": 110, "frame_idx": 50, "global_frame_idx": 22119, "task_index": 22}, {"db_idx": 22120, "episode_idx": 110, "frame_idx": 51, "global_frame_idx": 22120, "task_index": 22}, {"db_idx": 22121, "episode_idx": 110, "frame_idx": 52, "global_frame_idx": 22121, "task_index": 22}, {"db_idx": 22122, "episode_idx": 110, "frame_idx": 53, "global_frame_idx": 22122, "task_index": 22}, {"db_idx": 22123, "episode_idx": 110, "frame_idx": 54, "global_frame_idx": 22123, "task_index": 22}, {"db_idx": 22124, "episode_idx": 110, "frame_idx": 55, "global_frame_idx": 22124, "task_index": 22}, {"db_idx": 22125, "episode_idx": 110, "frame_idx": 56, "global_frame_idx": 22125, "task_index": 22}, {"db_idx": 22126, "episode_idx": 110, "frame_idx": 57, "global_frame_idx": 22126, "task_index": 22}, {"db_idx": 22127, "episode_idx": 110, "frame_idx": 58, "global_frame_idx": 22127, "task_index": 22}, {"db_idx": 22128, "episode_idx": 110, "frame_idx": 59, "global_frame_idx": 22128, "task_index": 22}, {"db_idx": 22129, "episode_idx": 110, "frame_idx": 60, "global_frame_idx": 22129, "task_index": 22}, {"db_idx": 22130, "episode_idx": 110, "frame_idx": 61, "global_frame_idx": 22130, "task_index": 22}, {"db_idx": 22131, "episode_idx": 110, "frame_idx": 62, "global_frame_idx": 22131, "task_index": 22}, {"db_idx": 22132, "episode_idx": 110, "frame_idx": 63, "global_frame_idx": 22132, "task_index": 22}, {"db_idx": 22133, "episode_idx": 110, "frame_idx": 64, "global_frame_idx": 22133, "task_index": 22}, {"db_idx": 22134, "episode_idx": 110, "frame_idx": 65, "global_frame_idx": 22134, "task_index": 22}, {"db_idx": 22135, "episode_idx": 110, "frame_idx": 66, "global_frame_idx": 22135, "task_index": 22}, {"db_idx": 22136, "episode_idx": 110, "frame_idx": 67, "global_frame_idx": 22136, "task_index": 22}, {"db_idx": 22137, "episode_idx": 110, "frame_idx": 68, "global_frame_idx": 22137, "task_index": 22}, {"db_idx": 22138, "episode_idx": 110, "frame_idx": 69, "global_frame_idx": 22138, "task_index": 22}, {"db_idx": 22139, "episode_idx": 110, "frame_idx": 70, "global_frame_idx": 22139, "task_index": 22}, {"db_idx": 22140, "episode_idx": 110, "frame_idx": 71, "global_frame_idx": 22140, "task_index": 22}, {"db_idx": 22141, "episode_idx": 110, "frame_idx": 72, "global_frame_idx": 22141, "task_index": 22}, {"db_idx": 22142, "episode_idx": 110, "frame_idx": 73, "global_frame_idx": 22142, "task_index": 22}, {"db_idx": 22143, "episode_idx": 110, "frame_idx": 74, "global_frame_idx": 22143, "task_index": 22}, {"db_idx": 22144, "episode_idx": 110, "frame_idx": 75, "global_frame_idx": 22144, "task_index": 22}, {"db_idx": 22145, "episode_idx": 110, "frame_idx": 76, "global_frame_idx": 22145, "task_index": 22}, {"db_idx": 22146, "episode_idx": 110, "frame_idx": 77, "global_frame_idx": 22146, "task_index": 22}, {"db_idx": 22147, "episode_idx": 110, "frame_idx": 78, "global_frame_idx": 22147, "task_index": 22}, {"db_idx": 22148, "episode_idx": 110, "frame_idx": 79, "global_frame_idx": 22148, "task_index": 22}, {"db_idx": 22149, "episode_idx": 110, "frame_idx": 80, "global_frame_idx": 22149, "task_index": 22}, {"db_idx": 22150, "episode_idx": 110, "frame_idx": 81, "global_frame_idx": 22150, "task_index": 22}, {"db_idx": 22151, "episode_idx": 110, "frame_idx": 82, "global_frame_idx": 22151, "task_index": 22}, {"db_idx": 22152, "episode_idx": 110, "frame_idx": 83, "global_frame_idx": 22152, "task_index": 22}, {"db_idx": 22153, "episode_idx": 110, "frame_idx": 84, "global_frame_idx": 22153, "task_index": 22}, {"db_idx": 22154, "episode_idx": 110, "frame_idx": 85, "global_frame_idx": 22154, "task_index": 22}, {"db_idx": 22155, "episode_idx": 110, "frame_idx": 86, "global_frame_idx": 22155, "task_index": 22}, {"db_idx": 22156, "episode_idx": 110, "frame_idx": 87, "global_frame_idx": 22156, "task_index": 22}, {"db_idx": 22157, "episode_idx": 110, "frame_idx": 88, "global_frame_idx": 22157, "task_index": 22}, {"db_idx": 22158, "episode_idx": 110, "frame_idx": 89, "global_frame_idx": 22158, "task_index": 22}, {"db_idx": 22159, "episode_idx": 111, "frame_idx": 0, "global_frame_idx": 22159, "task_index": 22}, {"db_idx": 22160, "episode_idx": 111, "frame_idx": 1, "global_frame_idx": 22160, "task_index": 22}, {"db_idx": 22161, "episode_idx": 111, "frame_idx": 2, "global_frame_idx": 22161, "task_index": 22}, {"db_idx": 22162, "episode_idx": 111, "frame_idx": 3, "global_frame_idx": 22162, "task_index": 22}, {"db_idx": 22163, "episode_idx": 111, "frame_idx": 4, "global_frame_idx": 22163, "task_index": 22}, {"db_idx": 22164, "episode_idx": 111, "frame_idx": 5, "global_frame_idx": 22164, "task_index": 22}, {"db_idx": 22165, "episode_idx": 111, "frame_idx": 6, "global_frame_idx": 22165, "task_index": 22}, {"db_idx": 22166, "episode_idx": 111, "frame_idx": 7, "global_frame_idx": 22166, "task_index": 22}, {"db_idx": 22167, "episode_idx": 111, "frame_idx": 8, "global_frame_idx": 22167, "task_index": 22}, {"db_idx": 22168, "episode_idx": 111, "frame_idx": 9, "global_frame_idx": 22168, "task_index": 22}, {"db_idx": 22169, "episode_idx": 111, "frame_idx": 10, "global_frame_idx": 22169, "task_index": 22}, {"db_idx": 22170, "episode_idx": 111, "frame_idx": 11, "global_frame_idx": 22170, "task_index": 22}, {"db_idx": 22171, "episode_idx": 111, "frame_idx": 12, "global_frame_idx": 22171, "task_index": 22}, {"db_idx": 22172, "episode_idx": 111, "frame_idx": 13, "global_frame_idx": 22172, "task_index": 22}, {"db_idx": 22173, "episode_idx": 111, "frame_idx": 14, "global_frame_idx": 22173, "task_index": 22}, {"db_idx": 22174, "episode_idx": 111, "frame_idx": 15, "global_frame_idx": 22174, "task_index": 22}, {"db_idx": 22175, "episode_idx": 111, "frame_idx": 16, "global_frame_idx": 22175, "task_index": 22}, {"db_idx": 22176, "episode_idx": 111, "frame_idx": 17, "global_frame_idx": 22176, "task_index": 22}, {"db_idx": 22177, "episode_idx": 111, "frame_idx": 18, "global_frame_idx": 22177, "task_index": 22}, {"db_idx": 22178, "episode_idx": 111, "frame_idx": 19, "global_frame_idx": 22178, "task_index": 22}, {"db_idx": 22179, "episode_idx": 111, "frame_idx": 20, "global_frame_idx": 22179, "task_index": 22}, {"db_idx": 22180, "episode_idx": 111, "frame_idx": 21, "global_frame_idx": 22180, "task_index": 22}, {"db_idx": 22181, "episode_idx": 111, "frame_idx": 22, "global_frame_idx": 22181, "task_index": 22}, {"db_idx": 22182, "episode_idx": 111, "frame_idx": 23, "global_frame_idx": 22182, "task_index": 22}, {"db_idx": 22183, "episode_idx": 111, "frame_idx": 24, "global_frame_idx": 22183, "task_index": 22}, {"db_idx": 22184, "episode_idx": 111, "frame_idx": 25, "global_frame_idx": 22184, "task_index": 22}, {"db_idx": 22185, "episode_idx": 111, "frame_idx": 26, "global_frame_idx": 22185, "task_index": 22}, {"db_idx": 22186, "episode_idx": 111, "frame_idx": 27, "global_frame_idx": 22186, "task_index": 22}, {"db_idx": 22187, "episode_idx": 111, "frame_idx": 28, "global_frame_idx": 22187, "task_index": 22}, {"db_idx": 22188, "episode_idx": 111, "frame_idx": 29, "global_frame_idx": 22188, "task_index": 22}, {"db_idx": 22189, "episode_idx": 111, "frame_idx": 30, "global_frame_idx": 22189, "task_index": 22}, {"db_idx": 22190, "episode_idx": 111, "frame_idx": 31, "global_frame_idx": 22190, "task_index": 22}, {"db_idx": 22191, "episode_idx": 111, "frame_idx": 32, "global_frame_idx": 22191, "task_index": 22}, {"db_idx": 22192, "episode_idx": 111, "frame_idx": 33, "global_frame_idx": 22192, "task_index": 22}, {"db_idx": 22193, "episode_idx": 111, "frame_idx": 34, "global_frame_idx": 22193, "task_index": 22}, {"db_idx": 22194, "episode_idx": 111, "frame_idx": 35, "global_frame_idx": 22194, "task_index": 22}, {"db_idx": 22195, "episode_idx": 111, "frame_idx": 36, "global_frame_idx": 22195, "task_index": 22}, {"db_idx": 22196, "episode_idx": 111, "frame_idx": 37, "global_frame_idx": 22196, "task_index": 22}, {"db_idx": 22197, "episode_idx": 111, "frame_idx": 38, "global_frame_idx": 22197, "task_index": 22}, {"db_idx": 22198, "episode_idx": 111, "frame_idx": 39, "global_frame_idx": 22198, "task_index": 22}, {"db_idx": 22199, "episode_idx": 111, "frame_idx": 40, "global_frame_idx": 22199, "task_index": 22}, {"db_idx": 22200, "episode_idx": 111, "frame_idx": 41, "global_frame_idx": 22200, "task_index": 22}, {"db_idx": 22201, "episode_idx": 111, "frame_idx": 42, "global_frame_idx": 22201, "task_index": 22}, {"db_idx": 22202, "episode_idx": 111, "frame_idx": 43, "global_frame_idx": 22202, "task_index": 22}, {"db_idx": 22203, "episode_idx": 111, "frame_idx": 44, "global_frame_idx": 22203, "task_index": 22}, {"db_idx": 22204, "episode_idx": 111, "frame_idx": 45, "global_frame_idx": 22204, "task_index": 22}, {"db_idx": 22205, "episode_idx": 111, "frame_idx": 46, "global_frame_idx": 22205, "task_index": 22}, {"db_idx": 22206, "episode_idx": 111, "frame_idx": 47, "global_frame_idx": 22206, "task_index": 22}, {"db_idx": 22207, "episode_idx": 111, "frame_idx": 48, "global_frame_idx": 22207, "task_index": 22}, {"db_idx": 22208, "episode_idx": 111, "frame_idx": 49, "global_frame_idx": 22208, "task_index": 22}, {"db_idx": 22209, "episode_idx": 111, "frame_idx": 50, "global_frame_idx": 22209, "task_index": 22}, {"db_idx": 22210, "episode_idx": 111, "frame_idx": 51, "global_frame_idx": 22210, "task_index": 22}, {"db_idx": 22211, "episode_idx": 111, "frame_idx": 52, "global_frame_idx": 22211, "task_index": 22}, {"db_idx": 22212, "episode_idx": 111, "frame_idx": 53, "global_frame_idx": 22212, "task_index": 22}, {"db_idx": 22213, "episode_idx": 111, "frame_idx": 54, "global_frame_idx": 22213, "task_index": 22}, {"db_idx": 22214, "episode_idx": 111, "frame_idx": 55, "global_frame_idx": 22214, "task_index": 22}, {"db_idx": 22215, "episode_idx": 111, "frame_idx": 56, "global_frame_idx": 22215, "task_index": 22}, {"db_idx": 22216, "episode_idx": 111, "frame_idx": 57, "global_frame_idx": 22216, "task_index": 22}, {"db_idx": 22217, "episode_idx": 111, "frame_idx": 58, "global_frame_idx": 22217, "task_index": 22}, {"db_idx": 22218, "episode_idx": 111, "frame_idx": 59, "global_frame_idx": 22218, "task_index": 22}, {"db_idx": 22219, "episode_idx": 111, "frame_idx": 60, "global_frame_idx": 22219, "task_index": 22}, {"db_idx": 22220, "episode_idx": 111, "frame_idx": 61, "global_frame_idx": 22220, "task_index": 22}, {"db_idx": 22221, "episode_idx": 111, "frame_idx": 62, "global_frame_idx": 22221, "task_index": 22}, {"db_idx": 22222, "episode_idx": 111, "frame_idx": 63, "global_frame_idx": 22222, "task_index": 22}, {"db_idx": 22223, "episode_idx": 111, "frame_idx": 64, "global_frame_idx": 22223, "task_index": 22}, {"db_idx": 22224, "episode_idx": 111, "frame_idx": 65, "global_frame_idx": 22224, "task_index": 22}, {"db_idx": 22225, "episode_idx": 111, "frame_idx": 66, "global_frame_idx": 22225, "task_index": 22}, {"db_idx": 22226, "episode_idx": 111, "frame_idx": 67, "global_frame_idx": 22226, "task_index": 22}, {"db_idx": 22227, "episode_idx": 111, "frame_idx": 68, "global_frame_idx": 22227, "task_index": 22}, {"db_idx": 22228, "episode_idx": 111, "frame_idx": 69, "global_frame_idx": 22228, "task_index": 22}, {"db_idx": 22229, "episode_idx": 111, "frame_idx": 70, "global_frame_idx": 22229, "task_index": 22}, {"db_idx": 22230, "episode_idx": 111, "frame_idx": 71, "global_frame_idx": 22230, "task_index": 22}, {"db_idx": 22231, "episode_idx": 111, "frame_idx": 72, "global_frame_idx": 22231, "task_index": 22}, {"db_idx": 22232, "episode_idx": 111, "frame_idx": 73, "global_frame_idx": 22232, "task_index": 22}, {"db_idx": 22233, "episode_idx": 111, "frame_idx": 74, "global_frame_idx": 22233, "task_index": 22}, {"db_idx": 22234, "episode_idx": 111, "frame_idx": 75, "global_frame_idx": 22234, "task_index": 22}, {"db_idx": 22235, "episode_idx": 111, "frame_idx": 76, "global_frame_idx": 22235, "task_index": 22}, {"db_idx": 22236, "episode_idx": 111, "frame_idx": 77, "global_frame_idx": 22236, "task_index": 22}, {"db_idx": 22237, "episode_idx": 111, "frame_idx": 78, "global_frame_idx": 22237, "task_index": 22}, {"db_idx": 22238, "episode_idx": 111, "frame_idx": 79, "global_frame_idx": 22238, "task_index": 22}, {"db_idx": 22239, "episode_idx": 111, "frame_idx": 80, "global_frame_idx": 22239, "task_index": 22}, {"db_idx": 22240, "episode_idx": 111, "frame_idx": 81, "global_frame_idx": 22240, "task_index": 22}, {"db_idx": 22241, "episode_idx": 111, "frame_idx": 82, "global_frame_idx": 22241, "task_index": 22}, {"db_idx": 22242, "episode_idx": 111, "frame_idx": 83, "global_frame_idx": 22242, "task_index": 22}, {"db_idx": 22243, "episode_idx": 111, "frame_idx": 84, "global_frame_idx": 22243, "task_index": 22}, {"db_idx": 22244, "episode_idx": 111, "frame_idx": 85, "global_frame_idx": 22244, "task_index": 22}, {"db_idx": 22245, "episode_idx": 111, "frame_idx": 86, "global_frame_idx": 22245, "task_index": 22}, {"db_idx": 22246, "episode_idx": 112, "frame_idx": 0, "global_frame_idx": 22246, "task_index": 22}, {"db_idx": 22247, "episode_idx": 112, "frame_idx": 1, "global_frame_idx": 22247, "task_index": 22}, {"db_idx": 22248, "episode_idx": 112, "frame_idx": 2, "global_frame_idx": 22248, "task_index": 22}, {"db_idx": 22249, "episode_idx": 112, "frame_idx": 3, "global_frame_idx": 22249, "task_index": 22}, {"db_idx": 22250, "episode_idx": 112, "frame_idx": 4, "global_frame_idx": 22250, "task_index": 22}, {"db_idx": 22251, "episode_idx": 112, "frame_idx": 5, "global_frame_idx": 22251, "task_index": 22}, {"db_idx": 22252, "episode_idx": 112, "frame_idx": 6, "global_frame_idx": 22252, "task_index": 22}, {"db_idx": 22253, "episode_idx": 112, "frame_idx": 7, "global_frame_idx": 22253, "task_index": 22}, {"db_idx": 22254, "episode_idx": 112, "frame_idx": 8, "global_frame_idx": 22254, "task_index": 22}, {"db_idx": 22255, "episode_idx": 112, "frame_idx": 9, "global_frame_idx": 22255, "task_index": 22}, {"db_idx": 22256, "episode_idx": 112, "frame_idx": 10, "global_frame_idx": 22256, "task_index": 22}, {"db_idx": 22257, "episode_idx": 112, "frame_idx": 11, "global_frame_idx": 22257, "task_index": 22}, {"db_idx": 22258, "episode_idx": 112, "frame_idx": 12, "global_frame_idx": 22258, "task_index": 22}, {"db_idx": 22259, "episode_idx": 112, "frame_idx": 13, "global_frame_idx": 22259, "task_index": 22}, {"db_idx": 22260, "episode_idx": 112, "frame_idx": 14, "global_frame_idx": 22260, "task_index": 22}, {"db_idx": 22261, "episode_idx": 112, "frame_idx": 15, "global_frame_idx": 22261, "task_index": 22}, {"db_idx": 22262, "episode_idx": 112, "frame_idx": 16, "global_frame_idx": 22262, "task_index": 22}, {"db_idx": 22263, "episode_idx": 112, "frame_idx": 17, "global_frame_idx": 22263, "task_index": 22}, {"db_idx": 22264, "episode_idx": 112, "frame_idx": 18, "global_frame_idx": 22264, "task_index": 22}, {"db_idx": 22265, "episode_idx": 112, "frame_idx": 19, "global_frame_idx": 22265, "task_index": 22}, {"db_idx": 22266, "episode_idx": 112, "frame_idx": 20, "global_frame_idx": 22266, "task_index": 22}, {"db_idx": 22267, "episode_idx": 112, "frame_idx": 21, "global_frame_idx": 22267, "task_index": 22}, {"db_idx": 22268, "episode_idx": 112, "frame_idx": 22, "global_frame_idx": 22268, "task_index": 22}, {"db_idx": 22269, "episode_idx": 112, "frame_idx": 23, "global_frame_idx": 22269, "task_index": 22}, {"db_idx": 22270, "episode_idx": 112, "frame_idx": 24, "global_frame_idx": 22270, "task_index": 22}, {"db_idx": 22271, "episode_idx": 112, "frame_idx": 25, "global_frame_idx": 22271, "task_index": 22}, {"db_idx": 22272, "episode_idx": 112, "frame_idx": 26, "global_frame_idx": 22272, "task_index": 22}, {"db_idx": 22273, "episode_idx": 112, "frame_idx": 27, "global_frame_idx": 22273, "task_index": 22}, {"db_idx": 22274, "episode_idx": 112, "frame_idx": 28, "global_frame_idx": 22274, "task_index": 22}, {"db_idx": 22275, "episode_idx": 112, "frame_idx": 29, "global_frame_idx": 22275, "task_index": 22}, {"db_idx": 22276, "episode_idx": 112, "frame_idx": 30, "global_frame_idx": 22276, "task_index": 22}, {"db_idx": 22277, "episode_idx": 112, "frame_idx": 31, "global_frame_idx": 22277, "task_index": 22}, {"db_idx": 22278, "episode_idx": 112, "frame_idx": 32, "global_frame_idx": 22278, "task_index": 22}, {"db_idx": 22279, "episode_idx": 112, "frame_idx": 33, "global_frame_idx": 22279, "task_index": 22}, {"db_idx": 22280, "episode_idx": 112, "frame_idx": 34, "global_frame_idx": 22280, "task_index": 22}, {"db_idx": 22281, "episode_idx": 112, "frame_idx": 35, "global_frame_idx": 22281, "task_index": 22}, {"db_idx": 22282, "episode_idx": 112, "frame_idx": 36, "global_frame_idx": 22282, "task_index": 22}, {"db_idx": 22283, "episode_idx": 112, "frame_idx": 37, "global_frame_idx": 22283, "task_index": 22}, {"db_idx": 22284, "episode_idx": 112, "frame_idx": 38, "global_frame_idx": 22284, "task_index": 22}, {"db_idx": 22285, "episode_idx": 112, "frame_idx": 39, "global_frame_idx": 22285, "task_index": 22}, {"db_idx": 22286, "episode_idx": 112, "frame_idx": 40, "global_frame_idx": 22286, "task_index": 22}, {"db_idx": 22287, "episode_idx": 112, "frame_idx": 41, "global_frame_idx": 22287, "task_index": 22}, {"db_idx": 22288, "episode_idx": 112, "frame_idx": 42, "global_frame_idx": 22288, "task_index": 22}, {"db_idx": 22289, "episode_idx": 112, "frame_idx": 43, "global_frame_idx": 22289, "task_index": 22}, {"db_idx": 22290, "episode_idx": 112, "frame_idx": 44, "global_frame_idx": 22290, "task_index": 22}, {"db_idx": 22291, "episode_idx": 112, "frame_idx": 45, "global_frame_idx": 22291, "task_index": 22}, {"db_idx": 22292, "episode_idx": 112, "frame_idx": 46, "global_frame_idx": 22292, "task_index": 22}, {"db_idx": 22293, "episode_idx": 112, "frame_idx": 47, "global_frame_idx": 22293, "task_index": 22}, {"db_idx": 22294, "episode_idx": 112, "frame_idx": 48, "global_frame_idx": 22294, "task_index": 22}, {"db_idx": 22295, "episode_idx": 112, "frame_idx": 49, "global_frame_idx": 22295, "task_index": 22}, {"db_idx": 22296, "episode_idx": 112, "frame_idx": 50, "global_frame_idx": 22296, "task_index": 22}, {"db_idx": 22297, "episode_idx": 112, "frame_idx": 51, "global_frame_idx": 22297, "task_index": 22}, {"db_idx": 22298, "episode_idx": 112, "frame_idx": 52, "global_frame_idx": 22298, "task_index": 22}, {"db_idx": 22299, "episode_idx": 112, "frame_idx": 53, "global_frame_idx": 22299, "task_index": 22}, {"db_idx": 22300, "episode_idx": 112, "frame_idx": 54, "global_frame_idx": 22300, "task_index": 22}, {"db_idx": 22301, "episode_idx": 112, "frame_idx": 55, "global_frame_idx": 22301, "task_index": 22}, {"db_idx": 22302, "episode_idx": 112, "frame_idx": 56, "global_frame_idx": 22302, "task_index": 22}, {"db_idx": 22303, "episode_idx": 112, "frame_idx": 57, "global_frame_idx": 22303, "task_index": 22}, {"db_idx": 22304, "episode_idx": 112, "frame_idx": 58, "global_frame_idx": 22304, "task_index": 22}, {"db_idx": 22305, "episode_idx": 112, "frame_idx": 59, "global_frame_idx": 22305, "task_index": 22}, {"db_idx": 22306, "episode_idx": 112, "frame_idx": 60, "global_frame_idx": 22306, "task_index": 22}, {"db_idx": 22307, "episode_idx": 112, "frame_idx": 61, "global_frame_idx": 22307, "task_index": 22}, {"db_idx": 22308, "episode_idx": 112, "frame_idx": 62, "global_frame_idx": 22308, "task_index": 22}, {"db_idx": 22309, "episode_idx": 112, "frame_idx": 63, "global_frame_idx": 22309, "task_index": 22}, {"db_idx": 22310, "episode_idx": 112, "frame_idx": 64, "global_frame_idx": 22310, "task_index": 22}, {"db_idx": 22311, "episode_idx": 112, "frame_idx": 65, "global_frame_idx": 22311, "task_index": 22}, {"db_idx": 22312, "episode_idx": 112, "frame_idx": 66, "global_frame_idx": 22312, "task_index": 22}, {"db_idx": 22313, "episode_idx": 112, "frame_idx": 67, "global_frame_idx": 22313, "task_index": 22}, {"db_idx": 22314, "episode_idx": 112, "frame_idx": 68, "global_frame_idx": 22314, "task_index": 22}, {"db_idx": 22315, "episode_idx": 112, "frame_idx": 69, "global_frame_idx": 22315, "task_index": 22}, {"db_idx": 22316, "episode_idx": 112, "frame_idx": 70, "global_frame_idx": 22316, "task_index": 22}, {"db_idx": 22317, "episode_idx": 112, "frame_idx": 71, "global_frame_idx": 22317, "task_index": 22}, {"db_idx": 22318, "episode_idx": 112, "frame_idx": 72, "global_frame_idx": 22318, "task_index": 22}, {"db_idx": 22319, "episode_idx": 112, "frame_idx": 73, "global_frame_idx": 22319, "task_index": 22}, {"db_idx": 22320, "episode_idx": 112, "frame_idx": 74, "global_frame_idx": 22320, "task_index": 22}, {"db_idx": 22321, "episode_idx": 112, "frame_idx": 75, "global_frame_idx": 22321, "task_index": 22}, {"db_idx": 22322, "episode_idx": 112, "frame_idx": 76, "global_frame_idx": 22322, "task_index": 22}, {"db_idx": 22323, "episode_idx": 112, "frame_idx": 77, "global_frame_idx": 22323, "task_index": 22}, {"db_idx": 22324, "episode_idx": 112, "frame_idx": 78, "global_frame_idx": 22324, "task_index": 22}, {"db_idx": 22325, "episode_idx": 112, "frame_idx": 79, "global_frame_idx": 22325, "task_index": 22}, {"db_idx": 22326, "episode_idx": 112, "frame_idx": 80, "global_frame_idx": 22326, "task_index": 22}, {"db_idx": 22327, "episode_idx": 112, "frame_idx": 81, "global_frame_idx": 22327, "task_index": 22}, {"db_idx": 22328, "episode_idx": 112, "frame_idx": 82, "global_frame_idx": 22328, "task_index": 22}, {"db_idx": 22329, "episode_idx": 112, "frame_idx": 83, "global_frame_idx": 22329, "task_index": 22}, {"db_idx": 22330, "episode_idx": 112, "frame_idx": 84, "global_frame_idx": 22330, "task_index": 22}, {"db_idx": 22331, "episode_idx": 112, "frame_idx": 85, "global_frame_idx": 22331, "task_index": 22}, {"db_idx": 22332, "episode_idx": 112, "frame_idx": 86, "global_frame_idx": 22332, "task_index": 22}, {"db_idx": 22333, "episode_idx": 112, "frame_idx": 87, "global_frame_idx": 22333, "task_index": 22}, {"db_idx": 22334, "episode_idx": 112, "frame_idx": 88, "global_frame_idx": 22334, "task_index": 22}, {"db_idx": 22335, "episode_idx": 112, "frame_idx": 89, "global_frame_idx": 22335, "task_index": 22}, {"db_idx": 22336, "episode_idx": 112, "frame_idx": 90, "global_frame_idx": 22336, "task_index": 22}, {"db_idx": 22337, "episode_idx": 112, "frame_idx": 91, "global_frame_idx": 22337, "task_index": 22}, {"db_idx": 22338, "episode_idx": 112, "frame_idx": 92, "global_frame_idx": 22338, "task_index": 22}, {"db_idx": 22339, "episode_idx": 112, "frame_idx": 93, "global_frame_idx": 22339, "task_index": 22}, {"db_idx": 22340, "episode_idx": 112, "frame_idx": 94, "global_frame_idx": 22340, "task_index": 22}, {"db_idx": 22341, "episode_idx": 112, "frame_idx": 95, "global_frame_idx": 22341, "task_index": 22}, {"db_idx": 22342, "episode_idx": 112, "frame_idx": 96, "global_frame_idx": 22342, "task_index": 22}, {"db_idx": 22343, "episode_idx": 112, "frame_idx": 97, "global_frame_idx": 22343, "task_index": 22}, {"db_idx": 22344, "episode_idx": 113, "frame_idx": 0, "global_frame_idx": 22344, "task_index": 22}, {"db_idx": 22345, "episode_idx": 113, "frame_idx": 1, "global_frame_idx": 22345, "task_index": 22}, {"db_idx": 22346, "episode_idx": 113, "frame_idx": 2, "global_frame_idx": 22346, "task_index": 22}, {"db_idx": 22347, "episode_idx": 113, "frame_idx": 3, "global_frame_idx": 22347, "task_index": 22}, {"db_idx": 22348, "episode_idx": 113, "frame_idx": 4, "global_frame_idx": 22348, "task_index": 22}, {"db_idx": 22349, "episode_idx": 113, "frame_idx": 5, "global_frame_idx": 22349, "task_index": 22}, {"db_idx": 22350, "episode_idx": 113, "frame_idx": 6, "global_frame_idx": 22350, "task_index": 22}, {"db_idx": 22351, "episode_idx": 113, "frame_idx": 7, "global_frame_idx": 22351, "task_index": 22}, {"db_idx": 22352, "episode_idx": 113, "frame_idx": 8, "global_frame_idx": 22352, "task_index": 22}, {"db_idx": 22353, "episode_idx": 113, "frame_idx": 9, "global_frame_idx": 22353, "task_index": 22}, {"db_idx": 22354, "episode_idx": 113, "frame_idx": 10, "global_frame_idx": 22354, "task_index": 22}, {"db_idx": 22355, "episode_idx": 113, "frame_idx": 11, "global_frame_idx": 22355, "task_index": 22}, {"db_idx": 22356, "episode_idx": 113, "frame_idx": 12, "global_frame_idx": 22356, "task_index": 22}, {"db_idx": 22357, "episode_idx": 113, "frame_idx": 13, "global_frame_idx": 22357, "task_index": 22}, {"db_idx": 22358, "episode_idx": 113, "frame_idx": 14, "global_frame_idx": 22358, "task_index": 22}, {"db_idx": 22359, "episode_idx": 113, "frame_idx": 15, "global_frame_idx": 22359, "task_index": 22}, {"db_idx": 22360, "episode_idx": 113, "frame_idx": 16, "global_frame_idx": 22360, "task_index": 22}, {"db_idx": 22361, "episode_idx": 113, "frame_idx": 17, "global_frame_idx": 22361, "task_index": 22}, {"db_idx": 22362, "episode_idx": 113, "frame_idx": 18, "global_frame_idx": 22362, "task_index": 22}, {"db_idx": 22363, "episode_idx": 113, "frame_idx": 19, "global_frame_idx": 22363, "task_index": 22}, {"db_idx": 22364, "episode_idx": 113, "frame_idx": 20, "global_frame_idx": 22364, "task_index": 22}, {"db_idx": 22365, "episode_idx": 113, "frame_idx": 21, "global_frame_idx": 22365, "task_index": 22}, {"db_idx": 22366, "episode_idx": 113, "frame_idx": 22, "global_frame_idx": 22366, "task_index": 22}, {"db_idx": 22367, "episode_idx": 113, "frame_idx": 23, "global_frame_idx": 22367, "task_index": 22}, {"db_idx": 22368, "episode_idx": 113, "frame_idx": 24, "global_frame_idx": 22368, "task_index": 22}, {"db_idx": 22369, "episode_idx": 113, "frame_idx": 25, "global_frame_idx": 22369, "task_index": 22}, {"db_idx": 22370, "episode_idx": 113, "frame_idx": 26, "global_frame_idx": 22370, "task_index": 22}, {"db_idx": 22371, "episode_idx": 113, "frame_idx": 27, "global_frame_idx": 22371, "task_index": 22}, {"db_idx": 22372, "episode_idx": 113, "frame_idx": 28, "global_frame_idx": 22372, "task_index": 22}, {"db_idx": 22373, "episode_idx": 113, "frame_idx": 29, "global_frame_idx": 22373, "task_index": 22}, {"db_idx": 22374, "episode_idx": 113, "frame_idx": 30, "global_frame_idx": 22374, "task_index": 22}, {"db_idx": 22375, "episode_idx": 113, "frame_idx": 31, "global_frame_idx": 22375, "task_index": 22}, {"db_idx": 22376, "episode_idx": 113, "frame_idx": 32, "global_frame_idx": 22376, "task_index": 22}, {"db_idx": 22377, "episode_idx": 113, "frame_idx": 33, "global_frame_idx": 22377, "task_index": 22}, {"db_idx": 22378, "episode_idx": 113, "frame_idx": 34, "global_frame_idx": 22378, "task_index": 22}, {"db_idx": 22379, "episode_idx": 113, "frame_idx": 35, "global_frame_idx": 22379, "task_index": 22}, {"db_idx": 22380, "episode_idx": 113, "frame_idx": 36, "global_frame_idx": 22380, "task_index": 22}, {"db_idx": 22381, "episode_idx": 113, "frame_idx": 37, "global_frame_idx": 22381, "task_index": 22}, {"db_idx": 22382, "episode_idx": 113, "frame_idx": 38, "global_frame_idx": 22382, "task_index": 22}, {"db_idx": 22383, "episode_idx": 113, "frame_idx": 39, "global_frame_idx": 22383, "task_index": 22}, {"db_idx": 22384, "episode_idx": 113, "frame_idx": 40, "global_frame_idx": 22384, "task_index": 22}, {"db_idx": 22385, "episode_idx": 113, "frame_idx": 41, "global_frame_idx": 22385, "task_index": 22}, {"db_idx": 22386, "episode_idx": 113, "frame_idx": 42, "global_frame_idx": 22386, "task_index": 22}, {"db_idx": 22387, "episode_idx": 113, "frame_idx": 43, "global_frame_idx": 22387, "task_index": 22}, {"db_idx": 22388, "episode_idx": 113, "frame_idx": 44, "global_frame_idx": 22388, "task_index": 22}, {"db_idx": 22389, "episode_idx": 113, "frame_idx": 45, "global_frame_idx": 22389, "task_index": 22}, {"db_idx": 22390, "episode_idx": 113, "frame_idx": 46, "global_frame_idx": 22390, "task_index": 22}, {"db_idx": 22391, "episode_idx": 113, "frame_idx": 47, "global_frame_idx": 22391, "task_index": 22}, {"db_idx": 22392, "episode_idx": 113, "frame_idx": 48, "global_frame_idx": 22392, "task_index": 22}, {"db_idx": 22393, "episode_idx": 113, "frame_idx": 49, "global_frame_idx": 22393, "task_index": 22}, {"db_idx": 22394, "episode_idx": 113, "frame_idx": 50, "global_frame_idx": 22394, "task_index": 22}, {"db_idx": 22395, "episode_idx": 113, "frame_idx": 51, "global_frame_idx": 22395, "task_index": 22}, {"db_idx": 22396, "episode_idx": 113, "frame_idx": 52, "global_frame_idx": 22396, "task_index": 22}, {"db_idx": 22397, "episode_idx": 113, "frame_idx": 53, "global_frame_idx": 22397, "task_index": 22}, {"db_idx": 22398, "episode_idx": 113, "frame_idx": 54, "global_frame_idx": 22398, "task_index": 22}, {"db_idx": 22399, "episode_idx": 113, "frame_idx": 55, "global_frame_idx": 22399, "task_index": 22}, {"db_idx": 22400, "episode_idx": 113, "frame_idx": 56, "global_frame_idx": 22400, "task_index": 22}, {"db_idx": 22401, "episode_idx": 113, "frame_idx": 57, "global_frame_idx": 22401, "task_index": 22}, {"db_idx": 22402, "episode_idx": 113, "frame_idx": 58, "global_frame_idx": 22402, "task_index": 22}, {"db_idx": 22403, "episode_idx": 113, "frame_idx": 59, "global_frame_idx": 22403, "task_index": 22}, {"db_idx": 22404, "episode_idx": 113, "frame_idx": 60, "global_frame_idx": 22404, "task_index": 22}, {"db_idx": 22405, "episode_idx": 113, "frame_idx": 61, "global_frame_idx": 22405, "task_index": 22}, {"db_idx": 22406, "episode_idx": 113, "frame_idx": 62, "global_frame_idx": 22406, "task_index": 22}, {"db_idx": 22407, "episode_idx": 113, "frame_idx": 63, "global_frame_idx": 22407, "task_index": 22}, {"db_idx": 22408, "episode_idx": 113, "frame_idx": 64, "global_frame_idx": 22408, "task_index": 22}, {"db_idx": 22409, "episode_idx": 113, "frame_idx": 65, "global_frame_idx": 22409, "task_index": 22}, {"db_idx": 22410, "episode_idx": 113, "frame_idx": 66, "global_frame_idx": 22410, "task_index": 22}, {"db_idx": 22411, "episode_idx": 113, "frame_idx": 67, "global_frame_idx": 22411, "task_index": 22}, {"db_idx": 22412, "episode_idx": 113, "frame_idx": 68, "global_frame_idx": 22412, "task_index": 22}, {"db_idx": 22413, "episode_idx": 113, "frame_idx": 69, "global_frame_idx": 22413, "task_index": 22}, {"db_idx": 22414, "episode_idx": 113, "frame_idx": 70, "global_frame_idx": 22414, "task_index": 22}, {"db_idx": 22415, "episode_idx": 113, "frame_idx": 71, "global_frame_idx": 22415, "task_index": 22}, {"db_idx": 22416, "episode_idx": 113, "frame_idx": 72, "global_frame_idx": 22416, "task_index": 22}, {"db_idx": 22417, "episode_idx": 113, "frame_idx": 73, "global_frame_idx": 22417, "task_index": 22}, {"db_idx": 22418, "episode_idx": 113, "frame_idx": 74, "global_frame_idx": 22418, "task_index": 22}, {"db_idx": 22419, "episode_idx": 113, "frame_idx": 75, "global_frame_idx": 22419, "task_index": 22}, {"db_idx": 22420, "episode_idx": 113, "frame_idx": 76, "global_frame_idx": 22420, "task_index": 22}, {"db_idx": 22421, "episode_idx": 113, "frame_idx": 77, "global_frame_idx": 22421, "task_index": 22}, {"db_idx": 22422, "episode_idx": 113, "frame_idx": 78, "global_frame_idx": 22422, "task_index": 22}, {"db_idx": 22423, "episode_idx": 113, "frame_idx": 79, "global_frame_idx": 22423, "task_index": 22}, {"db_idx": 22424, "episode_idx": 113, "frame_idx": 80, "global_frame_idx": 22424, "task_index": 22}, {"db_idx": 22425, "episode_idx": 113, "frame_idx": 81, "global_frame_idx": 22425, "task_index": 22}, {"db_idx": 22426, "episode_idx": 113, "frame_idx": 82, "global_frame_idx": 22426, "task_index": 22}, {"db_idx": 22427, "episode_idx": 113, "frame_idx": 83, "global_frame_idx": 22427, "task_index": 22}, {"db_idx": 22428, "episode_idx": 113, "frame_idx": 84, "global_frame_idx": 22428, "task_index": 22}, {"db_idx": 22429, "episode_idx": 113, "frame_idx": 85, "global_frame_idx": 22429, "task_index": 22}, {"db_idx": 22430, "episode_idx": 113, "frame_idx": 86, "global_frame_idx": 22430, "task_index": 22}, {"db_idx": 22431, "episode_idx": 113, "frame_idx": 87, "global_frame_idx": 22431, "task_index": 22}, {"db_idx": 22432, "episode_idx": 113, "frame_idx": 88, "global_frame_idx": 22432, "task_index": 22}, {"db_idx": 22433, "episode_idx": 113, "frame_idx": 89, "global_frame_idx": 22433, "task_index": 22}, {"db_idx": 22434, "episode_idx": 113, "frame_idx": 90, "global_frame_idx": 22434, "task_index": 22}, {"db_idx": 22435, "episode_idx": 113, "frame_idx": 91, "global_frame_idx": 22435, "task_index": 22}, {"db_idx": 22436, "episode_idx": 113, "frame_idx": 92, "global_frame_idx": 22436, "task_index": 22}, {"db_idx": 22437, "episode_idx": 113, "frame_idx": 93, "global_frame_idx": 22437, "task_index": 22}, {"db_idx": 22438, "episode_idx": 113, "frame_idx": 94, "global_frame_idx": 22438, "task_index": 22}, {"db_idx": 22439, "episode_idx": 114, "frame_idx": 0, "global_frame_idx": 22439, "task_index": 22}, {"db_idx": 22440, "episode_idx": 114, "frame_idx": 1, "global_frame_idx": 22440, "task_index": 22}, {"db_idx": 22441, "episode_idx": 114, "frame_idx": 2, "global_frame_idx": 22441, "task_index": 22}, {"db_idx": 22442, "episode_idx": 114, "frame_idx": 3, "global_frame_idx": 22442, "task_index": 22}, {"db_idx": 22443, "episode_idx": 114, "frame_idx": 4, "global_frame_idx": 22443, "task_index": 22}, {"db_idx": 22444, "episode_idx": 114, "frame_idx": 5, "global_frame_idx": 22444, "task_index": 22}, {"db_idx": 22445, "episode_idx": 114, "frame_idx": 6, "global_frame_idx": 22445, "task_index": 22}, {"db_idx": 22446, "episode_idx": 114, "frame_idx": 7, "global_frame_idx": 22446, "task_index": 22}, {"db_idx": 22447, "episode_idx": 114, "frame_idx": 8, "global_frame_idx": 22447, "task_index": 22}, {"db_idx": 22448, "episode_idx": 114, "frame_idx": 9, "global_frame_idx": 22448, "task_index": 22}, {"db_idx": 22449, "episode_idx": 114, "frame_idx": 10, "global_frame_idx": 22449, "task_index": 22}, {"db_idx": 22450, "episode_idx": 114, "frame_idx": 11, "global_frame_idx": 22450, "task_index": 22}, {"db_idx": 22451, "episode_idx": 114, "frame_idx": 12, "global_frame_idx": 22451, "task_index": 22}, {"db_idx": 22452, "episode_idx": 114, "frame_idx": 13, "global_frame_idx": 22452, "task_index": 22}, {"db_idx": 22453, "episode_idx": 114, "frame_idx": 14, "global_frame_idx": 22453, "task_index": 22}, {"db_idx": 22454, "episode_idx": 114, "frame_idx": 15, "global_frame_idx": 22454, "task_index": 22}, {"db_idx": 22455, "episode_idx": 114, "frame_idx": 16, "global_frame_idx": 22455, "task_index": 22}, {"db_idx": 22456, "episode_idx": 114, "frame_idx": 17, "global_frame_idx": 22456, "task_index": 22}, {"db_idx": 22457, "episode_idx": 114, "frame_idx": 18, "global_frame_idx": 22457, "task_index": 22}, {"db_idx": 22458, "episode_idx": 114, "frame_idx": 19, "global_frame_idx": 22458, "task_index": 22}, {"db_idx": 22459, "episode_idx": 114, "frame_idx": 20, "global_frame_idx": 22459, "task_index": 22}, {"db_idx": 22460, "episode_idx": 114, "frame_idx": 21, "global_frame_idx": 22460, "task_index": 22}, {"db_idx": 22461, "episode_idx": 114, "frame_idx": 22, "global_frame_idx": 22461, "task_index": 22}, {"db_idx": 22462, "episode_idx": 114, "frame_idx": 23, "global_frame_idx": 22462, "task_index": 22}, {"db_idx": 22463, "episode_idx": 114, "frame_idx": 24, "global_frame_idx": 22463, "task_index": 22}, {"db_idx": 22464, "episode_idx": 114, "frame_idx": 25, "global_frame_idx": 22464, "task_index": 22}, {"db_idx": 22465, "episode_idx": 114, "frame_idx": 26, "global_frame_idx": 22465, "task_index": 22}, {"db_idx": 22466, "episode_idx": 114, "frame_idx": 27, "global_frame_idx": 22466, "task_index": 22}, {"db_idx": 22467, "episode_idx": 114, "frame_idx": 28, "global_frame_idx": 22467, "task_index": 22}, {"db_idx": 22468, "episode_idx": 114, "frame_idx": 29, "global_frame_idx": 22468, "task_index": 22}, {"db_idx": 22469, "episode_idx": 114, "frame_idx": 30, "global_frame_idx": 22469, "task_index": 22}, {"db_idx": 22470, "episode_idx": 114, "frame_idx": 31, "global_frame_idx": 22470, "task_index": 22}, {"db_idx": 22471, "episode_idx": 114, "frame_idx": 32, "global_frame_idx": 22471, "task_index": 22}, {"db_idx": 22472, "episode_idx": 114, "frame_idx": 33, "global_frame_idx": 22472, "task_index": 22}, {"db_idx": 22473, "episode_idx": 114, "frame_idx": 34, "global_frame_idx": 22473, "task_index": 22}, {"db_idx": 22474, "episode_idx": 114, "frame_idx": 35, "global_frame_idx": 22474, "task_index": 22}, {"db_idx": 22475, "episode_idx": 114, "frame_idx": 36, "global_frame_idx": 22475, "task_index": 22}, {"db_idx": 22476, "episode_idx": 114, "frame_idx": 37, "global_frame_idx": 22476, "task_index": 22}, {"db_idx": 22477, "episode_idx": 114, "frame_idx": 38, "global_frame_idx": 22477, "task_index": 22}, {"db_idx": 22478, "episode_idx": 114, "frame_idx": 39, "global_frame_idx": 22478, "task_index": 22}, {"db_idx": 22479, "episode_idx": 114, "frame_idx": 40, "global_frame_idx": 22479, "task_index": 22}, {"db_idx": 22480, "episode_idx": 114, "frame_idx": 41, "global_frame_idx": 22480, "task_index": 22}, {"db_idx": 22481, "episode_idx": 114, "frame_idx": 42, "global_frame_idx": 22481, "task_index": 22}, {"db_idx": 22482, "episode_idx": 114, "frame_idx": 43, "global_frame_idx": 22482, "task_index": 22}, {"db_idx": 22483, "episode_idx": 114, "frame_idx": 44, "global_frame_idx": 22483, "task_index": 22}, {"db_idx": 22484, "episode_idx": 114, "frame_idx": 45, "global_frame_idx": 22484, "task_index": 22}, {"db_idx": 22485, "episode_idx": 114, "frame_idx": 46, "global_frame_idx": 22485, "task_index": 22}, {"db_idx": 22486, "episode_idx": 114, "frame_idx": 47, "global_frame_idx": 22486, "task_index": 22}, {"db_idx": 22487, "episode_idx": 114, "frame_idx": 48, "global_frame_idx": 22487, "task_index": 22}, {"db_idx": 22488, "episode_idx": 114, "frame_idx": 49, "global_frame_idx": 22488, "task_index": 22}, {"db_idx": 22489, "episode_idx": 114, "frame_idx": 50, "global_frame_idx": 22489, "task_index": 22}, {"db_idx": 22490, "episode_idx": 114, "frame_idx": 51, "global_frame_idx": 22490, "task_index": 22}, {"db_idx": 22491, "episode_idx": 114, "frame_idx": 52, "global_frame_idx": 22491, "task_index": 22}, {"db_idx": 22492, "episode_idx": 114, "frame_idx": 53, "global_frame_idx": 22492, "task_index": 22}, {"db_idx": 22493, "episode_idx": 114, "frame_idx": 54, "global_frame_idx": 22493, "task_index": 22}, {"db_idx": 22494, "episode_idx": 114, "frame_idx": 55, "global_frame_idx": 22494, "task_index": 22}, {"db_idx": 22495, "episode_idx": 114, "frame_idx": 56, "global_frame_idx": 22495, "task_index": 22}, {"db_idx": 22496, "episode_idx": 114, "frame_idx": 57, "global_frame_idx": 22496, "task_index": 22}, {"db_idx": 22497, "episode_idx": 114, "frame_idx": 58, "global_frame_idx": 22497, "task_index": 22}, {"db_idx": 22498, "episode_idx": 114, "frame_idx": 59, "global_frame_idx": 22498, "task_index": 22}, {"db_idx": 22499, "episode_idx": 114, "frame_idx": 60, "global_frame_idx": 22499, "task_index": 22}, {"db_idx": 22500, "episode_idx": 114, "frame_idx": 61, "global_frame_idx": 22500, "task_index": 22}, {"db_idx": 22501, "episode_idx": 114, "frame_idx": 62, "global_frame_idx": 22501, "task_index": 22}, {"db_idx": 22502, "episode_idx": 114, "frame_idx": 63, "global_frame_idx": 22502, "task_index": 22}, {"db_idx": 22503, "episode_idx": 114, "frame_idx": 64, "global_frame_idx": 22503, "task_index": 22}, {"db_idx": 22504, "episode_idx": 114, "frame_idx": 65, "global_frame_idx": 22504, "task_index": 22}, {"db_idx": 22505, "episode_idx": 114, "frame_idx": 66, "global_frame_idx": 22505, "task_index": 22}, {"db_idx": 22506, "episode_idx": 114, "frame_idx": 67, "global_frame_idx": 22506, "task_index": 22}, {"db_idx": 22507, "episode_idx": 114, "frame_idx": 68, "global_frame_idx": 22507, "task_index": 22}, {"db_idx": 22508, "episode_idx": 114, "frame_idx": 69, "global_frame_idx": 22508, "task_index": 22}, {"db_idx": 22509, "episode_idx": 114, "frame_idx": 70, "global_frame_idx": 22509, "task_index": 22}, {"db_idx": 22510, "episode_idx": 114, "frame_idx": 71, "global_frame_idx": 22510, "task_index": 22}, {"db_idx": 22511, "episode_idx": 114, "frame_idx": 72, "global_frame_idx": 22511, "task_index": 22}, {"db_idx": 22512, "episode_idx": 114, "frame_idx": 73, "global_frame_idx": 22512, "task_index": 22}, {"db_idx": 22513, "episode_idx": 114, "frame_idx": 74, "global_frame_idx": 22513, "task_index": 22}, {"db_idx": 22514, "episode_idx": 114, "frame_idx": 75, "global_frame_idx": 22514, "task_index": 22}, {"db_idx": 22515, "episode_idx": 114, "frame_idx": 76, "global_frame_idx": 22515, "task_index": 22}, {"db_idx": 22516, "episode_idx": 114, "frame_idx": 77, "global_frame_idx": 22516, "task_index": 22}, {"db_idx": 22517, "episode_idx": 114, "frame_idx": 78, "global_frame_idx": 22517, "task_index": 22}, {"db_idx": 22518, "episode_idx": 114, "frame_idx": 79, "global_frame_idx": 22518, "task_index": 22}, {"db_idx": 22519, "episode_idx": 114, "frame_idx": 80, "global_frame_idx": 22519, "task_index": 22}, {"db_idx": 22520, "episode_idx": 114, "frame_idx": 81, "global_frame_idx": 22520, "task_index": 22}, {"db_idx": 22521, "episode_idx": 114, "frame_idx": 82, "global_frame_idx": 22521, "task_index": 22}, {"db_idx": 22522, "episode_idx": 114, "frame_idx": 83, "global_frame_idx": 22522, "task_index": 22}, {"db_idx": 22523, "episode_idx": 114, "frame_idx": 84, "global_frame_idx": 22523, "task_index": 22}, {"db_idx": 22524, "episode_idx": 114, "frame_idx": 85, "global_frame_idx": 22524, "task_index": 22}, {"db_idx": 22525, "episode_idx": 114, "frame_idx": 86, "global_frame_idx": 22525, "task_index": 22}, {"db_idx": 22526, "episode_idx": 114, "frame_idx": 87, "global_frame_idx": 22526, "task_index": 22}, {"db_idx": 22527, "episode_idx": 114, "frame_idx": 88, "global_frame_idx": 22527, "task_index": 22}, {"db_idx": 22528, "episode_idx": 115, "frame_idx": 0, "global_frame_idx": 22528, "task_index": 23}, {"db_idx": 22529, "episode_idx": 115, "frame_idx": 1, "global_frame_idx": 22529, "task_index": 23}, {"db_idx": 22530, "episode_idx": 115, "frame_idx": 2, "global_frame_idx": 22530, "task_index": 23}, {"db_idx": 22531, "episode_idx": 115, "frame_idx": 3, "global_frame_idx": 22531, "task_index": 23}, {"db_idx": 22532, "episode_idx": 115, "frame_idx": 4, "global_frame_idx": 22532, "task_index": 23}, {"db_idx": 22533, "episode_idx": 115, "frame_idx": 5, "global_frame_idx": 22533, "task_index": 23}, {"db_idx": 22534, "episode_idx": 115, "frame_idx": 6, "global_frame_idx": 22534, "task_index": 23}, {"db_idx": 22535, "episode_idx": 115, "frame_idx": 7, "global_frame_idx": 22535, "task_index": 23}, {"db_idx": 22536, "episode_idx": 115, "frame_idx": 8, "global_frame_idx": 22536, "task_index": 23}, {"db_idx": 22537, "episode_idx": 115, "frame_idx": 9, "global_frame_idx": 22537, "task_index": 23}, {"db_idx": 22538, "episode_idx": 115, "frame_idx": 10, "global_frame_idx": 22538, "task_index": 23}, {"db_idx": 22539, "episode_idx": 115, "frame_idx": 11, "global_frame_idx": 22539, "task_index": 23}, {"db_idx": 22540, "episode_idx": 115, "frame_idx": 12, "global_frame_idx": 22540, "task_index": 23}, {"db_idx": 22541, "episode_idx": 115, "frame_idx": 13, "global_frame_idx": 22541, "task_index": 23}, {"db_idx": 22542, "episode_idx": 115, "frame_idx": 14, "global_frame_idx": 22542, "task_index": 23}, {"db_idx": 22543, "episode_idx": 115, "frame_idx": 15, "global_frame_idx": 22543, "task_index": 23}, {"db_idx": 22544, "episode_idx": 115, "frame_idx": 16, "global_frame_idx": 22544, "task_index": 23}, {"db_idx": 22545, "episode_idx": 115, "frame_idx": 17, "global_frame_idx": 22545, "task_index": 23}, {"db_idx": 22546, "episode_idx": 115, "frame_idx": 18, "global_frame_idx": 22546, "task_index": 23}, {"db_idx": 22547, "episode_idx": 115, "frame_idx": 19, "global_frame_idx": 22547, "task_index": 23}, {"db_idx": 22548, "episode_idx": 115, "frame_idx": 20, "global_frame_idx": 22548, "task_index": 23}, {"db_idx": 22549, "episode_idx": 115, "frame_idx": 21, "global_frame_idx": 22549, "task_index": 23}, {"db_idx": 22550, "episode_idx": 115, "frame_idx": 22, "global_frame_idx": 22550, "task_index": 23}, {"db_idx": 22551, "episode_idx": 115, "frame_idx": 23, "global_frame_idx": 22551, "task_index": 23}, {"db_idx": 22552, "episode_idx": 115, "frame_idx": 24, "global_frame_idx": 22552, "task_index": 23}, {"db_idx": 22553, "episode_idx": 115, "frame_idx": 25, "global_frame_idx": 22553, "task_index": 23}, {"db_idx": 22554, "episode_idx": 115, "frame_idx": 26, "global_frame_idx": 22554, "task_index": 23}, {"db_idx": 22555, "episode_idx": 115, "frame_idx": 27, "global_frame_idx": 22555, "task_index": 23}, {"db_idx": 22556, "episode_idx": 115, "frame_idx": 28, "global_frame_idx": 22556, "task_index": 23}, {"db_idx": 22557, "episode_idx": 115, "frame_idx": 29, "global_frame_idx": 22557, "task_index": 23}, {"db_idx": 22558, "episode_idx": 115, "frame_idx": 30, "global_frame_idx": 22558, "task_index": 23}, {"db_idx": 22559, "episode_idx": 115, "frame_idx": 31, "global_frame_idx": 22559, "task_index": 23}, {"db_idx": 22560, "episode_idx": 115, "frame_idx": 32, "global_frame_idx": 22560, "task_index": 23}, {"db_idx": 22561, "episode_idx": 115, "frame_idx": 33, "global_frame_idx": 22561, "task_index": 23}, {"db_idx": 22562, "episode_idx": 115, "frame_idx": 34, "global_frame_idx": 22562, "task_index": 23}, {"db_idx": 22563, "episode_idx": 115, "frame_idx": 35, "global_frame_idx": 22563, "task_index": 23}, {"db_idx": 22564, "episode_idx": 115, "frame_idx": 36, "global_frame_idx": 22564, "task_index": 23}, {"db_idx": 22565, "episode_idx": 115, "frame_idx": 37, "global_frame_idx": 22565, "task_index": 23}, {"db_idx": 22566, "episode_idx": 115, "frame_idx": 38, "global_frame_idx": 22566, "task_index": 23}, {"db_idx": 22567, "episode_idx": 115, "frame_idx": 39, "global_frame_idx": 22567, "task_index": 23}, {"db_idx": 22568, "episode_idx": 115, "frame_idx": 40, "global_frame_idx": 22568, "task_index": 23}, {"db_idx": 22569, "episode_idx": 115, "frame_idx": 41, "global_frame_idx": 22569, "task_index": 23}, {"db_idx": 22570, "episode_idx": 115, "frame_idx": 42, "global_frame_idx": 22570, "task_index": 23}, {"db_idx": 22571, "episode_idx": 115, "frame_idx": 43, "global_frame_idx": 22571, "task_index": 23}, {"db_idx": 22572, "episode_idx": 115, "frame_idx": 44, "global_frame_idx": 22572, "task_index": 23}, {"db_idx": 22573, "episode_idx": 115, "frame_idx": 45, "global_frame_idx": 22573, "task_index": 23}, {"db_idx": 22574, "episode_idx": 115, "frame_idx": 46, "global_frame_idx": 22574, "task_index": 23}, {"db_idx": 22575, "episode_idx": 115, "frame_idx": 47, "global_frame_idx": 22575, "task_index": 23}, {"db_idx": 22576, "episode_idx": 115, "frame_idx": 48, "global_frame_idx": 22576, "task_index": 23}, {"db_idx": 22577, "episode_idx": 115, "frame_idx": 49, "global_frame_idx": 22577, "task_index": 23}, {"db_idx": 22578, "episode_idx": 115, "frame_idx": 50, "global_frame_idx": 22578, "task_index": 23}, {"db_idx": 22579, "episode_idx": 115, "frame_idx": 51, "global_frame_idx": 22579, "task_index": 23}, {"db_idx": 22580, "episode_idx": 115, "frame_idx": 52, "global_frame_idx": 22580, "task_index": 23}, {"db_idx": 22581, "episode_idx": 115, "frame_idx": 53, "global_frame_idx": 22581, "task_index": 23}, {"db_idx": 22582, "episode_idx": 115, "frame_idx": 54, "global_frame_idx": 22582, "task_index": 23}, {"db_idx": 22583, "episode_idx": 115, "frame_idx": 55, "global_frame_idx": 22583, "task_index": 23}, {"db_idx": 22584, "episode_idx": 115, "frame_idx": 56, "global_frame_idx": 22584, "task_index": 23}, {"db_idx": 22585, "episode_idx": 115, "frame_idx": 57, "global_frame_idx": 22585, "task_index": 23}, {"db_idx": 22586, "episode_idx": 115, "frame_idx": 58, "global_frame_idx": 22586, "task_index": 23}, {"db_idx": 22587, "episode_idx": 115, "frame_idx": 59, "global_frame_idx": 22587, "task_index": 23}, {"db_idx": 22588, "episode_idx": 115, "frame_idx": 60, "global_frame_idx": 22588, "task_index": 23}, {"db_idx": 22589, "episode_idx": 115, "frame_idx": 61, "global_frame_idx": 22589, "task_index": 23}, {"db_idx": 22590, "episode_idx": 115, "frame_idx": 62, "global_frame_idx": 22590, "task_index": 23}, {"db_idx": 22591, "episode_idx": 115, "frame_idx": 63, "global_frame_idx": 22591, "task_index": 23}, {"db_idx": 22592, "episode_idx": 115, "frame_idx": 64, "global_frame_idx": 22592, "task_index": 23}, {"db_idx": 22593, "episode_idx": 115, "frame_idx": 65, "global_frame_idx": 22593, "task_index": 23}, {"db_idx": 22594, "episode_idx": 115, "frame_idx": 66, "global_frame_idx": 22594, "task_index": 23}, {"db_idx": 22595, "episode_idx": 115, "frame_idx": 67, "global_frame_idx": 22595, "task_index": 23}, {"db_idx": 22596, "episode_idx": 115, "frame_idx": 68, "global_frame_idx": 22596, "task_index": 23}, {"db_idx": 22597, "episode_idx": 115, "frame_idx": 69, "global_frame_idx": 22597, "task_index": 23}, {"db_idx": 22598, "episode_idx": 115, "frame_idx": 70, "global_frame_idx": 22598, "task_index": 23}, {"db_idx": 22599, "episode_idx": 115, "frame_idx": 71, "global_frame_idx": 22599, "task_index": 23}, {"db_idx": 22600, "episode_idx": 115, "frame_idx": 72, "global_frame_idx": 22600, "task_index": 23}, {"db_idx": 22601, "episode_idx": 115, "frame_idx": 73, "global_frame_idx": 22601, "task_index": 23}, {"db_idx": 22602, "episode_idx": 115, "frame_idx": 74, "global_frame_idx": 22602, "task_index": 23}, {"db_idx": 22603, "episode_idx": 115, "frame_idx": 75, "global_frame_idx": 22603, "task_index": 23}, {"db_idx": 22604, "episode_idx": 115, "frame_idx": 76, "global_frame_idx": 22604, "task_index": 23}, {"db_idx": 22605, "episode_idx": 115, "frame_idx": 77, "global_frame_idx": 22605, "task_index": 23}, {"db_idx": 22606, "episode_idx": 115, "frame_idx": 78, "global_frame_idx": 22606, "task_index": 23}, {"db_idx": 22607, "episode_idx": 115, "frame_idx": 79, "global_frame_idx": 22607, "task_index": 23}, {"db_idx": 22608, "episode_idx": 115, "frame_idx": 80, "global_frame_idx": 22608, "task_index": 23}, {"db_idx": 22609, "episode_idx": 115, "frame_idx": 81, "global_frame_idx": 22609, "task_index": 23}, {"db_idx": 22610, "episode_idx": 115, "frame_idx": 82, "global_frame_idx": 22610, "task_index": 23}, {"db_idx": 22611, "episode_idx": 115, "frame_idx": 83, "global_frame_idx": 22611, "task_index": 23}, {"db_idx": 22612, "episode_idx": 115, "frame_idx": 84, "global_frame_idx": 22612, "task_index": 23}, {"db_idx": 22613, "episode_idx": 115, "frame_idx": 85, "global_frame_idx": 22613, "task_index": 23}, {"db_idx": 22614, "episode_idx": 115, "frame_idx": 86, "global_frame_idx": 22614, "task_index": 23}, {"db_idx": 22615, "episode_idx": 115, "frame_idx": 87, "global_frame_idx": 22615, "task_index": 23}, {"db_idx": 22616, "episode_idx": 115, "frame_idx": 88, "global_frame_idx": 22616, "task_index": 23}, {"db_idx": 22617, "episode_idx": 115, "frame_idx": 89, "global_frame_idx": 22617, "task_index": 23}, {"db_idx": 22618, "episode_idx": 115, "frame_idx": 90, "global_frame_idx": 22618, "task_index": 23}, {"db_idx": 22619, "episode_idx": 115, "frame_idx": 91, "global_frame_idx": 22619, "task_index": 23}, {"db_idx": 22620, "episode_idx": 115, "frame_idx": 92, "global_frame_idx": 22620, "task_index": 23}, {"db_idx": 22621, "episode_idx": 115, "frame_idx": 93, "global_frame_idx": 22621, "task_index": 23}, {"db_idx": 22622, "episode_idx": 115, "frame_idx": 94, "global_frame_idx": 22622, "task_index": 23}, {"db_idx": 22623, "episode_idx": 115, "frame_idx": 95, "global_frame_idx": 22623, "task_index": 23}, {"db_idx": 22624, "episode_idx": 115, "frame_idx": 96, "global_frame_idx": 22624, "task_index": 23}, {"db_idx": 22625, "episode_idx": 115, "frame_idx": 97, "global_frame_idx": 22625, "task_index": 23}, {"db_idx": 22626, "episode_idx": 115, "frame_idx": 98, "global_frame_idx": 22626, "task_index": 23}, {"db_idx": 22627, "episode_idx": 115, "frame_idx": 99, "global_frame_idx": 22627, "task_index": 23}, {"db_idx": 22628, "episode_idx": 115, "frame_idx": 100, "global_frame_idx": 22628, "task_index": 23}, {"db_idx": 22629, "episode_idx": 115, "frame_idx": 101, "global_frame_idx": 22629, "task_index": 23}, {"db_idx": 22630, "episode_idx": 115, "frame_idx": 102, "global_frame_idx": 22630, "task_index": 23}, {"db_idx": 22631, "episode_idx": 115, "frame_idx": 103, "global_frame_idx": 22631, "task_index": 23}, {"db_idx": 22632, "episode_idx": 115, "frame_idx": 104, "global_frame_idx": 22632, "task_index": 23}, {"db_idx": 22633, "episode_idx": 115, "frame_idx": 105, "global_frame_idx": 22633, "task_index": 23}, {"db_idx": 22634, "episode_idx": 115, "frame_idx": 106, "global_frame_idx": 22634, "task_index": 23}, {"db_idx": 22635, "episode_idx": 115, "frame_idx": 107, "global_frame_idx": 22635, "task_index": 23}, {"db_idx": 22636, "episode_idx": 115, "frame_idx": 108, "global_frame_idx": 22636, "task_index": 23}, {"db_idx": 22637, "episode_idx": 115, "frame_idx": 109, "global_frame_idx": 22637, "task_index": 23}, {"db_idx": 22638, "episode_idx": 115, "frame_idx": 110, "global_frame_idx": 22638, "task_index": 23}, {"db_idx": 22639, "episode_idx": 115, "frame_idx": 111, "global_frame_idx": 22639, "task_index": 23}, {"db_idx": 22640, "episode_idx": 115, "frame_idx": 112, "global_frame_idx": 22640, "task_index": 23}, {"db_idx": 22641, "episode_idx": 115, "frame_idx": 113, "global_frame_idx": 22641, "task_index": 23}, {"db_idx": 22642, "episode_idx": 116, "frame_idx": 0, "global_frame_idx": 22642, "task_index": 23}, {"db_idx": 22643, "episode_idx": 116, "frame_idx": 1, "global_frame_idx": 22643, "task_index": 23}, {"db_idx": 22644, "episode_idx": 116, "frame_idx": 2, "global_frame_idx": 22644, "task_index": 23}, {"db_idx": 22645, "episode_idx": 116, "frame_idx": 3, "global_frame_idx": 22645, "task_index": 23}, {"db_idx": 22646, "episode_idx": 116, "frame_idx": 4, "global_frame_idx": 22646, "task_index": 23}, {"db_idx": 22647, "episode_idx": 116, "frame_idx": 5, "global_frame_idx": 22647, "task_index": 23}, {"db_idx": 22648, "episode_idx": 116, "frame_idx": 6, "global_frame_idx": 22648, "task_index": 23}, {"db_idx": 22649, "episode_idx": 116, "frame_idx": 7, "global_frame_idx": 22649, "task_index": 23}, {"db_idx": 22650, "episode_idx": 116, "frame_idx": 8, "global_frame_idx": 22650, "task_index": 23}, {"db_idx": 22651, "episode_idx": 116, "frame_idx": 9, "global_frame_idx": 22651, "task_index": 23}, {"db_idx": 22652, "episode_idx": 116, "frame_idx": 10, "global_frame_idx": 22652, "task_index": 23}, {"db_idx": 22653, "episode_idx": 116, "frame_idx": 11, "global_frame_idx": 22653, "task_index": 23}, {"db_idx": 22654, "episode_idx": 116, "frame_idx": 12, "global_frame_idx": 22654, "task_index": 23}, {"db_idx": 22655, "episode_idx": 116, "frame_idx": 13, "global_frame_idx": 22655, "task_index": 23}, {"db_idx": 22656, "episode_idx": 116, "frame_idx": 14, "global_frame_idx": 22656, "task_index": 23}, {"db_idx": 22657, "episode_idx": 116, "frame_idx": 15, "global_frame_idx": 22657, "task_index": 23}, {"db_idx": 22658, "episode_idx": 116, "frame_idx": 16, "global_frame_idx": 22658, "task_index": 23}, {"db_idx": 22659, "episode_idx": 116, "frame_idx": 17, "global_frame_idx": 22659, "task_index": 23}, {"db_idx": 22660, "episode_idx": 116, "frame_idx": 18, "global_frame_idx": 22660, "task_index": 23}, {"db_idx": 22661, "episode_idx": 116, "frame_idx": 19, "global_frame_idx": 22661, "task_index": 23}, {"db_idx": 22662, "episode_idx": 116, "frame_idx": 20, "global_frame_idx": 22662, "task_index": 23}, {"db_idx": 22663, "episode_idx": 116, "frame_idx": 21, "global_frame_idx": 22663, "task_index": 23}, {"db_idx": 22664, "episode_idx": 116, "frame_idx": 22, "global_frame_idx": 22664, "task_index": 23}, {"db_idx": 22665, "episode_idx": 116, "frame_idx": 23, "global_frame_idx": 22665, "task_index": 23}, {"db_idx": 22666, "episode_idx": 116, "frame_idx": 24, "global_frame_idx": 22666, "task_index": 23}, {"db_idx": 22667, "episode_idx": 116, "frame_idx": 25, "global_frame_idx": 22667, "task_index": 23}, {"db_idx": 22668, "episode_idx": 116, "frame_idx": 26, "global_frame_idx": 22668, "task_index": 23}, {"db_idx": 22669, "episode_idx": 116, "frame_idx": 27, "global_frame_idx": 22669, "task_index": 23}, {"db_idx": 22670, "episode_idx": 116, "frame_idx": 28, "global_frame_idx": 22670, "task_index": 23}, {"db_idx": 22671, "episode_idx": 116, "frame_idx": 29, "global_frame_idx": 22671, "task_index": 23}, {"db_idx": 22672, "episode_idx": 116, "frame_idx": 30, "global_frame_idx": 22672, "task_index": 23}, {"db_idx": 22673, "episode_idx": 116, "frame_idx": 31, "global_frame_idx": 22673, "task_index": 23}, {"db_idx": 22674, "episode_idx": 116, "frame_idx": 32, "global_frame_idx": 22674, "task_index": 23}, {"db_idx": 22675, "episode_idx": 116, "frame_idx": 33, "global_frame_idx": 22675, "task_index": 23}, {"db_idx": 22676, "episode_idx": 116, "frame_idx": 34, "global_frame_idx": 22676, "task_index": 23}, {"db_idx": 22677, "episode_idx": 116, "frame_idx": 35, "global_frame_idx": 22677, "task_index": 23}, {"db_idx": 22678, "episode_idx": 116, "frame_idx": 36, "global_frame_idx": 22678, "task_index": 23}, {"db_idx": 22679, "episode_idx": 116, "frame_idx": 37, "global_frame_idx": 22679, "task_index": 23}, {"db_idx": 22680, "episode_idx": 116, "frame_idx": 38, "global_frame_idx": 22680, "task_index": 23}, {"db_idx": 22681, "episode_idx": 116, "frame_idx": 39, "global_frame_idx": 22681, "task_index": 23}, {"db_idx": 22682, "episode_idx": 116, "frame_idx": 40, "global_frame_idx": 22682, "task_index": 23}, {"db_idx": 22683, "episode_idx": 116, "frame_idx": 41, "global_frame_idx": 22683, "task_index": 23}, {"db_idx": 22684, "episode_idx": 116, "frame_idx": 42, "global_frame_idx": 22684, "task_index": 23}, {"db_idx": 22685, "episode_idx": 116, "frame_idx": 43, "global_frame_idx": 22685, "task_index": 23}, {"db_idx": 22686, "episode_idx": 116, "frame_idx": 44, "global_frame_idx": 22686, "task_index": 23}, {"db_idx": 22687, "episode_idx": 116, "frame_idx": 45, "global_frame_idx": 22687, "task_index": 23}, {"db_idx": 22688, "episode_idx": 116, "frame_idx": 46, "global_frame_idx": 22688, "task_index": 23}, {"db_idx": 22689, "episode_idx": 116, "frame_idx": 47, "global_frame_idx": 22689, "task_index": 23}, {"db_idx": 22690, "episode_idx": 116, "frame_idx": 48, "global_frame_idx": 22690, "task_index": 23}, {"db_idx": 22691, "episode_idx": 116, "frame_idx": 49, "global_frame_idx": 22691, "task_index": 23}, {"db_idx": 22692, "episode_idx": 116, "frame_idx": 50, "global_frame_idx": 22692, "task_index": 23}, {"db_idx": 22693, "episode_idx": 116, "frame_idx": 51, "global_frame_idx": 22693, "task_index": 23}, {"db_idx": 22694, "episode_idx": 116, "frame_idx": 52, "global_frame_idx": 22694, "task_index": 23}, {"db_idx": 22695, "episode_idx": 116, "frame_idx": 53, "global_frame_idx": 22695, "task_index": 23}, {"db_idx": 22696, "episode_idx": 116, "frame_idx": 54, "global_frame_idx": 22696, "task_index": 23}, {"db_idx": 22697, "episode_idx": 116, "frame_idx": 55, "global_frame_idx": 22697, "task_index": 23}, {"db_idx": 22698, "episode_idx": 116, "frame_idx": 56, "global_frame_idx": 22698, "task_index": 23}, {"db_idx": 22699, "episode_idx": 116, "frame_idx": 57, "global_frame_idx": 22699, "task_index": 23}, {"db_idx": 22700, "episode_idx": 116, "frame_idx": 58, "global_frame_idx": 22700, "task_index": 23}, {"db_idx": 22701, "episode_idx": 116, "frame_idx": 59, "global_frame_idx": 22701, "task_index": 23}, {"db_idx": 22702, "episode_idx": 116, "frame_idx": 60, "global_frame_idx": 22702, "task_index": 23}, {"db_idx": 22703, "episode_idx": 116, "frame_idx": 61, "global_frame_idx": 22703, "task_index": 23}, {"db_idx": 22704, "episode_idx": 116, "frame_idx": 62, "global_frame_idx": 22704, "task_index": 23}, {"db_idx": 22705, "episode_idx": 116, "frame_idx": 63, "global_frame_idx": 22705, "task_index": 23}, {"db_idx": 22706, "episode_idx": 116, "frame_idx": 64, "global_frame_idx": 22706, "task_index": 23}, {"db_idx": 22707, "episode_idx": 116, "frame_idx": 65, "global_frame_idx": 22707, "task_index": 23}, {"db_idx": 22708, "episode_idx": 116, "frame_idx": 66, "global_frame_idx": 22708, "task_index": 23}, {"db_idx": 22709, "episode_idx": 116, "frame_idx": 67, "global_frame_idx": 22709, "task_index": 23}, {"db_idx": 22710, "episode_idx": 116, "frame_idx": 68, "global_frame_idx": 22710, "task_index": 23}, {"db_idx": 22711, "episode_idx": 116, "frame_idx": 69, "global_frame_idx": 22711, "task_index": 23}, {"db_idx": 22712, "episode_idx": 116, "frame_idx": 70, "global_frame_idx": 22712, "task_index": 23}, {"db_idx": 22713, "episode_idx": 116, "frame_idx": 71, "global_frame_idx": 22713, "task_index": 23}, {"db_idx": 22714, "episode_idx": 116, "frame_idx": 72, "global_frame_idx": 22714, "task_index": 23}, {"db_idx": 22715, "episode_idx": 116, "frame_idx": 73, "global_frame_idx": 22715, "task_index": 23}, {"db_idx": 22716, "episode_idx": 116, "frame_idx": 74, "global_frame_idx": 22716, "task_index": 23}, {"db_idx": 22717, "episode_idx": 116, "frame_idx": 75, "global_frame_idx": 22717, "task_index": 23}, {"db_idx": 22718, "episode_idx": 116, "frame_idx": 76, "global_frame_idx": 22718, "task_index": 23}, {"db_idx": 22719, "episode_idx": 116, "frame_idx": 77, "global_frame_idx": 22719, "task_index": 23}, {"db_idx": 22720, "episode_idx": 116, "frame_idx": 78, "global_frame_idx": 22720, "task_index": 23}, {"db_idx": 22721, "episode_idx": 116, "frame_idx": 79, "global_frame_idx": 22721, "task_index": 23}, {"db_idx": 22722, "episode_idx": 116, "frame_idx": 80, "global_frame_idx": 22722, "task_index": 23}, {"db_idx": 22723, "episode_idx": 116, "frame_idx": 81, "global_frame_idx": 22723, "task_index": 23}, {"db_idx": 22724, "episode_idx": 116, "frame_idx": 82, "global_frame_idx": 22724, "task_index": 23}, {"db_idx": 22725, "episode_idx": 116, "frame_idx": 83, "global_frame_idx": 22725, "task_index": 23}, {"db_idx": 22726, "episode_idx": 116, "frame_idx": 84, "global_frame_idx": 22726, "task_index": 23}, {"db_idx": 22727, "episode_idx": 116, "frame_idx": 85, "global_frame_idx": 22727, "task_index": 23}, {"db_idx": 22728, "episode_idx": 116, "frame_idx": 86, "global_frame_idx": 22728, "task_index": 23}, {"db_idx": 22729, "episode_idx": 116, "frame_idx": 87, "global_frame_idx": 22729, "task_index": 23}, {"db_idx": 22730, "episode_idx": 116, "frame_idx": 88, "global_frame_idx": 22730, "task_index": 23}, {"db_idx": 22731, "episode_idx": 116, "frame_idx": 89, "global_frame_idx": 22731, "task_index": 23}, {"db_idx": 22732, "episode_idx": 116, "frame_idx": 90, "global_frame_idx": 22732, "task_index": 23}, {"db_idx": 22733, "episode_idx": 116, "frame_idx": 91, "global_frame_idx": 22733, "task_index": 23}, {"db_idx": 22734, "episode_idx": 116, "frame_idx": 92, "global_frame_idx": 22734, "task_index": 23}, {"db_idx": 22735, "episode_idx": 116, "frame_idx": 93, "global_frame_idx": 22735, "task_index": 23}, {"db_idx": 22736, "episode_idx": 116, "frame_idx": 94, "global_frame_idx": 22736, "task_index": 23}, {"db_idx": 22737, "episode_idx": 116, "frame_idx": 95, "global_frame_idx": 22737, "task_index": 23}, {"db_idx": 22738, "episode_idx": 116, "frame_idx": 96, "global_frame_idx": 22738, "task_index": 23}, {"db_idx": 22739, "episode_idx": 117, "frame_idx": 0, "global_frame_idx": 22739, "task_index": 23}, {"db_idx": 22740, "episode_idx": 117, "frame_idx": 1, "global_frame_idx": 22740, "task_index": 23}, {"db_idx": 22741, "episode_idx": 117, "frame_idx": 2, "global_frame_idx": 22741, "task_index": 23}, {"db_idx": 22742, "episode_idx": 117, "frame_idx": 3, "global_frame_idx": 22742, "task_index": 23}, {"db_idx": 22743, "episode_idx": 117, "frame_idx": 4, "global_frame_idx": 22743, "task_index": 23}, {"db_idx": 22744, "episode_idx": 117, "frame_idx": 5, "global_frame_idx": 22744, "task_index": 23}, {"db_idx": 22745, "episode_idx": 117, "frame_idx": 6, "global_frame_idx": 22745, "task_index": 23}, {"db_idx": 22746, "episode_idx": 117, "frame_idx": 7, "global_frame_idx": 22746, "task_index": 23}, {"db_idx": 22747, "episode_idx": 117, "frame_idx": 8, "global_frame_idx": 22747, "task_index": 23}, {"db_idx": 22748, "episode_idx": 117, "frame_idx": 9, "global_frame_idx": 22748, "task_index": 23}, {"db_idx": 22749, "episode_idx": 117, "frame_idx": 10, "global_frame_idx": 22749, "task_index": 23}, {"db_idx": 22750, "episode_idx": 117, "frame_idx": 11, "global_frame_idx": 22750, "task_index": 23}, {"db_idx": 22751, "episode_idx": 117, "frame_idx": 12, "global_frame_idx": 22751, "task_index": 23}, {"db_idx": 22752, "episode_idx": 117, "frame_idx": 13, "global_frame_idx": 22752, "task_index": 23}, {"db_idx": 22753, "episode_idx": 117, "frame_idx": 14, "global_frame_idx": 22753, "task_index": 23}, {"db_idx": 22754, "episode_idx": 117, "frame_idx": 15, "global_frame_idx": 22754, "task_index": 23}, {"db_idx": 22755, "episode_idx": 117, "frame_idx": 16, "global_frame_idx": 22755, "task_index": 23}, {"db_idx": 22756, "episode_idx": 117, "frame_idx": 17, "global_frame_idx": 22756, "task_index": 23}, {"db_idx": 22757, "episode_idx": 117, "frame_idx": 18, "global_frame_idx": 22757, "task_index": 23}, {"db_idx": 22758, "episode_idx": 117, "frame_idx": 19, "global_frame_idx": 22758, "task_index": 23}, {"db_idx": 22759, "episode_idx": 117, "frame_idx": 20, "global_frame_idx": 22759, "task_index": 23}, {"db_idx": 22760, "episode_idx": 117, "frame_idx": 21, "global_frame_idx": 22760, "task_index": 23}, {"db_idx": 22761, "episode_idx": 117, "frame_idx": 22, "global_frame_idx": 22761, "task_index": 23}, {"db_idx": 22762, "episode_idx": 117, "frame_idx": 23, "global_frame_idx": 22762, "task_index": 23}, {"db_idx": 22763, "episode_idx": 117, "frame_idx": 24, "global_frame_idx": 22763, "task_index": 23}, {"db_idx": 22764, "episode_idx": 117, "frame_idx": 25, "global_frame_idx": 22764, "task_index": 23}, {"db_idx": 22765, "episode_idx": 117, "frame_idx": 26, "global_frame_idx": 22765, "task_index": 23}, {"db_idx": 22766, "episode_idx": 117, "frame_idx": 27, "global_frame_idx": 22766, "task_index": 23}, {"db_idx": 22767, "episode_idx": 117, "frame_idx": 28, "global_frame_idx": 22767, "task_index": 23}, {"db_idx": 22768, "episode_idx": 117, "frame_idx": 29, "global_frame_idx": 22768, "task_index": 23}, {"db_idx": 22769, "episode_idx": 117, "frame_idx": 30, "global_frame_idx": 22769, "task_index": 23}, {"db_idx": 22770, "episode_idx": 117, "frame_idx": 31, "global_frame_idx": 22770, "task_index": 23}, {"db_idx": 22771, "episode_idx": 117, "frame_idx": 32, "global_frame_idx": 22771, "task_index": 23}, {"db_idx": 22772, "episode_idx": 117, "frame_idx": 33, "global_frame_idx": 22772, "task_index": 23}, {"db_idx": 22773, "episode_idx": 117, "frame_idx": 34, "global_frame_idx": 22773, "task_index": 23}, {"db_idx": 22774, "episode_idx": 117, "frame_idx": 35, "global_frame_idx": 22774, "task_index": 23}, {"db_idx": 22775, "episode_idx": 117, "frame_idx": 36, "global_frame_idx": 22775, "task_index": 23}, {"db_idx": 22776, "episode_idx": 117, "frame_idx": 37, "global_frame_idx": 22776, "task_index": 23}, {"db_idx": 22777, "episode_idx": 117, "frame_idx": 38, "global_frame_idx": 22777, "task_index": 23}, {"db_idx": 22778, "episode_idx": 117, "frame_idx": 39, "global_frame_idx": 22778, "task_index": 23}, {"db_idx": 22779, "episode_idx": 117, "frame_idx": 40, "global_frame_idx": 22779, "task_index": 23}, {"db_idx": 22780, "episode_idx": 117, "frame_idx": 41, "global_frame_idx": 22780, "task_index": 23}, {"db_idx": 22781, "episode_idx": 117, "frame_idx": 42, "global_frame_idx": 22781, "task_index": 23}, {"db_idx": 22782, "episode_idx": 117, "frame_idx": 43, "global_frame_idx": 22782, "task_index": 23}, {"db_idx": 22783, "episode_idx": 117, "frame_idx": 44, "global_frame_idx": 22783, "task_index": 23}, {"db_idx": 22784, "episode_idx": 117, "frame_idx": 45, "global_frame_idx": 22784, "task_index": 23}, {"db_idx": 22785, "episode_idx": 117, "frame_idx": 46, "global_frame_idx": 22785, "task_index": 23}, {"db_idx": 22786, "episode_idx": 117, "frame_idx": 47, "global_frame_idx": 22786, "task_index": 23}, {"db_idx": 22787, "episode_idx": 117, "frame_idx": 48, "global_frame_idx": 22787, "task_index": 23}, {"db_idx": 22788, "episode_idx": 117, "frame_idx": 49, "global_frame_idx": 22788, "task_index": 23}, {"db_idx": 22789, "episode_idx": 117, "frame_idx": 50, "global_frame_idx": 22789, "task_index": 23}, {"db_idx": 22790, "episode_idx": 117, "frame_idx": 51, "global_frame_idx": 22790, "task_index": 23}, {"db_idx": 22791, "episode_idx": 117, "frame_idx": 52, "global_frame_idx": 22791, "task_index": 23}, {"db_idx": 22792, "episode_idx": 117, "frame_idx": 53, "global_frame_idx": 22792, "task_index": 23}, {"db_idx": 22793, "episode_idx": 117, "frame_idx": 54, "global_frame_idx": 22793, "task_index": 23}, {"db_idx": 22794, "episode_idx": 117, "frame_idx": 55, "global_frame_idx": 22794, "task_index": 23}, {"db_idx": 22795, "episode_idx": 117, "frame_idx": 56, "global_frame_idx": 22795, "task_index": 23}, {"db_idx": 22796, "episode_idx": 117, "frame_idx": 57, "global_frame_idx": 22796, "task_index": 23}, {"db_idx": 22797, "episode_idx": 117, "frame_idx": 58, "global_frame_idx": 22797, "task_index": 23}, {"db_idx": 22798, "episode_idx": 117, "frame_idx": 59, "global_frame_idx": 22798, "task_index": 23}, {"db_idx": 22799, "episode_idx": 117, "frame_idx": 60, "global_frame_idx": 22799, "task_index": 23}, {"db_idx": 22800, "episode_idx": 117, "frame_idx": 61, "global_frame_idx": 22800, "task_index": 23}, {"db_idx": 22801, "episode_idx": 117, "frame_idx": 62, "global_frame_idx": 22801, "task_index": 23}, {"db_idx": 22802, "episode_idx": 117, "frame_idx": 63, "global_frame_idx": 22802, "task_index": 23}, {"db_idx": 22803, "episode_idx": 117, "frame_idx": 64, "global_frame_idx": 22803, "task_index": 23}, {"db_idx": 22804, "episode_idx": 117, "frame_idx": 65, "global_frame_idx": 22804, "task_index": 23}, {"db_idx": 22805, "episode_idx": 117, "frame_idx": 66, "global_frame_idx": 22805, "task_index": 23}, {"db_idx": 22806, "episode_idx": 117, "frame_idx": 67, "global_frame_idx": 22806, "task_index": 23}, {"db_idx": 22807, "episode_idx": 117, "frame_idx": 68, "global_frame_idx": 22807, "task_index": 23}, {"db_idx": 22808, "episode_idx": 117, "frame_idx": 69, "global_frame_idx": 22808, "task_index": 23}, {"db_idx": 22809, "episode_idx": 117, "frame_idx": 70, "global_frame_idx": 22809, "task_index": 23}, {"db_idx": 22810, "episode_idx": 117, "frame_idx": 71, "global_frame_idx": 22810, "task_index": 23}, {"db_idx": 22811, "episode_idx": 117, "frame_idx": 72, "global_frame_idx": 22811, "task_index": 23}, {"db_idx": 22812, "episode_idx": 117, "frame_idx": 73, "global_frame_idx": 22812, "task_index": 23}, {"db_idx": 22813, "episode_idx": 117, "frame_idx": 74, "global_frame_idx": 22813, "task_index": 23}, {"db_idx": 22814, "episode_idx": 117, "frame_idx": 75, "global_frame_idx": 22814, "task_index": 23}, {"db_idx": 22815, "episode_idx": 117, "frame_idx": 76, "global_frame_idx": 22815, "task_index": 23}, {"db_idx": 22816, "episode_idx": 117, "frame_idx": 77, "global_frame_idx": 22816, "task_index": 23}, {"db_idx": 22817, "episode_idx": 117, "frame_idx": 78, "global_frame_idx": 22817, "task_index": 23}, {"db_idx": 22818, "episode_idx": 117, "frame_idx": 79, "global_frame_idx": 22818, "task_index": 23}, {"db_idx": 22819, "episode_idx": 117, "frame_idx": 80, "global_frame_idx": 22819, "task_index": 23}, {"db_idx": 22820, "episode_idx": 117, "frame_idx": 81, "global_frame_idx": 22820, "task_index": 23}, {"db_idx": 22821, "episode_idx": 117, "frame_idx": 82, "global_frame_idx": 22821, "task_index": 23}, {"db_idx": 22822, "episode_idx": 117, "frame_idx": 83, "global_frame_idx": 22822, "task_index": 23}, {"db_idx": 22823, "episode_idx": 117, "frame_idx": 84, "global_frame_idx": 22823, "task_index": 23}, {"db_idx": 22824, "episode_idx": 117, "frame_idx": 85, "global_frame_idx": 22824, "task_index": 23}, {"db_idx": 22825, "episode_idx": 117, "frame_idx": 86, "global_frame_idx": 22825, "task_index": 23}, {"db_idx": 22826, "episode_idx": 117, "frame_idx": 87, "global_frame_idx": 22826, "task_index": 23}, {"db_idx": 22827, "episode_idx": 117, "frame_idx": 88, "global_frame_idx": 22827, "task_index": 23}, {"db_idx": 22828, "episode_idx": 117, "frame_idx": 89, "global_frame_idx": 22828, "task_index": 23}, {"db_idx": 22829, "episode_idx": 117, "frame_idx": 90, "global_frame_idx": 22829, "task_index": 23}, {"db_idx": 22830, "episode_idx": 117, "frame_idx": 91, "global_frame_idx": 22830, "task_index": 23}, {"db_idx": 22831, "episode_idx": 117, "frame_idx": 92, "global_frame_idx": 22831, "task_index": 23}, {"db_idx": 22832, "episode_idx": 117, "frame_idx": 93, "global_frame_idx": 22832, "task_index": 23}, {"db_idx": 22833, "episode_idx": 117, "frame_idx": 94, "global_frame_idx": 22833, "task_index": 23}, {"db_idx": 22834, "episode_idx": 117, "frame_idx": 95, "global_frame_idx": 22834, "task_index": 23}, {"db_idx": 22835, "episode_idx": 117, "frame_idx": 96, "global_frame_idx": 22835, "task_index": 23}, {"db_idx": 22836, "episode_idx": 117, "frame_idx": 97, "global_frame_idx": 22836, "task_index": 23}, {"db_idx": 22837, "episode_idx": 117, "frame_idx": 98, "global_frame_idx": 22837, "task_index": 23}, {"db_idx": 22838, "episode_idx": 117, "frame_idx": 99, "global_frame_idx": 22838, "task_index": 23}, {"db_idx": 22839, "episode_idx": 117, "frame_idx": 100, "global_frame_idx": 22839, "task_index": 23}, {"db_idx": 22840, "episode_idx": 117, "frame_idx": 101, "global_frame_idx": 22840, "task_index": 23}, {"db_idx": 22841, "episode_idx": 118, "frame_idx": 0, "global_frame_idx": 22841, "task_index": 23}, {"db_idx": 22842, "episode_idx": 118, "frame_idx": 1, "global_frame_idx": 22842, "task_index": 23}, {"db_idx": 22843, "episode_idx": 118, "frame_idx": 2, "global_frame_idx": 22843, "task_index": 23}, {"db_idx": 22844, "episode_idx": 118, "frame_idx": 3, "global_frame_idx": 22844, "task_index": 23}, {"db_idx": 22845, "episode_idx": 118, "frame_idx": 4, "global_frame_idx": 22845, "task_index": 23}, {"db_idx": 22846, "episode_idx": 118, "frame_idx": 5, "global_frame_idx": 22846, "task_index": 23}, {"db_idx": 22847, "episode_idx": 118, "frame_idx": 6, "global_frame_idx": 22847, "task_index": 23}, {"db_idx": 22848, "episode_idx": 118, "frame_idx": 7, "global_frame_idx": 22848, "task_index": 23}, {"db_idx": 22849, "episode_idx": 118, "frame_idx": 8, "global_frame_idx": 22849, "task_index": 23}, {"db_idx": 22850, "episode_idx": 118, "frame_idx": 9, "global_frame_idx": 22850, "task_index": 23}, {"db_idx": 22851, "episode_idx": 118, "frame_idx": 10, "global_frame_idx": 22851, "task_index": 23}, {"db_idx": 22852, "episode_idx": 118, "frame_idx": 11, "global_frame_idx": 22852, "task_index": 23}, {"db_idx": 22853, "episode_idx": 118, "frame_idx": 12, "global_frame_idx": 22853, "task_index": 23}, {"db_idx": 22854, "episode_idx": 118, "frame_idx": 13, "global_frame_idx": 22854, "task_index": 23}, {"db_idx": 22855, "episode_idx": 118, "frame_idx": 14, "global_frame_idx": 22855, "task_index": 23}, {"db_idx": 22856, "episode_idx": 118, "frame_idx": 15, "global_frame_idx": 22856, "task_index": 23}, {"db_idx": 22857, "episode_idx": 118, "frame_idx": 16, "global_frame_idx": 22857, "task_index": 23}, {"db_idx": 22858, "episode_idx": 118, "frame_idx": 17, "global_frame_idx": 22858, "task_index": 23}, {"db_idx": 22859, "episode_idx": 118, "frame_idx": 18, "global_frame_idx": 22859, "task_index": 23}, {"db_idx": 22860, "episode_idx": 118, "frame_idx": 19, "global_frame_idx": 22860, "task_index": 23}, {"db_idx": 22861, "episode_idx": 118, "frame_idx": 20, "global_frame_idx": 22861, "task_index": 23}, {"db_idx": 22862, "episode_idx": 118, "frame_idx": 21, "global_frame_idx": 22862, "task_index": 23}, {"db_idx": 22863, "episode_idx": 118, "frame_idx": 22, "global_frame_idx": 22863, "task_index": 23}, {"db_idx": 22864, "episode_idx": 118, "frame_idx": 23, "global_frame_idx": 22864, "task_index": 23}, {"db_idx": 22865, "episode_idx": 118, "frame_idx": 24, "global_frame_idx": 22865, "task_index": 23}, {"db_idx": 22866, "episode_idx": 118, "frame_idx": 25, "global_frame_idx": 22866, "task_index": 23}, {"db_idx": 22867, "episode_idx": 118, "frame_idx": 26, "global_frame_idx": 22867, "task_index": 23}, {"db_idx": 22868, "episode_idx": 118, "frame_idx": 27, "global_frame_idx": 22868, "task_index": 23}, {"db_idx": 22869, "episode_idx": 118, "frame_idx": 28, "global_frame_idx": 22869, "task_index": 23}, {"db_idx": 22870, "episode_idx": 118, "frame_idx": 29, "global_frame_idx": 22870, "task_index": 23}, {"db_idx": 22871, "episode_idx": 118, "frame_idx": 30, "global_frame_idx": 22871, "task_index": 23}, {"db_idx": 22872, "episode_idx": 118, "frame_idx": 31, "global_frame_idx": 22872, "task_index": 23}, {"db_idx": 22873, "episode_idx": 118, "frame_idx": 32, "global_frame_idx": 22873, "task_index": 23}, {"db_idx": 22874, "episode_idx": 118, "frame_idx": 33, "global_frame_idx": 22874, "task_index": 23}, {"db_idx": 22875, "episode_idx": 118, "frame_idx": 34, "global_frame_idx": 22875, "task_index": 23}, {"db_idx": 22876, "episode_idx": 118, "frame_idx": 35, "global_frame_idx": 22876, "task_index": 23}, {"db_idx": 22877, "episode_idx": 118, "frame_idx": 36, "global_frame_idx": 22877, "task_index": 23}, {"db_idx": 22878, "episode_idx": 118, "frame_idx": 37, "global_frame_idx": 22878, "task_index": 23}, {"db_idx": 22879, "episode_idx": 118, "frame_idx": 38, "global_frame_idx": 22879, "task_index": 23}, {"db_idx": 22880, "episode_idx": 118, "frame_idx": 39, "global_frame_idx": 22880, "task_index": 23}, {"db_idx": 22881, "episode_idx": 118, "frame_idx": 40, "global_frame_idx": 22881, "task_index": 23}, {"db_idx": 22882, "episode_idx": 118, "frame_idx": 41, "global_frame_idx": 22882, "task_index": 23}, {"db_idx": 22883, "episode_idx": 118, "frame_idx": 42, "global_frame_idx": 22883, "task_index": 23}, {"db_idx": 22884, "episode_idx": 118, "frame_idx": 43, "global_frame_idx": 22884, "task_index": 23}, {"db_idx": 22885, "episode_idx": 118, "frame_idx": 44, "global_frame_idx": 22885, "task_index": 23}, {"db_idx": 22886, "episode_idx": 118, "frame_idx": 45, "global_frame_idx": 22886, "task_index": 23}, {"db_idx": 22887, "episode_idx": 118, "frame_idx": 46, "global_frame_idx": 22887, "task_index": 23}, {"db_idx": 22888, "episode_idx": 118, "frame_idx": 47, "global_frame_idx": 22888, "task_index": 23}, {"db_idx": 22889, "episode_idx": 118, "frame_idx": 48, "global_frame_idx": 22889, "task_index": 23}, {"db_idx": 22890, "episode_idx": 118, "frame_idx": 49, "global_frame_idx": 22890, "task_index": 23}, {"db_idx": 22891, "episode_idx": 118, "frame_idx": 50, "global_frame_idx": 22891, "task_index": 23}, {"db_idx": 22892, "episode_idx": 118, "frame_idx": 51, "global_frame_idx": 22892, "task_index": 23}, {"db_idx": 22893, "episode_idx": 118, "frame_idx": 52, "global_frame_idx": 22893, "task_index": 23}, {"db_idx": 22894, "episode_idx": 118, "frame_idx": 53, "global_frame_idx": 22894, "task_index": 23}, {"db_idx": 22895, "episode_idx": 118, "frame_idx": 54, "global_frame_idx": 22895, "task_index": 23}, {"db_idx": 22896, "episode_idx": 118, "frame_idx": 55, "global_frame_idx": 22896, "task_index": 23}, {"db_idx": 22897, "episode_idx": 118, "frame_idx": 56, "global_frame_idx": 22897, "task_index": 23}, {"db_idx": 22898, "episode_idx": 118, "frame_idx": 57, "global_frame_idx": 22898, "task_index": 23}, {"db_idx": 22899, "episode_idx": 118, "frame_idx": 58, "global_frame_idx": 22899, "task_index": 23}, {"db_idx": 22900, "episode_idx": 118, "frame_idx": 59, "global_frame_idx": 22900, "task_index": 23}, {"db_idx": 22901, "episode_idx": 118, "frame_idx": 60, "global_frame_idx": 22901, "task_index": 23}, {"db_idx": 22902, "episode_idx": 118, "frame_idx": 61, "global_frame_idx": 22902, "task_index": 23}, {"db_idx": 22903, "episode_idx": 118, "frame_idx": 62, "global_frame_idx": 22903, "task_index": 23}, {"db_idx": 22904, "episode_idx": 118, "frame_idx": 63, "global_frame_idx": 22904, "task_index": 23}, {"db_idx": 22905, "episode_idx": 118, "frame_idx": 64, "global_frame_idx": 22905, "task_index": 23}, {"db_idx": 22906, "episode_idx": 118, "frame_idx": 65, "global_frame_idx": 22906, "task_index": 23}, {"db_idx": 22907, "episode_idx": 118, "frame_idx": 66, "global_frame_idx": 22907, "task_index": 23}, {"db_idx": 22908, "episode_idx": 118, "frame_idx": 67, "global_frame_idx": 22908, "task_index": 23}, {"db_idx": 22909, "episode_idx": 118, "frame_idx": 68, "global_frame_idx": 22909, "task_index": 23}, {"db_idx": 22910, "episode_idx": 118, "frame_idx": 69, "global_frame_idx": 22910, "task_index": 23}, {"db_idx": 22911, "episode_idx": 118, "frame_idx": 70, "global_frame_idx": 22911, "task_index": 23}, {"db_idx": 22912, "episode_idx": 118, "frame_idx": 71, "global_frame_idx": 22912, "task_index": 23}, {"db_idx": 22913, "episode_idx": 118, "frame_idx": 72, "global_frame_idx": 22913, "task_index": 23}, {"db_idx": 22914, "episode_idx": 118, "frame_idx": 73, "global_frame_idx": 22914, "task_index": 23}, {"db_idx": 22915, "episode_idx": 118, "frame_idx": 74, "global_frame_idx": 22915, "task_index": 23}, {"db_idx": 22916, "episode_idx": 118, "frame_idx": 75, "global_frame_idx": 22916, "task_index": 23}, {"db_idx": 22917, "episode_idx": 118, "frame_idx": 76, "global_frame_idx": 22917, "task_index": 23}, {"db_idx": 22918, "episode_idx": 118, "frame_idx": 77, "global_frame_idx": 22918, "task_index": 23}, {"db_idx": 22919, "episode_idx": 118, "frame_idx": 78, "global_frame_idx": 22919, "task_index": 23}, {"db_idx": 22920, "episode_idx": 118, "frame_idx": 79, "global_frame_idx": 22920, "task_index": 23}, {"db_idx": 22921, "episode_idx": 118, "frame_idx": 80, "global_frame_idx": 22921, "task_index": 23}, {"db_idx": 22922, "episode_idx": 118, "frame_idx": 81, "global_frame_idx": 22922, "task_index": 23}, {"db_idx": 22923, "episode_idx": 118, "frame_idx": 82, "global_frame_idx": 22923, "task_index": 23}, {"db_idx": 22924, "episode_idx": 118, "frame_idx": 83, "global_frame_idx": 22924, "task_index": 23}, {"db_idx": 22925, "episode_idx": 118, "frame_idx": 84, "global_frame_idx": 22925, "task_index": 23}, {"db_idx": 22926, "episode_idx": 118, "frame_idx": 85, "global_frame_idx": 22926, "task_index": 23}, {"db_idx": 22927, "episode_idx": 118, "frame_idx": 86, "global_frame_idx": 22927, "task_index": 23}, {"db_idx": 22928, "episode_idx": 118, "frame_idx": 87, "global_frame_idx": 22928, "task_index": 23}, {"db_idx": 22929, "episode_idx": 118, "frame_idx": 88, "global_frame_idx": 22929, "task_index": 23}, {"db_idx": 22930, "episode_idx": 118, "frame_idx": 89, "global_frame_idx": 22930, "task_index": 23}, {"db_idx": 22931, "episode_idx": 118, "frame_idx": 90, "global_frame_idx": 22931, "task_index": 23}, {"db_idx": 22932, "episode_idx": 118, "frame_idx": 91, "global_frame_idx": 22932, "task_index": 23}, {"db_idx": 22933, "episode_idx": 118, "frame_idx": 92, "global_frame_idx": 22933, "task_index": 23}, {"db_idx": 22934, "episode_idx": 118, "frame_idx": 93, "global_frame_idx": 22934, "task_index": 23}, {"db_idx": 22935, "episode_idx": 118, "frame_idx": 94, "global_frame_idx": 22935, "task_index": 23}, {"db_idx": 22936, "episode_idx": 118, "frame_idx": 95, "global_frame_idx": 22936, "task_index": 23}, {"db_idx": 22937, "episode_idx": 118, "frame_idx": 96, "global_frame_idx": 22937, "task_index": 23}, {"db_idx": 22938, "episode_idx": 118, "frame_idx": 97, "global_frame_idx": 22938, "task_index": 23}, {"db_idx": 22939, "episode_idx": 118, "frame_idx": 98, "global_frame_idx": 22939, "task_index": 23}, {"db_idx": 22940, "episode_idx": 119, "frame_idx": 0, "global_frame_idx": 22940, "task_index": 23}, {"db_idx": 22941, "episode_idx": 119, "frame_idx": 1, "global_frame_idx": 22941, "task_index": 23}, {"db_idx": 22942, "episode_idx": 119, "frame_idx": 2, "global_frame_idx": 22942, "task_index": 23}, {"db_idx": 22943, "episode_idx": 119, "frame_idx": 3, "global_frame_idx": 22943, "task_index": 23}, {"db_idx": 22944, "episode_idx": 119, "frame_idx": 4, "global_frame_idx": 22944, "task_index": 23}, {"db_idx": 22945, "episode_idx": 119, "frame_idx": 5, "global_frame_idx": 22945, "task_index": 23}, {"db_idx": 22946, "episode_idx": 119, "frame_idx": 6, "global_frame_idx": 22946, "task_index": 23}, {"db_idx": 22947, "episode_idx": 119, "frame_idx": 7, "global_frame_idx": 22947, "task_index": 23}, {"db_idx": 22948, "episode_idx": 119, "frame_idx": 8, "global_frame_idx": 22948, "task_index": 23}, {"db_idx": 22949, "episode_idx": 119, "frame_idx": 9, "global_frame_idx": 22949, "task_index": 23}, {"db_idx": 22950, "episode_idx": 119, "frame_idx": 10, "global_frame_idx": 22950, "task_index": 23}, {"db_idx": 22951, "episode_idx": 119, "frame_idx": 11, "global_frame_idx": 22951, "task_index": 23}, {"db_idx": 22952, "episode_idx": 119, "frame_idx": 12, "global_frame_idx": 22952, "task_index": 23}, {"db_idx": 22953, "episode_idx": 119, "frame_idx": 13, "global_frame_idx": 22953, "task_index": 23}, {"db_idx": 22954, "episode_idx": 119, "frame_idx": 14, "global_frame_idx": 22954, "task_index": 23}, {"db_idx": 22955, "episode_idx": 119, "frame_idx": 15, "global_frame_idx": 22955, "task_index": 23}, {"db_idx": 22956, "episode_idx": 119, "frame_idx": 16, "global_frame_idx": 22956, "task_index": 23}, {"db_idx": 22957, "episode_idx": 119, "frame_idx": 17, "global_frame_idx": 22957, "task_index": 23}, {"db_idx": 22958, "episode_idx": 119, "frame_idx": 18, "global_frame_idx": 22958, "task_index": 23}, {"db_idx": 22959, "episode_idx": 119, "frame_idx": 19, "global_frame_idx": 22959, "task_index": 23}, {"db_idx": 22960, "episode_idx": 119, "frame_idx": 20, "global_frame_idx": 22960, "task_index": 23}, {"db_idx": 22961, "episode_idx": 119, "frame_idx": 21, "global_frame_idx": 22961, "task_index": 23}, {"db_idx": 22962, "episode_idx": 119, "frame_idx": 22, "global_frame_idx": 22962, "task_index": 23}, {"db_idx": 22963, "episode_idx": 119, "frame_idx": 23, "global_frame_idx": 22963, "task_index": 23}, {"db_idx": 22964, "episode_idx": 119, "frame_idx": 24, "global_frame_idx": 22964, "task_index": 23}, {"db_idx": 22965, "episode_idx": 119, "frame_idx": 25, "global_frame_idx": 22965, "task_index": 23}, {"db_idx": 22966, "episode_idx": 119, "frame_idx": 26, "global_frame_idx": 22966, "task_index": 23}, {"db_idx": 22967, "episode_idx": 119, "frame_idx": 27, "global_frame_idx": 22967, "task_index": 23}, {"db_idx": 22968, "episode_idx": 119, "frame_idx": 28, "global_frame_idx": 22968, "task_index": 23}, {"db_idx": 22969, "episode_idx": 119, "frame_idx": 29, "global_frame_idx": 22969, "task_index": 23}, {"db_idx": 22970, "episode_idx": 119, "frame_idx": 30, "global_frame_idx": 22970, "task_index": 23}, {"db_idx": 22971, "episode_idx": 119, "frame_idx": 31, "global_frame_idx": 22971, "task_index": 23}, {"db_idx": 22972, "episode_idx": 119, "frame_idx": 32, "global_frame_idx": 22972, "task_index": 23}, {"db_idx": 22973, "episode_idx": 119, "frame_idx": 33, "global_frame_idx": 22973, "task_index": 23}, {"db_idx": 22974, "episode_idx": 119, "frame_idx": 34, "global_frame_idx": 22974, "task_index": 23}, {"db_idx": 22975, "episode_idx": 119, "frame_idx": 35, "global_frame_idx": 22975, "task_index": 23}, {"db_idx": 22976, "episode_idx": 119, "frame_idx": 36, "global_frame_idx": 22976, "task_index": 23}, {"db_idx": 22977, "episode_idx": 119, "frame_idx": 37, "global_frame_idx": 22977, "task_index": 23}, {"db_idx": 22978, "episode_idx": 119, "frame_idx": 38, "global_frame_idx": 22978, "task_index": 23}, {"db_idx": 22979, "episode_idx": 119, "frame_idx": 39, "global_frame_idx": 22979, "task_index": 23}, {"db_idx": 22980, "episode_idx": 119, "frame_idx": 40, "global_frame_idx": 22980, "task_index": 23}, {"db_idx": 22981, "episode_idx": 119, "frame_idx": 41, "global_frame_idx": 22981, "task_index": 23}, {"db_idx": 22982, "episode_idx": 119, "frame_idx": 42, "global_frame_idx": 22982, "task_index": 23}, {"db_idx": 22983, "episode_idx": 119, "frame_idx": 43, "global_frame_idx": 22983, "task_index": 23}, {"db_idx": 22984, "episode_idx": 119, "frame_idx": 44, "global_frame_idx": 22984, "task_index": 23}, {"db_idx": 22985, "episode_idx": 119, "frame_idx": 45, "global_frame_idx": 22985, "task_index": 23}, {"db_idx": 22986, "episode_idx": 119, "frame_idx": 46, "global_frame_idx": 22986, "task_index": 23}, {"db_idx": 22987, "episode_idx": 119, "frame_idx": 47, "global_frame_idx": 22987, "task_index": 23}, {"db_idx": 22988, "episode_idx": 119, "frame_idx": 48, "global_frame_idx": 22988, "task_index": 23}, {"db_idx": 22989, "episode_idx": 119, "frame_idx": 49, "global_frame_idx": 22989, "task_index": 23}, {"db_idx": 22990, "episode_idx": 119, "frame_idx": 50, "global_frame_idx": 22990, "task_index": 23}, {"db_idx": 22991, "episode_idx": 119, "frame_idx": 51, "global_frame_idx": 22991, "task_index": 23}, {"db_idx": 22992, "episode_idx": 119, "frame_idx": 52, "global_frame_idx": 22992, "task_index": 23}, {"db_idx": 22993, "episode_idx": 119, "frame_idx": 53, "global_frame_idx": 22993, "task_index": 23}, {"db_idx": 22994, "episode_idx": 119, "frame_idx": 54, "global_frame_idx": 22994, "task_index": 23}, {"db_idx": 22995, "episode_idx": 119, "frame_idx": 55, "global_frame_idx": 22995, "task_index": 23}, {"db_idx": 22996, "episode_idx": 119, "frame_idx": 56, "global_frame_idx": 22996, "task_index": 23}, {"db_idx": 22997, "episode_idx": 119, "frame_idx": 57, "global_frame_idx": 22997, "task_index": 23}, {"db_idx": 22998, "episode_idx": 119, "frame_idx": 58, "global_frame_idx": 22998, "task_index": 23}, {"db_idx": 22999, "episode_idx": 119, "frame_idx": 59, "global_frame_idx": 22999, "task_index": 23}, {"db_idx": 23000, "episode_idx": 119, "frame_idx": 60, "global_frame_idx": 23000, "task_index": 23}, {"db_idx": 23001, "episode_idx": 119, "frame_idx": 61, "global_frame_idx": 23001, "task_index": 23}, {"db_idx": 23002, "episode_idx": 119, "frame_idx": 62, "global_frame_idx": 23002, "task_index": 23}, {"db_idx": 23003, "episode_idx": 119, "frame_idx": 63, "global_frame_idx": 23003, "task_index": 23}, {"db_idx": 23004, "episode_idx": 119, "frame_idx": 64, "global_frame_idx": 23004, "task_index": 23}, {"db_idx": 23005, "episode_idx": 119, "frame_idx": 65, "global_frame_idx": 23005, "task_index": 23}, {"db_idx": 23006, "episode_idx": 119, "frame_idx": 66, "global_frame_idx": 23006, "task_index": 23}, {"db_idx": 23007, "episode_idx": 119, "frame_idx": 67, "global_frame_idx": 23007, "task_index": 23}, {"db_idx": 23008, "episode_idx": 119, "frame_idx": 68, "global_frame_idx": 23008, "task_index": 23}, {"db_idx": 23009, "episode_idx": 119, "frame_idx": 69, "global_frame_idx": 23009, "task_index": 23}, {"db_idx": 23010, "episode_idx": 119, "frame_idx": 70, "global_frame_idx": 23010, "task_index": 23}, {"db_idx": 23011, "episode_idx": 119, "frame_idx": 71, "global_frame_idx": 23011, "task_index": 23}, {"db_idx": 23012, "episode_idx": 119, "frame_idx": 72, "global_frame_idx": 23012, "task_index": 23}, {"db_idx": 23013, "episode_idx": 119, "frame_idx": 73, "global_frame_idx": 23013, "task_index": 23}, {"db_idx": 23014, "episode_idx": 119, "frame_idx": 74, "global_frame_idx": 23014, "task_index": 23}, {"db_idx": 23015, "episode_idx": 119, "frame_idx": 75, "global_frame_idx": 23015, "task_index": 23}, {"db_idx": 23016, "episode_idx": 119, "frame_idx": 76, "global_frame_idx": 23016, "task_index": 23}, {"db_idx": 23017, "episode_idx": 119, "frame_idx": 77, "global_frame_idx": 23017, "task_index": 23}, {"db_idx": 23018, "episode_idx": 119, "frame_idx": 78, "global_frame_idx": 23018, "task_index": 23}, {"db_idx": 23019, "episode_idx": 119, "frame_idx": 79, "global_frame_idx": 23019, "task_index": 23}, {"db_idx": 23020, "episode_idx": 119, "frame_idx": 80, "global_frame_idx": 23020, "task_index": 23}, {"db_idx": 23021, "episode_idx": 119, "frame_idx": 81, "global_frame_idx": 23021, "task_index": 23}, {"db_idx": 23022, "episode_idx": 119, "frame_idx": 82, "global_frame_idx": 23022, "task_index": 23}, {"db_idx": 23023, "episode_idx": 119, "frame_idx": 83, "global_frame_idx": 23023, "task_index": 23}, {"db_idx": 23024, "episode_idx": 119, "frame_idx": 84, "global_frame_idx": 23024, "task_index": 23}, {"db_idx": 23025, "episode_idx": 119, "frame_idx": 85, "global_frame_idx": 23025, "task_index": 23}, {"db_idx": 23026, "episode_idx": 119, "frame_idx": 86, "global_frame_idx": 23026, "task_index": 23}, {"db_idx": 23027, "episode_idx": 119, "frame_idx": 87, "global_frame_idx": 23027, "task_index": 23}, {"db_idx": 23028, "episode_idx": 119, "frame_idx": 88, "global_frame_idx": 23028, "task_index": 23}, {"db_idx": 23029, "episode_idx": 119, "frame_idx": 89, "global_frame_idx": 23029, "task_index": 23}, {"db_idx": 23030, "episode_idx": 119, "frame_idx": 90, "global_frame_idx": 23030, "task_index": 23}, {"db_idx": 23031, "episode_idx": 119, "frame_idx": 91, "global_frame_idx": 23031, "task_index": 23}, {"db_idx": 23032, "episode_idx": 119, "frame_idx": 92, "global_frame_idx": 23032, "task_index": 23}, {"db_idx": 23033, "episode_idx": 119, "frame_idx": 93, "global_frame_idx": 23033, "task_index": 23}, {"db_idx": 23034, "episode_idx": 119, "frame_idx": 94, "global_frame_idx": 23034, "task_index": 23}, {"db_idx": 23035, "episode_idx": 119, "frame_idx": 95, "global_frame_idx": 23035, "task_index": 23}, {"db_idx": 23036, "episode_idx": 119, "frame_idx": 96, "global_frame_idx": 23036, "task_index": 23}, {"db_idx": 23037, "episode_idx": 119, "frame_idx": 97, "global_frame_idx": 23037, "task_index": 23}, {"db_idx": 23038, "episode_idx": 119, "frame_idx": 98, "global_frame_idx": 23038, "task_index": 23}, {"db_idx": 23039, "episode_idx": 119, "frame_idx": 99, "global_frame_idx": 23039, "task_index": 23}, {"db_idx": 23040, "episode_idx": 119, "frame_idx": 100, "global_frame_idx": 23040, "task_index": 23}, {"db_idx": 23041, "episode_idx": 119, "frame_idx": 101, "global_frame_idx": 23041, "task_index": 23}, {"db_idx": 23042, "episode_idx": 119, "frame_idx": 102, "global_frame_idx": 23042, "task_index": 23}, {"db_idx": 23043, "episode_idx": 119, "frame_idx": 103, "global_frame_idx": 23043, "task_index": 23}, {"db_idx": 23044, "episode_idx": 119, "frame_idx": 104, "global_frame_idx": 23044, "task_index": 23}, {"db_idx": 23045, "episode_idx": 119, "frame_idx": 105, "global_frame_idx": 23045, "task_index": 23}, {"db_idx": 23046, "episode_idx": 119, "frame_idx": 106, "global_frame_idx": 23046, "task_index": 23}, {"db_idx": 23047, "episode_idx": 119, "frame_idx": 107, "global_frame_idx": 23047, "task_index": 23}, {"db_idx": 23048, "episode_idx": 119, "frame_idx": 108, "global_frame_idx": 23048, "task_index": 23}, {"db_idx": 23049, "episode_idx": 119, "frame_idx": 109, "global_frame_idx": 23049, "task_index": 23}, {"db_idx": 23050, "episode_idx": 119, "frame_idx": 110, "global_frame_idx": 23050, "task_index": 23}, {"db_idx": 23051, "episode_idx": 119, "frame_idx": 111, "global_frame_idx": 23051, "task_index": 23}, {"db_idx": 23052, "episode_idx": 119, "frame_idx": 112, "global_frame_idx": 23052, "task_index": 23}, {"db_idx": 23053, "episode_idx": 120, "frame_idx": 0, "global_frame_idx": 23053, "task_index": 24}, {"db_idx": 23054, "episode_idx": 120, "frame_idx": 1, "global_frame_idx": 23054, "task_index": 24}, {"db_idx": 23055, "episode_idx": 120, "frame_idx": 2, "global_frame_idx": 23055, "task_index": 24}, {"db_idx": 23056, "episode_idx": 120, "frame_idx": 3, "global_frame_idx": 23056, "task_index": 24}, {"db_idx": 23057, "episode_idx": 120, "frame_idx": 4, "global_frame_idx": 23057, "task_index": 24}, {"db_idx": 23058, "episode_idx": 120, "frame_idx": 5, "global_frame_idx": 23058, "task_index": 24}, {"db_idx": 23059, "episode_idx": 120, "frame_idx": 6, "global_frame_idx": 23059, "task_index": 24}, {"db_idx": 23060, "episode_idx": 120, "frame_idx": 7, "global_frame_idx": 23060, "task_index": 24}, {"db_idx": 23061, "episode_idx": 120, "frame_idx": 8, "global_frame_idx": 23061, "task_index": 24}, {"db_idx": 23062, "episode_idx": 120, "frame_idx": 9, "global_frame_idx": 23062, "task_index": 24}, {"db_idx": 23063, "episode_idx": 120, "frame_idx": 10, "global_frame_idx": 23063, "task_index": 24}, {"db_idx": 23064, "episode_idx": 120, "frame_idx": 11, "global_frame_idx": 23064, "task_index": 24}, {"db_idx": 23065, "episode_idx": 120, "frame_idx": 12, "global_frame_idx": 23065, "task_index": 24}, {"db_idx": 23066, "episode_idx": 120, "frame_idx": 13, "global_frame_idx": 23066, "task_index": 24}, {"db_idx": 23067, "episode_idx": 120, "frame_idx": 14, "global_frame_idx": 23067, "task_index": 24}, {"db_idx": 23068, "episode_idx": 120, "frame_idx": 15, "global_frame_idx": 23068, "task_index": 24}, {"db_idx": 23069, "episode_idx": 120, "frame_idx": 16, "global_frame_idx": 23069, "task_index": 24}, {"db_idx": 23070, "episode_idx": 120, "frame_idx": 17, "global_frame_idx": 23070, "task_index": 24}, {"db_idx": 23071, "episode_idx": 120, "frame_idx": 18, "global_frame_idx": 23071, "task_index": 24}, {"db_idx": 23072, "episode_idx": 120, "frame_idx": 19, "global_frame_idx": 23072, "task_index": 24}, {"db_idx": 23073, "episode_idx": 120, "frame_idx": 20, "global_frame_idx": 23073, "task_index": 24}, {"db_idx": 23074, "episode_idx": 120, "frame_idx": 21, "global_frame_idx": 23074, "task_index": 24}, {"db_idx": 23075, "episode_idx": 120, "frame_idx": 22, "global_frame_idx": 23075, "task_index": 24}, {"db_idx": 23076, "episode_idx": 120, "frame_idx": 23, "global_frame_idx": 23076, "task_index": 24}, {"db_idx": 23077, "episode_idx": 120, "frame_idx": 24, "global_frame_idx": 23077, "task_index": 24}, {"db_idx": 23078, "episode_idx": 120, "frame_idx": 25, "global_frame_idx": 23078, "task_index": 24}, {"db_idx": 23079, "episode_idx": 120, "frame_idx": 26, "global_frame_idx": 23079, "task_index": 24}, {"db_idx": 23080, "episode_idx": 120, "frame_idx": 27, "global_frame_idx": 23080, "task_index": 24}, {"db_idx": 23081, "episode_idx": 120, "frame_idx": 28, "global_frame_idx": 23081, "task_index": 24}, {"db_idx": 23082, "episode_idx": 120, "frame_idx": 29, "global_frame_idx": 23082, "task_index": 24}, {"db_idx": 23083, "episode_idx": 120, "frame_idx": 30, "global_frame_idx": 23083, "task_index": 24}, {"db_idx": 23084, "episode_idx": 120, "frame_idx": 31, "global_frame_idx": 23084, "task_index": 24}, {"db_idx": 23085, "episode_idx": 120, "frame_idx": 32, "global_frame_idx": 23085, "task_index": 24}, {"db_idx": 23086, "episode_idx": 120, "frame_idx": 33, "global_frame_idx": 23086, "task_index": 24}, {"db_idx": 23087, "episode_idx": 120, "frame_idx": 34, "global_frame_idx": 23087, "task_index": 24}, {"db_idx": 23088, "episode_idx": 120, "frame_idx": 35, "global_frame_idx": 23088, "task_index": 24}, {"db_idx": 23089, "episode_idx": 120, "frame_idx": 36, "global_frame_idx": 23089, "task_index": 24}, {"db_idx": 23090, "episode_idx": 120, "frame_idx": 37, "global_frame_idx": 23090, "task_index": 24}, {"db_idx": 23091, "episode_idx": 120, "frame_idx": 38, "global_frame_idx": 23091, "task_index": 24}, {"db_idx": 23092, "episode_idx": 120, "frame_idx": 39, "global_frame_idx": 23092, "task_index": 24}, {"db_idx": 23093, "episode_idx": 120, "frame_idx": 40, "global_frame_idx": 23093, "task_index": 24}, {"db_idx": 23094, "episode_idx": 120, "frame_idx": 41, "global_frame_idx": 23094, "task_index": 24}, {"db_idx": 23095, "episode_idx": 120, "frame_idx": 42, "global_frame_idx": 23095, "task_index": 24}, {"db_idx": 23096, "episode_idx": 120, "frame_idx": 43, "global_frame_idx": 23096, "task_index": 24}, {"db_idx": 23097, "episode_idx": 120, "frame_idx": 44, "global_frame_idx": 23097, "task_index": 24}, {"db_idx": 23098, "episode_idx": 120, "frame_idx": 45, "global_frame_idx": 23098, "task_index": 24}, {"db_idx": 23099, "episode_idx": 120, "frame_idx": 46, "global_frame_idx": 23099, "task_index": 24}, {"db_idx": 23100, "episode_idx": 120, "frame_idx": 47, "global_frame_idx": 23100, "task_index": 24}, {"db_idx": 23101, "episode_idx": 120, "frame_idx": 48, "global_frame_idx": 23101, "task_index": 24}, {"db_idx": 23102, "episode_idx": 120, "frame_idx": 49, "global_frame_idx": 23102, "task_index": 24}, {"db_idx": 23103, "episode_idx": 120, "frame_idx": 50, "global_frame_idx": 23103, "task_index": 24}, {"db_idx": 23104, "episode_idx": 120, "frame_idx": 51, "global_frame_idx": 23104, "task_index": 24}, {"db_idx": 23105, "episode_idx": 120, "frame_idx": 52, "global_frame_idx": 23105, "task_index": 24}, {"db_idx": 23106, "episode_idx": 120, "frame_idx": 53, "global_frame_idx": 23106, "task_index": 24}, {"db_idx": 23107, "episode_idx": 120, "frame_idx": 54, "global_frame_idx": 23107, "task_index": 24}, {"db_idx": 23108, "episode_idx": 120, "frame_idx": 55, "global_frame_idx": 23108, "task_index": 24}, {"db_idx": 23109, "episode_idx": 120, "frame_idx": 56, "global_frame_idx": 23109, "task_index": 24}, {"db_idx": 23110, "episode_idx": 120, "frame_idx": 57, "global_frame_idx": 23110, "task_index": 24}, {"db_idx": 23111, "episode_idx": 120, "frame_idx": 58, "global_frame_idx": 23111, "task_index": 24}, {"db_idx": 23112, "episode_idx": 120, "frame_idx": 59, "global_frame_idx": 23112, "task_index": 24}, {"db_idx": 23113, "episode_idx": 120, "frame_idx": 60, "global_frame_idx": 23113, "task_index": 24}, {"db_idx": 23114, "episode_idx": 120, "frame_idx": 61, "global_frame_idx": 23114, "task_index": 24}, {"db_idx": 23115, "episode_idx": 120, "frame_idx": 62, "global_frame_idx": 23115, "task_index": 24}, {"db_idx": 23116, "episode_idx": 120, "frame_idx": 63, "global_frame_idx": 23116, "task_index": 24}, {"db_idx": 23117, "episode_idx": 120, "frame_idx": 64, "global_frame_idx": 23117, "task_index": 24}, {"db_idx": 23118, "episode_idx": 120, "frame_idx": 65, "global_frame_idx": 23118, "task_index": 24}, {"db_idx": 23119, "episode_idx": 120, "frame_idx": 66, "global_frame_idx": 23119, "task_index": 24}, {"db_idx": 23120, "episode_idx": 120, "frame_idx": 67, "global_frame_idx": 23120, "task_index": 24}, {"db_idx": 23121, "episode_idx": 120, "frame_idx": 68, "global_frame_idx": 23121, "task_index": 24}, {"db_idx": 23122, "episode_idx": 120, "frame_idx": 69, "global_frame_idx": 23122, "task_index": 24}, {"db_idx": 23123, "episode_idx": 120, "frame_idx": 70, "global_frame_idx": 23123, "task_index": 24}, {"db_idx": 23124, "episode_idx": 120, "frame_idx": 71, "global_frame_idx": 23124, "task_index": 24}, {"db_idx": 23125, "episode_idx": 120, "frame_idx": 72, "global_frame_idx": 23125, "task_index": 24}, {"db_idx": 23126, "episode_idx": 120, "frame_idx": 73, "global_frame_idx": 23126, "task_index": 24}, {"db_idx": 23127, "episode_idx": 120, "frame_idx": 74, "global_frame_idx": 23127, "task_index": 24}, {"db_idx": 23128, "episode_idx": 120, "frame_idx": 75, "global_frame_idx": 23128, "task_index": 24}, {"db_idx": 23129, "episode_idx": 120, "frame_idx": 76, "global_frame_idx": 23129, "task_index": 24}, {"db_idx": 23130, "episode_idx": 120, "frame_idx": 77, "global_frame_idx": 23130, "task_index": 24}, {"db_idx": 23131, "episode_idx": 120, "frame_idx": 78, "global_frame_idx": 23131, "task_index": 24}, {"db_idx": 23132, "episode_idx": 120, "frame_idx": 79, "global_frame_idx": 23132, "task_index": 24}, {"db_idx": 23133, "episode_idx": 120, "frame_idx": 80, "global_frame_idx": 23133, "task_index": 24}, {"db_idx": 23134, "episode_idx": 120, "frame_idx": 81, "global_frame_idx": 23134, "task_index": 24}, {"db_idx": 23135, "episode_idx": 120, "frame_idx": 82, "global_frame_idx": 23135, "task_index": 24}, {"db_idx": 23136, "episode_idx": 120, "frame_idx": 83, "global_frame_idx": 23136, "task_index": 24}, {"db_idx": 23137, "episode_idx": 120, "frame_idx": 84, "global_frame_idx": 23137, "task_index": 24}, {"db_idx": 23138, "episode_idx": 120, "frame_idx": 85, "global_frame_idx": 23138, "task_index": 24}, {"db_idx": 23139, "episode_idx": 120, "frame_idx": 86, "global_frame_idx": 23139, "task_index": 24}, {"db_idx": 23140, "episode_idx": 120, "frame_idx": 87, "global_frame_idx": 23140, "task_index": 24}, {"db_idx": 23141, "episode_idx": 120, "frame_idx": 88, "global_frame_idx": 23141, "task_index": 24}, {"db_idx": 23142, "episode_idx": 120, "frame_idx": 89, "global_frame_idx": 23142, "task_index": 24}, {"db_idx": 23143, "episode_idx": 120, "frame_idx": 90, "global_frame_idx": 23143, "task_index": 24}, {"db_idx": 23144, "episode_idx": 120, "frame_idx": 91, "global_frame_idx": 23144, "task_index": 24}, {"db_idx": 23145, "episode_idx": 120, "frame_idx": 92, "global_frame_idx": 23145, "task_index": 24}, {"db_idx": 23146, "episode_idx": 120, "frame_idx": 93, "global_frame_idx": 23146, "task_index": 24}, {"db_idx": 23147, "episode_idx": 120, "frame_idx": 94, "global_frame_idx": 23147, "task_index": 24}, {"db_idx": 23148, "episode_idx": 120, "frame_idx": 95, "global_frame_idx": 23148, "task_index": 24}, {"db_idx": 23149, "episode_idx": 120, "frame_idx": 96, "global_frame_idx": 23149, "task_index": 24}, {"db_idx": 23150, "episode_idx": 120, "frame_idx": 97, "global_frame_idx": 23150, "task_index": 24}, {"db_idx": 23151, "episode_idx": 120, "frame_idx": 98, "global_frame_idx": 23151, "task_index": 24}, {"db_idx": 23152, "episode_idx": 120, "frame_idx": 99, "global_frame_idx": 23152, "task_index": 24}, {"db_idx": 23153, "episode_idx": 120, "frame_idx": 100, "global_frame_idx": 23153, "task_index": 24}, {"db_idx": 23154, "episode_idx": 120, "frame_idx": 101, "global_frame_idx": 23154, "task_index": 24}, {"db_idx": 23155, "episode_idx": 120, "frame_idx": 102, "global_frame_idx": 23155, "task_index": 24}, {"db_idx": 23156, "episode_idx": 120, "frame_idx": 103, "global_frame_idx": 23156, "task_index": 24}, {"db_idx": 23157, "episode_idx": 120, "frame_idx": 104, "global_frame_idx": 23157, "task_index": 24}, {"db_idx": 23158, "episode_idx": 120, "frame_idx": 105, "global_frame_idx": 23158, "task_index": 24}, {"db_idx": 23159, "episode_idx": 120, "frame_idx": 106, "global_frame_idx": 23159, "task_index": 24}, {"db_idx": 23160, "episode_idx": 120, "frame_idx": 107, "global_frame_idx": 23160, "task_index": 24}, {"db_idx": 23161, "episode_idx": 120, "frame_idx": 108, "global_frame_idx": 23161, "task_index": 24}, {"db_idx": 23162, "episode_idx": 120, "frame_idx": 109, "global_frame_idx": 23162, "task_index": 24}, {"db_idx": 23163, "episode_idx": 120, "frame_idx": 110, "global_frame_idx": 23163, "task_index": 24}, {"db_idx": 23164, "episode_idx": 120, "frame_idx": 111, "global_frame_idx": 23164, "task_index": 24}, {"db_idx": 23165, "episode_idx": 120, "frame_idx": 112, "global_frame_idx": 23165, "task_index": 24}, {"db_idx": 23166, "episode_idx": 120, "frame_idx": 113, "global_frame_idx": 23166, "task_index": 24}, {"db_idx": 23167, "episode_idx": 120, "frame_idx": 114, "global_frame_idx": 23167, "task_index": 24}, {"db_idx": 23168, "episode_idx": 120, "frame_idx": 115, "global_frame_idx": 23168, "task_index": 24}, {"db_idx": 23169, "episode_idx": 120, "frame_idx": 116, "global_frame_idx": 23169, "task_index": 24}, {"db_idx": 23170, "episode_idx": 120, "frame_idx": 117, "global_frame_idx": 23170, "task_index": 24}, {"db_idx": 23171, "episode_idx": 120, "frame_idx": 118, "global_frame_idx": 23171, "task_index": 24}, {"db_idx": 23172, "episode_idx": 120, "frame_idx": 119, "global_frame_idx": 23172, "task_index": 24}, {"db_idx": 23173, "episode_idx": 120, "frame_idx": 120, "global_frame_idx": 23173, "task_index": 24}, {"db_idx": 23174, "episode_idx": 120, "frame_idx": 121, "global_frame_idx": 23174, "task_index": 24}, {"db_idx": 23175, "episode_idx": 120, "frame_idx": 122, "global_frame_idx": 23175, "task_index": 24}, {"db_idx": 23176, "episode_idx": 120, "frame_idx": 123, "global_frame_idx": 23176, "task_index": 24}, {"db_idx": 23177, "episode_idx": 120, "frame_idx": 124, "global_frame_idx": 23177, "task_index": 24}, {"db_idx": 23178, "episode_idx": 120, "frame_idx": 125, "global_frame_idx": 23178, "task_index": 24}, {"db_idx": 23179, "episode_idx": 120, "frame_idx": 126, "global_frame_idx": 23179, "task_index": 24}, {"db_idx": 23180, "episode_idx": 120, "frame_idx": 127, "global_frame_idx": 23180, "task_index": 24}, {"db_idx": 23181, "episode_idx": 120, "frame_idx": 128, "global_frame_idx": 23181, "task_index": 24}, {"db_idx": 23182, "episode_idx": 120, "frame_idx": 129, "global_frame_idx": 23182, "task_index": 24}, {"db_idx": 23183, "episode_idx": 120, "frame_idx": 130, "global_frame_idx": 23183, "task_index": 24}, {"db_idx": 23184, "episode_idx": 120, "frame_idx": 131, "global_frame_idx": 23184, "task_index": 24}, {"db_idx": 23185, "episode_idx": 120, "frame_idx": 132, "global_frame_idx": 23185, "task_index": 24}, {"db_idx": 23186, "episode_idx": 120, "frame_idx": 133, "global_frame_idx": 23186, "task_index": 24}, {"db_idx": 23187, "episode_idx": 120, "frame_idx": 134, "global_frame_idx": 23187, "task_index": 24}, {"db_idx": 23188, "episode_idx": 120, "frame_idx": 135, "global_frame_idx": 23188, "task_index": 24}, {"db_idx": 23189, "episode_idx": 120, "frame_idx": 136, "global_frame_idx": 23189, "task_index": 24}, {"db_idx": 23190, "episode_idx": 120, "frame_idx": 137, "global_frame_idx": 23190, "task_index": 24}, {"db_idx": 23191, "episode_idx": 120, "frame_idx": 138, "global_frame_idx": 23191, "task_index": 24}, {"db_idx": 23192, "episode_idx": 120, "frame_idx": 139, "global_frame_idx": 23192, "task_index": 24}, {"db_idx": 23193, "episode_idx": 120, "frame_idx": 140, "global_frame_idx": 23193, "task_index": 24}, {"db_idx": 23194, "episode_idx": 120, "frame_idx": 141, "global_frame_idx": 23194, "task_index": 24}, {"db_idx": 23195, "episode_idx": 120, "frame_idx": 142, "global_frame_idx": 23195, "task_index": 24}, {"db_idx": 23196, "episode_idx": 120, "frame_idx": 143, "global_frame_idx": 23196, "task_index": 24}, {"db_idx": 23197, "episode_idx": 120, "frame_idx": 144, "global_frame_idx": 23197, "task_index": 24}, {"db_idx": 23198, "episode_idx": 120, "frame_idx": 145, "global_frame_idx": 23198, "task_index": 24}, {"db_idx": 23199, "episode_idx": 120, "frame_idx": 146, "global_frame_idx": 23199, "task_index": 24}, {"db_idx": 23200, "episode_idx": 120, "frame_idx": 147, "global_frame_idx": 23200, "task_index": 24}, {"db_idx": 23201, "episode_idx": 120, "frame_idx": 148, "global_frame_idx": 23201, "task_index": 24}, {"db_idx": 23202, "episode_idx": 121, "frame_idx": 0, "global_frame_idx": 23202, "task_index": 24}, {"db_idx": 23203, "episode_idx": 121, "frame_idx": 1, "global_frame_idx": 23203, "task_index": 24}, {"db_idx": 23204, "episode_idx": 121, "frame_idx": 2, "global_frame_idx": 23204, "task_index": 24}, {"db_idx": 23205, "episode_idx": 121, "frame_idx": 3, "global_frame_idx": 23205, "task_index": 24}, {"db_idx": 23206, "episode_idx": 121, "frame_idx": 4, "global_frame_idx": 23206, "task_index": 24}, {"db_idx": 23207, "episode_idx": 121, "frame_idx": 5, "global_frame_idx": 23207, "task_index": 24}, {"db_idx": 23208, "episode_idx": 121, "frame_idx": 6, "global_frame_idx": 23208, "task_index": 24}, {"db_idx": 23209, "episode_idx": 121, "frame_idx": 7, "global_frame_idx": 23209, "task_index": 24}, {"db_idx": 23210, "episode_idx": 121, "frame_idx": 8, "global_frame_idx": 23210, "task_index": 24}, {"db_idx": 23211, "episode_idx": 121, "frame_idx": 9, "global_frame_idx": 23211, "task_index": 24}, {"db_idx": 23212, "episode_idx": 121, "frame_idx": 10, "global_frame_idx": 23212, "task_index": 24}, {"db_idx": 23213, "episode_idx": 121, "frame_idx": 11, "global_frame_idx": 23213, "task_index": 24}, {"db_idx": 23214, "episode_idx": 121, "frame_idx": 12, "global_frame_idx": 23214, "task_index": 24}, {"db_idx": 23215, "episode_idx": 121, "frame_idx": 13, "global_frame_idx": 23215, "task_index": 24}, {"db_idx": 23216, "episode_idx": 121, "frame_idx": 14, "global_frame_idx": 23216, "task_index": 24}, {"db_idx": 23217, "episode_idx": 121, "frame_idx": 15, "global_frame_idx": 23217, "task_index": 24}, {"db_idx": 23218, "episode_idx": 121, "frame_idx": 16, "global_frame_idx": 23218, "task_index": 24}, {"db_idx": 23219, "episode_idx": 121, "frame_idx": 17, "global_frame_idx": 23219, "task_index": 24}, {"db_idx": 23220, "episode_idx": 121, "frame_idx": 18, "global_frame_idx": 23220, "task_index": 24}, {"db_idx": 23221, "episode_idx": 121, "frame_idx": 19, "global_frame_idx": 23221, "task_index": 24}, {"db_idx": 23222, "episode_idx": 121, "frame_idx": 20, "global_frame_idx": 23222, "task_index": 24}, {"db_idx": 23223, "episode_idx": 121, "frame_idx": 21, "global_frame_idx": 23223, "task_index": 24}, {"db_idx": 23224, "episode_idx": 121, "frame_idx": 22, "global_frame_idx": 23224, "task_index": 24}, {"db_idx": 23225, "episode_idx": 121, "frame_idx": 23, "global_frame_idx": 23225, "task_index": 24}, {"db_idx": 23226, "episode_idx": 121, "frame_idx": 24, "global_frame_idx": 23226, "task_index": 24}, {"db_idx": 23227, "episode_idx": 121, "frame_idx": 25, "global_frame_idx": 23227, "task_index": 24}, {"db_idx": 23228, "episode_idx": 121, "frame_idx": 26, "global_frame_idx": 23228, "task_index": 24}, {"db_idx": 23229, "episode_idx": 121, "frame_idx": 27, "global_frame_idx": 23229, "task_index": 24}, {"db_idx": 23230, "episode_idx": 121, "frame_idx": 28, "global_frame_idx": 23230, "task_index": 24}, {"db_idx": 23231, "episode_idx": 121, "frame_idx": 29, "global_frame_idx": 23231, "task_index": 24}, {"db_idx": 23232, "episode_idx": 121, "frame_idx": 30, "global_frame_idx": 23232, "task_index": 24}, {"db_idx": 23233, "episode_idx": 121, "frame_idx": 31, "global_frame_idx": 23233, "task_index": 24}, {"db_idx": 23234, "episode_idx": 121, "frame_idx": 32, "global_frame_idx": 23234, "task_index": 24}, {"db_idx": 23235, "episode_idx": 121, "frame_idx": 33, "global_frame_idx": 23235, "task_index": 24}, {"db_idx": 23236, "episode_idx": 121, "frame_idx": 34, "global_frame_idx": 23236, "task_index": 24}, {"db_idx": 23237, "episode_idx": 121, "frame_idx": 35, "global_frame_idx": 23237, "task_index": 24}, {"db_idx": 23238, "episode_idx": 121, "frame_idx": 36, "global_frame_idx": 23238, "task_index": 24}, {"db_idx": 23239, "episode_idx": 121, "frame_idx": 37, "global_frame_idx": 23239, "task_index": 24}, {"db_idx": 23240, "episode_idx": 121, "frame_idx": 38, "global_frame_idx": 23240, "task_index": 24}, {"db_idx": 23241, "episode_idx": 121, "frame_idx": 39, "global_frame_idx": 23241, "task_index": 24}, {"db_idx": 23242, "episode_idx": 121, "frame_idx": 40, "global_frame_idx": 23242, "task_index": 24}, {"db_idx": 23243, "episode_idx": 121, "frame_idx": 41, "global_frame_idx": 23243, "task_index": 24}, {"db_idx": 23244, "episode_idx": 121, "frame_idx": 42, "global_frame_idx": 23244, "task_index": 24}, {"db_idx": 23245, "episode_idx": 121, "frame_idx": 43, "global_frame_idx": 23245, "task_index": 24}, {"db_idx": 23246, "episode_idx": 121, "frame_idx": 44, "global_frame_idx": 23246, "task_index": 24}, {"db_idx": 23247, "episode_idx": 121, "frame_idx": 45, "global_frame_idx": 23247, "task_index": 24}, {"db_idx": 23248, "episode_idx": 121, "frame_idx": 46, "global_frame_idx": 23248, "task_index": 24}, {"db_idx": 23249, "episode_idx": 121, "frame_idx": 47, "global_frame_idx": 23249, "task_index": 24}, {"db_idx": 23250, "episode_idx": 121, "frame_idx": 48, "global_frame_idx": 23250, "task_index": 24}, {"db_idx": 23251, "episode_idx": 121, "frame_idx": 49, "global_frame_idx": 23251, "task_index": 24}, {"db_idx": 23252, "episode_idx": 121, "frame_idx": 50, "global_frame_idx": 23252, "task_index": 24}, {"db_idx": 23253, "episode_idx": 121, "frame_idx": 51, "global_frame_idx": 23253, "task_index": 24}, {"db_idx": 23254, "episode_idx": 121, "frame_idx": 52, "global_frame_idx": 23254, "task_index": 24}, {"db_idx": 23255, "episode_idx": 121, "frame_idx": 53, "global_frame_idx": 23255, "task_index": 24}, {"db_idx": 23256, "episode_idx": 121, "frame_idx": 54, "global_frame_idx": 23256, "task_index": 24}, {"db_idx": 23257, "episode_idx": 121, "frame_idx": 55, "global_frame_idx": 23257, "task_index": 24}, {"db_idx": 23258, "episode_idx": 121, "frame_idx": 56, "global_frame_idx": 23258, "task_index": 24}, {"db_idx": 23259, "episode_idx": 121, "frame_idx": 57, "global_frame_idx": 23259, "task_index": 24}, {"db_idx": 23260, "episode_idx": 121, "frame_idx": 58, "global_frame_idx": 23260, "task_index": 24}, {"db_idx": 23261, "episode_idx": 121, "frame_idx": 59, "global_frame_idx": 23261, "task_index": 24}, {"db_idx": 23262, "episode_idx": 121, "frame_idx": 60, "global_frame_idx": 23262, "task_index": 24}, {"db_idx": 23263, "episode_idx": 121, "frame_idx": 61, "global_frame_idx": 23263, "task_index": 24}, {"db_idx": 23264, "episode_idx": 121, "frame_idx": 62, "global_frame_idx": 23264, "task_index": 24}, {"db_idx": 23265, "episode_idx": 121, "frame_idx": 63, "global_frame_idx": 23265, "task_index": 24}, {"db_idx": 23266, "episode_idx": 121, "frame_idx": 64, "global_frame_idx": 23266, "task_index": 24}, {"db_idx": 23267, "episode_idx": 121, "frame_idx": 65, "global_frame_idx": 23267, "task_index": 24}, {"db_idx": 23268, "episode_idx": 121, "frame_idx": 66, "global_frame_idx": 23268, "task_index": 24}, {"db_idx": 23269, "episode_idx": 121, "frame_idx": 67, "global_frame_idx": 23269, "task_index": 24}, {"db_idx": 23270, "episode_idx": 121, "frame_idx": 68, "global_frame_idx": 23270, "task_index": 24}, {"db_idx": 23271, "episode_idx": 121, "frame_idx": 69, "global_frame_idx": 23271, "task_index": 24}, {"db_idx": 23272, "episode_idx": 121, "frame_idx": 70, "global_frame_idx": 23272, "task_index": 24}, {"db_idx": 23273, "episode_idx": 121, "frame_idx": 71, "global_frame_idx": 23273, "task_index": 24}, {"db_idx": 23274, "episode_idx": 121, "frame_idx": 72, "global_frame_idx": 23274, "task_index": 24}, {"db_idx": 23275, "episode_idx": 121, "frame_idx": 73, "global_frame_idx": 23275, "task_index": 24}, {"db_idx": 23276, "episode_idx": 121, "frame_idx": 74, "global_frame_idx": 23276, "task_index": 24}, {"db_idx": 23277, "episode_idx": 121, "frame_idx": 75, "global_frame_idx": 23277, "task_index": 24}, {"db_idx": 23278, "episode_idx": 121, "frame_idx": 76, "global_frame_idx": 23278, "task_index": 24}, {"db_idx": 23279, "episode_idx": 121, "frame_idx": 77, "global_frame_idx": 23279, "task_index": 24}, {"db_idx": 23280, "episode_idx": 121, "frame_idx": 78, "global_frame_idx": 23280, "task_index": 24}, {"db_idx": 23281, "episode_idx": 121, "frame_idx": 79, "global_frame_idx": 23281, "task_index": 24}, {"db_idx": 23282, "episode_idx": 121, "frame_idx": 80, "global_frame_idx": 23282, "task_index": 24}, {"db_idx": 23283, "episode_idx": 121, "frame_idx": 81, "global_frame_idx": 23283, "task_index": 24}, {"db_idx": 23284, "episode_idx": 121, "frame_idx": 82, "global_frame_idx": 23284, "task_index": 24}, {"db_idx": 23285, "episode_idx": 121, "frame_idx": 83, "global_frame_idx": 23285, "task_index": 24}, {"db_idx": 23286, "episode_idx": 121, "frame_idx": 84, "global_frame_idx": 23286, "task_index": 24}, {"db_idx": 23287, "episode_idx": 121, "frame_idx": 85, "global_frame_idx": 23287, "task_index": 24}, {"db_idx": 23288, "episode_idx": 121, "frame_idx": 86, "global_frame_idx": 23288, "task_index": 24}, {"db_idx": 23289, "episode_idx": 121, "frame_idx": 87, "global_frame_idx": 23289, "task_index": 24}, {"db_idx": 23290, "episode_idx": 121, "frame_idx": 88, "global_frame_idx": 23290, "task_index": 24}, {"db_idx": 23291, "episode_idx": 121, "frame_idx": 89, "global_frame_idx": 23291, "task_index": 24}, {"db_idx": 23292, "episode_idx": 121, "frame_idx": 90, "global_frame_idx": 23292, "task_index": 24}, {"db_idx": 23293, "episode_idx": 121, "frame_idx": 91, "global_frame_idx": 23293, "task_index": 24}, {"db_idx": 23294, "episode_idx": 121, "frame_idx": 92, "global_frame_idx": 23294, "task_index": 24}, {"db_idx": 23295, "episode_idx": 121, "frame_idx": 93, "global_frame_idx": 23295, "task_index": 24}, {"db_idx": 23296, "episode_idx": 121, "frame_idx": 94, "global_frame_idx": 23296, "task_index": 24}, {"db_idx": 23297, "episode_idx": 121, "frame_idx": 95, "global_frame_idx": 23297, "task_index": 24}, {"db_idx": 23298, "episode_idx": 121, "frame_idx": 96, "global_frame_idx": 23298, "task_index": 24}, {"db_idx": 23299, "episode_idx": 121, "frame_idx": 97, "global_frame_idx": 23299, "task_index": 24}, {"db_idx": 23300, "episode_idx": 121, "frame_idx": 98, "global_frame_idx": 23300, "task_index": 24}, {"db_idx": 23301, "episode_idx": 121, "frame_idx": 99, "global_frame_idx": 23301, "task_index": 24}, {"db_idx": 23302, "episode_idx": 121, "frame_idx": 100, "global_frame_idx": 23302, "task_index": 24}, {"db_idx": 23303, "episode_idx": 121, "frame_idx": 101, "global_frame_idx": 23303, "task_index": 24}, {"db_idx": 23304, "episode_idx": 121, "frame_idx": 102, "global_frame_idx": 23304, "task_index": 24}, {"db_idx": 23305, "episode_idx": 121, "frame_idx": 103, "global_frame_idx": 23305, "task_index": 24}, {"db_idx": 23306, "episode_idx": 121, "frame_idx": 104, "global_frame_idx": 23306, "task_index": 24}, {"db_idx": 23307, "episode_idx": 121, "frame_idx": 105, "global_frame_idx": 23307, "task_index": 24}, {"db_idx": 23308, "episode_idx": 121, "frame_idx": 106, "global_frame_idx": 23308, "task_index": 24}, {"db_idx": 23309, "episode_idx": 121, "frame_idx": 107, "global_frame_idx": 23309, "task_index": 24}, {"db_idx": 23310, "episode_idx": 121, "frame_idx": 108, "global_frame_idx": 23310, "task_index": 24}, {"db_idx": 23311, "episode_idx": 121, "frame_idx": 109, "global_frame_idx": 23311, "task_index": 24}, {"db_idx": 23312, "episode_idx": 121, "frame_idx": 110, "global_frame_idx": 23312, "task_index": 24}, {"db_idx": 23313, "episode_idx": 121, "frame_idx": 111, "global_frame_idx": 23313, "task_index": 24}, {"db_idx": 23314, "episode_idx": 121, "frame_idx": 112, "global_frame_idx": 23314, "task_index": 24}, {"db_idx": 23315, "episode_idx": 121, "frame_idx": 113, "global_frame_idx": 23315, "task_index": 24}, {"db_idx": 23316, "episode_idx": 121, "frame_idx": 114, "global_frame_idx": 23316, "task_index": 24}, {"db_idx": 23317, "episode_idx": 121, "frame_idx": 115, "global_frame_idx": 23317, "task_index": 24}, {"db_idx": 23318, "episode_idx": 121, "frame_idx": 116, "global_frame_idx": 23318, "task_index": 24}, {"db_idx": 23319, "episode_idx": 121, "frame_idx": 117, "global_frame_idx": 23319, "task_index": 24}, {"db_idx": 23320, "episode_idx": 122, "frame_idx": 0, "global_frame_idx": 23320, "task_index": 24}, {"db_idx": 23321, "episode_idx": 122, "frame_idx": 1, "global_frame_idx": 23321, "task_index": 24}, {"db_idx": 23322, "episode_idx": 122, "frame_idx": 2, "global_frame_idx": 23322, "task_index": 24}, {"db_idx": 23323, "episode_idx": 122, "frame_idx": 3, "global_frame_idx": 23323, "task_index": 24}, {"db_idx": 23324, "episode_idx": 122, "frame_idx": 4, "global_frame_idx": 23324, "task_index": 24}, {"db_idx": 23325, "episode_idx": 122, "frame_idx": 5, "global_frame_idx": 23325, "task_index": 24}, {"db_idx": 23326, "episode_idx": 122, "frame_idx": 6, "global_frame_idx": 23326, "task_index": 24}, {"db_idx": 23327, "episode_idx": 122, "frame_idx": 7, "global_frame_idx": 23327, "task_index": 24}, {"db_idx": 23328, "episode_idx": 122, "frame_idx": 8, "global_frame_idx": 23328, "task_index": 24}, {"db_idx": 23329, "episode_idx": 122, "frame_idx": 9, "global_frame_idx": 23329, "task_index": 24}, {"db_idx": 23330, "episode_idx": 122, "frame_idx": 10, "global_frame_idx": 23330, "task_index": 24}, {"db_idx": 23331, "episode_idx": 122, "frame_idx": 11, "global_frame_idx": 23331, "task_index": 24}, {"db_idx": 23332, "episode_idx": 122, "frame_idx": 12, "global_frame_idx": 23332, "task_index": 24}, {"db_idx": 23333, "episode_idx": 122, "frame_idx": 13, "global_frame_idx": 23333, "task_index": 24}, {"db_idx": 23334, "episode_idx": 122, "frame_idx": 14, "global_frame_idx": 23334, "task_index": 24}, {"db_idx": 23335, "episode_idx": 122, "frame_idx": 15, "global_frame_idx": 23335, "task_index": 24}, {"db_idx": 23336, "episode_idx": 122, "frame_idx": 16, "global_frame_idx": 23336, "task_index": 24}, {"db_idx": 23337, "episode_idx": 122, "frame_idx": 17, "global_frame_idx": 23337, "task_index": 24}, {"db_idx": 23338, "episode_idx": 122, "frame_idx": 18, "global_frame_idx": 23338, "task_index": 24}, {"db_idx": 23339, "episode_idx": 122, "frame_idx": 19, "global_frame_idx": 23339, "task_index": 24}, {"db_idx": 23340, "episode_idx": 122, "frame_idx": 20, "global_frame_idx": 23340, "task_index": 24}, {"db_idx": 23341, "episode_idx": 122, "frame_idx": 21, "global_frame_idx": 23341, "task_index": 24}, {"db_idx": 23342, "episode_idx": 122, "frame_idx": 22, "global_frame_idx": 23342, "task_index": 24}, {"db_idx": 23343, "episode_idx": 122, "frame_idx": 23, "global_frame_idx": 23343, "task_index": 24}, {"db_idx": 23344, "episode_idx": 122, "frame_idx": 24, "global_frame_idx": 23344, "task_index": 24}, {"db_idx": 23345, "episode_idx": 122, "frame_idx": 25, "global_frame_idx": 23345, "task_index": 24}, {"db_idx": 23346, "episode_idx": 122, "frame_idx": 26, "global_frame_idx": 23346, "task_index": 24}, {"db_idx": 23347, "episode_idx": 122, "frame_idx": 27, "global_frame_idx": 23347, "task_index": 24}, {"db_idx": 23348, "episode_idx": 122, "frame_idx": 28, "global_frame_idx": 23348, "task_index": 24}, {"db_idx": 23349, "episode_idx": 122, "frame_idx": 29, "global_frame_idx": 23349, "task_index": 24}, {"db_idx": 23350, "episode_idx": 122, "frame_idx": 30, "global_frame_idx": 23350, "task_index": 24}, {"db_idx": 23351, "episode_idx": 122, "frame_idx": 31, "global_frame_idx": 23351, "task_index": 24}, {"db_idx": 23352, "episode_idx": 122, "frame_idx": 32, "global_frame_idx": 23352, "task_index": 24}, {"db_idx": 23353, "episode_idx": 122, "frame_idx": 33, "global_frame_idx": 23353, "task_index": 24}, {"db_idx": 23354, "episode_idx": 122, "frame_idx": 34, "global_frame_idx": 23354, "task_index": 24}, {"db_idx": 23355, "episode_idx": 122, "frame_idx": 35, "global_frame_idx": 23355, "task_index": 24}, {"db_idx": 23356, "episode_idx": 122, "frame_idx": 36, "global_frame_idx": 23356, "task_index": 24}, {"db_idx": 23357, "episode_idx": 122, "frame_idx": 37, "global_frame_idx": 23357, "task_index": 24}, {"db_idx": 23358, "episode_idx": 122, "frame_idx": 38, "global_frame_idx": 23358, "task_index": 24}, {"db_idx": 23359, "episode_idx": 122, "frame_idx": 39, "global_frame_idx": 23359, "task_index": 24}, {"db_idx": 23360, "episode_idx": 122, "frame_idx": 40, "global_frame_idx": 23360, "task_index": 24}, {"db_idx": 23361, "episode_idx": 122, "frame_idx": 41, "global_frame_idx": 23361, "task_index": 24}, {"db_idx": 23362, "episode_idx": 122, "frame_idx": 42, "global_frame_idx": 23362, "task_index": 24}, {"db_idx": 23363, "episode_idx": 122, "frame_idx": 43, "global_frame_idx": 23363, "task_index": 24}, {"db_idx": 23364, "episode_idx": 122, "frame_idx": 44, "global_frame_idx": 23364, "task_index": 24}, {"db_idx": 23365, "episode_idx": 122, "frame_idx": 45, "global_frame_idx": 23365, "task_index": 24}, {"db_idx": 23366, "episode_idx": 122, "frame_idx": 46, "global_frame_idx": 23366, "task_index": 24}, {"db_idx": 23367, "episode_idx": 122, "frame_idx": 47, "global_frame_idx": 23367, "task_index": 24}, {"db_idx": 23368, "episode_idx": 122, "frame_idx": 48, "global_frame_idx": 23368, "task_index": 24}, {"db_idx": 23369, "episode_idx": 122, "frame_idx": 49, "global_frame_idx": 23369, "task_index": 24}, {"db_idx": 23370, "episode_idx": 122, "frame_idx": 50, "global_frame_idx": 23370, "task_index": 24}, {"db_idx": 23371, "episode_idx": 122, "frame_idx": 51, "global_frame_idx": 23371, "task_index": 24}, {"db_idx": 23372, "episode_idx": 122, "frame_idx": 52, "global_frame_idx": 23372, "task_index": 24}, {"db_idx": 23373, "episode_idx": 122, "frame_idx": 53, "global_frame_idx": 23373, "task_index": 24}, {"db_idx": 23374, "episode_idx": 122, "frame_idx": 54, "global_frame_idx": 23374, "task_index": 24}, {"db_idx": 23375, "episode_idx": 122, "frame_idx": 55, "global_frame_idx": 23375, "task_index": 24}, {"db_idx": 23376, "episode_idx": 122, "frame_idx": 56, "global_frame_idx": 23376, "task_index": 24}, {"db_idx": 23377, "episode_idx": 122, "frame_idx": 57, "global_frame_idx": 23377, "task_index": 24}, {"db_idx": 23378, "episode_idx": 122, "frame_idx": 58, "global_frame_idx": 23378, "task_index": 24}, {"db_idx": 23379, "episode_idx": 122, "frame_idx": 59, "global_frame_idx": 23379, "task_index": 24}, {"db_idx": 23380, "episode_idx": 122, "frame_idx": 60, "global_frame_idx": 23380, "task_index": 24}, {"db_idx": 23381, "episode_idx": 122, "frame_idx": 61, "global_frame_idx": 23381, "task_index": 24}, {"db_idx": 23382, "episode_idx": 122, "frame_idx": 62, "global_frame_idx": 23382, "task_index": 24}, {"db_idx": 23383, "episode_idx": 122, "frame_idx": 63, "global_frame_idx": 23383, "task_index": 24}, {"db_idx": 23384, "episode_idx": 122, "frame_idx": 64, "global_frame_idx": 23384, "task_index": 24}, {"db_idx": 23385, "episode_idx": 122, "frame_idx": 65, "global_frame_idx": 23385, "task_index": 24}, {"db_idx": 23386, "episode_idx": 122, "frame_idx": 66, "global_frame_idx": 23386, "task_index": 24}, {"db_idx": 23387, "episode_idx": 122, "frame_idx": 67, "global_frame_idx": 23387, "task_index": 24}, {"db_idx": 23388, "episode_idx": 122, "frame_idx": 68, "global_frame_idx": 23388, "task_index": 24}, {"db_idx": 23389, "episode_idx": 122, "frame_idx": 69, "global_frame_idx": 23389, "task_index": 24}, {"db_idx": 23390, "episode_idx": 122, "frame_idx": 70, "global_frame_idx": 23390, "task_index": 24}, {"db_idx": 23391, "episode_idx": 122, "frame_idx": 71, "global_frame_idx": 23391, "task_index": 24}, {"db_idx": 23392, "episode_idx": 122, "frame_idx": 72, "global_frame_idx": 23392, "task_index": 24}, {"db_idx": 23393, "episode_idx": 122, "frame_idx": 73, "global_frame_idx": 23393, "task_index": 24}, {"db_idx": 23394, "episode_idx": 122, "frame_idx": 74, "global_frame_idx": 23394, "task_index": 24}, {"db_idx": 23395, "episode_idx": 122, "frame_idx": 75, "global_frame_idx": 23395, "task_index": 24}, {"db_idx": 23396, "episode_idx": 122, "frame_idx": 76, "global_frame_idx": 23396, "task_index": 24}, {"db_idx": 23397, "episode_idx": 122, "frame_idx": 77, "global_frame_idx": 23397, "task_index": 24}, {"db_idx": 23398, "episode_idx": 122, "frame_idx": 78, "global_frame_idx": 23398, "task_index": 24}, {"db_idx": 23399, "episode_idx": 122, "frame_idx": 79, "global_frame_idx": 23399, "task_index": 24}, {"db_idx": 23400, "episode_idx": 122, "frame_idx": 80, "global_frame_idx": 23400, "task_index": 24}, {"db_idx": 23401, "episode_idx": 122, "frame_idx": 81, "global_frame_idx": 23401, "task_index": 24}, {"db_idx": 23402, "episode_idx": 122, "frame_idx": 82, "global_frame_idx": 23402, "task_index": 24}, {"db_idx": 23403, "episode_idx": 122, "frame_idx": 83, "global_frame_idx": 23403, "task_index": 24}, {"db_idx": 23404, "episode_idx": 122, "frame_idx": 84, "global_frame_idx": 23404, "task_index": 24}, {"db_idx": 23405, "episode_idx": 122, "frame_idx": 85, "global_frame_idx": 23405, "task_index": 24}, {"db_idx": 23406, "episode_idx": 122, "frame_idx": 86, "global_frame_idx": 23406, "task_index": 24}, {"db_idx": 23407, "episode_idx": 122, "frame_idx": 87, "global_frame_idx": 23407, "task_index": 24}, {"db_idx": 23408, "episode_idx": 122, "frame_idx": 88, "global_frame_idx": 23408, "task_index": 24}, {"db_idx": 23409, "episode_idx": 122, "frame_idx": 89, "global_frame_idx": 23409, "task_index": 24}, {"db_idx": 23410, "episode_idx": 122, "frame_idx": 90, "global_frame_idx": 23410, "task_index": 24}, {"db_idx": 23411, "episode_idx": 122, "frame_idx": 91, "global_frame_idx": 23411, "task_index": 24}, {"db_idx": 23412, "episode_idx": 122, "frame_idx": 92, "global_frame_idx": 23412, "task_index": 24}, {"db_idx": 23413, "episode_idx": 122, "frame_idx": 93, "global_frame_idx": 23413, "task_index": 24}, {"db_idx": 23414, "episode_idx": 122, "frame_idx": 94, "global_frame_idx": 23414, "task_index": 24}, {"db_idx": 23415, "episode_idx": 122, "frame_idx": 95, "global_frame_idx": 23415, "task_index": 24}, {"db_idx": 23416, "episode_idx": 122, "frame_idx": 96, "global_frame_idx": 23416, "task_index": 24}, {"db_idx": 23417, "episode_idx": 122, "frame_idx": 97, "global_frame_idx": 23417, "task_index": 24}, {"db_idx": 23418, "episode_idx": 122, "frame_idx": 98, "global_frame_idx": 23418, "task_index": 24}, {"db_idx": 23419, "episode_idx": 122, "frame_idx": 99, "global_frame_idx": 23419, "task_index": 24}, {"db_idx": 23420, "episode_idx": 122, "frame_idx": 100, "global_frame_idx": 23420, "task_index": 24}, {"db_idx": 23421, "episode_idx": 122, "frame_idx": 101, "global_frame_idx": 23421, "task_index": 24}, {"db_idx": 23422, "episode_idx": 122, "frame_idx": 102, "global_frame_idx": 23422, "task_index": 24}, {"db_idx": 23423, "episode_idx": 122, "frame_idx": 103, "global_frame_idx": 23423, "task_index": 24}, {"db_idx": 23424, "episode_idx": 122, "frame_idx": 104, "global_frame_idx": 23424, "task_index": 24}, {"db_idx": 23425, "episode_idx": 122, "frame_idx": 105, "global_frame_idx": 23425, "task_index": 24}, {"db_idx": 23426, "episode_idx": 122, "frame_idx": 106, "global_frame_idx": 23426, "task_index": 24}, {"db_idx": 23427, "episode_idx": 122, "frame_idx": 107, "global_frame_idx": 23427, "task_index": 24}, {"db_idx": 23428, "episode_idx": 122, "frame_idx": 108, "global_frame_idx": 23428, "task_index": 24}, {"db_idx": 23429, "episode_idx": 122, "frame_idx": 109, "global_frame_idx": 23429, "task_index": 24}, {"db_idx": 23430, "episode_idx": 122, "frame_idx": 110, "global_frame_idx": 23430, "task_index": 24}, {"db_idx": 23431, "episode_idx": 122, "frame_idx": 111, "global_frame_idx": 23431, "task_index": 24}, {"db_idx": 23432, "episode_idx": 122, "frame_idx": 112, "global_frame_idx": 23432, "task_index": 24}, {"db_idx": 23433, "episode_idx": 122, "frame_idx": 113, "global_frame_idx": 23433, "task_index": 24}, {"db_idx": 23434, "episode_idx": 122, "frame_idx": 114, "global_frame_idx": 23434, "task_index": 24}, {"db_idx": 23435, "episode_idx": 122, "frame_idx": 115, "global_frame_idx": 23435, "task_index": 24}, {"db_idx": 23436, "episode_idx": 122, "frame_idx": 116, "global_frame_idx": 23436, "task_index": 24}, {"db_idx": 23437, "episode_idx": 122, "frame_idx": 117, "global_frame_idx": 23437, "task_index": 24}, {"db_idx": 23438, "episode_idx": 122, "frame_idx": 118, "global_frame_idx": 23438, "task_index": 24}, {"db_idx": 23439, "episode_idx": 122, "frame_idx": 119, "global_frame_idx": 23439, "task_index": 24}, {"db_idx": 23440, "episode_idx": 122, "frame_idx": 120, "global_frame_idx": 23440, "task_index": 24}, {"db_idx": 23441, "episode_idx": 122, "frame_idx": 121, "global_frame_idx": 23441, "task_index": 24}, {"db_idx": 23442, "episode_idx": 122, "frame_idx": 122, "global_frame_idx": 23442, "task_index": 24}, {"db_idx": 23443, "episode_idx": 122, "frame_idx": 123, "global_frame_idx": 23443, "task_index": 24}, {"db_idx": 23444, "episode_idx": 122, "frame_idx": 124, "global_frame_idx": 23444, "task_index": 24}, {"db_idx": 23445, "episode_idx": 122, "frame_idx": 125, "global_frame_idx": 23445, "task_index": 24}, {"db_idx": 23446, "episode_idx": 122, "frame_idx": 126, "global_frame_idx": 23446, "task_index": 24}, {"db_idx": 23447, "episode_idx": 122, "frame_idx": 127, "global_frame_idx": 23447, "task_index": 24}, {"db_idx": 23448, "episode_idx": 122, "frame_idx": 128, "global_frame_idx": 23448, "task_index": 24}, {"db_idx": 23449, "episode_idx": 122, "frame_idx": 129, "global_frame_idx": 23449, "task_index": 24}, {"db_idx": 23450, "episode_idx": 122, "frame_idx": 130, "global_frame_idx": 23450, "task_index": 24}, {"db_idx": 23451, "episode_idx": 122, "frame_idx": 131, "global_frame_idx": 23451, "task_index": 24}, {"db_idx": 23452, "episode_idx": 122, "frame_idx": 132, "global_frame_idx": 23452, "task_index": 24}, {"db_idx": 23453, "episode_idx": 122, "frame_idx": 133, "global_frame_idx": 23453, "task_index": 24}, {"db_idx": 23454, "episode_idx": 122, "frame_idx": 134, "global_frame_idx": 23454, "task_index": 24}, {"db_idx": 23455, "episode_idx": 122, "frame_idx": 135, "global_frame_idx": 23455, "task_index": 24}, {"db_idx": 23456, "episode_idx": 122, "frame_idx": 136, "global_frame_idx": 23456, "task_index": 24}, {"db_idx": 23457, "episode_idx": 122, "frame_idx": 137, "global_frame_idx": 23457, "task_index": 24}, {"db_idx": 23458, "episode_idx": 122, "frame_idx": 138, "global_frame_idx": 23458, "task_index": 24}, {"db_idx": 23459, "episode_idx": 123, "frame_idx": 0, "global_frame_idx": 23459, "task_index": 24}, {"db_idx": 23460, "episode_idx": 123, "frame_idx": 1, "global_frame_idx": 23460, "task_index": 24}, {"db_idx": 23461, "episode_idx": 123, "frame_idx": 2, "global_frame_idx": 23461, "task_index": 24}, {"db_idx": 23462, "episode_idx": 123, "frame_idx": 3, "global_frame_idx": 23462, "task_index": 24}, {"db_idx": 23463, "episode_idx": 123, "frame_idx": 4, "global_frame_idx": 23463, "task_index": 24}, {"db_idx": 23464, "episode_idx": 123, "frame_idx": 5, "global_frame_idx": 23464, "task_index": 24}, {"db_idx": 23465, "episode_idx": 123, "frame_idx": 6, "global_frame_idx": 23465, "task_index": 24}, {"db_idx": 23466, "episode_idx": 123, "frame_idx": 7, "global_frame_idx": 23466, "task_index": 24}, {"db_idx": 23467, "episode_idx": 123, "frame_idx": 8, "global_frame_idx": 23467, "task_index": 24}, {"db_idx": 23468, "episode_idx": 123, "frame_idx": 9, "global_frame_idx": 23468, "task_index": 24}, {"db_idx": 23469, "episode_idx": 123, "frame_idx": 10, "global_frame_idx": 23469, "task_index": 24}, {"db_idx": 23470, "episode_idx": 123, "frame_idx": 11, "global_frame_idx": 23470, "task_index": 24}, {"db_idx": 23471, "episode_idx": 123, "frame_idx": 12, "global_frame_idx": 23471, "task_index": 24}, {"db_idx": 23472, "episode_idx": 123, "frame_idx": 13, "global_frame_idx": 23472, "task_index": 24}, {"db_idx": 23473, "episode_idx": 123, "frame_idx": 14, "global_frame_idx": 23473, "task_index": 24}, {"db_idx": 23474, "episode_idx": 123, "frame_idx": 15, "global_frame_idx": 23474, "task_index": 24}, {"db_idx": 23475, "episode_idx": 123, "frame_idx": 16, "global_frame_idx": 23475, "task_index": 24}, {"db_idx": 23476, "episode_idx": 123, "frame_idx": 17, "global_frame_idx": 23476, "task_index": 24}, {"db_idx": 23477, "episode_idx": 123, "frame_idx": 18, "global_frame_idx": 23477, "task_index": 24}, {"db_idx": 23478, "episode_idx": 123, "frame_idx": 19, "global_frame_idx": 23478, "task_index": 24}, {"db_idx": 23479, "episode_idx": 123, "frame_idx": 20, "global_frame_idx": 23479, "task_index": 24}, {"db_idx": 23480, "episode_idx": 123, "frame_idx": 21, "global_frame_idx": 23480, "task_index": 24}, {"db_idx": 23481, "episode_idx": 123, "frame_idx": 22, "global_frame_idx": 23481, "task_index": 24}, {"db_idx": 23482, "episode_idx": 123, "frame_idx": 23, "global_frame_idx": 23482, "task_index": 24}, {"db_idx": 23483, "episode_idx": 123, "frame_idx": 24, "global_frame_idx": 23483, "task_index": 24}, {"db_idx": 23484, "episode_idx": 123, "frame_idx": 25, "global_frame_idx": 23484, "task_index": 24}, {"db_idx": 23485, "episode_idx": 123, "frame_idx": 26, "global_frame_idx": 23485, "task_index": 24}, {"db_idx": 23486, "episode_idx": 123, "frame_idx": 27, "global_frame_idx": 23486, "task_index": 24}, {"db_idx": 23487, "episode_idx": 123, "frame_idx": 28, "global_frame_idx": 23487, "task_index": 24}, {"db_idx": 23488, "episode_idx": 123, "frame_idx": 29, "global_frame_idx": 23488, "task_index": 24}, {"db_idx": 23489, "episode_idx": 123, "frame_idx": 30, "global_frame_idx": 23489, "task_index": 24}, {"db_idx": 23490, "episode_idx": 123, "frame_idx": 31, "global_frame_idx": 23490, "task_index": 24}, {"db_idx": 23491, "episode_idx": 123, "frame_idx": 32, "global_frame_idx": 23491, "task_index": 24}, {"db_idx": 23492, "episode_idx": 123, "frame_idx": 33, "global_frame_idx": 23492, "task_index": 24}, {"db_idx": 23493, "episode_idx": 123, "frame_idx": 34, "global_frame_idx": 23493, "task_index": 24}, {"db_idx": 23494, "episode_idx": 123, "frame_idx": 35, "global_frame_idx": 23494, "task_index": 24}, {"db_idx": 23495, "episode_idx": 123, "frame_idx": 36, "global_frame_idx": 23495, "task_index": 24}, {"db_idx": 23496, "episode_idx": 123, "frame_idx": 37, "global_frame_idx": 23496, "task_index": 24}, {"db_idx": 23497, "episode_idx": 123, "frame_idx": 38, "global_frame_idx": 23497, "task_index": 24}, {"db_idx": 23498, "episode_idx": 123, "frame_idx": 39, "global_frame_idx": 23498, "task_index": 24}, {"db_idx": 23499, "episode_idx": 123, "frame_idx": 40, "global_frame_idx": 23499, "task_index": 24}, {"db_idx": 23500, "episode_idx": 123, "frame_idx": 41, "global_frame_idx": 23500, "task_index": 24}, {"db_idx": 23501, "episode_idx": 123, "frame_idx": 42, "global_frame_idx": 23501, "task_index": 24}, {"db_idx": 23502, "episode_idx": 123, "frame_idx": 43, "global_frame_idx": 23502, "task_index": 24}, {"db_idx": 23503, "episode_idx": 123, "frame_idx": 44, "global_frame_idx": 23503, "task_index": 24}, {"db_idx": 23504, "episode_idx": 123, "frame_idx": 45, "global_frame_idx": 23504, "task_index": 24}, {"db_idx": 23505, "episode_idx": 123, "frame_idx": 46, "global_frame_idx": 23505, "task_index": 24}, {"db_idx": 23506, "episode_idx": 123, "frame_idx": 47, "global_frame_idx": 23506, "task_index": 24}, {"db_idx": 23507, "episode_idx": 123, "frame_idx": 48, "global_frame_idx": 23507, "task_index": 24}, {"db_idx": 23508, "episode_idx": 123, "frame_idx": 49, "global_frame_idx": 23508, "task_index": 24}, {"db_idx": 23509, "episode_idx": 123, "frame_idx": 50, "global_frame_idx": 23509, "task_index": 24}, {"db_idx": 23510, "episode_idx": 123, "frame_idx": 51, "global_frame_idx": 23510, "task_index": 24}, {"db_idx": 23511, "episode_idx": 123, "frame_idx": 52, "global_frame_idx": 23511, "task_index": 24}, {"db_idx": 23512, "episode_idx": 123, "frame_idx": 53, "global_frame_idx": 23512, "task_index": 24}, {"db_idx": 23513, "episode_idx": 123, "frame_idx": 54, "global_frame_idx": 23513, "task_index": 24}, {"db_idx": 23514, "episode_idx": 123, "frame_idx": 55, "global_frame_idx": 23514, "task_index": 24}, {"db_idx": 23515, "episode_idx": 123, "frame_idx": 56, "global_frame_idx": 23515, "task_index": 24}, {"db_idx": 23516, "episode_idx": 123, "frame_idx": 57, "global_frame_idx": 23516, "task_index": 24}, {"db_idx": 23517, "episode_idx": 123, "frame_idx": 58, "global_frame_idx": 23517, "task_index": 24}, {"db_idx": 23518, "episode_idx": 123, "frame_idx": 59, "global_frame_idx": 23518, "task_index": 24}, {"db_idx": 23519, "episode_idx": 123, "frame_idx": 60, "global_frame_idx": 23519, "task_index": 24}, {"db_idx": 23520, "episode_idx": 123, "frame_idx": 61, "global_frame_idx": 23520, "task_index": 24}, {"db_idx": 23521, "episode_idx": 123, "frame_idx": 62, "global_frame_idx": 23521, "task_index": 24}, {"db_idx": 23522, "episode_idx": 123, "frame_idx": 63, "global_frame_idx": 23522, "task_index": 24}, {"db_idx": 23523, "episode_idx": 123, "frame_idx": 64, "global_frame_idx": 23523, "task_index": 24}, {"db_idx": 23524, "episode_idx": 123, "frame_idx": 65, "global_frame_idx": 23524, "task_index": 24}, {"db_idx": 23525, "episode_idx": 123, "frame_idx": 66, "global_frame_idx": 23525, "task_index": 24}, {"db_idx": 23526, "episode_idx": 123, "frame_idx": 67, "global_frame_idx": 23526, "task_index": 24}, {"db_idx": 23527, "episode_idx": 123, "frame_idx": 68, "global_frame_idx": 23527, "task_index": 24}, {"db_idx": 23528, "episode_idx": 123, "frame_idx": 69, "global_frame_idx": 23528, "task_index": 24}, {"db_idx": 23529, "episode_idx": 123, "frame_idx": 70, "global_frame_idx": 23529, "task_index": 24}, {"db_idx": 23530, "episode_idx": 123, "frame_idx": 71, "global_frame_idx": 23530, "task_index": 24}, {"db_idx": 23531, "episode_idx": 123, "frame_idx": 72, "global_frame_idx": 23531, "task_index": 24}, {"db_idx": 23532, "episode_idx": 123, "frame_idx": 73, "global_frame_idx": 23532, "task_index": 24}, {"db_idx": 23533, "episode_idx": 123, "frame_idx": 74, "global_frame_idx": 23533, "task_index": 24}, {"db_idx": 23534, "episode_idx": 123, "frame_idx": 75, "global_frame_idx": 23534, "task_index": 24}, {"db_idx": 23535, "episode_idx": 123, "frame_idx": 76, "global_frame_idx": 23535, "task_index": 24}, {"db_idx": 23536, "episode_idx": 123, "frame_idx": 77, "global_frame_idx": 23536, "task_index": 24}, {"db_idx": 23537, "episode_idx": 123, "frame_idx": 78, "global_frame_idx": 23537, "task_index": 24}, {"db_idx": 23538, "episode_idx": 123, "frame_idx": 79, "global_frame_idx": 23538, "task_index": 24}, {"db_idx": 23539, "episode_idx": 123, "frame_idx": 80, "global_frame_idx": 23539, "task_index": 24}, {"db_idx": 23540, "episode_idx": 123, "frame_idx": 81, "global_frame_idx": 23540, "task_index": 24}, {"db_idx": 23541, "episode_idx": 123, "frame_idx": 82, "global_frame_idx": 23541, "task_index": 24}, {"db_idx": 23542, "episode_idx": 123, "frame_idx": 83, "global_frame_idx": 23542, "task_index": 24}, {"db_idx": 23543, "episode_idx": 123, "frame_idx": 84, "global_frame_idx": 23543, "task_index": 24}, {"db_idx": 23544, "episode_idx": 123, "frame_idx": 85, "global_frame_idx": 23544, "task_index": 24}, {"db_idx": 23545, "episode_idx": 123, "frame_idx": 86, "global_frame_idx": 23545, "task_index": 24}, {"db_idx": 23546, "episode_idx": 123, "frame_idx": 87, "global_frame_idx": 23546, "task_index": 24}, {"db_idx": 23547, "episode_idx": 123, "frame_idx": 88, "global_frame_idx": 23547, "task_index": 24}, {"db_idx": 23548, "episode_idx": 123, "frame_idx": 89, "global_frame_idx": 23548, "task_index": 24}, {"db_idx": 23549, "episode_idx": 123, "frame_idx": 90, "global_frame_idx": 23549, "task_index": 24}, {"db_idx": 23550, "episode_idx": 123, "frame_idx": 91, "global_frame_idx": 23550, "task_index": 24}, {"db_idx": 23551, "episode_idx": 123, "frame_idx": 92, "global_frame_idx": 23551, "task_index": 24}, {"db_idx": 23552, "episode_idx": 123, "frame_idx": 93, "global_frame_idx": 23552, "task_index": 24}, {"db_idx": 23553, "episode_idx": 123, "frame_idx": 94, "global_frame_idx": 23553, "task_index": 24}, {"db_idx": 23554, "episode_idx": 123, "frame_idx": 95, "global_frame_idx": 23554, "task_index": 24}, {"db_idx": 23555, "episode_idx": 123, "frame_idx": 96, "global_frame_idx": 23555, "task_index": 24}, {"db_idx": 23556, "episode_idx": 123, "frame_idx": 97, "global_frame_idx": 23556, "task_index": 24}, {"db_idx": 23557, "episode_idx": 123, "frame_idx": 98, "global_frame_idx": 23557, "task_index": 24}, {"db_idx": 23558, "episode_idx": 123, "frame_idx": 99, "global_frame_idx": 23558, "task_index": 24}, {"db_idx": 23559, "episode_idx": 123, "frame_idx": 100, "global_frame_idx": 23559, "task_index": 24}, {"db_idx": 23560, "episode_idx": 123, "frame_idx": 101, "global_frame_idx": 23560, "task_index": 24}, {"db_idx": 23561, "episode_idx": 123, "frame_idx": 102, "global_frame_idx": 23561, "task_index": 24}, {"db_idx": 23562, "episode_idx": 123, "frame_idx": 103, "global_frame_idx": 23562, "task_index": 24}, {"db_idx": 23563, "episode_idx": 123, "frame_idx": 104, "global_frame_idx": 23563, "task_index": 24}, {"db_idx": 23564, "episode_idx": 123, "frame_idx": 105, "global_frame_idx": 23564, "task_index": 24}, {"db_idx": 23565, "episode_idx": 123, "frame_idx": 106, "global_frame_idx": 23565, "task_index": 24}, {"db_idx": 23566, "episode_idx": 123, "frame_idx": 107, "global_frame_idx": 23566, "task_index": 24}, {"db_idx": 23567, "episode_idx": 123, "frame_idx": 108, "global_frame_idx": 23567, "task_index": 24}, {"db_idx": 23568, "episode_idx": 123, "frame_idx": 109, "global_frame_idx": 23568, "task_index": 24}, {"db_idx": 23569, "episode_idx": 123, "frame_idx": 110, "global_frame_idx": 23569, "task_index": 24}, {"db_idx": 23570, "episode_idx": 123, "frame_idx": 111, "global_frame_idx": 23570, "task_index": 24}, {"db_idx": 23571, "episode_idx": 123, "frame_idx": 112, "global_frame_idx": 23571, "task_index": 24}, {"db_idx": 23572, "episode_idx": 123, "frame_idx": 113, "global_frame_idx": 23572, "task_index": 24}, {"db_idx": 23573, "episode_idx": 123, "frame_idx": 114, "global_frame_idx": 23573, "task_index": 24}, {"db_idx": 23574, "episode_idx": 123, "frame_idx": 115, "global_frame_idx": 23574, "task_index": 24}, {"db_idx": 23575, "episode_idx": 123, "frame_idx": 116, "global_frame_idx": 23575, "task_index": 24}, {"db_idx": 23576, "episode_idx": 123, "frame_idx": 117, "global_frame_idx": 23576, "task_index": 24}, {"db_idx": 23577, "episode_idx": 123, "frame_idx": 118, "global_frame_idx": 23577, "task_index": 24}, {"db_idx": 23578, "episode_idx": 123, "frame_idx": 119, "global_frame_idx": 23578, "task_index": 24}, {"db_idx": 23579, "episode_idx": 123, "frame_idx": 120, "global_frame_idx": 23579, "task_index": 24}, {"db_idx": 23580, "episode_idx": 123, "frame_idx": 121, "global_frame_idx": 23580, "task_index": 24}, {"db_idx": 23581, "episode_idx": 123, "frame_idx": 122, "global_frame_idx": 23581, "task_index": 24}, {"db_idx": 23582, "episode_idx": 123, "frame_idx": 123, "global_frame_idx": 23582, "task_index": 24}, {"db_idx": 23583, "episode_idx": 123, "frame_idx": 124, "global_frame_idx": 23583, "task_index": 24}, {"db_idx": 23584, "episode_idx": 123, "frame_idx": 125, "global_frame_idx": 23584, "task_index": 24}, {"db_idx": 23585, "episode_idx": 123, "frame_idx": 126, "global_frame_idx": 23585, "task_index": 24}, {"db_idx": 23586, "episode_idx": 123, "frame_idx": 127, "global_frame_idx": 23586, "task_index": 24}, {"db_idx": 23587, "episode_idx": 123, "frame_idx": 128, "global_frame_idx": 23587, "task_index": 24}, {"db_idx": 23588, "episode_idx": 123, "frame_idx": 129, "global_frame_idx": 23588, "task_index": 24}, {"db_idx": 23589, "episode_idx": 123, "frame_idx": 130, "global_frame_idx": 23589, "task_index": 24}, {"db_idx": 23590, "episode_idx": 123, "frame_idx": 131, "global_frame_idx": 23590, "task_index": 24}, {"db_idx": 23591, "episode_idx": 123, "frame_idx": 132, "global_frame_idx": 23591, "task_index": 24}, {"db_idx": 23592, "episode_idx": 123, "frame_idx": 133, "global_frame_idx": 23592, "task_index": 24}, {"db_idx": 23593, "episode_idx": 123, "frame_idx": 134, "global_frame_idx": 23593, "task_index": 24}, {"db_idx": 23594, "episode_idx": 123, "frame_idx": 135, "global_frame_idx": 23594, "task_index": 24}, {"db_idx": 23595, "episode_idx": 123, "frame_idx": 136, "global_frame_idx": 23595, "task_index": 24}, {"db_idx": 23596, "episode_idx": 123, "frame_idx": 137, "global_frame_idx": 23596, "task_index": 24}, {"db_idx": 23597, "episode_idx": 123, "frame_idx": 138, "global_frame_idx": 23597, "task_index": 24}, {"db_idx": 23598, "episode_idx": 123, "frame_idx": 139, "global_frame_idx": 23598, "task_index": 24}, {"db_idx": 23599, "episode_idx": 123, "frame_idx": 140, "global_frame_idx": 23599, "task_index": 24}, {"db_idx": 23600, "episode_idx": 123, "frame_idx": 141, "global_frame_idx": 23600, "task_index": 24}, {"db_idx": 23601, "episode_idx": 123, "frame_idx": 142, "global_frame_idx": 23601, "task_index": 24}, {"db_idx": 23602, "episode_idx": 123, "frame_idx": 143, "global_frame_idx": 23602, "task_index": 24}, {"db_idx": 23603, "episode_idx": 123, "frame_idx": 144, "global_frame_idx": 23603, "task_index": 24}, {"db_idx": 23604, "episode_idx": 123, "frame_idx": 145, "global_frame_idx": 23604, "task_index": 24}, {"db_idx": 23605, "episode_idx": 123, "frame_idx": 146, "global_frame_idx": 23605, "task_index": 24}, {"db_idx": 23606, "episode_idx": 123, "frame_idx": 147, "global_frame_idx": 23606, "task_index": 24}, {"db_idx": 23607, "episode_idx": 123, "frame_idx": 148, "global_frame_idx": 23607, "task_index": 24}, {"db_idx": 23608, "episode_idx": 123, "frame_idx": 149, "global_frame_idx": 23608, "task_index": 24}, {"db_idx": 23609, "episode_idx": 123, "frame_idx": 150, "global_frame_idx": 23609, "task_index": 24}, {"db_idx": 23610, "episode_idx": 123, "frame_idx": 151, "global_frame_idx": 23610, "task_index": 24}, {"db_idx": 23611, "episode_idx": 123, "frame_idx": 152, "global_frame_idx": 23611, "task_index": 24}, {"db_idx": 23612, "episode_idx": 123, "frame_idx": 153, "global_frame_idx": 23612, "task_index": 24}, {"db_idx": 23613, "episode_idx": 123, "frame_idx": 154, "global_frame_idx": 23613, "task_index": 24}, {"db_idx": 23614, "episode_idx": 123, "frame_idx": 155, "global_frame_idx": 23614, "task_index": 24}, {"db_idx": 23615, "episode_idx": 123, "frame_idx": 156, "global_frame_idx": 23615, "task_index": 24}, {"db_idx": 23616, "episode_idx": 123, "frame_idx": 157, "global_frame_idx": 23616, "task_index": 24}, {"db_idx": 23617, "episode_idx": 123, "frame_idx": 158, "global_frame_idx": 23617, "task_index": 24}, {"db_idx": 23618, "episode_idx": 123, "frame_idx": 159, "global_frame_idx": 23618, "task_index": 24}, {"db_idx": 23619, "episode_idx": 123, "frame_idx": 160, "global_frame_idx": 23619, "task_index": 24}, {"db_idx": 23620, "episode_idx": 123, "frame_idx": 161, "global_frame_idx": 23620, "task_index": 24}, {"db_idx": 23621, "episode_idx": 123, "frame_idx": 162, "global_frame_idx": 23621, "task_index": 24}, {"db_idx": 23622, "episode_idx": 123, "frame_idx": 163, "global_frame_idx": 23622, "task_index": 24}, {"db_idx": 23623, "episode_idx": 123, "frame_idx": 164, "global_frame_idx": 23623, "task_index": 24}, {"db_idx": 23624, "episode_idx": 123, "frame_idx": 165, "global_frame_idx": 23624, "task_index": 24}, {"db_idx": 23625, "episode_idx": 123, "frame_idx": 166, "global_frame_idx": 23625, "task_index": 24}, {"db_idx": 23626, "episode_idx": 123, "frame_idx": 167, "global_frame_idx": 23626, "task_index": 24}, {"db_idx": 23627, "episode_idx": 123, "frame_idx": 168, "global_frame_idx": 23627, "task_index": 24}, {"db_idx": 23628, "episode_idx": 123, "frame_idx": 169, "global_frame_idx": 23628, "task_index": 24}, {"db_idx": 23629, "episode_idx": 123, "frame_idx": 170, "global_frame_idx": 23629, "task_index": 24}, {"db_idx": 23630, "episode_idx": 123, "frame_idx": 171, "global_frame_idx": 23630, "task_index": 24}, {"db_idx": 23631, "episode_idx": 123, "frame_idx": 172, "global_frame_idx": 23631, "task_index": 24}, {"db_idx": 23632, "episode_idx": 123, "frame_idx": 173, "global_frame_idx": 23632, "task_index": 24}, {"db_idx": 23633, "episode_idx": 123, "frame_idx": 174, "global_frame_idx": 23633, "task_index": 24}, {"db_idx": 23634, "episode_idx": 123, "frame_idx": 175, "global_frame_idx": 23634, "task_index": 24}, {"db_idx": 23635, "episode_idx": 123, "frame_idx": 176, "global_frame_idx": 23635, "task_index": 24}, {"db_idx": 23636, "episode_idx": 123, "frame_idx": 177, "global_frame_idx": 23636, "task_index": 24}, {"db_idx": 23637, "episode_idx": 123, "frame_idx": 178, "global_frame_idx": 23637, "task_index": 24}, {"db_idx": 23638, "episode_idx": 123, "frame_idx": 179, "global_frame_idx": 23638, "task_index": 24}, {"db_idx": 23639, "episode_idx": 123, "frame_idx": 180, "global_frame_idx": 23639, "task_index": 24}, {"db_idx": 23640, "episode_idx": 123, "frame_idx": 181, "global_frame_idx": 23640, "task_index": 24}, {"db_idx": 23641, "episode_idx": 123, "frame_idx": 182, "global_frame_idx": 23641, "task_index": 24}, {"db_idx": 23642, "episode_idx": 123, "frame_idx": 183, "global_frame_idx": 23642, "task_index": 24}, {"db_idx": 23643, "episode_idx": 123, "frame_idx": 184, "global_frame_idx": 23643, "task_index": 24}, {"db_idx": 23644, "episode_idx": 123, "frame_idx": 185, "global_frame_idx": 23644, "task_index": 24}, {"db_idx": 23645, "episode_idx": 123, "frame_idx": 186, "global_frame_idx": 23645, "task_index": 24}, {"db_idx": 23646, "episode_idx": 123, "frame_idx": 187, "global_frame_idx": 23646, "task_index": 24}, {"db_idx": 23647, "episode_idx": 123, "frame_idx": 188, "global_frame_idx": 23647, "task_index": 24}, {"db_idx": 23648, "episode_idx": 123, "frame_idx": 189, "global_frame_idx": 23648, "task_index": 24}, {"db_idx": 23649, "episode_idx": 123, "frame_idx": 190, "global_frame_idx": 23649, "task_index": 24}, {"db_idx": 23650, "episode_idx": 123, "frame_idx": 191, "global_frame_idx": 23650, "task_index": 24}, {"db_idx": 23651, "episode_idx": 123, "frame_idx": 192, "global_frame_idx": 23651, "task_index": 24}, {"db_idx": 23652, "episode_idx": 123, "frame_idx": 193, "global_frame_idx": 23652, "task_index": 24}, {"db_idx": 23653, "episode_idx": 123, "frame_idx": 194, "global_frame_idx": 23653, "task_index": 24}, {"db_idx": 23654, "episode_idx": 123, "frame_idx": 195, "global_frame_idx": 23654, "task_index": 24}, {"db_idx": 23655, "episode_idx": 123, "frame_idx": 196, "global_frame_idx": 23655, "task_index": 24}, {"db_idx": 23656, "episode_idx": 123, "frame_idx": 197, "global_frame_idx": 23656, "task_index": 24}, {"db_idx": 23657, "episode_idx": 123, "frame_idx": 198, "global_frame_idx": 23657, "task_index": 24}, {"db_idx": 23658, "episode_idx": 123, "frame_idx": 199, "global_frame_idx": 23658, "task_index": 24}, {"db_idx": 23659, "episode_idx": 123, "frame_idx": 200, "global_frame_idx": 23659, "task_index": 24}, {"db_idx": 23660, "episode_idx": 123, "frame_idx": 201, "global_frame_idx": 23660, "task_index": 24}, {"db_idx": 23661, "episode_idx": 123, "frame_idx": 202, "global_frame_idx": 23661, "task_index": 24}, {"db_idx": 23662, "episode_idx": 123, "frame_idx": 203, "global_frame_idx": 23662, "task_index": 24}, {"db_idx": 23663, "episode_idx": 123, "frame_idx": 204, "global_frame_idx": 23663, "task_index": 24}, {"db_idx": 23664, "episode_idx": 123, "frame_idx": 205, "global_frame_idx": 23664, "task_index": 24}, {"db_idx": 23665, "episode_idx": 123, "frame_idx": 206, "global_frame_idx": 23665, "task_index": 24}, {"db_idx": 23666, "episode_idx": 123, "frame_idx": 207, "global_frame_idx": 23666, "task_index": 24}, {"db_idx": 23667, "episode_idx": 123, "frame_idx": 208, "global_frame_idx": 23667, "task_index": 24}, {"db_idx": 23668, "episode_idx": 123, "frame_idx": 209, "global_frame_idx": 23668, "task_index": 24}, {"db_idx": 23669, "episode_idx": 123, "frame_idx": 210, "global_frame_idx": 23669, "task_index": 24}, {"db_idx": 23670, "episode_idx": 123, "frame_idx": 211, "global_frame_idx": 23670, "task_index": 24}, {"db_idx": 23671, "episode_idx": 123, "frame_idx": 212, "global_frame_idx": 23671, "task_index": 24}, {"db_idx": 23672, "episode_idx": 123, "frame_idx": 213, "global_frame_idx": 23672, "task_index": 24}, {"db_idx": 23673, "episode_idx": 123, "frame_idx": 214, "global_frame_idx": 23673, "task_index": 24}, {"db_idx": 23674, "episode_idx": 124, "frame_idx": 0, "global_frame_idx": 23674, "task_index": 24}, {"db_idx": 23675, "episode_idx": 124, "frame_idx": 1, "global_frame_idx": 23675, "task_index": 24}, {"db_idx": 23676, "episode_idx": 124, "frame_idx": 2, "global_frame_idx": 23676, "task_index": 24}, {"db_idx": 23677, "episode_idx": 124, "frame_idx": 3, "global_frame_idx": 23677, "task_index": 24}, {"db_idx": 23678, "episode_idx": 124, "frame_idx": 4, "global_frame_idx": 23678, "task_index": 24}, {"db_idx": 23679, "episode_idx": 124, "frame_idx": 5, "global_frame_idx": 23679, "task_index": 24}, {"db_idx": 23680, "episode_idx": 124, "frame_idx": 6, "global_frame_idx": 23680, "task_index": 24}, {"db_idx": 23681, "episode_idx": 124, "frame_idx": 7, "global_frame_idx": 23681, "task_index": 24}, {"db_idx": 23682, "episode_idx": 124, "frame_idx": 8, "global_frame_idx": 23682, "task_index": 24}, {"db_idx": 23683, "episode_idx": 124, "frame_idx": 9, "global_frame_idx": 23683, "task_index": 24}, {"db_idx": 23684, "episode_idx": 124, "frame_idx": 10, "global_frame_idx": 23684, "task_index": 24}, {"db_idx": 23685, "episode_idx": 124, "frame_idx": 11, "global_frame_idx": 23685, "task_index": 24}, {"db_idx": 23686, "episode_idx": 124, "frame_idx": 12, "global_frame_idx": 23686, "task_index": 24}, {"db_idx": 23687, "episode_idx": 124, "frame_idx": 13, "global_frame_idx": 23687, "task_index": 24}, {"db_idx": 23688, "episode_idx": 124, "frame_idx": 14, "global_frame_idx": 23688, "task_index": 24}, {"db_idx": 23689, "episode_idx": 124, "frame_idx": 15, "global_frame_idx": 23689, "task_index": 24}, {"db_idx": 23690, "episode_idx": 124, "frame_idx": 16, "global_frame_idx": 23690, "task_index": 24}, {"db_idx": 23691, "episode_idx": 124, "frame_idx": 17, "global_frame_idx": 23691, "task_index": 24}, {"db_idx": 23692, "episode_idx": 124, "frame_idx": 18, "global_frame_idx": 23692, "task_index": 24}, {"db_idx": 23693, "episode_idx": 124, "frame_idx": 19, "global_frame_idx": 23693, "task_index": 24}, {"db_idx": 23694, "episode_idx": 124, "frame_idx": 20, "global_frame_idx": 23694, "task_index": 24}, {"db_idx": 23695, "episode_idx": 124, "frame_idx": 21, "global_frame_idx": 23695, "task_index": 24}, {"db_idx": 23696, "episode_idx": 124, "frame_idx": 22, "global_frame_idx": 23696, "task_index": 24}, {"db_idx": 23697, "episode_idx": 124, "frame_idx": 23, "global_frame_idx": 23697, "task_index": 24}, {"db_idx": 23698, "episode_idx": 124, "frame_idx": 24, "global_frame_idx": 23698, "task_index": 24}, {"db_idx": 23699, "episode_idx": 124, "frame_idx": 25, "global_frame_idx": 23699, "task_index": 24}, {"db_idx": 23700, "episode_idx": 124, "frame_idx": 26, "global_frame_idx": 23700, "task_index": 24}, {"db_idx": 23701, "episode_idx": 124, "frame_idx": 27, "global_frame_idx": 23701, "task_index": 24}, {"db_idx": 23702, "episode_idx": 124, "frame_idx": 28, "global_frame_idx": 23702, "task_index": 24}, {"db_idx": 23703, "episode_idx": 124, "frame_idx": 29, "global_frame_idx": 23703, "task_index": 24}, {"db_idx": 23704, "episode_idx": 124, "frame_idx": 30, "global_frame_idx": 23704, "task_index": 24}, {"db_idx": 23705, "episode_idx": 124, "frame_idx": 31, "global_frame_idx": 23705, "task_index": 24}, {"db_idx": 23706, "episode_idx": 124, "frame_idx": 32, "global_frame_idx": 23706, "task_index": 24}, {"db_idx": 23707, "episode_idx": 124, "frame_idx": 33, "global_frame_idx": 23707, "task_index": 24}, {"db_idx": 23708, "episode_idx": 124, "frame_idx": 34, "global_frame_idx": 23708, "task_index": 24}, {"db_idx": 23709, "episode_idx": 124, "frame_idx": 35, "global_frame_idx": 23709, "task_index": 24}, {"db_idx": 23710, "episode_idx": 124, "frame_idx": 36, "global_frame_idx": 23710, "task_index": 24}, {"db_idx": 23711, "episode_idx": 124, "frame_idx": 37, "global_frame_idx": 23711, "task_index": 24}, {"db_idx": 23712, "episode_idx": 124, "frame_idx": 38, "global_frame_idx": 23712, "task_index": 24}, {"db_idx": 23713, "episode_idx": 124, "frame_idx": 39, "global_frame_idx": 23713, "task_index": 24}, {"db_idx": 23714, "episode_idx": 124, "frame_idx": 40, "global_frame_idx": 23714, "task_index": 24}, {"db_idx": 23715, "episode_idx": 124, "frame_idx": 41, "global_frame_idx": 23715, "task_index": 24}, {"db_idx": 23716, "episode_idx": 124, "frame_idx": 42, "global_frame_idx": 23716, "task_index": 24}, {"db_idx": 23717, "episode_idx": 124, "frame_idx": 43, "global_frame_idx": 23717, "task_index": 24}, {"db_idx": 23718, "episode_idx": 124, "frame_idx": 44, "global_frame_idx": 23718, "task_index": 24}, {"db_idx": 23719, "episode_idx": 124, "frame_idx": 45, "global_frame_idx": 23719, "task_index": 24}, {"db_idx": 23720, "episode_idx": 124, "frame_idx": 46, "global_frame_idx": 23720, "task_index": 24}, {"db_idx": 23721, "episode_idx": 124, "frame_idx": 47, "global_frame_idx": 23721, "task_index": 24}, {"db_idx": 23722, "episode_idx": 124, "frame_idx": 48, "global_frame_idx": 23722, "task_index": 24}, {"db_idx": 23723, "episode_idx": 124, "frame_idx": 49, "global_frame_idx": 23723, "task_index": 24}, {"db_idx": 23724, "episode_idx": 124, "frame_idx": 50, "global_frame_idx": 23724, "task_index": 24}, {"db_idx": 23725, "episode_idx": 124, "frame_idx": 51, "global_frame_idx": 23725, "task_index": 24}, {"db_idx": 23726, "episode_idx": 124, "frame_idx": 52, "global_frame_idx": 23726, "task_index": 24}, {"db_idx": 23727, "episode_idx": 124, "frame_idx": 53, "global_frame_idx": 23727, "task_index": 24}, {"db_idx": 23728, "episode_idx": 124, "frame_idx": 54, "global_frame_idx": 23728, "task_index": 24}, {"db_idx": 23729, "episode_idx": 124, "frame_idx": 55, "global_frame_idx": 23729, "task_index": 24}, {"db_idx": 23730, "episode_idx": 124, "frame_idx": 56, "global_frame_idx": 23730, "task_index": 24}, {"db_idx": 23731, "episode_idx": 124, "frame_idx": 57, "global_frame_idx": 23731, "task_index": 24}, {"db_idx": 23732, "episode_idx": 124, "frame_idx": 58, "global_frame_idx": 23732, "task_index": 24}, {"db_idx": 23733, "episode_idx": 124, "frame_idx": 59, "global_frame_idx": 23733, "task_index": 24}, {"db_idx": 23734, "episode_idx": 124, "frame_idx": 60, "global_frame_idx": 23734, "task_index": 24}, {"db_idx": 23735, "episode_idx": 124, "frame_idx": 61, "global_frame_idx": 23735, "task_index": 24}, {"db_idx": 23736, "episode_idx": 124, "frame_idx": 62, "global_frame_idx": 23736, "task_index": 24}, {"db_idx": 23737, "episode_idx": 124, "frame_idx": 63, "global_frame_idx": 23737, "task_index": 24}, {"db_idx": 23738, "episode_idx": 124, "frame_idx": 64, "global_frame_idx": 23738, "task_index": 24}, {"db_idx": 23739, "episode_idx": 124, "frame_idx": 65, "global_frame_idx": 23739, "task_index": 24}, {"db_idx": 23740, "episode_idx": 124, "frame_idx": 66, "global_frame_idx": 23740, "task_index": 24}, {"db_idx": 23741, "episode_idx": 124, "frame_idx": 67, "global_frame_idx": 23741, "task_index": 24}, {"db_idx": 23742, "episode_idx": 124, "frame_idx": 68, "global_frame_idx": 23742, "task_index": 24}, {"db_idx": 23743, "episode_idx": 124, "frame_idx": 69, "global_frame_idx": 23743, "task_index": 24}, {"db_idx": 23744, "episode_idx": 124, "frame_idx": 70, "global_frame_idx": 23744, "task_index": 24}, {"db_idx": 23745, "episode_idx": 124, "frame_idx": 71, "global_frame_idx": 23745, "task_index": 24}, {"db_idx": 23746, "episode_idx": 124, "frame_idx": 72, "global_frame_idx": 23746, "task_index": 24}, {"db_idx": 23747, "episode_idx": 124, "frame_idx": 73, "global_frame_idx": 23747, "task_index": 24}, {"db_idx": 23748, "episode_idx": 124, "frame_idx": 74, "global_frame_idx": 23748, "task_index": 24}, {"db_idx": 23749, "episode_idx": 124, "frame_idx": 75, "global_frame_idx": 23749, "task_index": 24}, {"db_idx": 23750, "episode_idx": 124, "frame_idx": 76, "global_frame_idx": 23750, "task_index": 24}, {"db_idx": 23751, "episode_idx": 124, "frame_idx": 77, "global_frame_idx": 23751, "task_index": 24}, {"db_idx": 23752, "episode_idx": 124, "frame_idx": 78, "global_frame_idx": 23752, "task_index": 24}, {"db_idx": 23753, "episode_idx": 124, "frame_idx": 79, "global_frame_idx": 23753, "task_index": 24}, {"db_idx": 23754, "episode_idx": 124, "frame_idx": 80, "global_frame_idx": 23754, "task_index": 24}, {"db_idx": 23755, "episode_idx": 124, "frame_idx": 81, "global_frame_idx": 23755, "task_index": 24}, {"db_idx": 23756, "episode_idx": 124, "frame_idx": 82, "global_frame_idx": 23756, "task_index": 24}, {"db_idx": 23757, "episode_idx": 124, "frame_idx": 83, "global_frame_idx": 23757, "task_index": 24}, {"db_idx": 23758, "episode_idx": 124, "frame_idx": 84, "global_frame_idx": 23758, "task_index": 24}, {"db_idx": 23759, "episode_idx": 124, "frame_idx": 85, "global_frame_idx": 23759, "task_index": 24}, {"db_idx": 23760, "episode_idx": 124, "frame_idx": 86, "global_frame_idx": 23760, "task_index": 24}, {"db_idx": 23761, "episode_idx": 124, "frame_idx": 87, "global_frame_idx": 23761, "task_index": 24}, {"db_idx": 23762, "episode_idx": 124, "frame_idx": 88, "global_frame_idx": 23762, "task_index": 24}, {"db_idx": 23763, "episode_idx": 124, "frame_idx": 89, "global_frame_idx": 23763, "task_index": 24}, {"db_idx": 23764, "episode_idx": 124, "frame_idx": 90, "global_frame_idx": 23764, "task_index": 24}, {"db_idx": 23765, "episode_idx": 124, "frame_idx": 91, "global_frame_idx": 23765, "task_index": 24}, {"db_idx": 23766, "episode_idx": 124, "frame_idx": 92, "global_frame_idx": 23766, "task_index": 24}, {"db_idx": 23767, "episode_idx": 124, "frame_idx": 93, "global_frame_idx": 23767, "task_index": 24}, {"db_idx": 23768, "episode_idx": 124, "frame_idx": 94, "global_frame_idx": 23768, "task_index": 24}, {"db_idx": 23769, "episode_idx": 124, "frame_idx": 95, "global_frame_idx": 23769, "task_index": 24}, {"db_idx": 23770, "episode_idx": 124, "frame_idx": 96, "global_frame_idx": 23770, "task_index": 24}, {"db_idx": 23771, "episode_idx": 124, "frame_idx": 97, "global_frame_idx": 23771, "task_index": 24}, {"db_idx": 23772, "episode_idx": 124, "frame_idx": 98, "global_frame_idx": 23772, "task_index": 24}, {"db_idx": 23773, "episode_idx": 124, "frame_idx": 99, "global_frame_idx": 23773, "task_index": 24}, {"db_idx": 23774, "episode_idx": 124, "frame_idx": 100, "global_frame_idx": 23774, "task_index": 24}, {"db_idx": 23775, "episode_idx": 124, "frame_idx": 101, "global_frame_idx": 23775, "task_index": 24}, {"db_idx": 23776, "episode_idx": 124, "frame_idx": 102, "global_frame_idx": 23776, "task_index": 24}, {"db_idx": 23777, "episode_idx": 124, "frame_idx": 103, "global_frame_idx": 23777, "task_index": 24}, {"db_idx": 23778, "episode_idx": 124, "frame_idx": 104, "global_frame_idx": 23778, "task_index": 24}, {"db_idx": 23779, "episode_idx": 124, "frame_idx": 105, "global_frame_idx": 23779, "task_index": 24}, {"db_idx": 23780, "episode_idx": 124, "frame_idx": 106, "global_frame_idx": 23780, "task_index": 24}, {"db_idx": 23781, "episode_idx": 124, "frame_idx": 107, "global_frame_idx": 23781, "task_index": 24}, {"db_idx": 23782, "episode_idx": 124, "frame_idx": 108, "global_frame_idx": 23782, "task_index": 24}, {"db_idx": 23783, "episode_idx": 124, "frame_idx": 109, "global_frame_idx": 23783, "task_index": 24}, {"db_idx": 23784, "episode_idx": 124, "frame_idx": 110, "global_frame_idx": 23784, "task_index": 24}, {"db_idx": 23785, "episode_idx": 124, "frame_idx": 111, "global_frame_idx": 23785, "task_index": 24}, {"db_idx": 23786, "episode_idx": 124, "frame_idx": 112, "global_frame_idx": 23786, "task_index": 24}, {"db_idx": 23787, "episode_idx": 124, "frame_idx": 113, "global_frame_idx": 23787, "task_index": 24}, {"db_idx": 23788, "episode_idx": 124, "frame_idx": 114, "global_frame_idx": 23788, "task_index": 24}, {"db_idx": 23789, "episode_idx": 124, "frame_idx": 115, "global_frame_idx": 23789, "task_index": 24}, {"db_idx": 23790, "episode_idx": 124, "frame_idx": 116, "global_frame_idx": 23790, "task_index": 24}, {"db_idx": 23791, "episode_idx": 124, "frame_idx": 117, "global_frame_idx": 23791, "task_index": 24}, {"db_idx": 23792, "episode_idx": 124, "frame_idx": 118, "global_frame_idx": 23792, "task_index": 24}, {"db_idx": 23793, "episode_idx": 124, "frame_idx": 119, "global_frame_idx": 23793, "task_index": 24}, {"db_idx": 23794, "episode_idx": 124, "frame_idx": 120, "global_frame_idx": 23794, "task_index": 24}, {"db_idx": 23795, "episode_idx": 124, "frame_idx": 121, "global_frame_idx": 23795, "task_index": 24}, {"db_idx": 23796, "episode_idx": 124, "frame_idx": 122, "global_frame_idx": 23796, "task_index": 24}, {"db_idx": 23797, "episode_idx": 124, "frame_idx": 123, "global_frame_idx": 23797, "task_index": 24}, {"db_idx": 23798, "episode_idx": 124, "frame_idx": 124, "global_frame_idx": 23798, "task_index": 24}, {"db_idx": 23799, "episode_idx": 124, "frame_idx": 125, "global_frame_idx": 23799, "task_index": 24}, {"db_idx": 23800, "episode_idx": 124, "frame_idx": 126, "global_frame_idx": 23800, "task_index": 24}, {"db_idx": 23801, "episode_idx": 124, "frame_idx": 127, "global_frame_idx": 23801, "task_index": 24}, {"db_idx": 23802, "episode_idx": 124, "frame_idx": 128, "global_frame_idx": 23802, "task_index": 24}, {"db_idx": 23803, "episode_idx": 124, "frame_idx": 129, "global_frame_idx": 23803, "task_index": 24}, {"db_idx": 23804, "episode_idx": 124, "frame_idx": 130, "global_frame_idx": 23804, "task_index": 24}, {"db_idx": 23805, "episode_idx": 124, "frame_idx": 131, "global_frame_idx": 23805, "task_index": 24}, {"db_idx": 23806, "episode_idx": 124, "frame_idx": 132, "global_frame_idx": 23806, "task_index": 24}, {"db_idx": 23807, "episode_idx": 124, "frame_idx": 133, "global_frame_idx": 23807, "task_index": 24}, {"db_idx": 23808, "episode_idx": 124, "frame_idx": 134, "global_frame_idx": 23808, "task_index": 24}, {"db_idx": 23809, "episode_idx": 124, "frame_idx": 135, "global_frame_idx": 23809, "task_index": 24}, {"db_idx": 23810, "episode_idx": 124, "frame_idx": 136, "global_frame_idx": 23810, "task_index": 24}, {"db_idx": 23811, "episode_idx": 124, "frame_idx": 137, "global_frame_idx": 23811, "task_index": 24}, {"db_idx": 23812, "episode_idx": 124, "frame_idx": 138, "global_frame_idx": 23812, "task_index": 24}, {"db_idx": 23813, "episode_idx": 124, "frame_idx": 139, "global_frame_idx": 23813, "task_index": 24}, {"db_idx": 23814, "episode_idx": 124, "frame_idx": 140, "global_frame_idx": 23814, "task_index": 24}, {"db_idx": 23815, "episode_idx": 124, "frame_idx": 141, "global_frame_idx": 23815, "task_index": 24}, {"db_idx": 23816, "episode_idx": 124, "frame_idx": 142, "global_frame_idx": 23816, "task_index": 24}, {"db_idx": 23817, "episode_idx": 124, "frame_idx": 143, "global_frame_idx": 23817, "task_index": 24}, {"db_idx": 23818, "episode_idx": 124, "frame_idx": 144, "global_frame_idx": 23818, "task_index": 24}, {"db_idx": 23819, "episode_idx": 124, "frame_idx": 145, "global_frame_idx": 23819, "task_index": 24}, {"db_idx": 23820, "episode_idx": 124, "frame_idx": 146, "global_frame_idx": 23820, "task_index": 24}, {"db_idx": 23821, "episode_idx": 125, "frame_idx": 0, "global_frame_idx": 23821, "task_index": 25}, {"db_idx": 23822, "episode_idx": 125, "frame_idx": 1, "global_frame_idx": 23822, "task_index": 25}, {"db_idx": 23823, "episode_idx": 125, "frame_idx": 2, "global_frame_idx": 23823, "task_index": 25}, {"db_idx": 23824, "episode_idx": 125, "frame_idx": 3, "global_frame_idx": 23824, "task_index": 25}, {"db_idx": 23825, "episode_idx": 125, "frame_idx": 4, "global_frame_idx": 23825, "task_index": 25}, {"db_idx": 23826, "episode_idx": 125, "frame_idx": 5, "global_frame_idx": 23826, "task_index": 25}, {"db_idx": 23827, "episode_idx": 125, "frame_idx": 6, "global_frame_idx": 23827, "task_index": 25}, {"db_idx": 23828, "episode_idx": 125, "frame_idx": 7, "global_frame_idx": 23828, "task_index": 25}, {"db_idx": 23829, "episode_idx": 125, "frame_idx": 8, "global_frame_idx": 23829, "task_index": 25}, {"db_idx": 23830, "episode_idx": 125, "frame_idx": 9, "global_frame_idx": 23830, "task_index": 25}, {"db_idx": 23831, "episode_idx": 125, "frame_idx": 10, "global_frame_idx": 23831, "task_index": 25}, {"db_idx": 23832, "episode_idx": 125, "frame_idx": 11, "global_frame_idx": 23832, "task_index": 25}, {"db_idx": 23833, "episode_idx": 125, "frame_idx": 12, "global_frame_idx": 23833, "task_index": 25}, {"db_idx": 23834, "episode_idx": 125, "frame_idx": 13, "global_frame_idx": 23834, "task_index": 25}, {"db_idx": 23835, "episode_idx": 125, "frame_idx": 14, "global_frame_idx": 23835, "task_index": 25}, {"db_idx": 23836, "episode_idx": 125, "frame_idx": 15, "global_frame_idx": 23836, "task_index": 25}, {"db_idx": 23837, "episode_idx": 125, "frame_idx": 16, "global_frame_idx": 23837, "task_index": 25}, {"db_idx": 23838, "episode_idx": 125, "frame_idx": 17, "global_frame_idx": 23838, "task_index": 25}, {"db_idx": 23839, "episode_idx": 125, "frame_idx": 18, "global_frame_idx": 23839, "task_index": 25}, {"db_idx": 23840, "episode_idx": 125, "frame_idx": 19, "global_frame_idx": 23840, "task_index": 25}, {"db_idx": 23841, "episode_idx": 125, "frame_idx": 20, "global_frame_idx": 23841, "task_index": 25}, {"db_idx": 23842, "episode_idx": 125, "frame_idx": 21, "global_frame_idx": 23842, "task_index": 25}, {"db_idx": 23843, "episode_idx": 125, "frame_idx": 22, "global_frame_idx": 23843, "task_index": 25}, {"db_idx": 23844, "episode_idx": 125, "frame_idx": 23, "global_frame_idx": 23844, "task_index": 25}, {"db_idx": 23845, "episode_idx": 125, "frame_idx": 24, "global_frame_idx": 23845, "task_index": 25}, {"db_idx": 23846, "episode_idx": 125, "frame_idx": 25, "global_frame_idx": 23846, "task_index": 25}, {"db_idx": 23847, "episode_idx": 125, "frame_idx": 26, "global_frame_idx": 23847, "task_index": 25}, {"db_idx": 23848, "episode_idx": 125, "frame_idx": 27, "global_frame_idx": 23848, "task_index": 25}, {"db_idx": 23849, "episode_idx": 125, "frame_idx": 28, "global_frame_idx": 23849, "task_index": 25}, {"db_idx": 23850, "episode_idx": 125, "frame_idx": 29, "global_frame_idx": 23850, "task_index": 25}, {"db_idx": 23851, "episode_idx": 125, "frame_idx": 30, "global_frame_idx": 23851, "task_index": 25}, {"db_idx": 23852, "episode_idx": 125, "frame_idx": 31, "global_frame_idx": 23852, "task_index": 25}, {"db_idx": 23853, "episode_idx": 125, "frame_idx": 32, "global_frame_idx": 23853, "task_index": 25}, {"db_idx": 23854, "episode_idx": 125, "frame_idx": 33, "global_frame_idx": 23854, "task_index": 25}, {"db_idx": 23855, "episode_idx": 125, "frame_idx": 34, "global_frame_idx": 23855, "task_index": 25}, {"db_idx": 23856, "episode_idx": 125, "frame_idx": 35, "global_frame_idx": 23856, "task_index": 25}, {"db_idx": 23857, "episode_idx": 125, "frame_idx": 36, "global_frame_idx": 23857, "task_index": 25}, {"db_idx": 23858, "episode_idx": 125, "frame_idx": 37, "global_frame_idx": 23858, "task_index": 25}, {"db_idx": 23859, "episode_idx": 125, "frame_idx": 38, "global_frame_idx": 23859, "task_index": 25}, {"db_idx": 23860, "episode_idx": 125, "frame_idx": 39, "global_frame_idx": 23860, "task_index": 25}, {"db_idx": 23861, "episode_idx": 125, "frame_idx": 40, "global_frame_idx": 23861, "task_index": 25}, {"db_idx": 23862, "episode_idx": 125, "frame_idx": 41, "global_frame_idx": 23862, "task_index": 25}, {"db_idx": 23863, "episode_idx": 125, "frame_idx": 42, "global_frame_idx": 23863, "task_index": 25}, {"db_idx": 23864, "episode_idx": 125, "frame_idx": 43, "global_frame_idx": 23864, "task_index": 25}, {"db_idx": 23865, "episode_idx": 125, "frame_idx": 44, "global_frame_idx": 23865, "task_index": 25}, {"db_idx": 23866, "episode_idx": 125, "frame_idx": 45, "global_frame_idx": 23866, "task_index": 25}, {"db_idx": 23867, "episode_idx": 125, "frame_idx": 46, "global_frame_idx": 23867, "task_index": 25}, {"db_idx": 23868, "episode_idx": 125, "frame_idx": 47, "global_frame_idx": 23868, "task_index": 25}, {"db_idx": 23869, "episode_idx": 125, "frame_idx": 48, "global_frame_idx": 23869, "task_index": 25}, {"db_idx": 23870, "episode_idx": 125, "frame_idx": 49, "global_frame_idx": 23870, "task_index": 25}, {"db_idx": 23871, "episode_idx": 125, "frame_idx": 50, "global_frame_idx": 23871, "task_index": 25}, {"db_idx": 23872, "episode_idx": 125, "frame_idx": 51, "global_frame_idx": 23872, "task_index": 25}, {"db_idx": 23873, "episode_idx": 125, "frame_idx": 52, "global_frame_idx": 23873, "task_index": 25}, {"db_idx": 23874, "episode_idx": 125, "frame_idx": 53, "global_frame_idx": 23874, "task_index": 25}, {"db_idx": 23875, "episode_idx": 125, "frame_idx": 54, "global_frame_idx": 23875, "task_index": 25}, {"db_idx": 23876, "episode_idx": 125, "frame_idx": 55, "global_frame_idx": 23876, "task_index": 25}, {"db_idx": 23877, "episode_idx": 125, "frame_idx": 56, "global_frame_idx": 23877, "task_index": 25}, {"db_idx": 23878, "episode_idx": 125, "frame_idx": 57, "global_frame_idx": 23878, "task_index": 25}, {"db_idx": 23879, "episode_idx": 125, "frame_idx": 58, "global_frame_idx": 23879, "task_index": 25}, {"db_idx": 23880, "episode_idx": 125, "frame_idx": 59, "global_frame_idx": 23880, "task_index": 25}, {"db_idx": 23881, "episode_idx": 125, "frame_idx": 60, "global_frame_idx": 23881, "task_index": 25}, {"db_idx": 23882, "episode_idx": 125, "frame_idx": 61, "global_frame_idx": 23882, "task_index": 25}, {"db_idx": 23883, "episode_idx": 125, "frame_idx": 62, "global_frame_idx": 23883, "task_index": 25}, {"db_idx": 23884, "episode_idx": 125, "frame_idx": 63, "global_frame_idx": 23884, "task_index": 25}, {"db_idx": 23885, "episode_idx": 125, "frame_idx": 64, "global_frame_idx": 23885, "task_index": 25}, {"db_idx": 23886, "episode_idx": 125, "frame_idx": 65, "global_frame_idx": 23886, "task_index": 25}, {"db_idx": 23887, "episode_idx": 125, "frame_idx": 66, "global_frame_idx": 23887, "task_index": 25}, {"db_idx": 23888, "episode_idx": 125, "frame_idx": 67, "global_frame_idx": 23888, "task_index": 25}, {"db_idx": 23889, "episode_idx": 125, "frame_idx": 68, "global_frame_idx": 23889, "task_index": 25}, {"db_idx": 23890, "episode_idx": 125, "frame_idx": 69, "global_frame_idx": 23890, "task_index": 25}, {"db_idx": 23891, "episode_idx": 125, "frame_idx": 70, "global_frame_idx": 23891, "task_index": 25}, {"db_idx": 23892, "episode_idx": 125, "frame_idx": 71, "global_frame_idx": 23892, "task_index": 25}, {"db_idx": 23893, "episode_idx": 125, "frame_idx": 72, "global_frame_idx": 23893, "task_index": 25}, {"db_idx": 23894, "episode_idx": 125, "frame_idx": 73, "global_frame_idx": 23894, "task_index": 25}, {"db_idx": 23895, "episode_idx": 125, "frame_idx": 74, "global_frame_idx": 23895, "task_index": 25}, {"db_idx": 23896, "episode_idx": 125, "frame_idx": 75, "global_frame_idx": 23896, "task_index": 25}, {"db_idx": 23897, "episode_idx": 125, "frame_idx": 76, "global_frame_idx": 23897, "task_index": 25}, {"db_idx": 23898, "episode_idx": 125, "frame_idx": 77, "global_frame_idx": 23898, "task_index": 25}, {"db_idx": 23899, "episode_idx": 125, "frame_idx": 78, "global_frame_idx": 23899, "task_index": 25}, {"db_idx": 23900, "episode_idx": 125, "frame_idx": 79, "global_frame_idx": 23900, "task_index": 25}, {"db_idx": 23901, "episode_idx": 125, "frame_idx": 80, "global_frame_idx": 23901, "task_index": 25}, {"db_idx": 23902, "episode_idx": 125, "frame_idx": 81, "global_frame_idx": 23902, "task_index": 25}, {"db_idx": 23903, "episode_idx": 125, "frame_idx": 82, "global_frame_idx": 23903, "task_index": 25}, {"db_idx": 23904, "episode_idx": 125, "frame_idx": 83, "global_frame_idx": 23904, "task_index": 25}, {"db_idx": 23905, "episode_idx": 125, "frame_idx": 84, "global_frame_idx": 23905, "task_index": 25}, {"db_idx": 23906, "episode_idx": 125, "frame_idx": 85, "global_frame_idx": 23906, "task_index": 25}, {"db_idx": 23907, "episode_idx": 125, "frame_idx": 86, "global_frame_idx": 23907, "task_index": 25}, {"db_idx": 23908, "episode_idx": 125, "frame_idx": 87, "global_frame_idx": 23908, "task_index": 25}, {"db_idx": 23909, "episode_idx": 125, "frame_idx": 88, "global_frame_idx": 23909, "task_index": 25}, {"db_idx": 23910, "episode_idx": 125, "frame_idx": 89, "global_frame_idx": 23910, "task_index": 25}, {"db_idx": 23911, "episode_idx": 125, "frame_idx": 90, "global_frame_idx": 23911, "task_index": 25}, {"db_idx": 23912, "episode_idx": 126, "frame_idx": 0, "global_frame_idx": 23912, "task_index": 25}, {"db_idx": 23913, "episode_idx": 126, "frame_idx": 1, "global_frame_idx": 23913, "task_index": 25}, {"db_idx": 23914, "episode_idx": 126, "frame_idx": 2, "global_frame_idx": 23914, "task_index": 25}, {"db_idx": 23915, "episode_idx": 126, "frame_idx": 3, "global_frame_idx": 23915, "task_index": 25}, {"db_idx": 23916, "episode_idx": 126, "frame_idx": 4, "global_frame_idx": 23916, "task_index": 25}, {"db_idx": 23917, "episode_idx": 126, "frame_idx": 5, "global_frame_idx": 23917, "task_index": 25}, {"db_idx": 23918, "episode_idx": 126, "frame_idx": 6, "global_frame_idx": 23918, "task_index": 25}, {"db_idx": 23919, "episode_idx": 126, "frame_idx": 7, "global_frame_idx": 23919, "task_index": 25}, {"db_idx": 23920, "episode_idx": 126, "frame_idx": 8, "global_frame_idx": 23920, "task_index": 25}, {"db_idx": 23921, "episode_idx": 126, "frame_idx": 9, "global_frame_idx": 23921, "task_index": 25}, {"db_idx": 23922, "episode_idx": 126, "frame_idx": 10, "global_frame_idx": 23922, "task_index": 25}, {"db_idx": 23923, "episode_idx": 126, "frame_idx": 11, "global_frame_idx": 23923, "task_index": 25}, {"db_idx": 23924, "episode_idx": 126, "frame_idx": 12, "global_frame_idx": 23924, "task_index": 25}, {"db_idx": 23925, "episode_idx": 126, "frame_idx": 13, "global_frame_idx": 23925, "task_index": 25}, {"db_idx": 23926, "episode_idx": 126, "frame_idx": 14, "global_frame_idx": 23926, "task_index": 25}, {"db_idx": 23927, "episode_idx": 126, "frame_idx": 15, "global_frame_idx": 23927, "task_index": 25}, {"db_idx": 23928, "episode_idx": 126, "frame_idx": 16, "global_frame_idx": 23928, "task_index": 25}, {"db_idx": 23929, "episode_idx": 126, "frame_idx": 17, "global_frame_idx": 23929, "task_index": 25}, {"db_idx": 23930, "episode_idx": 126, "frame_idx": 18, "global_frame_idx": 23930, "task_index": 25}, {"db_idx": 23931, "episode_idx": 126, "frame_idx": 19, "global_frame_idx": 23931, "task_index": 25}, {"db_idx": 23932, "episode_idx": 126, "frame_idx": 20, "global_frame_idx": 23932, "task_index": 25}, {"db_idx": 23933, "episode_idx": 126, "frame_idx": 21, "global_frame_idx": 23933, "task_index": 25}, {"db_idx": 23934, "episode_idx": 126, "frame_idx": 22, "global_frame_idx": 23934, "task_index": 25}, {"db_idx": 23935, "episode_idx": 126, "frame_idx": 23, "global_frame_idx": 23935, "task_index": 25}, {"db_idx": 23936, "episode_idx": 126, "frame_idx": 24, "global_frame_idx": 23936, "task_index": 25}, {"db_idx": 23937, "episode_idx": 126, "frame_idx": 25, "global_frame_idx": 23937, "task_index": 25}, {"db_idx": 23938, "episode_idx": 126, "frame_idx": 26, "global_frame_idx": 23938, "task_index": 25}, {"db_idx": 23939, "episode_idx": 126, "frame_idx": 27, "global_frame_idx": 23939, "task_index": 25}, {"db_idx": 23940, "episode_idx": 126, "frame_idx": 28, "global_frame_idx": 23940, "task_index": 25}, {"db_idx": 23941, "episode_idx": 126, "frame_idx": 29, "global_frame_idx": 23941, "task_index": 25}, {"db_idx": 23942, "episode_idx": 126, "frame_idx": 30, "global_frame_idx": 23942, "task_index": 25}, {"db_idx": 23943, "episode_idx": 126, "frame_idx": 31, "global_frame_idx": 23943, "task_index": 25}, {"db_idx": 23944, "episode_idx": 126, "frame_idx": 32, "global_frame_idx": 23944, "task_index": 25}, {"db_idx": 23945, "episode_idx": 126, "frame_idx": 33, "global_frame_idx": 23945, "task_index": 25}, {"db_idx": 23946, "episode_idx": 126, "frame_idx": 34, "global_frame_idx": 23946, "task_index": 25}, {"db_idx": 23947, "episode_idx": 126, "frame_idx": 35, "global_frame_idx": 23947, "task_index": 25}, {"db_idx": 23948, "episode_idx": 126, "frame_idx": 36, "global_frame_idx": 23948, "task_index": 25}, {"db_idx": 23949, "episode_idx": 126, "frame_idx": 37, "global_frame_idx": 23949, "task_index": 25}, {"db_idx": 23950, "episode_idx": 126, "frame_idx": 38, "global_frame_idx": 23950, "task_index": 25}, {"db_idx": 23951, "episode_idx": 126, "frame_idx": 39, "global_frame_idx": 23951, "task_index": 25}, {"db_idx": 23952, "episode_idx": 126, "frame_idx": 40, "global_frame_idx": 23952, "task_index": 25}, {"db_idx": 23953, "episode_idx": 126, "frame_idx": 41, "global_frame_idx": 23953, "task_index": 25}, {"db_idx": 23954, "episode_idx": 126, "frame_idx": 42, "global_frame_idx": 23954, "task_index": 25}, {"db_idx": 23955, "episode_idx": 126, "frame_idx": 43, "global_frame_idx": 23955, "task_index": 25}, {"db_idx": 23956, "episode_idx": 126, "frame_idx": 44, "global_frame_idx": 23956, "task_index": 25}, {"db_idx": 23957, "episode_idx": 126, "frame_idx": 45, "global_frame_idx": 23957, "task_index": 25}, {"db_idx": 23958, "episode_idx": 126, "frame_idx": 46, "global_frame_idx": 23958, "task_index": 25}, {"db_idx": 23959, "episode_idx": 126, "frame_idx": 47, "global_frame_idx": 23959, "task_index": 25}, {"db_idx": 23960, "episode_idx": 126, "frame_idx": 48, "global_frame_idx": 23960, "task_index": 25}, {"db_idx": 23961, "episode_idx": 126, "frame_idx": 49, "global_frame_idx": 23961, "task_index": 25}, {"db_idx": 23962, "episode_idx": 126, "frame_idx": 50, "global_frame_idx": 23962, "task_index": 25}, {"db_idx": 23963, "episode_idx": 126, "frame_idx": 51, "global_frame_idx": 23963, "task_index": 25}, {"db_idx": 23964, "episode_idx": 126, "frame_idx": 52, "global_frame_idx": 23964, "task_index": 25}, {"db_idx": 23965, "episode_idx": 126, "frame_idx": 53, "global_frame_idx": 23965, "task_index": 25}, {"db_idx": 23966, "episode_idx": 126, "frame_idx": 54, "global_frame_idx": 23966, "task_index": 25}, {"db_idx": 23967, "episode_idx": 126, "frame_idx": 55, "global_frame_idx": 23967, "task_index": 25}, {"db_idx": 23968, "episode_idx": 126, "frame_idx": 56, "global_frame_idx": 23968, "task_index": 25}, {"db_idx": 23969, "episode_idx": 126, "frame_idx": 57, "global_frame_idx": 23969, "task_index": 25}, {"db_idx": 23970, "episode_idx": 126, "frame_idx": 58, "global_frame_idx": 23970, "task_index": 25}, {"db_idx": 23971, "episode_idx": 126, "frame_idx": 59, "global_frame_idx": 23971, "task_index": 25}, {"db_idx": 23972, "episode_idx": 126, "frame_idx": 60, "global_frame_idx": 23972, "task_index": 25}, {"db_idx": 23973, "episode_idx": 126, "frame_idx": 61, "global_frame_idx": 23973, "task_index": 25}, {"db_idx": 23974, "episode_idx": 126, "frame_idx": 62, "global_frame_idx": 23974, "task_index": 25}, {"db_idx": 23975, "episode_idx": 126, "frame_idx": 63, "global_frame_idx": 23975, "task_index": 25}, {"db_idx": 23976, "episode_idx": 126, "frame_idx": 64, "global_frame_idx": 23976, "task_index": 25}, {"db_idx": 23977, "episode_idx": 126, "frame_idx": 65, "global_frame_idx": 23977, "task_index": 25}, {"db_idx": 23978, "episode_idx": 126, "frame_idx": 66, "global_frame_idx": 23978, "task_index": 25}, {"db_idx": 23979, "episode_idx": 126, "frame_idx": 67, "global_frame_idx": 23979, "task_index": 25}, {"db_idx": 23980, "episode_idx": 126, "frame_idx": 68, "global_frame_idx": 23980, "task_index": 25}, {"db_idx": 23981, "episode_idx": 126, "frame_idx": 69, "global_frame_idx": 23981, "task_index": 25}, {"db_idx": 23982, "episode_idx": 126, "frame_idx": 70, "global_frame_idx": 23982, "task_index": 25}, {"db_idx": 23983, "episode_idx": 126, "frame_idx": 71, "global_frame_idx": 23983, "task_index": 25}, {"db_idx": 23984, "episode_idx": 126, "frame_idx": 72, "global_frame_idx": 23984, "task_index": 25}, {"db_idx": 23985, "episode_idx": 126, "frame_idx": 73, "global_frame_idx": 23985, "task_index": 25}, {"db_idx": 23986, "episode_idx": 126, "frame_idx": 74, "global_frame_idx": 23986, "task_index": 25}, {"db_idx": 23987, "episode_idx": 126, "frame_idx": 75, "global_frame_idx": 23987, "task_index": 25}, {"db_idx": 23988, "episode_idx": 126, "frame_idx": 76, "global_frame_idx": 23988, "task_index": 25}, {"db_idx": 23989, "episode_idx": 126, "frame_idx": 77, "global_frame_idx": 23989, "task_index": 25}, {"db_idx": 23990, "episode_idx": 126, "frame_idx": 78, "global_frame_idx": 23990, "task_index": 25}, {"db_idx": 23991, "episode_idx": 126, "frame_idx": 79, "global_frame_idx": 23991, "task_index": 25}, {"db_idx": 23992, "episode_idx": 126, "frame_idx": 80, "global_frame_idx": 23992, "task_index": 25}, {"db_idx": 23993, "episode_idx": 126, "frame_idx": 81, "global_frame_idx": 23993, "task_index": 25}, {"db_idx": 23994, "episode_idx": 126, "frame_idx": 82, "global_frame_idx": 23994, "task_index": 25}, {"db_idx": 23995, "episode_idx": 126, "frame_idx": 83, "global_frame_idx": 23995, "task_index": 25}, {"db_idx": 23996, "episode_idx": 126, "frame_idx": 84, "global_frame_idx": 23996, "task_index": 25}, {"db_idx": 23997, "episode_idx": 126, "frame_idx": 85, "global_frame_idx": 23997, "task_index": 25}, {"db_idx": 23998, "episode_idx": 126, "frame_idx": 86, "global_frame_idx": 23998, "task_index": 25}, {"db_idx": 23999, "episode_idx": 126, "frame_idx": 87, "global_frame_idx": 23999, "task_index": 25}, {"db_idx": 24000, "episode_idx": 126, "frame_idx": 88, "global_frame_idx": 24000, "task_index": 25}, {"db_idx": 24001, "episode_idx": 126, "frame_idx": 89, "global_frame_idx": 24001, "task_index": 25}, {"db_idx": 24002, "episode_idx": 127, "frame_idx": 0, "global_frame_idx": 24002, "task_index": 25}, {"db_idx": 24003, "episode_idx": 127, "frame_idx": 1, "global_frame_idx": 24003, "task_index": 25}, {"db_idx": 24004, "episode_idx": 127, "frame_idx": 2, "global_frame_idx": 24004, "task_index": 25}, {"db_idx": 24005, "episode_idx": 127, "frame_idx": 3, "global_frame_idx": 24005, "task_index": 25}, {"db_idx": 24006, "episode_idx": 127, "frame_idx": 4, "global_frame_idx": 24006, "task_index": 25}, {"db_idx": 24007, "episode_idx": 127, "frame_idx": 5, "global_frame_idx": 24007, "task_index": 25}, {"db_idx": 24008, "episode_idx": 127, "frame_idx": 6, "global_frame_idx": 24008, "task_index": 25}, {"db_idx": 24009, "episode_idx": 127, "frame_idx": 7, "global_frame_idx": 24009, "task_index": 25}, {"db_idx": 24010, "episode_idx": 127, "frame_idx": 8, "global_frame_idx": 24010, "task_index": 25}, {"db_idx": 24011, "episode_idx": 127, "frame_idx": 9, "global_frame_idx": 24011, "task_index": 25}, {"db_idx": 24012, "episode_idx": 127, "frame_idx": 10, "global_frame_idx": 24012, "task_index": 25}, {"db_idx": 24013, "episode_idx": 127, "frame_idx": 11, "global_frame_idx": 24013, "task_index": 25}, {"db_idx": 24014, "episode_idx": 127, "frame_idx": 12, "global_frame_idx": 24014, "task_index": 25}, {"db_idx": 24015, "episode_idx": 127, "frame_idx": 13, "global_frame_idx": 24015, "task_index": 25}, {"db_idx": 24016, "episode_idx": 127, "frame_idx": 14, "global_frame_idx": 24016, "task_index": 25}, {"db_idx": 24017, "episode_idx": 127, "frame_idx": 15, "global_frame_idx": 24017, "task_index": 25}, {"db_idx": 24018, "episode_idx": 127, "frame_idx": 16, "global_frame_idx": 24018, "task_index": 25}, {"db_idx": 24019, "episode_idx": 127, "frame_idx": 17, "global_frame_idx": 24019, "task_index": 25}, {"db_idx": 24020, "episode_idx": 127, "frame_idx": 18, "global_frame_idx": 24020, "task_index": 25}, {"db_idx": 24021, "episode_idx": 127, "frame_idx": 19, "global_frame_idx": 24021, "task_index": 25}, {"db_idx": 24022, "episode_idx": 127, "frame_idx": 20, "global_frame_idx": 24022, "task_index": 25}, {"db_idx": 24023, "episode_idx": 127, "frame_idx": 21, "global_frame_idx": 24023, "task_index": 25}, {"db_idx": 24024, "episode_idx": 127, "frame_idx": 22, "global_frame_idx": 24024, "task_index": 25}, {"db_idx": 24025, "episode_idx": 127, "frame_idx": 23, "global_frame_idx": 24025, "task_index": 25}, {"db_idx": 24026, "episode_idx": 127, "frame_idx": 24, "global_frame_idx": 24026, "task_index": 25}, {"db_idx": 24027, "episode_idx": 127, "frame_idx": 25, "global_frame_idx": 24027, "task_index": 25}, {"db_idx": 24028, "episode_idx": 127, "frame_idx": 26, "global_frame_idx": 24028, "task_index": 25}, {"db_idx": 24029, "episode_idx": 127, "frame_idx": 27, "global_frame_idx": 24029, "task_index": 25}, {"db_idx": 24030, "episode_idx": 127, "frame_idx": 28, "global_frame_idx": 24030, "task_index": 25}, {"db_idx": 24031, "episode_idx": 127, "frame_idx": 29, "global_frame_idx": 24031, "task_index": 25}, {"db_idx": 24032, "episode_idx": 127, "frame_idx": 30, "global_frame_idx": 24032, "task_index": 25}, {"db_idx": 24033, "episode_idx": 127, "frame_idx": 31, "global_frame_idx": 24033, "task_index": 25}, {"db_idx": 24034, "episode_idx": 127, "frame_idx": 32, "global_frame_idx": 24034, "task_index": 25}, {"db_idx": 24035, "episode_idx": 127, "frame_idx": 33, "global_frame_idx": 24035, "task_index": 25}, {"db_idx": 24036, "episode_idx": 127, "frame_idx": 34, "global_frame_idx": 24036, "task_index": 25}, {"db_idx": 24037, "episode_idx": 127, "frame_idx": 35, "global_frame_idx": 24037, "task_index": 25}, {"db_idx": 24038, "episode_idx": 127, "frame_idx": 36, "global_frame_idx": 24038, "task_index": 25}, {"db_idx": 24039, "episode_idx": 127, "frame_idx": 37, "global_frame_idx": 24039, "task_index": 25}, {"db_idx": 24040, "episode_idx": 127, "frame_idx": 38, "global_frame_idx": 24040, "task_index": 25}, {"db_idx": 24041, "episode_idx": 127, "frame_idx": 39, "global_frame_idx": 24041, "task_index": 25}, {"db_idx": 24042, "episode_idx": 127, "frame_idx": 40, "global_frame_idx": 24042, "task_index": 25}, {"db_idx": 24043, "episode_idx": 127, "frame_idx": 41, "global_frame_idx": 24043, "task_index": 25}, {"db_idx": 24044, "episode_idx": 127, "frame_idx": 42, "global_frame_idx": 24044, "task_index": 25}, {"db_idx": 24045, "episode_idx": 127, "frame_idx": 43, "global_frame_idx": 24045, "task_index": 25}, {"db_idx": 24046, "episode_idx": 127, "frame_idx": 44, "global_frame_idx": 24046, "task_index": 25}, {"db_idx": 24047, "episode_idx": 127, "frame_idx": 45, "global_frame_idx": 24047, "task_index": 25}, {"db_idx": 24048, "episode_idx": 127, "frame_idx": 46, "global_frame_idx": 24048, "task_index": 25}, {"db_idx": 24049, "episode_idx": 127, "frame_idx": 47, "global_frame_idx": 24049, "task_index": 25}, {"db_idx": 24050, "episode_idx": 127, "frame_idx": 48, "global_frame_idx": 24050, "task_index": 25}, {"db_idx": 24051, "episode_idx": 127, "frame_idx": 49, "global_frame_idx": 24051, "task_index": 25}, {"db_idx": 24052, "episode_idx": 127, "frame_idx": 50, "global_frame_idx": 24052, "task_index": 25}, {"db_idx": 24053, "episode_idx": 127, "frame_idx": 51, "global_frame_idx": 24053, "task_index": 25}, {"db_idx": 24054, "episode_idx": 127, "frame_idx": 52, "global_frame_idx": 24054, "task_index": 25}, {"db_idx": 24055, "episode_idx": 127, "frame_idx": 53, "global_frame_idx": 24055, "task_index": 25}, {"db_idx": 24056, "episode_idx": 127, "frame_idx": 54, "global_frame_idx": 24056, "task_index": 25}, {"db_idx": 24057, "episode_idx": 127, "frame_idx": 55, "global_frame_idx": 24057, "task_index": 25}, {"db_idx": 24058, "episode_idx": 127, "frame_idx": 56, "global_frame_idx": 24058, "task_index": 25}, {"db_idx": 24059, "episode_idx": 127, "frame_idx": 57, "global_frame_idx": 24059, "task_index": 25}, {"db_idx": 24060, "episode_idx": 127, "frame_idx": 58, "global_frame_idx": 24060, "task_index": 25}, {"db_idx": 24061, "episode_idx": 127, "frame_idx": 59, "global_frame_idx": 24061, "task_index": 25}, {"db_idx": 24062, "episode_idx": 127, "frame_idx": 60, "global_frame_idx": 24062, "task_index": 25}, {"db_idx": 24063, "episode_idx": 127, "frame_idx": 61, "global_frame_idx": 24063, "task_index": 25}, {"db_idx": 24064, "episode_idx": 127, "frame_idx": 62, "global_frame_idx": 24064, "task_index": 25}, {"db_idx": 24065, "episode_idx": 127, "frame_idx": 63, "global_frame_idx": 24065, "task_index": 25}, {"db_idx": 24066, "episode_idx": 127, "frame_idx": 64, "global_frame_idx": 24066, "task_index": 25}, {"db_idx": 24067, "episode_idx": 127, "frame_idx": 65, "global_frame_idx": 24067, "task_index": 25}, {"db_idx": 24068, "episode_idx": 127, "frame_idx": 66, "global_frame_idx": 24068, "task_index": 25}, {"db_idx": 24069, "episode_idx": 127, "frame_idx": 67, "global_frame_idx": 24069, "task_index": 25}, {"db_idx": 24070, "episode_idx": 127, "frame_idx": 68, "global_frame_idx": 24070, "task_index": 25}, {"db_idx": 24071, "episode_idx": 127, "frame_idx": 69, "global_frame_idx": 24071, "task_index": 25}, {"db_idx": 24072, "episode_idx": 127, "frame_idx": 70, "global_frame_idx": 24072, "task_index": 25}, {"db_idx": 24073, "episode_idx": 127, "frame_idx": 71, "global_frame_idx": 24073, "task_index": 25}, {"db_idx": 24074, "episode_idx": 127, "frame_idx": 72, "global_frame_idx": 24074, "task_index": 25}, {"db_idx": 24075, "episode_idx": 127, "frame_idx": 73, "global_frame_idx": 24075, "task_index": 25}, {"db_idx": 24076, "episode_idx": 127, "frame_idx": 74, "global_frame_idx": 24076, "task_index": 25}, {"db_idx": 24077, "episode_idx": 127, "frame_idx": 75, "global_frame_idx": 24077, "task_index": 25}, {"db_idx": 24078, "episode_idx": 127, "frame_idx": 76, "global_frame_idx": 24078, "task_index": 25}, {"db_idx": 24079, "episode_idx": 127, "frame_idx": 77, "global_frame_idx": 24079, "task_index": 25}, {"db_idx": 24080, "episode_idx": 127, "frame_idx": 78, "global_frame_idx": 24080, "task_index": 25}, {"db_idx": 24081, "episode_idx": 127, "frame_idx": 79, "global_frame_idx": 24081, "task_index": 25}, {"db_idx": 24082, "episode_idx": 127, "frame_idx": 80, "global_frame_idx": 24082, "task_index": 25}, {"db_idx": 24083, "episode_idx": 127, "frame_idx": 81, "global_frame_idx": 24083, "task_index": 25}, {"db_idx": 24084, "episode_idx": 127, "frame_idx": 82, "global_frame_idx": 24084, "task_index": 25}, {"db_idx": 24085, "episode_idx": 127, "frame_idx": 83, "global_frame_idx": 24085, "task_index": 25}, {"db_idx": 24086, "episode_idx": 127, "frame_idx": 84, "global_frame_idx": 24086, "task_index": 25}, {"db_idx": 24087, "episode_idx": 127, "frame_idx": 85, "global_frame_idx": 24087, "task_index": 25}, {"db_idx": 24088, "episode_idx": 127, "frame_idx": 86, "global_frame_idx": 24088, "task_index": 25}, {"db_idx": 24089, "episode_idx": 127, "frame_idx": 87, "global_frame_idx": 24089, "task_index": 25}, {"db_idx": 24090, "episode_idx": 127, "frame_idx": 88, "global_frame_idx": 24090, "task_index": 25}, {"db_idx": 24091, "episode_idx": 127, "frame_idx": 89, "global_frame_idx": 24091, "task_index": 25}, {"db_idx": 24092, "episode_idx": 127, "frame_idx": 90, "global_frame_idx": 24092, "task_index": 25}, {"db_idx": 24093, "episode_idx": 127, "frame_idx": 91, "global_frame_idx": 24093, "task_index": 25}, {"db_idx": 24094, "episode_idx": 127, "frame_idx": 92, "global_frame_idx": 24094, "task_index": 25}, {"db_idx": 24095, "episode_idx": 128, "frame_idx": 0, "global_frame_idx": 24095, "task_index": 25}, {"db_idx": 24096, "episode_idx": 128, "frame_idx": 1, "global_frame_idx": 24096, "task_index": 25}, {"db_idx": 24097, "episode_idx": 128, "frame_idx": 2, "global_frame_idx": 24097, "task_index": 25}, {"db_idx": 24098, "episode_idx": 128, "frame_idx": 3, "global_frame_idx": 24098, "task_index": 25}, {"db_idx": 24099, "episode_idx": 128, "frame_idx": 4, "global_frame_idx": 24099, "task_index": 25}, {"db_idx": 24100, "episode_idx": 128, "frame_idx": 5, "global_frame_idx": 24100, "task_index": 25}, {"db_idx": 24101, "episode_idx": 128, "frame_idx": 6, "global_frame_idx": 24101, "task_index": 25}, {"db_idx": 24102, "episode_idx": 128, "frame_idx": 7, "global_frame_idx": 24102, "task_index": 25}, {"db_idx": 24103, "episode_idx": 128, "frame_idx": 8, "global_frame_idx": 24103, "task_index": 25}, {"db_idx": 24104, "episode_idx": 128, "frame_idx": 9, "global_frame_idx": 24104, "task_index": 25}, {"db_idx": 24105, "episode_idx": 128, "frame_idx": 10, "global_frame_idx": 24105, "task_index": 25}, {"db_idx": 24106, "episode_idx": 128, "frame_idx": 11, "global_frame_idx": 24106, "task_index": 25}, {"db_idx": 24107, "episode_idx": 128, "frame_idx": 12, "global_frame_idx": 24107, "task_index": 25}, {"db_idx": 24108, "episode_idx": 128, "frame_idx": 13, "global_frame_idx": 24108, "task_index": 25}, {"db_idx": 24109, "episode_idx": 128, "frame_idx": 14, "global_frame_idx": 24109, "task_index": 25}, {"db_idx": 24110, "episode_idx": 128, "frame_idx": 15, "global_frame_idx": 24110, "task_index": 25}, {"db_idx": 24111, "episode_idx": 128, "frame_idx": 16, "global_frame_idx": 24111, "task_index": 25}, {"db_idx": 24112, "episode_idx": 128, "frame_idx": 17, "global_frame_idx": 24112, "task_index": 25}, {"db_idx": 24113, "episode_idx": 128, "frame_idx": 18, "global_frame_idx": 24113, "task_index": 25}, {"db_idx": 24114, "episode_idx": 128, "frame_idx": 19, "global_frame_idx": 24114, "task_index": 25}, {"db_idx": 24115, "episode_idx": 128, "frame_idx": 20, "global_frame_idx": 24115, "task_index": 25}, {"db_idx": 24116, "episode_idx": 128, "frame_idx": 21, "global_frame_idx": 24116, "task_index": 25}, {"db_idx": 24117, "episode_idx": 128, "frame_idx": 22, "global_frame_idx": 24117, "task_index": 25}, {"db_idx": 24118, "episode_idx": 128, "frame_idx": 23, "global_frame_idx": 24118, "task_index": 25}, {"db_idx": 24119, "episode_idx": 128, "frame_idx": 24, "global_frame_idx": 24119, "task_index": 25}, {"db_idx": 24120, "episode_idx": 128, "frame_idx": 25, "global_frame_idx": 24120, "task_index": 25}, {"db_idx": 24121, "episode_idx": 128, "frame_idx": 26, "global_frame_idx": 24121, "task_index": 25}, {"db_idx": 24122, "episode_idx": 128, "frame_idx": 27, "global_frame_idx": 24122, "task_index": 25}, {"db_idx": 24123, "episode_idx": 128, "frame_idx": 28, "global_frame_idx": 24123, "task_index": 25}, {"db_idx": 24124, "episode_idx": 128, "frame_idx": 29, "global_frame_idx": 24124, "task_index": 25}, {"db_idx": 24125, "episode_idx": 128, "frame_idx": 30, "global_frame_idx": 24125, "task_index": 25}, {"db_idx": 24126, "episode_idx": 128, "frame_idx": 31, "global_frame_idx": 24126, "task_index": 25}, {"db_idx": 24127, "episode_idx": 128, "frame_idx": 32, "global_frame_idx": 24127, "task_index": 25}, {"db_idx": 24128, "episode_idx": 128, "frame_idx": 33, "global_frame_idx": 24128, "task_index": 25}, {"db_idx": 24129, "episode_idx": 128, "frame_idx": 34, "global_frame_idx": 24129, "task_index": 25}, {"db_idx": 24130, "episode_idx": 128, "frame_idx": 35, "global_frame_idx": 24130, "task_index": 25}, {"db_idx": 24131, "episode_idx": 128, "frame_idx": 36, "global_frame_idx": 24131, "task_index": 25}, {"db_idx": 24132, "episode_idx": 128, "frame_idx": 37, "global_frame_idx": 24132, "task_index": 25}, {"db_idx": 24133, "episode_idx": 128, "frame_idx": 38, "global_frame_idx": 24133, "task_index": 25}, {"db_idx": 24134, "episode_idx": 128, "frame_idx": 39, "global_frame_idx": 24134, "task_index": 25}, {"db_idx": 24135, "episode_idx": 128, "frame_idx": 40, "global_frame_idx": 24135, "task_index": 25}, {"db_idx": 24136, "episode_idx": 128, "frame_idx": 41, "global_frame_idx": 24136, "task_index": 25}, {"db_idx": 24137, "episode_idx": 128, "frame_idx": 42, "global_frame_idx": 24137, "task_index": 25}, {"db_idx": 24138, "episode_idx": 128, "frame_idx": 43, "global_frame_idx": 24138, "task_index": 25}, {"db_idx": 24139, "episode_idx": 128, "frame_idx": 44, "global_frame_idx": 24139, "task_index": 25}, {"db_idx": 24140, "episode_idx": 128, "frame_idx": 45, "global_frame_idx": 24140, "task_index": 25}, {"db_idx": 24141, "episode_idx": 128, "frame_idx": 46, "global_frame_idx": 24141, "task_index": 25}, {"db_idx": 24142, "episode_idx": 128, "frame_idx": 47, "global_frame_idx": 24142, "task_index": 25}, {"db_idx": 24143, "episode_idx": 128, "frame_idx": 48, "global_frame_idx": 24143, "task_index": 25}, {"db_idx": 24144, "episode_idx": 128, "frame_idx": 49, "global_frame_idx": 24144, "task_index": 25}, {"db_idx": 24145, "episode_idx": 128, "frame_idx": 50, "global_frame_idx": 24145, "task_index": 25}, {"db_idx": 24146, "episode_idx": 128, "frame_idx": 51, "global_frame_idx": 24146, "task_index": 25}, {"db_idx": 24147, "episode_idx": 128, "frame_idx": 52, "global_frame_idx": 24147, "task_index": 25}, {"db_idx": 24148, "episode_idx": 128, "frame_idx": 53, "global_frame_idx": 24148, "task_index": 25}, {"db_idx": 24149, "episode_idx": 128, "frame_idx": 54, "global_frame_idx": 24149, "task_index": 25}, {"db_idx": 24150, "episode_idx": 128, "frame_idx": 55, "global_frame_idx": 24150, "task_index": 25}, {"db_idx": 24151, "episode_idx": 128, "frame_idx": 56, "global_frame_idx": 24151, "task_index": 25}, {"db_idx": 24152, "episode_idx": 128, "frame_idx": 57, "global_frame_idx": 24152, "task_index": 25}, {"db_idx": 24153, "episode_idx": 128, "frame_idx": 58, "global_frame_idx": 24153, "task_index": 25}, {"db_idx": 24154, "episode_idx": 128, "frame_idx": 59, "global_frame_idx": 24154, "task_index": 25}, {"db_idx": 24155, "episode_idx": 128, "frame_idx": 60, "global_frame_idx": 24155, "task_index": 25}, {"db_idx": 24156, "episode_idx": 128, "frame_idx": 61, "global_frame_idx": 24156, "task_index": 25}, {"db_idx": 24157, "episode_idx": 128, "frame_idx": 62, "global_frame_idx": 24157, "task_index": 25}, {"db_idx": 24158, "episode_idx": 128, "frame_idx": 63, "global_frame_idx": 24158, "task_index": 25}, {"db_idx": 24159, "episode_idx": 128, "frame_idx": 64, "global_frame_idx": 24159, "task_index": 25}, {"db_idx": 24160, "episode_idx": 128, "frame_idx": 65, "global_frame_idx": 24160, "task_index": 25}, {"db_idx": 24161, "episode_idx": 128, "frame_idx": 66, "global_frame_idx": 24161, "task_index": 25}, {"db_idx": 24162, "episode_idx": 128, "frame_idx": 67, "global_frame_idx": 24162, "task_index": 25}, {"db_idx": 24163, "episode_idx": 128, "frame_idx": 68, "global_frame_idx": 24163, "task_index": 25}, {"db_idx": 24164, "episode_idx": 128, "frame_idx": 69, "global_frame_idx": 24164, "task_index": 25}, {"db_idx": 24165, "episode_idx": 128, "frame_idx": 70, "global_frame_idx": 24165, "task_index": 25}, {"db_idx": 24166, "episode_idx": 128, "frame_idx": 71, "global_frame_idx": 24166, "task_index": 25}, {"db_idx": 24167, "episode_idx": 128, "frame_idx": 72, "global_frame_idx": 24167, "task_index": 25}, {"db_idx": 24168, "episode_idx": 128, "frame_idx": 73, "global_frame_idx": 24168, "task_index": 25}, {"db_idx": 24169, "episode_idx": 128, "frame_idx": 74, "global_frame_idx": 24169, "task_index": 25}, {"db_idx": 24170, "episode_idx": 128, "frame_idx": 75, "global_frame_idx": 24170, "task_index": 25}, {"db_idx": 24171, "episode_idx": 128, "frame_idx": 76, "global_frame_idx": 24171, "task_index": 25}, {"db_idx": 24172, "episode_idx": 128, "frame_idx": 77, "global_frame_idx": 24172, "task_index": 25}, {"db_idx": 24173, "episode_idx": 128, "frame_idx": 78, "global_frame_idx": 24173, "task_index": 25}, {"db_idx": 24174, "episode_idx": 128, "frame_idx": 79, "global_frame_idx": 24174, "task_index": 25}, {"db_idx": 24175, "episode_idx": 128, "frame_idx": 80, "global_frame_idx": 24175, "task_index": 25}, {"db_idx": 24176, "episode_idx": 128, "frame_idx": 81, "global_frame_idx": 24176, "task_index": 25}, {"db_idx": 24177, "episode_idx": 128, "frame_idx": 82, "global_frame_idx": 24177, "task_index": 25}, {"db_idx": 24178, "episode_idx": 128, "frame_idx": 83, "global_frame_idx": 24178, "task_index": 25}, {"db_idx": 24179, "episode_idx": 128, "frame_idx": 84, "global_frame_idx": 24179, "task_index": 25}, {"db_idx": 24180, "episode_idx": 128, "frame_idx": 85, "global_frame_idx": 24180, "task_index": 25}, {"db_idx": 24181, "episode_idx": 128, "frame_idx": 86, "global_frame_idx": 24181, "task_index": 25}, {"db_idx": 24182, "episode_idx": 128, "frame_idx": 87, "global_frame_idx": 24182, "task_index": 25}, {"db_idx": 24183, "episode_idx": 128, "frame_idx": 88, "global_frame_idx": 24183, "task_index": 25}, {"db_idx": 24184, "episode_idx": 128, "frame_idx": 89, "global_frame_idx": 24184, "task_index": 25}, {"db_idx": 24185, "episode_idx": 128, "frame_idx": 90, "global_frame_idx": 24185, "task_index": 25}, {"db_idx": 24186, "episode_idx": 128, "frame_idx": 91, "global_frame_idx": 24186, "task_index": 25}, {"db_idx": 24187, "episode_idx": 128, "frame_idx": 92, "global_frame_idx": 24187, "task_index": 25}, {"db_idx": 24188, "episode_idx": 128, "frame_idx": 93, "global_frame_idx": 24188, "task_index": 25}, {"db_idx": 24189, "episode_idx": 128, "frame_idx": 94, "global_frame_idx": 24189, "task_index": 25}, {"db_idx": 24190, "episode_idx": 128, "frame_idx": 95, "global_frame_idx": 24190, "task_index": 25}, {"db_idx": 24191, "episode_idx": 128, "frame_idx": 96, "global_frame_idx": 24191, "task_index": 25}, {"db_idx": 24192, "episode_idx": 128, "frame_idx": 97, "global_frame_idx": 24192, "task_index": 25}, {"db_idx": 24193, "episode_idx": 128, "frame_idx": 98, "global_frame_idx": 24193, "task_index": 25}, {"db_idx": 24194, "episode_idx": 128, "frame_idx": 99, "global_frame_idx": 24194, "task_index": 25}, {"db_idx": 24195, "episode_idx": 128, "frame_idx": 100, "global_frame_idx": 24195, "task_index": 25}, {"db_idx": 24196, "episode_idx": 128, "frame_idx": 101, "global_frame_idx": 24196, "task_index": 25}, {"db_idx": 24197, "episode_idx": 128, "frame_idx": 102, "global_frame_idx": 24197, "task_index": 25}, {"db_idx": 24198, "episode_idx": 128, "frame_idx": 103, "global_frame_idx": 24198, "task_index": 25}, {"db_idx": 24199, "episode_idx": 128, "frame_idx": 104, "global_frame_idx": 24199, "task_index": 25}, {"db_idx": 24200, "episode_idx": 128, "frame_idx": 105, "global_frame_idx": 24200, "task_index": 25}, {"db_idx": 24201, "episode_idx": 128, "frame_idx": 106, "global_frame_idx": 24201, "task_index": 25}, {"db_idx": 24202, "episode_idx": 128, "frame_idx": 107, "global_frame_idx": 24202, "task_index": 25}, {"db_idx": 24203, "episode_idx": 128, "frame_idx": 108, "global_frame_idx": 24203, "task_index": 25}, {"db_idx": 24204, "episode_idx": 128, "frame_idx": 109, "global_frame_idx": 24204, "task_index": 25}, {"db_idx": 24205, "episode_idx": 128, "frame_idx": 110, "global_frame_idx": 24205, "task_index": 25}, {"db_idx": 24206, "episode_idx": 128, "frame_idx": 111, "global_frame_idx": 24206, "task_index": 25}, {"db_idx": 24207, "episode_idx": 128, "frame_idx": 112, "global_frame_idx": 24207, "task_index": 25}, {"db_idx": 24208, "episode_idx": 128, "frame_idx": 113, "global_frame_idx": 24208, "task_index": 25}, {"db_idx": 24209, "episode_idx": 129, "frame_idx": 0, "global_frame_idx": 24209, "task_index": 25}, {"db_idx": 24210, "episode_idx": 129, "frame_idx": 1, "global_frame_idx": 24210, "task_index": 25}, {"db_idx": 24211, "episode_idx": 129, "frame_idx": 2, "global_frame_idx": 24211, "task_index": 25}, {"db_idx": 24212, "episode_idx": 129, "frame_idx": 3, "global_frame_idx": 24212, "task_index": 25}, {"db_idx": 24213, "episode_idx": 129, "frame_idx": 4, "global_frame_idx": 24213, "task_index": 25}, {"db_idx": 24214, "episode_idx": 129, "frame_idx": 5, "global_frame_idx": 24214, "task_index": 25}, {"db_idx": 24215, "episode_idx": 129, "frame_idx": 6, "global_frame_idx": 24215, "task_index": 25}, {"db_idx": 24216, "episode_idx": 129, "frame_idx": 7, "global_frame_idx": 24216, "task_index": 25}, {"db_idx": 24217, "episode_idx": 129, "frame_idx": 8, "global_frame_idx": 24217, "task_index": 25}, {"db_idx": 24218, "episode_idx": 129, "frame_idx": 9, "global_frame_idx": 24218, "task_index": 25}, {"db_idx": 24219, "episode_idx": 129, "frame_idx": 10, "global_frame_idx": 24219, "task_index": 25}, {"db_idx": 24220, "episode_idx": 129, "frame_idx": 11, "global_frame_idx": 24220, "task_index": 25}, {"db_idx": 24221, "episode_idx": 129, "frame_idx": 12, "global_frame_idx": 24221, "task_index": 25}, {"db_idx": 24222, "episode_idx": 129, "frame_idx": 13, "global_frame_idx": 24222, "task_index": 25}, {"db_idx": 24223, "episode_idx": 129, "frame_idx": 14, "global_frame_idx": 24223, "task_index": 25}, {"db_idx": 24224, "episode_idx": 129, "frame_idx": 15, "global_frame_idx": 24224, "task_index": 25}, {"db_idx": 24225, "episode_idx": 129, "frame_idx": 16, "global_frame_idx": 24225, "task_index": 25}, {"db_idx": 24226, "episode_idx": 129, "frame_idx": 17, "global_frame_idx": 24226, "task_index": 25}, {"db_idx": 24227, "episode_idx": 129, "frame_idx": 18, "global_frame_idx": 24227, "task_index": 25}, {"db_idx": 24228, "episode_idx": 129, "frame_idx": 19, "global_frame_idx": 24228, "task_index": 25}, {"db_idx": 24229, "episode_idx": 129, "frame_idx": 20, "global_frame_idx": 24229, "task_index": 25}, {"db_idx": 24230, "episode_idx": 129, "frame_idx": 21, "global_frame_idx": 24230, "task_index": 25}, {"db_idx": 24231, "episode_idx": 129, "frame_idx": 22, "global_frame_idx": 24231, "task_index": 25}, {"db_idx": 24232, "episode_idx": 129, "frame_idx": 23, "global_frame_idx": 24232, "task_index": 25}, {"db_idx": 24233, "episode_idx": 129, "frame_idx": 24, "global_frame_idx": 24233, "task_index": 25}, {"db_idx": 24234, "episode_idx": 129, "frame_idx": 25, "global_frame_idx": 24234, "task_index": 25}, {"db_idx": 24235, "episode_idx": 129, "frame_idx": 26, "global_frame_idx": 24235, "task_index": 25}, {"db_idx": 24236, "episode_idx": 129, "frame_idx": 27, "global_frame_idx": 24236, "task_index": 25}, {"db_idx": 24237, "episode_idx": 129, "frame_idx": 28, "global_frame_idx": 24237, "task_index": 25}, {"db_idx": 24238, "episode_idx": 129, "frame_idx": 29, "global_frame_idx": 24238, "task_index": 25}, {"db_idx": 24239, "episode_idx": 129, "frame_idx": 30, "global_frame_idx": 24239, "task_index": 25}, {"db_idx": 24240, "episode_idx": 129, "frame_idx": 31, "global_frame_idx": 24240, "task_index": 25}, {"db_idx": 24241, "episode_idx": 129, "frame_idx": 32, "global_frame_idx": 24241, "task_index": 25}, {"db_idx": 24242, "episode_idx": 129, "frame_idx": 33, "global_frame_idx": 24242, "task_index": 25}, {"db_idx": 24243, "episode_idx": 129, "frame_idx": 34, "global_frame_idx": 24243, "task_index": 25}, {"db_idx": 24244, "episode_idx": 129, "frame_idx": 35, "global_frame_idx": 24244, "task_index": 25}, {"db_idx": 24245, "episode_idx": 129, "frame_idx": 36, "global_frame_idx": 24245, "task_index": 25}, {"db_idx": 24246, "episode_idx": 129, "frame_idx": 37, "global_frame_idx": 24246, "task_index": 25}, {"db_idx": 24247, "episode_idx": 129, "frame_idx": 38, "global_frame_idx": 24247, "task_index": 25}, {"db_idx": 24248, "episode_idx": 129, "frame_idx": 39, "global_frame_idx": 24248, "task_index": 25}, {"db_idx": 24249, "episode_idx": 129, "frame_idx": 40, "global_frame_idx": 24249, "task_index": 25}, {"db_idx": 24250, "episode_idx": 129, "frame_idx": 41, "global_frame_idx": 24250, "task_index": 25}, {"db_idx": 24251, "episode_idx": 129, "frame_idx": 42, "global_frame_idx": 24251, "task_index": 25}, {"db_idx": 24252, "episode_idx": 129, "frame_idx": 43, "global_frame_idx": 24252, "task_index": 25}, {"db_idx": 24253, "episode_idx": 129, "frame_idx": 44, "global_frame_idx": 24253, "task_index": 25}, {"db_idx": 24254, "episode_idx": 129, "frame_idx": 45, "global_frame_idx": 24254, "task_index": 25}, {"db_idx": 24255, "episode_idx": 129, "frame_idx": 46, "global_frame_idx": 24255, "task_index": 25}, {"db_idx": 24256, "episode_idx": 129, "frame_idx": 47, "global_frame_idx": 24256, "task_index": 25}, {"db_idx": 24257, "episode_idx": 129, "frame_idx": 48, "global_frame_idx": 24257, "task_index": 25}, {"db_idx": 24258, "episode_idx": 129, "frame_idx": 49, "global_frame_idx": 24258, "task_index": 25}, {"db_idx": 24259, "episode_idx": 129, "frame_idx": 50, "global_frame_idx": 24259, "task_index": 25}, {"db_idx": 24260, "episode_idx": 129, "frame_idx": 51, "global_frame_idx": 24260, "task_index": 25}, {"db_idx": 24261, "episode_idx": 129, "frame_idx": 52, "global_frame_idx": 24261, "task_index": 25}, {"db_idx": 24262, "episode_idx": 129, "frame_idx": 53, "global_frame_idx": 24262, "task_index": 25}, {"db_idx": 24263, "episode_idx": 129, "frame_idx": 54, "global_frame_idx": 24263, "task_index": 25}, {"db_idx": 24264, "episode_idx": 129, "frame_idx": 55, "global_frame_idx": 24264, "task_index": 25}, {"db_idx": 24265, "episode_idx": 129, "frame_idx": 56, "global_frame_idx": 24265, "task_index": 25}, {"db_idx": 24266, "episode_idx": 129, "frame_idx": 57, "global_frame_idx": 24266, "task_index": 25}, {"db_idx": 24267, "episode_idx": 129, "frame_idx": 58, "global_frame_idx": 24267, "task_index": 25}, {"db_idx": 24268, "episode_idx": 129, "frame_idx": 59, "global_frame_idx": 24268, "task_index": 25}, {"db_idx": 24269, "episode_idx": 129, "frame_idx": 60, "global_frame_idx": 24269, "task_index": 25}, {"db_idx": 24270, "episode_idx": 129, "frame_idx": 61, "global_frame_idx": 24270, "task_index": 25}, {"db_idx": 24271, "episode_idx": 129, "frame_idx": 62, "global_frame_idx": 24271, "task_index": 25}, {"db_idx": 24272, "episode_idx": 129, "frame_idx": 63, "global_frame_idx": 24272, "task_index": 25}, {"db_idx": 24273, "episode_idx": 129, "frame_idx": 64, "global_frame_idx": 24273, "task_index": 25}, {"db_idx": 24274, "episode_idx": 129, "frame_idx": 65, "global_frame_idx": 24274, "task_index": 25}, {"db_idx": 24275, "episode_idx": 129, "frame_idx": 66, "global_frame_idx": 24275, "task_index": 25}, {"db_idx": 24276, "episode_idx": 129, "frame_idx": 67, "global_frame_idx": 24276, "task_index": 25}, {"db_idx": 24277, "episode_idx": 129, "frame_idx": 68, "global_frame_idx": 24277, "task_index": 25}, {"db_idx": 24278, "episode_idx": 129, "frame_idx": 69, "global_frame_idx": 24278, "task_index": 25}, {"db_idx": 24279, "episode_idx": 129, "frame_idx": 70, "global_frame_idx": 24279, "task_index": 25}, {"db_idx": 24280, "episode_idx": 129, "frame_idx": 71, "global_frame_idx": 24280, "task_index": 25}, {"db_idx": 24281, "episode_idx": 129, "frame_idx": 72, "global_frame_idx": 24281, "task_index": 25}, {"db_idx": 24282, "episode_idx": 129, "frame_idx": 73, "global_frame_idx": 24282, "task_index": 25}, {"db_idx": 24283, "episode_idx": 129, "frame_idx": 74, "global_frame_idx": 24283, "task_index": 25}, {"db_idx": 24284, "episode_idx": 129, "frame_idx": 75, "global_frame_idx": 24284, "task_index": 25}, {"db_idx": 24285, "episode_idx": 129, "frame_idx": 76, "global_frame_idx": 24285, "task_index": 25}, {"db_idx": 24286, "episode_idx": 129, "frame_idx": 77, "global_frame_idx": 24286, "task_index": 25}, {"db_idx": 24287, "episode_idx": 129, "frame_idx": 78, "global_frame_idx": 24287, "task_index": 25}, {"db_idx": 24288, "episode_idx": 129, "frame_idx": 79, "global_frame_idx": 24288, "task_index": 25}, {"db_idx": 24289, "episode_idx": 129, "frame_idx": 80, "global_frame_idx": 24289, "task_index": 25}, {"db_idx": 24290, "episode_idx": 129, "frame_idx": 81, "global_frame_idx": 24290, "task_index": 25}, {"db_idx": 24291, "episode_idx": 129, "frame_idx": 82, "global_frame_idx": 24291, "task_index": 25}, {"db_idx": 24292, "episode_idx": 129, "frame_idx": 83, "global_frame_idx": 24292, "task_index": 25}, {"db_idx": 24293, "episode_idx": 129, "frame_idx": 84, "global_frame_idx": 24293, "task_index": 25}, {"db_idx": 24294, "episode_idx": 129, "frame_idx": 85, "global_frame_idx": 24294, "task_index": 25}, {"db_idx": 24295, "episode_idx": 129, "frame_idx": 86, "global_frame_idx": 24295, "task_index": 25}, {"db_idx": 24296, "episode_idx": 129, "frame_idx": 87, "global_frame_idx": 24296, "task_index": 25}, {"db_idx": 24297, "episode_idx": 129, "frame_idx": 88, "global_frame_idx": 24297, "task_index": 25}, {"db_idx": 24298, "episode_idx": 129, "frame_idx": 89, "global_frame_idx": 24298, "task_index": 25}, {"db_idx": 24299, "episode_idx": 129, "frame_idx": 90, "global_frame_idx": 24299, "task_index": 25}, {"db_idx": 24300, "episode_idx": 129, "frame_idx": 91, "global_frame_idx": 24300, "task_index": 25}, {"db_idx": 24301, "episode_idx": 129, "frame_idx": 92, "global_frame_idx": 24301, "task_index": 25}, {"db_idx": 24302, "episode_idx": 129, "frame_idx": 93, "global_frame_idx": 24302, "task_index": 25}, {"db_idx": 24303, "episode_idx": 129, "frame_idx": 94, "global_frame_idx": 24303, "task_index": 25}, {"db_idx": 24304, "episode_idx": 129, "frame_idx": 95, "global_frame_idx": 24304, "task_index": 25}, {"db_idx": 24305, "episode_idx": 129, "frame_idx": 96, "global_frame_idx": 24305, "task_index": 25}, {"db_idx": 24306, "episode_idx": 129, "frame_idx": 97, "global_frame_idx": 24306, "task_index": 25}, {"db_idx": 24307, "episode_idx": 129, "frame_idx": 98, "global_frame_idx": 24307, "task_index": 25}, {"db_idx": 24308, "episode_idx": 129, "frame_idx": 99, "global_frame_idx": 24308, "task_index": 25}, {"db_idx": 24309, "episode_idx": 129, "frame_idx": 100, "global_frame_idx": 24309, "task_index": 25}, {"db_idx": 24310, "episode_idx": 129, "frame_idx": 101, "global_frame_idx": 24310, "task_index": 25}, {"db_idx": 24311, "episode_idx": 130, "frame_idx": 0, "global_frame_idx": 24311, "task_index": 26}, {"db_idx": 24312, "episode_idx": 130, "frame_idx": 1, "global_frame_idx": 24312, "task_index": 26}, {"db_idx": 24313, "episode_idx": 130, "frame_idx": 2, "global_frame_idx": 24313, "task_index": 26}, {"db_idx": 24314, "episode_idx": 130, "frame_idx": 3, "global_frame_idx": 24314, "task_index": 26}, {"db_idx": 24315, "episode_idx": 130, "frame_idx": 4, "global_frame_idx": 24315, "task_index": 26}, {"db_idx": 24316, "episode_idx": 130, "frame_idx": 5, "global_frame_idx": 24316, "task_index": 26}, {"db_idx": 24317, "episode_idx": 130, "frame_idx": 6, "global_frame_idx": 24317, "task_index": 26}, {"db_idx": 24318, "episode_idx": 130, "frame_idx": 7, "global_frame_idx": 24318, "task_index": 26}, {"db_idx": 24319, "episode_idx": 130, "frame_idx": 8, "global_frame_idx": 24319, "task_index": 26}, {"db_idx": 24320, "episode_idx": 130, "frame_idx": 9, "global_frame_idx": 24320, "task_index": 26}, {"db_idx": 24321, "episode_idx": 130, "frame_idx": 10, "global_frame_idx": 24321, "task_index": 26}, {"db_idx": 24322, "episode_idx": 130, "frame_idx": 11, "global_frame_idx": 24322, "task_index": 26}, {"db_idx": 24323, "episode_idx": 130, "frame_idx": 12, "global_frame_idx": 24323, "task_index": 26}, {"db_idx": 24324, "episode_idx": 130, "frame_idx": 13, "global_frame_idx": 24324, "task_index": 26}, {"db_idx": 24325, "episode_idx": 130, "frame_idx": 14, "global_frame_idx": 24325, "task_index": 26}, {"db_idx": 24326, "episode_idx": 130, "frame_idx": 15, "global_frame_idx": 24326, "task_index": 26}, {"db_idx": 24327, "episode_idx": 130, "frame_idx": 16, "global_frame_idx": 24327, "task_index": 26}, {"db_idx": 24328, "episode_idx": 130, "frame_idx": 17, "global_frame_idx": 24328, "task_index": 26}, {"db_idx": 24329, "episode_idx": 130, "frame_idx": 18, "global_frame_idx": 24329, "task_index": 26}, {"db_idx": 24330, "episode_idx": 130, "frame_idx": 19, "global_frame_idx": 24330, "task_index": 26}, {"db_idx": 24331, "episode_idx": 130, "frame_idx": 20, "global_frame_idx": 24331, "task_index": 26}, {"db_idx": 24332, "episode_idx": 130, "frame_idx": 21, "global_frame_idx": 24332, "task_index": 26}, {"db_idx": 24333, "episode_idx": 130, "frame_idx": 22, "global_frame_idx": 24333, "task_index": 26}, {"db_idx": 24334, "episode_idx": 130, "frame_idx": 23, "global_frame_idx": 24334, "task_index": 26}, {"db_idx": 24335, "episode_idx": 130, "frame_idx": 24, "global_frame_idx": 24335, "task_index": 26}, {"db_idx": 24336, "episode_idx": 130, "frame_idx": 25, "global_frame_idx": 24336, "task_index": 26}, {"db_idx": 24337, "episode_idx": 130, "frame_idx": 26, "global_frame_idx": 24337, "task_index": 26}, {"db_idx": 24338, "episode_idx": 130, "frame_idx": 27, "global_frame_idx": 24338, "task_index": 26}, {"db_idx": 24339, "episode_idx": 130, "frame_idx": 28, "global_frame_idx": 24339, "task_index": 26}, {"db_idx": 24340, "episode_idx": 130, "frame_idx": 29, "global_frame_idx": 24340, "task_index": 26}, {"db_idx": 24341, "episode_idx": 130, "frame_idx": 30, "global_frame_idx": 24341, "task_index": 26}, {"db_idx": 24342, "episode_idx": 130, "frame_idx": 31, "global_frame_idx": 24342, "task_index": 26}, {"db_idx": 24343, "episode_idx": 130, "frame_idx": 32, "global_frame_idx": 24343, "task_index": 26}, {"db_idx": 24344, "episode_idx": 130, "frame_idx": 33, "global_frame_idx": 24344, "task_index": 26}, {"db_idx": 24345, "episode_idx": 130, "frame_idx": 34, "global_frame_idx": 24345, "task_index": 26}, {"db_idx": 24346, "episode_idx": 130, "frame_idx": 35, "global_frame_idx": 24346, "task_index": 26}, {"db_idx": 24347, "episode_idx": 130, "frame_idx": 36, "global_frame_idx": 24347, "task_index": 26}, {"db_idx": 24348, "episode_idx": 130, "frame_idx": 37, "global_frame_idx": 24348, "task_index": 26}, {"db_idx": 24349, "episode_idx": 130, "frame_idx": 38, "global_frame_idx": 24349, "task_index": 26}, {"db_idx": 24350, "episode_idx": 130, "frame_idx": 39, "global_frame_idx": 24350, "task_index": 26}, {"db_idx": 24351, "episode_idx": 130, "frame_idx": 40, "global_frame_idx": 24351, "task_index": 26}, {"db_idx": 24352, "episode_idx": 130, "frame_idx": 41, "global_frame_idx": 24352, "task_index": 26}, {"db_idx": 24353, "episode_idx": 130, "frame_idx": 42, "global_frame_idx": 24353, "task_index": 26}, {"db_idx": 24354, "episode_idx": 130, "frame_idx": 43, "global_frame_idx": 24354, "task_index": 26}, {"db_idx": 24355, "episode_idx": 130, "frame_idx": 44, "global_frame_idx": 24355, "task_index": 26}, {"db_idx": 24356, "episode_idx": 130, "frame_idx": 45, "global_frame_idx": 24356, "task_index": 26}, {"db_idx": 24357, "episode_idx": 130, "frame_idx": 46, "global_frame_idx": 24357, "task_index": 26}, {"db_idx": 24358, "episode_idx": 130, "frame_idx": 47, "global_frame_idx": 24358, "task_index": 26}, {"db_idx": 24359, "episode_idx": 130, "frame_idx": 48, "global_frame_idx": 24359, "task_index": 26}, {"db_idx": 24360, "episode_idx": 130, "frame_idx": 49, "global_frame_idx": 24360, "task_index": 26}, {"db_idx": 24361, "episode_idx": 130, "frame_idx": 50, "global_frame_idx": 24361, "task_index": 26}, {"db_idx": 24362, "episode_idx": 130, "frame_idx": 51, "global_frame_idx": 24362, "task_index": 26}, {"db_idx": 24363, "episode_idx": 130, "frame_idx": 52, "global_frame_idx": 24363, "task_index": 26}, {"db_idx": 24364, "episode_idx": 130, "frame_idx": 53, "global_frame_idx": 24364, "task_index": 26}, {"db_idx": 24365, "episode_idx": 130, "frame_idx": 54, "global_frame_idx": 24365, "task_index": 26}, {"db_idx": 24366, "episode_idx": 130, "frame_idx": 55, "global_frame_idx": 24366, "task_index": 26}, {"db_idx": 24367, "episode_idx": 130, "frame_idx": 56, "global_frame_idx": 24367, "task_index": 26}, {"db_idx": 24368, "episode_idx": 130, "frame_idx": 57, "global_frame_idx": 24368, "task_index": 26}, {"db_idx": 24369, "episode_idx": 130, "frame_idx": 58, "global_frame_idx": 24369, "task_index": 26}, {"db_idx": 24370, "episode_idx": 130, "frame_idx": 59, "global_frame_idx": 24370, "task_index": 26}, {"db_idx": 24371, "episode_idx": 130, "frame_idx": 60, "global_frame_idx": 24371, "task_index": 26}, {"db_idx": 24372, "episode_idx": 130, "frame_idx": 61, "global_frame_idx": 24372, "task_index": 26}, {"db_idx": 24373, "episode_idx": 130, "frame_idx": 62, "global_frame_idx": 24373, "task_index": 26}, {"db_idx": 24374, "episode_idx": 130, "frame_idx": 63, "global_frame_idx": 24374, "task_index": 26}, {"db_idx": 24375, "episode_idx": 130, "frame_idx": 64, "global_frame_idx": 24375, "task_index": 26}, {"db_idx": 24376, "episode_idx": 130, "frame_idx": 65, "global_frame_idx": 24376, "task_index": 26}, {"db_idx": 24377, "episode_idx": 130, "frame_idx": 66, "global_frame_idx": 24377, "task_index": 26}, {"db_idx": 24378, "episode_idx": 130, "frame_idx": 67, "global_frame_idx": 24378, "task_index": 26}, {"db_idx": 24379, "episode_idx": 130, "frame_idx": 68, "global_frame_idx": 24379, "task_index": 26}, {"db_idx": 24380, "episode_idx": 130, "frame_idx": 69, "global_frame_idx": 24380, "task_index": 26}, {"db_idx": 24381, "episode_idx": 130, "frame_idx": 70, "global_frame_idx": 24381, "task_index": 26}, {"db_idx": 24382, "episode_idx": 130, "frame_idx": 71, "global_frame_idx": 24382, "task_index": 26}, {"db_idx": 24383, "episode_idx": 130, "frame_idx": 72, "global_frame_idx": 24383, "task_index": 26}, {"db_idx": 24384, "episode_idx": 130, "frame_idx": 73, "global_frame_idx": 24384, "task_index": 26}, {"db_idx": 24385, "episode_idx": 130, "frame_idx": 74, "global_frame_idx": 24385, "task_index": 26}, {"db_idx": 24386, "episode_idx": 130, "frame_idx": 75, "global_frame_idx": 24386, "task_index": 26}, {"db_idx": 24387, "episode_idx": 130, "frame_idx": 76, "global_frame_idx": 24387, "task_index": 26}, {"db_idx": 24388, "episode_idx": 130, "frame_idx": 77, "global_frame_idx": 24388, "task_index": 26}, {"db_idx": 24389, "episode_idx": 130, "frame_idx": 78, "global_frame_idx": 24389, "task_index": 26}, {"db_idx": 24390, "episode_idx": 130, "frame_idx": 79, "global_frame_idx": 24390, "task_index": 26}, {"db_idx": 24391, "episode_idx": 130, "frame_idx": 80, "global_frame_idx": 24391, "task_index": 26}, {"db_idx": 24392, "episode_idx": 130, "frame_idx": 81, "global_frame_idx": 24392, "task_index": 26}, {"db_idx": 24393, "episode_idx": 130, "frame_idx": 82, "global_frame_idx": 24393, "task_index": 26}, {"db_idx": 24394, "episode_idx": 130, "frame_idx": 83, "global_frame_idx": 24394, "task_index": 26}, {"db_idx": 24395, "episode_idx": 130, "frame_idx": 84, "global_frame_idx": 24395, "task_index": 26}, {"db_idx": 24396, "episode_idx": 130, "frame_idx": 85, "global_frame_idx": 24396, "task_index": 26}, {"db_idx": 24397, "episode_idx": 130, "frame_idx": 86, "global_frame_idx": 24397, "task_index": 26}, {"db_idx": 24398, "episode_idx": 130, "frame_idx": 87, "global_frame_idx": 24398, "task_index": 26}, {"db_idx": 24399, "episode_idx": 130, "frame_idx": 88, "global_frame_idx": 24399, "task_index": 26}, {"db_idx": 24400, "episode_idx": 130, "frame_idx": 89, "global_frame_idx": 24400, "task_index": 26}, {"db_idx": 24401, "episode_idx": 130, "frame_idx": 90, "global_frame_idx": 24401, "task_index": 26}, {"db_idx": 24402, "episode_idx": 130, "frame_idx": 91, "global_frame_idx": 24402, "task_index": 26}, {"db_idx": 24403, "episode_idx": 130, "frame_idx": 92, "global_frame_idx": 24403, "task_index": 26}, {"db_idx": 24404, "episode_idx": 130, "frame_idx": 93, "global_frame_idx": 24404, "task_index": 26}, {"db_idx": 24405, "episode_idx": 130, "frame_idx": 94, "global_frame_idx": 24405, "task_index": 26}, {"db_idx": 24406, "episode_idx": 130, "frame_idx": 95, "global_frame_idx": 24406, "task_index": 26}, {"db_idx": 24407, "episode_idx": 130, "frame_idx": 96, "global_frame_idx": 24407, "task_index": 26}, {"db_idx": 24408, "episode_idx": 130, "frame_idx": 97, "global_frame_idx": 24408, "task_index": 26}, {"db_idx": 24409, "episode_idx": 130, "frame_idx": 98, "global_frame_idx": 24409, "task_index": 26}, {"db_idx": 24410, "episode_idx": 130, "frame_idx": 99, "global_frame_idx": 24410, "task_index": 26}, {"db_idx": 24411, "episode_idx": 130, "frame_idx": 100, "global_frame_idx": 24411, "task_index": 26}, {"db_idx": 24412, "episode_idx": 130, "frame_idx": 101, "global_frame_idx": 24412, "task_index": 26}, {"db_idx": 24413, "episode_idx": 130, "frame_idx": 102, "global_frame_idx": 24413, "task_index": 26}, {"db_idx": 24414, "episode_idx": 130, "frame_idx": 103, "global_frame_idx": 24414, "task_index": 26}, {"db_idx": 24415, "episode_idx": 130, "frame_idx": 104, "global_frame_idx": 24415, "task_index": 26}, {"db_idx": 24416, "episode_idx": 130, "frame_idx": 105, "global_frame_idx": 24416, "task_index": 26}, {"db_idx": 24417, "episode_idx": 130, "frame_idx": 106, "global_frame_idx": 24417, "task_index": 26}, {"db_idx": 24418, "episode_idx": 130, "frame_idx": 107, "global_frame_idx": 24418, "task_index": 26}, {"db_idx": 24419, "episode_idx": 130, "frame_idx": 108, "global_frame_idx": 24419, "task_index": 26}, {"db_idx": 24420, "episode_idx": 130, "frame_idx": 109, "global_frame_idx": 24420, "task_index": 26}, {"db_idx": 24421, "episode_idx": 131, "frame_idx": 0, "global_frame_idx": 24421, "task_index": 26}, {"db_idx": 24422, "episode_idx": 131, "frame_idx": 1, "global_frame_idx": 24422, "task_index": 26}, {"db_idx": 24423, "episode_idx": 131, "frame_idx": 2, "global_frame_idx": 24423, "task_index": 26}, {"db_idx": 24424, "episode_idx": 131, "frame_idx": 3, "global_frame_idx": 24424, "task_index": 26}, {"db_idx": 24425, "episode_idx": 131, "frame_idx": 4, "global_frame_idx": 24425, "task_index": 26}, {"db_idx": 24426, "episode_idx": 131, "frame_idx": 5, "global_frame_idx": 24426, "task_index": 26}, {"db_idx": 24427, "episode_idx": 131, "frame_idx": 6, "global_frame_idx": 24427, "task_index": 26}, {"db_idx": 24428, "episode_idx": 131, "frame_idx": 7, "global_frame_idx": 24428, "task_index": 26}, {"db_idx": 24429, "episode_idx": 131, "frame_idx": 8, "global_frame_idx": 24429, "task_index": 26}, {"db_idx": 24430, "episode_idx": 131, "frame_idx": 9, "global_frame_idx": 24430, "task_index": 26}, {"db_idx": 24431, "episode_idx": 131, "frame_idx": 10, "global_frame_idx": 24431, "task_index": 26}, {"db_idx": 24432, "episode_idx": 131, "frame_idx": 11, "global_frame_idx": 24432, "task_index": 26}, {"db_idx": 24433, "episode_idx": 131, "frame_idx": 12, "global_frame_idx": 24433, "task_index": 26}, {"db_idx": 24434, "episode_idx": 131, "frame_idx": 13, "global_frame_idx": 24434, "task_index": 26}, {"db_idx": 24435, "episode_idx": 131, "frame_idx": 14, "global_frame_idx": 24435, "task_index": 26}, {"db_idx": 24436, "episode_idx": 131, "frame_idx": 15, "global_frame_idx": 24436, "task_index": 26}, {"db_idx": 24437, "episode_idx": 131, "frame_idx": 16, "global_frame_idx": 24437, "task_index": 26}, {"db_idx": 24438, "episode_idx": 131, "frame_idx": 17, "global_frame_idx": 24438, "task_index": 26}, {"db_idx": 24439, "episode_idx": 131, "frame_idx": 18, "global_frame_idx": 24439, "task_index": 26}, {"db_idx": 24440, "episode_idx": 131, "frame_idx": 19, "global_frame_idx": 24440, "task_index": 26}, {"db_idx": 24441, "episode_idx": 131, "frame_idx": 20, "global_frame_idx": 24441, "task_index": 26}, {"db_idx": 24442, "episode_idx": 131, "frame_idx": 21, "global_frame_idx": 24442, "task_index": 26}, {"db_idx": 24443, "episode_idx": 131, "frame_idx": 22, "global_frame_idx": 24443, "task_index": 26}, {"db_idx": 24444, "episode_idx": 131, "frame_idx": 23, "global_frame_idx": 24444, "task_index": 26}, {"db_idx": 24445, "episode_idx": 131, "frame_idx": 24, "global_frame_idx": 24445, "task_index": 26}, {"db_idx": 24446, "episode_idx": 131, "frame_idx": 25, "global_frame_idx": 24446, "task_index": 26}, {"db_idx": 24447, "episode_idx": 131, "frame_idx": 26, "global_frame_idx": 24447, "task_index": 26}, {"db_idx": 24448, "episode_idx": 131, "frame_idx": 27, "global_frame_idx": 24448, "task_index": 26}, {"db_idx": 24449, "episode_idx": 131, "frame_idx": 28, "global_frame_idx": 24449, "task_index": 26}, {"db_idx": 24450, "episode_idx": 131, "frame_idx": 29, "global_frame_idx": 24450, "task_index": 26}, {"db_idx": 24451, "episode_idx": 131, "frame_idx": 30, "global_frame_idx": 24451, "task_index": 26}, {"db_idx": 24452, "episode_idx": 131, "frame_idx": 31, "global_frame_idx": 24452, "task_index": 26}, {"db_idx": 24453, "episode_idx": 131, "frame_idx": 32, "global_frame_idx": 24453, "task_index": 26}, {"db_idx": 24454, "episode_idx": 131, "frame_idx": 33, "global_frame_idx": 24454, "task_index": 26}, {"db_idx": 24455, "episode_idx": 131, "frame_idx": 34, "global_frame_idx": 24455, "task_index": 26}, {"db_idx": 24456, "episode_idx": 131, "frame_idx": 35, "global_frame_idx": 24456, "task_index": 26}, {"db_idx": 24457, "episode_idx": 131, "frame_idx": 36, "global_frame_idx": 24457, "task_index": 26}, {"db_idx": 24458, "episode_idx": 131, "frame_idx": 37, "global_frame_idx": 24458, "task_index": 26}, {"db_idx": 24459, "episode_idx": 131, "frame_idx": 38, "global_frame_idx": 24459, "task_index": 26}, {"db_idx": 24460, "episode_idx": 131, "frame_idx": 39, "global_frame_idx": 24460, "task_index": 26}, {"db_idx": 24461, "episode_idx": 131, "frame_idx": 40, "global_frame_idx": 24461, "task_index": 26}, {"db_idx": 24462, "episode_idx": 131, "frame_idx": 41, "global_frame_idx": 24462, "task_index": 26}, {"db_idx": 24463, "episode_idx": 131, "frame_idx": 42, "global_frame_idx": 24463, "task_index": 26}, {"db_idx": 24464, "episode_idx": 131, "frame_idx": 43, "global_frame_idx": 24464, "task_index": 26}, {"db_idx": 24465, "episode_idx": 131, "frame_idx": 44, "global_frame_idx": 24465, "task_index": 26}, {"db_idx": 24466, "episode_idx": 131, "frame_idx": 45, "global_frame_idx": 24466, "task_index": 26}, {"db_idx": 24467, "episode_idx": 131, "frame_idx": 46, "global_frame_idx": 24467, "task_index": 26}, {"db_idx": 24468, "episode_idx": 131, "frame_idx": 47, "global_frame_idx": 24468, "task_index": 26}, {"db_idx": 24469, "episode_idx": 131, "frame_idx": 48, "global_frame_idx": 24469, "task_index": 26}, {"db_idx": 24470, "episode_idx": 131, "frame_idx": 49, "global_frame_idx": 24470, "task_index": 26}, {"db_idx": 24471, "episode_idx": 131, "frame_idx": 50, "global_frame_idx": 24471, "task_index": 26}, {"db_idx": 24472, "episode_idx": 131, "frame_idx": 51, "global_frame_idx": 24472, "task_index": 26}, {"db_idx": 24473, "episode_idx": 131, "frame_idx": 52, "global_frame_idx": 24473, "task_index": 26}, {"db_idx": 24474, "episode_idx": 131, "frame_idx": 53, "global_frame_idx": 24474, "task_index": 26}, {"db_idx": 24475, "episode_idx": 131, "frame_idx": 54, "global_frame_idx": 24475, "task_index": 26}, {"db_idx": 24476, "episode_idx": 131, "frame_idx": 55, "global_frame_idx": 24476, "task_index": 26}, {"db_idx": 24477, "episode_idx": 131, "frame_idx": 56, "global_frame_idx": 24477, "task_index": 26}, {"db_idx": 24478, "episode_idx": 131, "frame_idx": 57, "global_frame_idx": 24478, "task_index": 26}, {"db_idx": 24479, "episode_idx": 131, "frame_idx": 58, "global_frame_idx": 24479, "task_index": 26}, {"db_idx": 24480, "episode_idx": 131, "frame_idx": 59, "global_frame_idx": 24480, "task_index": 26}, {"db_idx": 24481, "episode_idx": 131, "frame_idx": 60, "global_frame_idx": 24481, "task_index": 26}, {"db_idx": 24482, "episode_idx": 131, "frame_idx": 61, "global_frame_idx": 24482, "task_index": 26}, {"db_idx": 24483, "episode_idx": 131, "frame_idx": 62, "global_frame_idx": 24483, "task_index": 26}, {"db_idx": 24484, "episode_idx": 131, "frame_idx": 63, "global_frame_idx": 24484, "task_index": 26}, {"db_idx": 24485, "episode_idx": 131, "frame_idx": 64, "global_frame_idx": 24485, "task_index": 26}, {"db_idx": 24486, "episode_idx": 131, "frame_idx": 65, "global_frame_idx": 24486, "task_index": 26}, {"db_idx": 24487, "episode_idx": 131, "frame_idx": 66, "global_frame_idx": 24487, "task_index": 26}, {"db_idx": 24488, "episode_idx": 131, "frame_idx": 67, "global_frame_idx": 24488, "task_index": 26}, {"db_idx": 24489, "episode_idx": 131, "frame_idx": 68, "global_frame_idx": 24489, "task_index": 26}, {"db_idx": 24490, "episode_idx": 131, "frame_idx": 69, "global_frame_idx": 24490, "task_index": 26}, {"db_idx": 24491, "episode_idx": 131, "frame_idx": 70, "global_frame_idx": 24491, "task_index": 26}, {"db_idx": 24492, "episode_idx": 131, "frame_idx": 71, "global_frame_idx": 24492, "task_index": 26}, {"db_idx": 24493, "episode_idx": 131, "frame_idx": 72, "global_frame_idx": 24493, "task_index": 26}, {"db_idx": 24494, "episode_idx": 131, "frame_idx": 73, "global_frame_idx": 24494, "task_index": 26}, {"db_idx": 24495, "episode_idx": 131, "frame_idx": 74, "global_frame_idx": 24495, "task_index": 26}, {"db_idx": 24496, "episode_idx": 131, "frame_idx": 75, "global_frame_idx": 24496, "task_index": 26}, {"db_idx": 24497, "episode_idx": 131, "frame_idx": 76, "global_frame_idx": 24497, "task_index": 26}, {"db_idx": 24498, "episode_idx": 131, "frame_idx": 77, "global_frame_idx": 24498, "task_index": 26}, {"db_idx": 24499, "episode_idx": 131, "frame_idx": 78, "global_frame_idx": 24499, "task_index": 26}, {"db_idx": 24500, "episode_idx": 131, "frame_idx": 79, "global_frame_idx": 24500, "task_index": 26}, {"db_idx": 24501, "episode_idx": 131, "frame_idx": 80, "global_frame_idx": 24501, "task_index": 26}, {"db_idx": 24502, "episode_idx": 131, "frame_idx": 81, "global_frame_idx": 24502, "task_index": 26}, {"db_idx": 24503, "episode_idx": 131, "frame_idx": 82, "global_frame_idx": 24503, "task_index": 26}, {"db_idx": 24504, "episode_idx": 131, "frame_idx": 83, "global_frame_idx": 24504, "task_index": 26}, {"db_idx": 24505, "episode_idx": 131, "frame_idx": 84, "global_frame_idx": 24505, "task_index": 26}, {"db_idx": 24506, "episode_idx": 131, "frame_idx": 85, "global_frame_idx": 24506, "task_index": 26}, {"db_idx": 24507, "episode_idx": 131, "frame_idx": 86, "global_frame_idx": 24507, "task_index": 26}, {"db_idx": 24508, "episode_idx": 131, "frame_idx": 87, "global_frame_idx": 24508, "task_index": 26}, {"db_idx": 24509, "episode_idx": 131, "frame_idx": 88, "global_frame_idx": 24509, "task_index": 26}, {"db_idx": 24510, "episode_idx": 131, "frame_idx": 89, "global_frame_idx": 24510, "task_index": 26}, {"db_idx": 24511, "episode_idx": 131, "frame_idx": 90, "global_frame_idx": 24511, "task_index": 26}, {"db_idx": 24512, "episode_idx": 131, "frame_idx": 91, "global_frame_idx": 24512, "task_index": 26}, {"db_idx": 24513, "episode_idx": 131, "frame_idx": 92, "global_frame_idx": 24513, "task_index": 26}, {"db_idx": 24514, "episode_idx": 131, "frame_idx": 93, "global_frame_idx": 24514, "task_index": 26}, {"db_idx": 24515, "episode_idx": 131, "frame_idx": 94, "global_frame_idx": 24515, "task_index": 26}, {"db_idx": 24516, "episode_idx": 132, "frame_idx": 0, "global_frame_idx": 24516, "task_index": 26}, {"db_idx": 24517, "episode_idx": 132, "frame_idx": 1, "global_frame_idx": 24517, "task_index": 26}, {"db_idx": 24518, "episode_idx": 132, "frame_idx": 2, "global_frame_idx": 24518, "task_index": 26}, {"db_idx": 24519, "episode_idx": 132, "frame_idx": 3, "global_frame_idx": 24519, "task_index": 26}, {"db_idx": 24520, "episode_idx": 132, "frame_idx": 4, "global_frame_idx": 24520, "task_index": 26}, {"db_idx": 24521, "episode_idx": 132, "frame_idx": 5, "global_frame_idx": 24521, "task_index": 26}, {"db_idx": 24522, "episode_idx": 132, "frame_idx": 6, "global_frame_idx": 24522, "task_index": 26}, {"db_idx": 24523, "episode_idx": 132, "frame_idx": 7, "global_frame_idx": 24523, "task_index": 26}, {"db_idx": 24524, "episode_idx": 132, "frame_idx": 8, "global_frame_idx": 24524, "task_index": 26}, {"db_idx": 24525, "episode_idx": 132, "frame_idx": 9, "global_frame_idx": 24525, "task_index": 26}, {"db_idx": 24526, "episode_idx": 132, "frame_idx": 10, "global_frame_idx": 24526, "task_index": 26}, {"db_idx": 24527, "episode_idx": 132, "frame_idx": 11, "global_frame_idx": 24527, "task_index": 26}, {"db_idx": 24528, "episode_idx": 132, "frame_idx": 12, "global_frame_idx": 24528, "task_index": 26}, {"db_idx": 24529, "episode_idx": 132, "frame_idx": 13, "global_frame_idx": 24529, "task_index": 26}, {"db_idx": 24530, "episode_idx": 132, "frame_idx": 14, "global_frame_idx": 24530, "task_index": 26}, {"db_idx": 24531, "episode_idx": 132, "frame_idx": 15, "global_frame_idx": 24531, "task_index": 26}, {"db_idx": 24532, "episode_idx": 132, "frame_idx": 16, "global_frame_idx": 24532, "task_index": 26}, {"db_idx": 24533, "episode_idx": 132, "frame_idx": 17, "global_frame_idx": 24533, "task_index": 26}, {"db_idx": 24534, "episode_idx": 132, "frame_idx": 18, "global_frame_idx": 24534, "task_index": 26}, {"db_idx": 24535, "episode_idx": 132, "frame_idx": 19, "global_frame_idx": 24535, "task_index": 26}, {"db_idx": 24536, "episode_idx": 132, "frame_idx": 20, "global_frame_idx": 24536, "task_index": 26}, {"db_idx": 24537, "episode_idx": 132, "frame_idx": 21, "global_frame_idx": 24537, "task_index": 26}, {"db_idx": 24538, "episode_idx": 132, "frame_idx": 22, "global_frame_idx": 24538, "task_index": 26}, {"db_idx": 24539, "episode_idx": 132, "frame_idx": 23, "global_frame_idx": 24539, "task_index": 26}, {"db_idx": 24540, "episode_idx": 132, "frame_idx": 24, "global_frame_idx": 24540, "task_index": 26}, {"db_idx": 24541, "episode_idx": 132, "frame_idx": 25, "global_frame_idx": 24541, "task_index": 26}, {"db_idx": 24542, "episode_idx": 132, "frame_idx": 26, "global_frame_idx": 24542, "task_index": 26}, {"db_idx": 24543, "episode_idx": 132, "frame_idx": 27, "global_frame_idx": 24543, "task_index": 26}, {"db_idx": 24544, "episode_idx": 132, "frame_idx": 28, "global_frame_idx": 24544, "task_index": 26}, {"db_idx": 24545, "episode_idx": 132, "frame_idx": 29, "global_frame_idx": 24545, "task_index": 26}, {"db_idx": 24546, "episode_idx": 132, "frame_idx": 30, "global_frame_idx": 24546, "task_index": 26}, {"db_idx": 24547, "episode_idx": 132, "frame_idx": 31, "global_frame_idx": 24547, "task_index": 26}, {"db_idx": 24548, "episode_idx": 132, "frame_idx": 32, "global_frame_idx": 24548, "task_index": 26}, {"db_idx": 24549, "episode_idx": 132, "frame_idx": 33, "global_frame_idx": 24549, "task_index": 26}, {"db_idx": 24550, "episode_idx": 132, "frame_idx": 34, "global_frame_idx": 24550, "task_index": 26}, {"db_idx": 24551, "episode_idx": 132, "frame_idx": 35, "global_frame_idx": 24551, "task_index": 26}, {"db_idx": 24552, "episode_idx": 132, "frame_idx": 36, "global_frame_idx": 24552, "task_index": 26}, {"db_idx": 24553, "episode_idx": 132, "frame_idx": 37, "global_frame_idx": 24553, "task_index": 26}, {"db_idx": 24554, "episode_idx": 132, "frame_idx": 38, "global_frame_idx": 24554, "task_index": 26}, {"db_idx": 24555, "episode_idx": 132, "frame_idx": 39, "global_frame_idx": 24555, "task_index": 26}, {"db_idx": 24556, "episode_idx": 132, "frame_idx": 40, "global_frame_idx": 24556, "task_index": 26}, {"db_idx": 24557, "episode_idx": 132, "frame_idx": 41, "global_frame_idx": 24557, "task_index": 26}, {"db_idx": 24558, "episode_idx": 132, "frame_idx": 42, "global_frame_idx": 24558, "task_index": 26}, {"db_idx": 24559, "episode_idx": 132, "frame_idx": 43, "global_frame_idx": 24559, "task_index": 26}, {"db_idx": 24560, "episode_idx": 132, "frame_idx": 44, "global_frame_idx": 24560, "task_index": 26}, {"db_idx": 24561, "episode_idx": 132, "frame_idx": 45, "global_frame_idx": 24561, "task_index": 26}, {"db_idx": 24562, "episode_idx": 132, "frame_idx": 46, "global_frame_idx": 24562, "task_index": 26}, {"db_idx": 24563, "episode_idx": 132, "frame_idx": 47, "global_frame_idx": 24563, "task_index": 26}, {"db_idx": 24564, "episode_idx": 132, "frame_idx": 48, "global_frame_idx": 24564, "task_index": 26}, {"db_idx": 24565, "episode_idx": 132, "frame_idx": 49, "global_frame_idx": 24565, "task_index": 26}, {"db_idx": 24566, "episode_idx": 132, "frame_idx": 50, "global_frame_idx": 24566, "task_index": 26}, {"db_idx": 24567, "episode_idx": 132, "frame_idx": 51, "global_frame_idx": 24567, "task_index": 26}, {"db_idx": 24568, "episode_idx": 132, "frame_idx": 52, "global_frame_idx": 24568, "task_index": 26}, {"db_idx": 24569, "episode_idx": 132, "frame_idx": 53, "global_frame_idx": 24569, "task_index": 26}, {"db_idx": 24570, "episode_idx": 132, "frame_idx": 54, "global_frame_idx": 24570, "task_index": 26}, {"db_idx": 24571, "episode_idx": 132, "frame_idx": 55, "global_frame_idx": 24571, "task_index": 26}, {"db_idx": 24572, "episode_idx": 132, "frame_idx": 56, "global_frame_idx": 24572, "task_index": 26}, {"db_idx": 24573, "episode_idx": 132, "frame_idx": 57, "global_frame_idx": 24573, "task_index": 26}, {"db_idx": 24574, "episode_idx": 132, "frame_idx": 58, "global_frame_idx": 24574, "task_index": 26}, {"db_idx": 24575, "episode_idx": 132, "frame_idx": 59, "global_frame_idx": 24575, "task_index": 26}, {"db_idx": 24576, "episode_idx": 132, "frame_idx": 60, "global_frame_idx": 24576, "task_index": 26}, {"db_idx": 24577, "episode_idx": 132, "frame_idx": 61, "global_frame_idx": 24577, "task_index": 26}, {"db_idx": 24578, "episode_idx": 132, "frame_idx": 62, "global_frame_idx": 24578, "task_index": 26}, {"db_idx": 24579, "episode_idx": 132, "frame_idx": 63, "global_frame_idx": 24579, "task_index": 26}, {"db_idx": 24580, "episode_idx": 132, "frame_idx": 64, "global_frame_idx": 24580, "task_index": 26}, {"db_idx": 24581, "episode_idx": 132, "frame_idx": 65, "global_frame_idx": 24581, "task_index": 26}, {"db_idx": 24582, "episode_idx": 132, "frame_idx": 66, "global_frame_idx": 24582, "task_index": 26}, {"db_idx": 24583, "episode_idx": 132, "frame_idx": 67, "global_frame_idx": 24583, "task_index": 26}, {"db_idx": 24584, "episode_idx": 132, "frame_idx": 68, "global_frame_idx": 24584, "task_index": 26}, {"db_idx": 24585, "episode_idx": 132, "frame_idx": 69, "global_frame_idx": 24585, "task_index": 26}, {"db_idx": 24586, "episode_idx": 132, "frame_idx": 70, "global_frame_idx": 24586, "task_index": 26}, {"db_idx": 24587, "episode_idx": 132, "frame_idx": 71, "global_frame_idx": 24587, "task_index": 26}, {"db_idx": 24588, "episode_idx": 132, "frame_idx": 72, "global_frame_idx": 24588, "task_index": 26}, {"db_idx": 24589, "episode_idx": 132, "frame_idx": 73, "global_frame_idx": 24589, "task_index": 26}, {"db_idx": 24590, "episode_idx": 132, "frame_idx": 74, "global_frame_idx": 24590, "task_index": 26}, {"db_idx": 24591, "episode_idx": 132, "frame_idx": 75, "global_frame_idx": 24591, "task_index": 26}, {"db_idx": 24592, "episode_idx": 132, "frame_idx": 76, "global_frame_idx": 24592, "task_index": 26}, {"db_idx": 24593, "episode_idx": 132, "frame_idx": 77, "global_frame_idx": 24593, "task_index": 26}, {"db_idx": 24594, "episode_idx": 132, "frame_idx": 78, "global_frame_idx": 24594, "task_index": 26}, {"db_idx": 24595, "episode_idx": 132, "frame_idx": 79, "global_frame_idx": 24595, "task_index": 26}, {"db_idx": 24596, "episode_idx": 132, "frame_idx": 80, "global_frame_idx": 24596, "task_index": 26}, {"db_idx": 24597, "episode_idx": 132, "frame_idx": 81, "global_frame_idx": 24597, "task_index": 26}, {"db_idx": 24598, "episode_idx": 132, "frame_idx": 82, "global_frame_idx": 24598, "task_index": 26}, {"db_idx": 24599, "episode_idx": 132, "frame_idx": 83, "global_frame_idx": 24599, "task_index": 26}, {"db_idx": 24600, "episode_idx": 132, "frame_idx": 84, "global_frame_idx": 24600, "task_index": 26}, {"db_idx": 24601, "episode_idx": 132, "frame_idx": 85, "global_frame_idx": 24601, "task_index": 26}, {"db_idx": 24602, "episode_idx": 132, "frame_idx": 86, "global_frame_idx": 24602, "task_index": 26}, {"db_idx": 24603, "episode_idx": 132, "frame_idx": 87, "global_frame_idx": 24603, "task_index": 26}, {"db_idx": 24604, "episode_idx": 132, "frame_idx": 88, "global_frame_idx": 24604, "task_index": 26}, {"db_idx": 24605, "episode_idx": 132, "frame_idx": 89, "global_frame_idx": 24605, "task_index": 26}, {"db_idx": 24606, "episode_idx": 132, "frame_idx": 90, "global_frame_idx": 24606, "task_index": 26}, {"db_idx": 24607, "episode_idx": 132, "frame_idx": 91, "global_frame_idx": 24607, "task_index": 26}, {"db_idx": 24608, "episode_idx": 132, "frame_idx": 92, "global_frame_idx": 24608, "task_index": 26}, {"db_idx": 24609, "episode_idx": 132, "frame_idx": 93, "global_frame_idx": 24609, "task_index": 26}, {"db_idx": 24610, "episode_idx": 132, "frame_idx": 94, "global_frame_idx": 24610, "task_index": 26}, {"db_idx": 24611, "episode_idx": 132, "frame_idx": 95, "global_frame_idx": 24611, "task_index": 26}, {"db_idx": 24612, "episode_idx": 132, "frame_idx": 96, "global_frame_idx": 24612, "task_index": 26}, {"db_idx": 24613, "episode_idx": 132, "frame_idx": 97, "global_frame_idx": 24613, "task_index": 26}, {"db_idx": 24614, "episode_idx": 132, "frame_idx": 98, "global_frame_idx": 24614, "task_index": 26}, {"db_idx": 24615, "episode_idx": 132, "frame_idx": 99, "global_frame_idx": 24615, "task_index": 26}, {"db_idx": 24616, "episode_idx": 132, "frame_idx": 100, "global_frame_idx": 24616, "task_index": 26}, {"db_idx": 24617, "episode_idx": 132, "frame_idx": 101, "global_frame_idx": 24617, "task_index": 26}, {"db_idx": 24618, "episode_idx": 132, "frame_idx": 102, "global_frame_idx": 24618, "task_index": 26}, {"db_idx": 24619, "episode_idx": 132, "frame_idx": 103, "global_frame_idx": 24619, "task_index": 26}, {"db_idx": 24620, "episode_idx": 132, "frame_idx": 104, "global_frame_idx": 24620, "task_index": 26}, {"db_idx": 24621, "episode_idx": 132, "frame_idx": 105, "global_frame_idx": 24621, "task_index": 26}, {"db_idx": 24622, "episode_idx": 132, "frame_idx": 106, "global_frame_idx": 24622, "task_index": 26}, {"db_idx": 24623, "episode_idx": 132, "frame_idx": 107, "global_frame_idx": 24623, "task_index": 26}, {"db_idx": 24624, "episode_idx": 132, "frame_idx": 108, "global_frame_idx": 24624, "task_index": 26}, {"db_idx": 24625, "episode_idx": 132, "frame_idx": 109, "global_frame_idx": 24625, "task_index": 26}, {"db_idx": 24626, "episode_idx": 132, "frame_idx": 110, "global_frame_idx": 24626, "task_index": 26}, {"db_idx": 24627, "episode_idx": 132, "frame_idx": 111, "global_frame_idx": 24627, "task_index": 26}, {"db_idx": 24628, "episode_idx": 132, "frame_idx": 112, "global_frame_idx": 24628, "task_index": 26}, {"db_idx": 24629, "episode_idx": 133, "frame_idx": 0, "global_frame_idx": 24629, "task_index": 26}, {"db_idx": 24630, "episode_idx": 133, "frame_idx": 1, "global_frame_idx": 24630, "task_index": 26}, {"db_idx": 24631, "episode_idx": 133, "frame_idx": 2, "global_frame_idx": 24631, "task_index": 26}, {"db_idx": 24632, "episode_idx": 133, "frame_idx": 3, "global_frame_idx": 24632, "task_index": 26}, {"db_idx": 24633, "episode_idx": 133, "frame_idx": 4, "global_frame_idx": 24633, "task_index": 26}, {"db_idx": 24634, "episode_idx": 133, "frame_idx": 5, "global_frame_idx": 24634, "task_index": 26}, {"db_idx": 24635, "episode_idx": 133, "frame_idx": 6, "global_frame_idx": 24635, "task_index": 26}, {"db_idx": 24636, "episode_idx": 133, "frame_idx": 7, "global_frame_idx": 24636, "task_index": 26}, {"db_idx": 24637, "episode_idx": 133, "frame_idx": 8, "global_frame_idx": 24637, "task_index": 26}, {"db_idx": 24638, "episode_idx": 133, "frame_idx": 9, "global_frame_idx": 24638, "task_index": 26}, {"db_idx": 24639, "episode_idx": 133, "frame_idx": 10, "global_frame_idx": 24639, "task_index": 26}, {"db_idx": 24640, "episode_idx": 133, "frame_idx": 11, "global_frame_idx": 24640, "task_index": 26}, {"db_idx": 24641, "episode_idx": 133, "frame_idx": 12, "global_frame_idx": 24641, "task_index": 26}, {"db_idx": 24642, "episode_idx": 133, "frame_idx": 13, "global_frame_idx": 24642, "task_index": 26}, {"db_idx": 24643, "episode_idx": 133, "frame_idx": 14, "global_frame_idx": 24643, "task_index": 26}, {"db_idx": 24644, "episode_idx": 133, "frame_idx": 15, "global_frame_idx": 24644, "task_index": 26}, {"db_idx": 24645, "episode_idx": 133, "frame_idx": 16, "global_frame_idx": 24645, "task_index": 26}, {"db_idx": 24646, "episode_idx": 133, "frame_idx": 17, "global_frame_idx": 24646, "task_index": 26}, {"db_idx": 24647, "episode_idx": 133, "frame_idx": 18, "global_frame_idx": 24647, "task_index": 26}, {"db_idx": 24648, "episode_idx": 133, "frame_idx": 19, "global_frame_idx": 24648, "task_index": 26}, {"db_idx": 24649, "episode_idx": 133, "frame_idx": 20, "global_frame_idx": 24649, "task_index": 26}, {"db_idx": 24650, "episode_idx": 133, "frame_idx": 21, "global_frame_idx": 24650, "task_index": 26}, {"db_idx": 24651, "episode_idx": 133, "frame_idx": 22, "global_frame_idx": 24651, "task_index": 26}, {"db_idx": 24652, "episode_idx": 133, "frame_idx": 23, "global_frame_idx": 24652, "task_index": 26}, {"db_idx": 24653, "episode_idx": 133, "frame_idx": 24, "global_frame_idx": 24653, "task_index": 26}, {"db_idx": 24654, "episode_idx": 133, "frame_idx": 25, "global_frame_idx": 24654, "task_index": 26}, {"db_idx": 24655, "episode_idx": 133, "frame_idx": 26, "global_frame_idx": 24655, "task_index": 26}, {"db_idx": 24656, "episode_idx": 133, "frame_idx": 27, "global_frame_idx": 24656, "task_index": 26}, {"db_idx": 24657, "episode_idx": 133, "frame_idx": 28, "global_frame_idx": 24657, "task_index": 26}, {"db_idx": 24658, "episode_idx": 133, "frame_idx": 29, "global_frame_idx": 24658, "task_index": 26}, {"db_idx": 24659, "episode_idx": 133, "frame_idx": 30, "global_frame_idx": 24659, "task_index": 26}, {"db_idx": 24660, "episode_idx": 133, "frame_idx": 31, "global_frame_idx": 24660, "task_index": 26}, {"db_idx": 24661, "episode_idx": 133, "frame_idx": 32, "global_frame_idx": 24661, "task_index": 26}, {"db_idx": 24662, "episode_idx": 133, "frame_idx": 33, "global_frame_idx": 24662, "task_index": 26}, {"db_idx": 24663, "episode_idx": 133, "frame_idx": 34, "global_frame_idx": 24663, "task_index": 26}, {"db_idx": 24664, "episode_idx": 133, "frame_idx": 35, "global_frame_idx": 24664, "task_index": 26}, {"db_idx": 24665, "episode_idx": 133, "frame_idx": 36, "global_frame_idx": 24665, "task_index": 26}, {"db_idx": 24666, "episode_idx": 133, "frame_idx": 37, "global_frame_idx": 24666, "task_index": 26}, {"db_idx": 24667, "episode_idx": 133, "frame_idx": 38, "global_frame_idx": 24667, "task_index": 26}, {"db_idx": 24668, "episode_idx": 133, "frame_idx": 39, "global_frame_idx": 24668, "task_index": 26}, {"db_idx": 24669, "episode_idx": 133, "frame_idx": 40, "global_frame_idx": 24669, "task_index": 26}, {"db_idx": 24670, "episode_idx": 133, "frame_idx": 41, "global_frame_idx": 24670, "task_index": 26}, {"db_idx": 24671, "episode_idx": 133, "frame_idx": 42, "global_frame_idx": 24671, "task_index": 26}, {"db_idx": 24672, "episode_idx": 133, "frame_idx": 43, "global_frame_idx": 24672, "task_index": 26}, {"db_idx": 24673, "episode_idx": 133, "frame_idx": 44, "global_frame_idx": 24673, "task_index": 26}, {"db_idx": 24674, "episode_idx": 133, "frame_idx": 45, "global_frame_idx": 24674, "task_index": 26}, {"db_idx": 24675, "episode_idx": 133, "frame_idx": 46, "global_frame_idx": 24675, "task_index": 26}, {"db_idx": 24676, "episode_idx": 133, "frame_idx": 47, "global_frame_idx": 24676, "task_index": 26}, {"db_idx": 24677, "episode_idx": 133, "frame_idx": 48, "global_frame_idx": 24677, "task_index": 26}, {"db_idx": 24678, "episode_idx": 133, "frame_idx": 49, "global_frame_idx": 24678, "task_index": 26}, {"db_idx": 24679, "episode_idx": 133, "frame_idx": 50, "global_frame_idx": 24679, "task_index": 26}, {"db_idx": 24680, "episode_idx": 133, "frame_idx": 51, "global_frame_idx": 24680, "task_index": 26}, {"db_idx": 24681, "episode_idx": 133, "frame_idx": 52, "global_frame_idx": 24681, "task_index": 26}, {"db_idx": 24682, "episode_idx": 133, "frame_idx": 53, "global_frame_idx": 24682, "task_index": 26}, {"db_idx": 24683, "episode_idx": 133, "frame_idx": 54, "global_frame_idx": 24683, "task_index": 26}, {"db_idx": 24684, "episode_idx": 133, "frame_idx": 55, "global_frame_idx": 24684, "task_index": 26}, {"db_idx": 24685, "episode_idx": 133, "frame_idx": 56, "global_frame_idx": 24685, "task_index": 26}, {"db_idx": 24686, "episode_idx": 133, "frame_idx": 57, "global_frame_idx": 24686, "task_index": 26}, {"db_idx": 24687, "episode_idx": 133, "frame_idx": 58, "global_frame_idx": 24687, "task_index": 26}, {"db_idx": 24688, "episode_idx": 133, "frame_idx": 59, "global_frame_idx": 24688, "task_index": 26}, {"db_idx": 24689, "episode_idx": 133, "frame_idx": 60, "global_frame_idx": 24689, "task_index": 26}, {"db_idx": 24690, "episode_idx": 133, "frame_idx": 61, "global_frame_idx": 24690, "task_index": 26}, {"db_idx": 24691, "episode_idx": 133, "frame_idx": 62, "global_frame_idx": 24691, "task_index": 26}, {"db_idx": 24692, "episode_idx": 133, "frame_idx": 63, "global_frame_idx": 24692, "task_index": 26}, {"db_idx": 24693, "episode_idx": 133, "frame_idx": 64, "global_frame_idx": 24693, "task_index": 26}, {"db_idx": 24694, "episode_idx": 133, "frame_idx": 65, "global_frame_idx": 24694, "task_index": 26}, {"db_idx": 24695, "episode_idx": 133, "frame_idx": 66, "global_frame_idx": 24695, "task_index": 26}, {"db_idx": 24696, "episode_idx": 133, "frame_idx": 67, "global_frame_idx": 24696, "task_index": 26}, {"db_idx": 24697, "episode_idx": 133, "frame_idx": 68, "global_frame_idx": 24697, "task_index": 26}, {"db_idx": 24698, "episode_idx": 133, "frame_idx": 69, "global_frame_idx": 24698, "task_index": 26}, {"db_idx": 24699, "episode_idx": 133, "frame_idx": 70, "global_frame_idx": 24699, "task_index": 26}, {"db_idx": 24700, "episode_idx": 133, "frame_idx": 71, "global_frame_idx": 24700, "task_index": 26}, {"db_idx": 24701, "episode_idx": 133, "frame_idx": 72, "global_frame_idx": 24701, "task_index": 26}, {"db_idx": 24702, "episode_idx": 133, "frame_idx": 73, "global_frame_idx": 24702, "task_index": 26}, {"db_idx": 24703, "episode_idx": 133, "frame_idx": 74, "global_frame_idx": 24703, "task_index": 26}, {"db_idx": 24704, "episode_idx": 133, "frame_idx": 75, "global_frame_idx": 24704, "task_index": 26}, {"db_idx": 24705, "episode_idx": 133, "frame_idx": 76, "global_frame_idx": 24705, "task_index": 26}, {"db_idx": 24706, "episode_idx": 133, "frame_idx": 77, "global_frame_idx": 24706, "task_index": 26}, {"db_idx": 24707, "episode_idx": 133, "frame_idx": 78, "global_frame_idx": 24707, "task_index": 26}, {"db_idx": 24708, "episode_idx": 133, "frame_idx": 79, "global_frame_idx": 24708, "task_index": 26}, {"db_idx": 24709, "episode_idx": 133, "frame_idx": 80, "global_frame_idx": 24709, "task_index": 26}, {"db_idx": 24710, "episode_idx": 133, "frame_idx": 81, "global_frame_idx": 24710, "task_index": 26}, {"db_idx": 24711, "episode_idx": 133, "frame_idx": 82, "global_frame_idx": 24711, "task_index": 26}, {"db_idx": 24712, "episode_idx": 133, "frame_idx": 83, "global_frame_idx": 24712, "task_index": 26}, {"db_idx": 24713, "episode_idx": 133, "frame_idx": 84, "global_frame_idx": 24713, "task_index": 26}, {"db_idx": 24714, "episode_idx": 133, "frame_idx": 85, "global_frame_idx": 24714, "task_index": 26}, {"db_idx": 24715, "episode_idx": 133, "frame_idx": 86, "global_frame_idx": 24715, "task_index": 26}, {"db_idx": 24716, "episode_idx": 133, "frame_idx": 87, "global_frame_idx": 24716, "task_index": 26}, {"db_idx": 24717, "episode_idx": 133, "frame_idx": 88, "global_frame_idx": 24717, "task_index": 26}, {"db_idx": 24718, "episode_idx": 133, "frame_idx": 89, "global_frame_idx": 24718, "task_index": 26}, {"db_idx": 24719, "episode_idx": 133, "frame_idx": 90, "global_frame_idx": 24719, "task_index": 26}, {"db_idx": 24720, "episode_idx": 133, "frame_idx": 91, "global_frame_idx": 24720, "task_index": 26}, {"db_idx": 24721, "episode_idx": 133, "frame_idx": 92, "global_frame_idx": 24721, "task_index": 26}, {"db_idx": 24722, "episode_idx": 133, "frame_idx": 93, "global_frame_idx": 24722, "task_index": 26}, {"db_idx": 24723, "episode_idx": 133, "frame_idx": 94, "global_frame_idx": 24723, "task_index": 26}, {"db_idx": 24724, "episode_idx": 133, "frame_idx": 95, "global_frame_idx": 24724, "task_index": 26}, {"db_idx": 24725, "episode_idx": 133, "frame_idx": 96, "global_frame_idx": 24725, "task_index": 26}, {"db_idx": 24726, "episode_idx": 133, "frame_idx": 97, "global_frame_idx": 24726, "task_index": 26}, {"db_idx": 24727, "episode_idx": 133, "frame_idx": 98, "global_frame_idx": 24727, "task_index": 26}, {"db_idx": 24728, "episode_idx": 133, "frame_idx": 99, "global_frame_idx": 24728, "task_index": 26}, {"db_idx": 24729, "episode_idx": 133, "frame_idx": 100, "global_frame_idx": 24729, "task_index": 26}, {"db_idx": 24730, "episode_idx": 133, "frame_idx": 101, "global_frame_idx": 24730, "task_index": 26}, {"db_idx": 24731, "episode_idx": 133, "frame_idx": 102, "global_frame_idx": 24731, "task_index": 26}, {"db_idx": 24732, "episode_idx": 133, "frame_idx": 103, "global_frame_idx": 24732, "task_index": 26}, {"db_idx": 24733, "episode_idx": 133, "frame_idx": 104, "global_frame_idx": 24733, "task_index": 26}, {"db_idx": 24734, "episode_idx": 133, "frame_idx": 105, "global_frame_idx": 24734, "task_index": 26}, {"db_idx": 24735, "episode_idx": 133, "frame_idx": 106, "global_frame_idx": 24735, "task_index": 26}, {"db_idx": 24736, "episode_idx": 133, "frame_idx": 107, "global_frame_idx": 24736, "task_index": 26}, {"db_idx": 24737, "episode_idx": 133, "frame_idx": 108, "global_frame_idx": 24737, "task_index": 26}, {"db_idx": 24738, "episode_idx": 133, "frame_idx": 109, "global_frame_idx": 24738, "task_index": 26}, {"db_idx": 24739, "episode_idx": 134, "frame_idx": 0, "global_frame_idx": 24739, "task_index": 26}, {"db_idx": 24740, "episode_idx": 134, "frame_idx": 1, "global_frame_idx": 24740, "task_index": 26}, {"db_idx": 24741, "episode_idx": 134, "frame_idx": 2, "global_frame_idx": 24741, "task_index": 26}, {"db_idx": 24742, "episode_idx": 134, "frame_idx": 3, "global_frame_idx": 24742, "task_index": 26}, {"db_idx": 24743, "episode_idx": 134, "frame_idx": 4, "global_frame_idx": 24743, "task_index": 26}, {"db_idx": 24744, "episode_idx": 134, "frame_idx": 5, "global_frame_idx": 24744, "task_index": 26}, {"db_idx": 24745, "episode_idx": 134, "frame_idx": 6, "global_frame_idx": 24745, "task_index": 26}, {"db_idx": 24746, "episode_idx": 134, "frame_idx": 7, "global_frame_idx": 24746, "task_index": 26}, {"db_idx": 24747, "episode_idx": 134, "frame_idx": 8, "global_frame_idx": 24747, "task_index": 26}, {"db_idx": 24748, "episode_idx": 134, "frame_idx": 9, "global_frame_idx": 24748, "task_index": 26}, {"db_idx": 24749, "episode_idx": 134, "frame_idx": 10, "global_frame_idx": 24749, "task_index": 26}, {"db_idx": 24750, "episode_idx": 134, "frame_idx": 11, "global_frame_idx": 24750, "task_index": 26}, {"db_idx": 24751, "episode_idx": 134, "frame_idx": 12, "global_frame_idx": 24751, "task_index": 26}, {"db_idx": 24752, "episode_idx": 134, "frame_idx": 13, "global_frame_idx": 24752, "task_index": 26}, {"db_idx": 24753, "episode_idx": 134, "frame_idx": 14, "global_frame_idx": 24753, "task_index": 26}, {"db_idx": 24754, "episode_idx": 134, "frame_idx": 15, "global_frame_idx": 24754, "task_index": 26}, {"db_idx": 24755, "episode_idx": 134, "frame_idx": 16, "global_frame_idx": 24755, "task_index": 26}, {"db_idx": 24756, "episode_idx": 134, "frame_idx": 17, "global_frame_idx": 24756, "task_index": 26}, {"db_idx": 24757, "episode_idx": 134, "frame_idx": 18, "global_frame_idx": 24757, "task_index": 26}, {"db_idx": 24758, "episode_idx": 134, "frame_idx": 19, "global_frame_idx": 24758, "task_index": 26}, {"db_idx": 24759, "episode_idx": 134, "frame_idx": 20, "global_frame_idx": 24759, "task_index": 26}, {"db_idx": 24760, "episode_idx": 134, "frame_idx": 21, "global_frame_idx": 24760, "task_index": 26}, {"db_idx": 24761, "episode_idx": 134, "frame_idx": 22, "global_frame_idx": 24761, "task_index": 26}, {"db_idx": 24762, "episode_idx": 134, "frame_idx": 23, "global_frame_idx": 24762, "task_index": 26}, {"db_idx": 24763, "episode_idx": 134, "frame_idx": 24, "global_frame_idx": 24763, "task_index": 26}, {"db_idx": 24764, "episode_idx": 134, "frame_idx": 25, "global_frame_idx": 24764, "task_index": 26}, {"db_idx": 24765, "episode_idx": 134, "frame_idx": 26, "global_frame_idx": 24765, "task_index": 26}, {"db_idx": 24766, "episode_idx": 134, "frame_idx": 27, "global_frame_idx": 24766, "task_index": 26}, {"db_idx": 24767, "episode_idx": 134, "frame_idx": 28, "global_frame_idx": 24767, "task_index": 26}, {"db_idx": 24768, "episode_idx": 134, "frame_idx": 29, "global_frame_idx": 24768, "task_index": 26}, {"db_idx": 24769, "episode_idx": 134, "frame_idx": 30, "global_frame_idx": 24769, "task_index": 26}, {"db_idx": 24770, "episode_idx": 134, "frame_idx": 31, "global_frame_idx": 24770, "task_index": 26}, {"db_idx": 24771, "episode_idx": 134, "frame_idx": 32, "global_frame_idx": 24771, "task_index": 26}, {"db_idx": 24772, "episode_idx": 134, "frame_idx": 33, "global_frame_idx": 24772, "task_index": 26}, {"db_idx": 24773, "episode_idx": 134, "frame_idx": 34, "global_frame_idx": 24773, "task_index": 26}, {"db_idx": 24774, "episode_idx": 134, "frame_idx": 35, "global_frame_idx": 24774, "task_index": 26}, {"db_idx": 24775, "episode_idx": 134, "frame_idx": 36, "global_frame_idx": 24775, "task_index": 26}, {"db_idx": 24776, "episode_idx": 134, "frame_idx": 37, "global_frame_idx": 24776, "task_index": 26}, {"db_idx": 24777, "episode_idx": 134, "frame_idx": 38, "global_frame_idx": 24777, "task_index": 26}, {"db_idx": 24778, "episode_idx": 134, "frame_idx": 39, "global_frame_idx": 24778, "task_index": 26}, {"db_idx": 24779, "episode_idx": 134, "frame_idx": 40, "global_frame_idx": 24779, "task_index": 26}, {"db_idx": 24780, "episode_idx": 134, "frame_idx": 41, "global_frame_idx": 24780, "task_index": 26}, {"db_idx": 24781, "episode_idx": 134, "frame_idx": 42, "global_frame_idx": 24781, "task_index": 26}, {"db_idx": 24782, "episode_idx": 134, "frame_idx": 43, "global_frame_idx": 24782, "task_index": 26}, {"db_idx": 24783, "episode_idx": 134, "frame_idx": 44, "global_frame_idx": 24783, "task_index": 26}, {"db_idx": 24784, "episode_idx": 134, "frame_idx": 45, "global_frame_idx": 24784, "task_index": 26}, {"db_idx": 24785, "episode_idx": 134, "frame_idx": 46, "global_frame_idx": 24785, "task_index": 26}, {"db_idx": 24786, "episode_idx": 134, "frame_idx": 47, "global_frame_idx": 24786, "task_index": 26}, {"db_idx": 24787, "episode_idx": 134, "frame_idx": 48, "global_frame_idx": 24787, "task_index": 26}, {"db_idx": 24788, "episode_idx": 134, "frame_idx": 49, "global_frame_idx": 24788, "task_index": 26}, {"db_idx": 24789, "episode_idx": 134, "frame_idx": 50, "global_frame_idx": 24789, "task_index": 26}, {"db_idx": 24790, "episode_idx": 134, "frame_idx": 51, "global_frame_idx": 24790, "task_index": 26}, {"db_idx": 24791, "episode_idx": 134, "frame_idx": 52, "global_frame_idx": 24791, "task_index": 26}, {"db_idx": 24792, "episode_idx": 134, "frame_idx": 53, "global_frame_idx": 24792, "task_index": 26}, {"db_idx": 24793, "episode_idx": 134, "frame_idx": 54, "global_frame_idx": 24793, "task_index": 26}, {"db_idx": 24794, "episode_idx": 134, "frame_idx": 55, "global_frame_idx": 24794, "task_index": 26}, {"db_idx": 24795, "episode_idx": 134, "frame_idx": 56, "global_frame_idx": 24795, "task_index": 26}, {"db_idx": 24796, "episode_idx": 134, "frame_idx": 57, "global_frame_idx": 24796, "task_index": 26}, {"db_idx": 24797, "episode_idx": 134, "frame_idx": 58, "global_frame_idx": 24797, "task_index": 26}, {"db_idx": 24798, "episode_idx": 134, "frame_idx": 59, "global_frame_idx": 24798, "task_index": 26}, {"db_idx": 24799, "episode_idx": 134, "frame_idx": 60, "global_frame_idx": 24799, "task_index": 26}, {"db_idx": 24800, "episode_idx": 134, "frame_idx": 61, "global_frame_idx": 24800, "task_index": 26}, {"db_idx": 24801, "episode_idx": 134, "frame_idx": 62, "global_frame_idx": 24801, "task_index": 26}, {"db_idx": 24802, "episode_idx": 134, "frame_idx": 63, "global_frame_idx": 24802, "task_index": 26}, {"db_idx": 24803, "episode_idx": 134, "frame_idx": 64, "global_frame_idx": 24803, "task_index": 26}, {"db_idx": 24804, "episode_idx": 134, "frame_idx": 65, "global_frame_idx": 24804, "task_index": 26}, {"db_idx": 24805, "episode_idx": 134, "frame_idx": 66, "global_frame_idx": 24805, "task_index": 26}, {"db_idx": 24806, "episode_idx": 134, "frame_idx": 67, "global_frame_idx": 24806, "task_index": 26}, {"db_idx": 24807, "episode_idx": 134, "frame_idx": 68, "global_frame_idx": 24807, "task_index": 26}, {"db_idx": 24808, "episode_idx": 134, "frame_idx": 69, "global_frame_idx": 24808, "task_index": 26}, {"db_idx": 24809, "episode_idx": 134, "frame_idx": 70, "global_frame_idx": 24809, "task_index": 26}, {"db_idx": 24810, "episode_idx": 134, "frame_idx": 71, "global_frame_idx": 24810, "task_index": 26}, {"db_idx": 24811, "episode_idx": 134, "frame_idx": 72, "global_frame_idx": 24811, "task_index": 26}, {"db_idx": 24812, "episode_idx": 134, "frame_idx": 73, "global_frame_idx": 24812, "task_index": 26}, {"db_idx": 24813, "episode_idx": 134, "frame_idx": 74, "global_frame_idx": 24813, "task_index": 26}, {"db_idx": 24814, "episode_idx": 134, "frame_idx": 75, "global_frame_idx": 24814, "task_index": 26}, {"db_idx": 24815, "episode_idx": 134, "frame_idx": 76, "global_frame_idx": 24815, "task_index": 26}, {"db_idx": 24816, "episode_idx": 134, "frame_idx": 77, "global_frame_idx": 24816, "task_index": 26}, {"db_idx": 24817, "episode_idx": 134, "frame_idx": 78, "global_frame_idx": 24817, "task_index": 26}, {"db_idx": 24818, "episode_idx": 134, "frame_idx": 79, "global_frame_idx": 24818, "task_index": 26}, {"db_idx": 24819, "episode_idx": 134, "frame_idx": 80, "global_frame_idx": 24819, "task_index": 26}, {"db_idx": 24820, "episode_idx": 134, "frame_idx": 81, "global_frame_idx": 24820, "task_index": 26}, {"db_idx": 24821, "episode_idx": 134, "frame_idx": 82, "global_frame_idx": 24821, "task_index": 26}, {"db_idx": 24822, "episode_idx": 134, "frame_idx": 83, "global_frame_idx": 24822, "task_index": 26}, {"db_idx": 24823, "episode_idx": 134, "frame_idx": 84, "global_frame_idx": 24823, "task_index": 26}, {"db_idx": 24824, "episode_idx": 134, "frame_idx": 85, "global_frame_idx": 24824, "task_index": 26}, {"db_idx": 24825, "episode_idx": 134, "frame_idx": 86, "global_frame_idx": 24825, "task_index": 26}, {"db_idx": 24826, "episode_idx": 134, "frame_idx": 87, "global_frame_idx": 24826, "task_index": 26}, {"db_idx": 24827, "episode_idx": 134, "frame_idx": 88, "global_frame_idx": 24827, "task_index": 26}, {"db_idx": 24828, "episode_idx": 134, "frame_idx": 89, "global_frame_idx": 24828, "task_index": 26}, {"db_idx": 24829, "episode_idx": 134, "frame_idx": 90, "global_frame_idx": 24829, "task_index": 26}, {"db_idx": 24830, "episode_idx": 134, "frame_idx": 91, "global_frame_idx": 24830, "task_index": 26}, {"db_idx": 24831, "episode_idx": 134, "frame_idx": 92, "global_frame_idx": 24831, "task_index": 26}, {"db_idx": 24832, "episode_idx": 134, "frame_idx": 93, "global_frame_idx": 24832, "task_index": 26}, {"db_idx": 24833, "episode_idx": 134, "frame_idx": 94, "global_frame_idx": 24833, "task_index": 26}, {"db_idx": 24834, "episode_idx": 134, "frame_idx": 95, "global_frame_idx": 24834, "task_index": 26}, {"db_idx": 24835, "episode_idx": 134, "frame_idx": 96, "global_frame_idx": 24835, "task_index": 26}, {"db_idx": 24836, "episode_idx": 134, "frame_idx": 97, "global_frame_idx": 24836, "task_index": 26}, {"db_idx": 24837, "episode_idx": 134, "frame_idx": 98, "global_frame_idx": 24837, "task_index": 26}, {"db_idx": 24838, "episode_idx": 134, "frame_idx": 99, "global_frame_idx": 24838, "task_index": 26}, {"db_idx": 24839, "episode_idx": 134, "frame_idx": 100, "global_frame_idx": 24839, "task_index": 26}, {"db_idx": 24840, "episode_idx": 134, "frame_idx": 101, "global_frame_idx": 24840, "task_index": 26}, {"db_idx": 24841, "episode_idx": 134, "frame_idx": 102, "global_frame_idx": 24841, "task_index": 26}, {"db_idx": 24842, "episode_idx": 134, "frame_idx": 103, "global_frame_idx": 24842, "task_index": 26}, {"db_idx": 24843, "episode_idx": 134, "frame_idx": 104, "global_frame_idx": 24843, "task_index": 26}, {"db_idx": 24844, "episode_idx": 134, "frame_idx": 105, "global_frame_idx": 24844, "task_index": 26}, {"db_idx": 24845, "episode_idx": 134, "frame_idx": 106, "global_frame_idx": 24845, "task_index": 26}, {"db_idx": 24846, "episode_idx": 134, "frame_idx": 107, "global_frame_idx": 24846, "task_index": 26}, {"db_idx": 24847, "episode_idx": 134, "frame_idx": 108, "global_frame_idx": 24847, "task_index": 26}, {"db_idx": 24848, "episode_idx": 134, "frame_idx": 109, "global_frame_idx": 24848, "task_index": 26}, {"db_idx": 24849, "episode_idx": 134, "frame_idx": 110, "global_frame_idx": 24849, "task_index": 26}, {"db_idx": 24850, "episode_idx": 135, "frame_idx": 0, "global_frame_idx": 24850, "task_index": 27}, {"db_idx": 24851, "episode_idx": 135, "frame_idx": 1, "global_frame_idx": 24851, "task_index": 27}, {"db_idx": 24852, "episode_idx": 135, "frame_idx": 2, "global_frame_idx": 24852, "task_index": 27}, {"db_idx": 24853, "episode_idx": 135, "frame_idx": 3, "global_frame_idx": 24853, "task_index": 27}, {"db_idx": 24854, "episode_idx": 135, "frame_idx": 4, "global_frame_idx": 24854, "task_index": 27}, {"db_idx": 24855, "episode_idx": 135, "frame_idx": 5, "global_frame_idx": 24855, "task_index": 27}, {"db_idx": 24856, "episode_idx": 135, "frame_idx": 6, "global_frame_idx": 24856, "task_index": 27}, {"db_idx": 24857, "episode_idx": 135, "frame_idx": 7, "global_frame_idx": 24857, "task_index": 27}, {"db_idx": 24858, "episode_idx": 135, "frame_idx": 8, "global_frame_idx": 24858, "task_index": 27}, {"db_idx": 24859, "episode_idx": 135, "frame_idx": 9, "global_frame_idx": 24859, "task_index": 27}, {"db_idx": 24860, "episode_idx": 135, "frame_idx": 10, "global_frame_idx": 24860, "task_index": 27}, {"db_idx": 24861, "episode_idx": 135, "frame_idx": 11, "global_frame_idx": 24861, "task_index": 27}, {"db_idx": 24862, "episode_idx": 135, "frame_idx": 12, "global_frame_idx": 24862, "task_index": 27}, {"db_idx": 24863, "episode_idx": 135, "frame_idx": 13, "global_frame_idx": 24863, "task_index": 27}, {"db_idx": 24864, "episode_idx": 135, "frame_idx": 14, "global_frame_idx": 24864, "task_index": 27}, {"db_idx": 24865, "episode_idx": 135, "frame_idx": 15, "global_frame_idx": 24865, "task_index": 27}, {"db_idx": 24866, "episode_idx": 135, "frame_idx": 16, "global_frame_idx": 24866, "task_index": 27}, {"db_idx": 24867, "episode_idx": 135, "frame_idx": 17, "global_frame_idx": 24867, "task_index": 27}, {"db_idx": 24868, "episode_idx": 135, "frame_idx": 18, "global_frame_idx": 24868, "task_index": 27}, {"db_idx": 24869, "episode_idx": 135, "frame_idx": 19, "global_frame_idx": 24869, "task_index": 27}, {"db_idx": 24870, "episode_idx": 135, "frame_idx": 20, "global_frame_idx": 24870, "task_index": 27}, {"db_idx": 24871, "episode_idx": 135, "frame_idx": 21, "global_frame_idx": 24871, "task_index": 27}, {"db_idx": 24872, "episode_idx": 135, "frame_idx": 22, "global_frame_idx": 24872, "task_index": 27}, {"db_idx": 24873, "episode_idx": 135, "frame_idx": 23, "global_frame_idx": 24873, "task_index": 27}, {"db_idx": 24874, "episode_idx": 135, "frame_idx": 24, "global_frame_idx": 24874, "task_index": 27}, {"db_idx": 24875, "episode_idx": 135, "frame_idx": 25, "global_frame_idx": 24875, "task_index": 27}, {"db_idx": 24876, "episode_idx": 135, "frame_idx": 26, "global_frame_idx": 24876, "task_index": 27}, {"db_idx": 24877, "episode_idx": 135, "frame_idx": 27, "global_frame_idx": 24877, "task_index": 27}, {"db_idx": 24878, "episode_idx": 135, "frame_idx": 28, "global_frame_idx": 24878, "task_index": 27}, {"db_idx": 24879, "episode_idx": 135, "frame_idx": 29, "global_frame_idx": 24879, "task_index": 27}, {"db_idx": 24880, "episode_idx": 135, "frame_idx": 30, "global_frame_idx": 24880, "task_index": 27}, {"db_idx": 24881, "episode_idx": 135, "frame_idx": 31, "global_frame_idx": 24881, "task_index": 27}, {"db_idx": 24882, "episode_idx": 135, "frame_idx": 32, "global_frame_idx": 24882, "task_index": 27}, {"db_idx": 24883, "episode_idx": 135, "frame_idx": 33, "global_frame_idx": 24883, "task_index": 27}, {"db_idx": 24884, "episode_idx": 135, "frame_idx": 34, "global_frame_idx": 24884, "task_index": 27}, {"db_idx": 24885, "episode_idx": 135, "frame_idx": 35, "global_frame_idx": 24885, "task_index": 27}, {"db_idx": 24886, "episode_idx": 135, "frame_idx": 36, "global_frame_idx": 24886, "task_index": 27}, {"db_idx": 24887, "episode_idx": 135, "frame_idx": 37, "global_frame_idx": 24887, "task_index": 27}, {"db_idx": 24888, "episode_idx": 135, "frame_idx": 38, "global_frame_idx": 24888, "task_index": 27}, {"db_idx": 24889, "episode_idx": 135, "frame_idx": 39, "global_frame_idx": 24889, "task_index": 27}, {"db_idx": 24890, "episode_idx": 135, "frame_idx": 40, "global_frame_idx": 24890, "task_index": 27}, {"db_idx": 24891, "episode_idx": 135, "frame_idx": 41, "global_frame_idx": 24891, "task_index": 27}, {"db_idx": 24892, "episode_idx": 135, "frame_idx": 42, "global_frame_idx": 24892, "task_index": 27}, {"db_idx": 24893, "episode_idx": 135, "frame_idx": 43, "global_frame_idx": 24893, "task_index": 27}, {"db_idx": 24894, "episode_idx": 135, "frame_idx": 44, "global_frame_idx": 24894, "task_index": 27}, {"db_idx": 24895, "episode_idx": 135, "frame_idx": 45, "global_frame_idx": 24895, "task_index": 27}, {"db_idx": 24896, "episode_idx": 135, "frame_idx": 46, "global_frame_idx": 24896, "task_index": 27}, {"db_idx": 24897, "episode_idx": 135, "frame_idx": 47, "global_frame_idx": 24897, "task_index": 27}, {"db_idx": 24898, "episode_idx": 135, "frame_idx": 48, "global_frame_idx": 24898, "task_index": 27}, {"db_idx": 24899, "episode_idx": 135, "frame_idx": 49, "global_frame_idx": 24899, "task_index": 27}, {"db_idx": 24900, "episode_idx": 135, "frame_idx": 50, "global_frame_idx": 24900, "task_index": 27}, {"db_idx": 24901, "episode_idx": 135, "frame_idx": 51, "global_frame_idx": 24901, "task_index": 27}, {"db_idx": 24902, "episode_idx": 135, "frame_idx": 52, "global_frame_idx": 24902, "task_index": 27}, {"db_idx": 24903, "episode_idx": 135, "frame_idx": 53, "global_frame_idx": 24903, "task_index": 27}, {"db_idx": 24904, "episode_idx": 135, "frame_idx": 54, "global_frame_idx": 24904, "task_index": 27}, {"db_idx": 24905, "episode_idx": 135, "frame_idx": 55, "global_frame_idx": 24905, "task_index": 27}, {"db_idx": 24906, "episode_idx": 135, "frame_idx": 56, "global_frame_idx": 24906, "task_index": 27}, {"db_idx": 24907, "episode_idx": 135, "frame_idx": 57, "global_frame_idx": 24907, "task_index": 27}, {"db_idx": 24908, "episode_idx": 135, "frame_idx": 58, "global_frame_idx": 24908, "task_index": 27}, {"db_idx": 24909, "episode_idx": 135, "frame_idx": 59, "global_frame_idx": 24909, "task_index": 27}, {"db_idx": 24910, "episode_idx": 135, "frame_idx": 60, "global_frame_idx": 24910, "task_index": 27}, {"db_idx": 24911, "episode_idx": 135, "frame_idx": 61, "global_frame_idx": 24911, "task_index": 27}, {"db_idx": 24912, "episode_idx": 135, "frame_idx": 62, "global_frame_idx": 24912, "task_index": 27}, {"db_idx": 24913, "episode_idx": 135, "frame_idx": 63, "global_frame_idx": 24913, "task_index": 27}, {"db_idx": 24914, "episode_idx": 135, "frame_idx": 64, "global_frame_idx": 24914, "task_index": 27}, {"db_idx": 24915, "episode_idx": 135, "frame_idx": 65, "global_frame_idx": 24915, "task_index": 27}, {"db_idx": 24916, "episode_idx": 135, "frame_idx": 66, "global_frame_idx": 24916, "task_index": 27}, {"db_idx": 24917, "episode_idx": 135, "frame_idx": 67, "global_frame_idx": 24917, "task_index": 27}, {"db_idx": 24918, "episode_idx": 135, "frame_idx": 68, "global_frame_idx": 24918, "task_index": 27}, {"db_idx": 24919, "episode_idx": 135, "frame_idx": 69, "global_frame_idx": 24919, "task_index": 27}, {"db_idx": 24920, "episode_idx": 135, "frame_idx": 70, "global_frame_idx": 24920, "task_index": 27}, {"db_idx": 24921, "episode_idx": 135, "frame_idx": 71, "global_frame_idx": 24921, "task_index": 27}, {"db_idx": 24922, "episode_idx": 135, "frame_idx": 72, "global_frame_idx": 24922, "task_index": 27}, {"db_idx": 24923, "episode_idx": 135, "frame_idx": 73, "global_frame_idx": 24923, "task_index": 27}, {"db_idx": 24924, "episode_idx": 135, "frame_idx": 74, "global_frame_idx": 24924, "task_index": 27}, {"db_idx": 24925, "episode_idx": 135, "frame_idx": 75, "global_frame_idx": 24925, "task_index": 27}, {"db_idx": 24926, "episode_idx": 135, "frame_idx": 76, "global_frame_idx": 24926, "task_index": 27}, {"db_idx": 24927, "episode_idx": 135, "frame_idx": 77, "global_frame_idx": 24927, "task_index": 27}, {"db_idx": 24928, "episode_idx": 135, "frame_idx": 78, "global_frame_idx": 24928, "task_index": 27}, {"db_idx": 24929, "episode_idx": 135, "frame_idx": 79, "global_frame_idx": 24929, "task_index": 27}, {"db_idx": 24930, "episode_idx": 135, "frame_idx": 80, "global_frame_idx": 24930, "task_index": 27}, {"db_idx": 24931, "episode_idx": 135, "frame_idx": 81, "global_frame_idx": 24931, "task_index": 27}, {"db_idx": 24932, "episode_idx": 135, "frame_idx": 82, "global_frame_idx": 24932, "task_index": 27}, {"db_idx": 24933, "episode_idx": 135, "frame_idx": 83, "global_frame_idx": 24933, "task_index": 27}, {"db_idx": 24934, "episode_idx": 135, "frame_idx": 84, "global_frame_idx": 24934, "task_index": 27}, {"db_idx": 24935, "episode_idx": 135, "frame_idx": 85, "global_frame_idx": 24935, "task_index": 27}, {"db_idx": 24936, "episode_idx": 135, "frame_idx": 86, "global_frame_idx": 24936, "task_index": 27}, {"db_idx": 24937, "episode_idx": 135, "frame_idx": 87, "global_frame_idx": 24937, "task_index": 27}, {"db_idx": 24938, "episode_idx": 135, "frame_idx": 88, "global_frame_idx": 24938, "task_index": 27}, {"db_idx": 24939, "episode_idx": 135, "frame_idx": 89, "global_frame_idx": 24939, "task_index": 27}, {"db_idx": 24940, "episode_idx": 135, "frame_idx": 90, "global_frame_idx": 24940, "task_index": 27}, {"db_idx": 24941, "episode_idx": 135, "frame_idx": 91, "global_frame_idx": 24941, "task_index": 27}, {"db_idx": 24942, "episode_idx": 135, "frame_idx": 92, "global_frame_idx": 24942, "task_index": 27}, {"db_idx": 24943, "episode_idx": 135, "frame_idx": 93, "global_frame_idx": 24943, "task_index": 27}, {"db_idx": 24944, "episode_idx": 135, "frame_idx": 94, "global_frame_idx": 24944, "task_index": 27}, {"db_idx": 24945, "episode_idx": 135, "frame_idx": 95, "global_frame_idx": 24945, "task_index": 27}, {"db_idx": 24946, "episode_idx": 135, "frame_idx": 96, "global_frame_idx": 24946, "task_index": 27}, {"db_idx": 24947, "episode_idx": 135, "frame_idx": 97, "global_frame_idx": 24947, "task_index": 27}, {"db_idx": 24948, "episode_idx": 135, "frame_idx": 98, "global_frame_idx": 24948, "task_index": 27}, {"db_idx": 24949, "episode_idx": 135, "frame_idx": 99, "global_frame_idx": 24949, "task_index": 27}, {"db_idx": 24950, "episode_idx": 135, "frame_idx": 100, "global_frame_idx": 24950, "task_index": 27}, {"db_idx": 24951, "episode_idx": 135, "frame_idx": 101, "global_frame_idx": 24951, "task_index": 27}, {"db_idx": 24952, "episode_idx": 135, "frame_idx": 102, "global_frame_idx": 24952, "task_index": 27}, {"db_idx": 24953, "episode_idx": 135, "frame_idx": 103, "global_frame_idx": 24953, "task_index": 27}, {"db_idx": 24954, "episode_idx": 135, "frame_idx": 104, "global_frame_idx": 24954, "task_index": 27}, {"db_idx": 24955, "episode_idx": 135, "frame_idx": 105, "global_frame_idx": 24955, "task_index": 27}, {"db_idx": 24956, "episode_idx": 135, "frame_idx": 106, "global_frame_idx": 24956, "task_index": 27}, {"db_idx": 24957, "episode_idx": 135, "frame_idx": 107, "global_frame_idx": 24957, "task_index": 27}, {"db_idx": 24958, "episode_idx": 135, "frame_idx": 108, "global_frame_idx": 24958, "task_index": 27}, {"db_idx": 24959, "episode_idx": 135, "frame_idx": 109, "global_frame_idx": 24959, "task_index": 27}, {"db_idx": 24960, "episode_idx": 135, "frame_idx": 110, "global_frame_idx": 24960, "task_index": 27}, {"db_idx": 24961, "episode_idx": 135, "frame_idx": 111, "global_frame_idx": 24961, "task_index": 27}, {"db_idx": 24962, "episode_idx": 135, "frame_idx": 112, "global_frame_idx": 24962, "task_index": 27}, {"db_idx": 24963, "episode_idx": 135, "frame_idx": 113, "global_frame_idx": 24963, "task_index": 27}, {"db_idx": 24964, "episode_idx": 135, "frame_idx": 114, "global_frame_idx": 24964, "task_index": 27}, {"db_idx": 24965, "episode_idx": 135, "frame_idx": 115, "global_frame_idx": 24965, "task_index": 27}, {"db_idx": 24966, "episode_idx": 135, "frame_idx": 116, "global_frame_idx": 24966, "task_index": 27}, {"db_idx": 24967, "episode_idx": 135, "frame_idx": 117, "global_frame_idx": 24967, "task_index": 27}, {"db_idx": 24968, "episode_idx": 135, "frame_idx": 118, "global_frame_idx": 24968, "task_index": 27}, {"db_idx": 24969, "episode_idx": 135, "frame_idx": 119, "global_frame_idx": 24969, "task_index": 27}, {"db_idx": 24970, "episode_idx": 135, "frame_idx": 120, "global_frame_idx": 24970, "task_index": 27}, {"db_idx": 24971, "episode_idx": 135, "frame_idx": 121, "global_frame_idx": 24971, "task_index": 27}, {"db_idx": 24972, "episode_idx": 135, "frame_idx": 122, "global_frame_idx": 24972, "task_index": 27}, {"db_idx": 24973, "episode_idx": 135, "frame_idx": 123, "global_frame_idx": 24973, "task_index": 27}, {"db_idx": 24974, "episode_idx": 135, "frame_idx": 124, "global_frame_idx": 24974, "task_index": 27}, {"db_idx": 24975, "episode_idx": 135, "frame_idx": 125, "global_frame_idx": 24975, "task_index": 27}, {"db_idx": 24976, "episode_idx": 135, "frame_idx": 126, "global_frame_idx": 24976, "task_index": 27}, {"db_idx": 24977, "episode_idx": 135, "frame_idx": 127, "global_frame_idx": 24977, "task_index": 27}, {"db_idx": 24978, "episode_idx": 135, "frame_idx": 128, "global_frame_idx": 24978, "task_index": 27}, {"db_idx": 24979, "episode_idx": 135, "frame_idx": 129, "global_frame_idx": 24979, "task_index": 27}, {"db_idx": 24980, "episode_idx": 135, "frame_idx": 130, "global_frame_idx": 24980, "task_index": 27}, {"db_idx": 24981, "episode_idx": 135, "frame_idx": 131, "global_frame_idx": 24981, "task_index": 27}, {"db_idx": 24982, "episode_idx": 135, "frame_idx": 132, "global_frame_idx": 24982, "task_index": 27}, {"db_idx": 24983, "episode_idx": 135, "frame_idx": 133, "global_frame_idx": 24983, "task_index": 27}, {"db_idx": 24984, "episode_idx": 135, "frame_idx": 134, "global_frame_idx": 24984, "task_index": 27}, {"db_idx": 24985, "episode_idx": 135, "frame_idx": 135, "global_frame_idx": 24985, "task_index": 27}, {"db_idx": 24986, "episode_idx": 135, "frame_idx": 136, "global_frame_idx": 24986, "task_index": 27}, {"db_idx": 24987, "episode_idx": 135, "frame_idx": 137, "global_frame_idx": 24987, "task_index": 27}, {"db_idx": 24988, "episode_idx": 135, "frame_idx": 138, "global_frame_idx": 24988, "task_index": 27}, {"db_idx": 24989, "episode_idx": 135, "frame_idx": 139, "global_frame_idx": 24989, "task_index": 27}, {"db_idx": 24990, "episode_idx": 135, "frame_idx": 140, "global_frame_idx": 24990, "task_index": 27}, {"db_idx": 24991, "episode_idx": 135, "frame_idx": 141, "global_frame_idx": 24991, "task_index": 27}, {"db_idx": 24992, "episode_idx": 135, "frame_idx": 142, "global_frame_idx": 24992, "task_index": 27}, {"db_idx": 24993, "episode_idx": 135, "frame_idx": 143, "global_frame_idx": 24993, "task_index": 27}, {"db_idx": 24994, "episode_idx": 135, "frame_idx": 144, "global_frame_idx": 24994, "task_index": 27}, {"db_idx": 24995, "episode_idx": 135, "frame_idx": 145, "global_frame_idx": 24995, "task_index": 27}, {"db_idx": 24996, "episode_idx": 135, "frame_idx": 146, "global_frame_idx": 24996, "task_index": 27}, {"db_idx": 24997, "episode_idx": 135, "frame_idx": 147, "global_frame_idx": 24997, "task_index": 27}, {"db_idx": 24998, "episode_idx": 135, "frame_idx": 148, "global_frame_idx": 24998, "task_index": 27}, {"db_idx": 24999, "episode_idx": 135, "frame_idx": 149, "global_frame_idx": 24999, "task_index": 27}, {"db_idx": 25000, "episode_idx": 135, "frame_idx": 150, "global_frame_idx": 25000, "task_index": 27}, {"db_idx": 25001, "episode_idx": 135, "frame_idx": 151, "global_frame_idx": 25001, "task_index": 27}, {"db_idx": 25002, "episode_idx": 135, "frame_idx": 152, "global_frame_idx": 25002, "task_index": 27}, {"db_idx": 25003, "episode_idx": 135, "frame_idx": 153, "global_frame_idx": 25003, "task_index": 27}, {"db_idx": 25004, "episode_idx": 135, "frame_idx": 154, "global_frame_idx": 25004, "task_index": 27}, {"db_idx": 25005, "episode_idx": 135, "frame_idx": 155, "global_frame_idx": 25005, "task_index": 27}, {"db_idx": 25006, "episode_idx": 135, "frame_idx": 156, "global_frame_idx": 25006, "task_index": 27}, {"db_idx": 25007, "episode_idx": 135, "frame_idx": 157, "global_frame_idx": 25007, "task_index": 27}, {"db_idx": 25008, "episode_idx": 135, "frame_idx": 158, "global_frame_idx": 25008, "task_index": 27}, {"db_idx": 25009, "episode_idx": 135, "frame_idx": 159, "global_frame_idx": 25009, "task_index": 27}, {"db_idx": 25010, "episode_idx": 135, "frame_idx": 160, "global_frame_idx": 25010, "task_index": 27}, {"db_idx": 25011, "episode_idx": 135, "frame_idx": 161, "global_frame_idx": 25011, "task_index": 27}, {"db_idx": 25012, "episode_idx": 135, "frame_idx": 162, "global_frame_idx": 25012, "task_index": 27}, {"db_idx": 25013, "episode_idx": 135, "frame_idx": 163, "global_frame_idx": 25013, "task_index": 27}, {"db_idx": 25014, "episode_idx": 135, "frame_idx": 164, "global_frame_idx": 25014, "task_index": 27}, {"db_idx": 25015, "episode_idx": 135, "frame_idx": 165, "global_frame_idx": 25015, "task_index": 27}, {"db_idx": 25016, "episode_idx": 135, "frame_idx": 166, "global_frame_idx": 25016, "task_index": 27}, {"db_idx": 25017, "episode_idx": 135, "frame_idx": 167, "global_frame_idx": 25017, "task_index": 27}, {"db_idx": 25018, "episode_idx": 135, "frame_idx": 168, "global_frame_idx": 25018, "task_index": 27}, {"db_idx": 25019, "episode_idx": 135, "frame_idx": 169, "global_frame_idx": 25019, "task_index": 27}, {"db_idx": 25020, "episode_idx": 135, "frame_idx": 170, "global_frame_idx": 25020, "task_index": 27}, {"db_idx": 25021, "episode_idx": 135, "frame_idx": 171, "global_frame_idx": 25021, "task_index": 27}, {"db_idx": 25022, "episode_idx": 135, "frame_idx": 172, "global_frame_idx": 25022, "task_index": 27}, {"db_idx": 25023, "episode_idx": 135, "frame_idx": 173, "global_frame_idx": 25023, "task_index": 27}, {"db_idx": 25024, "episode_idx": 135, "frame_idx": 174, "global_frame_idx": 25024, "task_index": 27}, {"db_idx": 25025, "episode_idx": 135, "frame_idx": 175, "global_frame_idx": 25025, "task_index": 27}, {"db_idx": 25026, "episode_idx": 135, "frame_idx": 176, "global_frame_idx": 25026, "task_index": 27}, {"db_idx": 25027, "episode_idx": 135, "frame_idx": 177, "global_frame_idx": 25027, "task_index": 27}, {"db_idx": 25028, "episode_idx": 135, "frame_idx": 178, "global_frame_idx": 25028, "task_index": 27}, {"db_idx": 25029, "episode_idx": 135, "frame_idx": 179, "global_frame_idx": 25029, "task_index": 27}, {"db_idx": 25030, "episode_idx": 135, "frame_idx": 180, "global_frame_idx": 25030, "task_index": 27}, {"db_idx": 25031, "episode_idx": 135, "frame_idx": 181, "global_frame_idx": 25031, "task_index": 27}, {"db_idx": 25032, "episode_idx": 135, "frame_idx": 182, "global_frame_idx": 25032, "task_index": 27}, {"db_idx": 25033, "episode_idx": 135, "frame_idx": 183, "global_frame_idx": 25033, "task_index": 27}, {"db_idx": 25034, "episode_idx": 135, "frame_idx": 184, "global_frame_idx": 25034, "task_index": 27}, {"db_idx": 25035, "episode_idx": 135, "frame_idx": 185, "global_frame_idx": 25035, "task_index": 27}, {"db_idx": 25036, "episode_idx": 135, "frame_idx": 186, "global_frame_idx": 25036, "task_index": 27}, {"db_idx": 25037, "episode_idx": 135, "frame_idx": 187, "global_frame_idx": 25037, "task_index": 27}, {"db_idx": 25038, "episode_idx": 135, "frame_idx": 188, "global_frame_idx": 25038, "task_index": 27}, {"db_idx": 25039, "episode_idx": 135, "frame_idx": 189, "global_frame_idx": 25039, "task_index": 27}, {"db_idx": 25040, "episode_idx": 135, "frame_idx": 190, "global_frame_idx": 25040, "task_index": 27}, {"db_idx": 25041, "episode_idx": 135, "frame_idx": 191, "global_frame_idx": 25041, "task_index": 27}, {"db_idx": 25042, "episode_idx": 135, "frame_idx": 192, "global_frame_idx": 25042, "task_index": 27}, {"db_idx": 25043, "episode_idx": 135, "frame_idx": 193, "global_frame_idx": 25043, "task_index": 27}, {"db_idx": 25044, "episode_idx": 135, "frame_idx": 194, "global_frame_idx": 25044, "task_index": 27}, {"db_idx": 25045, "episode_idx": 135, "frame_idx": 195, "global_frame_idx": 25045, "task_index": 27}, {"db_idx": 25046, "episode_idx": 135, "frame_idx": 196, "global_frame_idx": 25046, "task_index": 27}, {"db_idx": 25047, "episode_idx": 135, "frame_idx": 197, "global_frame_idx": 25047, "task_index": 27}, {"db_idx": 25048, "episode_idx": 135, "frame_idx": 198, "global_frame_idx": 25048, "task_index": 27}, {"db_idx": 25049, "episode_idx": 135, "frame_idx": 199, "global_frame_idx": 25049, "task_index": 27}, {"db_idx": 25050, "episode_idx": 135, "frame_idx": 200, "global_frame_idx": 25050, "task_index": 27}, {"db_idx": 25051, "episode_idx": 135, "frame_idx": 201, "global_frame_idx": 25051, "task_index": 27}, {"db_idx": 25052, "episode_idx": 135, "frame_idx": 202, "global_frame_idx": 25052, "task_index": 27}, {"db_idx": 25053, "episode_idx": 135, "frame_idx": 203, "global_frame_idx": 25053, "task_index": 27}, {"db_idx": 25054, "episode_idx": 135, "frame_idx": 204, "global_frame_idx": 25054, "task_index": 27}, {"db_idx": 25055, "episode_idx": 135, "frame_idx": 205, "global_frame_idx": 25055, "task_index": 27}, {"db_idx": 25056, "episode_idx": 135, "frame_idx": 206, "global_frame_idx": 25056, "task_index": 27}, {"db_idx": 25057, "episode_idx": 135, "frame_idx": 207, "global_frame_idx": 25057, "task_index": 27}, {"db_idx": 25058, "episode_idx": 135, "frame_idx": 208, "global_frame_idx": 25058, "task_index": 27}, {"db_idx": 25059, "episode_idx": 135, "frame_idx": 209, "global_frame_idx": 25059, "task_index": 27}, {"db_idx": 25060, "episode_idx": 135, "frame_idx": 210, "global_frame_idx": 25060, "task_index": 27}, {"db_idx": 25061, "episode_idx": 135, "frame_idx": 211, "global_frame_idx": 25061, "task_index": 27}, {"db_idx": 25062, "episode_idx": 135, "frame_idx": 212, "global_frame_idx": 25062, "task_index": 27}, {"db_idx": 25063, "episode_idx": 135, "frame_idx": 213, "global_frame_idx": 25063, "task_index": 27}, {"db_idx": 25064, "episode_idx": 135, "frame_idx": 214, "global_frame_idx": 25064, "task_index": 27}, {"db_idx": 25065, "episode_idx": 135, "frame_idx": 215, "global_frame_idx": 25065, "task_index": 27}, {"db_idx": 25066, "episode_idx": 135, "frame_idx": 216, "global_frame_idx": 25066, "task_index": 27}, {"db_idx": 25067, "episode_idx": 135, "frame_idx": 217, "global_frame_idx": 25067, "task_index": 27}, {"db_idx": 25068, "episode_idx": 135, "frame_idx": 218, "global_frame_idx": 25068, "task_index": 27}, {"db_idx": 25069, "episode_idx": 136, "frame_idx": 0, "global_frame_idx": 25069, "task_index": 27}, {"db_idx": 25070, "episode_idx": 136, "frame_idx": 1, "global_frame_idx": 25070, "task_index": 27}, {"db_idx": 25071, "episode_idx": 136, "frame_idx": 2, "global_frame_idx": 25071, "task_index": 27}, {"db_idx": 25072, "episode_idx": 136, "frame_idx": 3, "global_frame_idx": 25072, "task_index": 27}, {"db_idx": 25073, "episode_idx": 136, "frame_idx": 4, "global_frame_idx": 25073, "task_index": 27}, {"db_idx": 25074, "episode_idx": 136, "frame_idx": 5, "global_frame_idx": 25074, "task_index": 27}, {"db_idx": 25075, "episode_idx": 136, "frame_idx": 6, "global_frame_idx": 25075, "task_index": 27}, {"db_idx": 25076, "episode_idx": 136, "frame_idx": 7, "global_frame_idx": 25076, "task_index": 27}, {"db_idx": 25077, "episode_idx": 136, "frame_idx": 8, "global_frame_idx": 25077, "task_index": 27}, {"db_idx": 25078, "episode_idx": 136, "frame_idx": 9, "global_frame_idx": 25078, "task_index": 27}, {"db_idx": 25079, "episode_idx": 136, "frame_idx": 10, "global_frame_idx": 25079, "task_index": 27}, {"db_idx": 25080, "episode_idx": 136, "frame_idx": 11, "global_frame_idx": 25080, "task_index": 27}, {"db_idx": 25081, "episode_idx": 136, "frame_idx": 12, "global_frame_idx": 25081, "task_index": 27}, {"db_idx": 25082, "episode_idx": 136, "frame_idx": 13, "global_frame_idx": 25082, "task_index": 27}, {"db_idx": 25083, "episode_idx": 136, "frame_idx": 14, "global_frame_idx": 25083, "task_index": 27}, {"db_idx": 25084, "episode_idx": 136, "frame_idx": 15, "global_frame_idx": 25084, "task_index": 27}, {"db_idx": 25085, "episode_idx": 136, "frame_idx": 16, "global_frame_idx": 25085, "task_index": 27}, {"db_idx": 25086, "episode_idx": 136, "frame_idx": 17, "global_frame_idx": 25086, "task_index": 27}, {"db_idx": 25087, "episode_idx": 136, "frame_idx": 18, "global_frame_idx": 25087, "task_index": 27}, {"db_idx": 25088, "episode_idx": 136, "frame_idx": 19, "global_frame_idx": 25088, "task_index": 27}, {"db_idx": 25089, "episode_idx": 136, "frame_idx": 20, "global_frame_idx": 25089, "task_index": 27}, {"db_idx": 25090, "episode_idx": 136, "frame_idx": 21, "global_frame_idx": 25090, "task_index": 27}, {"db_idx": 25091, "episode_idx": 136, "frame_idx": 22, "global_frame_idx": 25091, "task_index": 27}, {"db_idx": 25092, "episode_idx": 136, "frame_idx": 23, "global_frame_idx": 25092, "task_index": 27}, {"db_idx": 25093, "episode_idx": 136, "frame_idx": 24, "global_frame_idx": 25093, "task_index": 27}, {"db_idx": 25094, "episode_idx": 136, "frame_idx": 25, "global_frame_idx": 25094, "task_index": 27}, {"db_idx": 25095, "episode_idx": 136, "frame_idx": 26, "global_frame_idx": 25095, "task_index": 27}, {"db_idx": 25096, "episode_idx": 136, "frame_idx": 27, "global_frame_idx": 25096, "task_index": 27}, {"db_idx": 25097, "episode_idx": 136, "frame_idx": 28, "global_frame_idx": 25097, "task_index": 27}, {"db_idx": 25098, "episode_idx": 136, "frame_idx": 29, "global_frame_idx": 25098, "task_index": 27}, {"db_idx": 25099, "episode_idx": 136, "frame_idx": 30, "global_frame_idx": 25099, "task_index": 27}, {"db_idx": 25100, "episode_idx": 136, "frame_idx": 31, "global_frame_idx": 25100, "task_index": 27}, {"db_idx": 25101, "episode_idx": 136, "frame_idx": 32, "global_frame_idx": 25101, "task_index": 27}, {"db_idx": 25102, "episode_idx": 136, "frame_idx": 33, "global_frame_idx": 25102, "task_index": 27}, {"db_idx": 25103, "episode_idx": 136, "frame_idx": 34, "global_frame_idx": 25103, "task_index": 27}, {"db_idx": 25104, "episode_idx": 136, "frame_idx": 35, "global_frame_idx": 25104, "task_index": 27}, {"db_idx": 25105, "episode_idx": 136, "frame_idx": 36, "global_frame_idx": 25105, "task_index": 27}, {"db_idx": 25106, "episode_idx": 136, "frame_idx": 37, "global_frame_idx": 25106, "task_index": 27}, {"db_idx": 25107, "episode_idx": 136, "frame_idx": 38, "global_frame_idx": 25107, "task_index": 27}, {"db_idx": 25108, "episode_idx": 136, "frame_idx": 39, "global_frame_idx": 25108, "task_index": 27}, {"db_idx": 25109, "episode_idx": 136, "frame_idx": 40, "global_frame_idx": 25109, "task_index": 27}, {"db_idx": 25110, "episode_idx": 136, "frame_idx": 41, "global_frame_idx": 25110, "task_index": 27}, {"db_idx": 25111, "episode_idx": 136, "frame_idx": 42, "global_frame_idx": 25111, "task_index": 27}, {"db_idx": 25112, "episode_idx": 136, "frame_idx": 43, "global_frame_idx": 25112, "task_index": 27}, {"db_idx": 25113, "episode_idx": 136, "frame_idx": 44, "global_frame_idx": 25113, "task_index": 27}, {"db_idx": 25114, "episode_idx": 136, "frame_idx": 45, "global_frame_idx": 25114, "task_index": 27}, {"db_idx": 25115, "episode_idx": 136, "frame_idx": 46, "global_frame_idx": 25115, "task_index": 27}, {"db_idx": 25116, "episode_idx": 136, "frame_idx": 47, "global_frame_idx": 25116, "task_index": 27}, {"db_idx": 25117, "episode_idx": 136, "frame_idx": 48, "global_frame_idx": 25117, "task_index": 27}, {"db_idx": 25118, "episode_idx": 136, "frame_idx": 49, "global_frame_idx": 25118, "task_index": 27}, {"db_idx": 25119, "episode_idx": 136, "frame_idx": 50, "global_frame_idx": 25119, "task_index": 27}, {"db_idx": 25120, "episode_idx": 136, "frame_idx": 51, "global_frame_idx": 25120, "task_index": 27}, {"db_idx": 25121, "episode_idx": 136, "frame_idx": 52, "global_frame_idx": 25121, "task_index": 27}, {"db_idx": 25122, "episode_idx": 136, "frame_idx": 53, "global_frame_idx": 25122, "task_index": 27}, {"db_idx": 25123, "episode_idx": 136, "frame_idx": 54, "global_frame_idx": 25123, "task_index": 27}, {"db_idx": 25124, "episode_idx": 136, "frame_idx": 55, "global_frame_idx": 25124, "task_index": 27}, {"db_idx": 25125, "episode_idx": 136, "frame_idx": 56, "global_frame_idx": 25125, "task_index": 27}, {"db_idx": 25126, "episode_idx": 136, "frame_idx": 57, "global_frame_idx": 25126, "task_index": 27}, {"db_idx": 25127, "episode_idx": 136, "frame_idx": 58, "global_frame_idx": 25127, "task_index": 27}, {"db_idx": 25128, "episode_idx": 136, "frame_idx": 59, "global_frame_idx": 25128, "task_index": 27}, {"db_idx": 25129, "episode_idx": 136, "frame_idx": 60, "global_frame_idx": 25129, "task_index": 27}, {"db_idx": 25130, "episode_idx": 136, "frame_idx": 61, "global_frame_idx": 25130, "task_index": 27}, {"db_idx": 25131, "episode_idx": 136, "frame_idx": 62, "global_frame_idx": 25131, "task_index": 27}, {"db_idx": 25132, "episode_idx": 136, "frame_idx": 63, "global_frame_idx": 25132, "task_index": 27}, {"db_idx": 25133, "episode_idx": 136, "frame_idx": 64, "global_frame_idx": 25133, "task_index": 27}, {"db_idx": 25134, "episode_idx": 136, "frame_idx": 65, "global_frame_idx": 25134, "task_index": 27}, {"db_idx": 25135, "episode_idx": 136, "frame_idx": 66, "global_frame_idx": 25135, "task_index": 27}, {"db_idx": 25136, "episode_idx": 136, "frame_idx": 67, "global_frame_idx": 25136, "task_index": 27}, {"db_idx": 25137, "episode_idx": 136, "frame_idx": 68, "global_frame_idx": 25137, "task_index": 27}, {"db_idx": 25138, "episode_idx": 136, "frame_idx": 69, "global_frame_idx": 25138, "task_index": 27}, {"db_idx": 25139, "episode_idx": 136, "frame_idx": 70, "global_frame_idx": 25139, "task_index": 27}, {"db_idx": 25140, "episode_idx": 136, "frame_idx": 71, "global_frame_idx": 25140, "task_index": 27}, {"db_idx": 25141, "episode_idx": 136, "frame_idx": 72, "global_frame_idx": 25141, "task_index": 27}, {"db_idx": 25142, "episode_idx": 136, "frame_idx": 73, "global_frame_idx": 25142, "task_index": 27}, {"db_idx": 25143, "episode_idx": 136, "frame_idx": 74, "global_frame_idx": 25143, "task_index": 27}, {"db_idx": 25144, "episode_idx": 136, "frame_idx": 75, "global_frame_idx": 25144, "task_index": 27}, {"db_idx": 25145, "episode_idx": 136, "frame_idx": 76, "global_frame_idx": 25145, "task_index": 27}, {"db_idx": 25146, "episode_idx": 136, "frame_idx": 77, "global_frame_idx": 25146, "task_index": 27}, {"db_idx": 25147, "episode_idx": 136, "frame_idx": 78, "global_frame_idx": 25147, "task_index": 27}, {"db_idx": 25148, "episode_idx": 136, "frame_idx": 79, "global_frame_idx": 25148, "task_index": 27}, {"db_idx": 25149, "episode_idx": 136, "frame_idx": 80, "global_frame_idx": 25149, "task_index": 27}, {"db_idx": 25150, "episode_idx": 136, "frame_idx": 81, "global_frame_idx": 25150, "task_index": 27}, {"db_idx": 25151, "episode_idx": 136, "frame_idx": 82, "global_frame_idx": 25151, "task_index": 27}, {"db_idx": 25152, "episode_idx": 136, "frame_idx": 83, "global_frame_idx": 25152, "task_index": 27}, {"db_idx": 25153, "episode_idx": 136, "frame_idx": 84, "global_frame_idx": 25153, "task_index": 27}, {"db_idx": 25154, "episode_idx": 136, "frame_idx": 85, "global_frame_idx": 25154, "task_index": 27}, {"db_idx": 25155, "episode_idx": 136, "frame_idx": 86, "global_frame_idx": 25155, "task_index": 27}, {"db_idx": 25156, "episode_idx": 136, "frame_idx": 87, "global_frame_idx": 25156, "task_index": 27}, {"db_idx": 25157, "episode_idx": 136, "frame_idx": 88, "global_frame_idx": 25157, "task_index": 27}, {"db_idx": 25158, "episode_idx": 136, "frame_idx": 89, "global_frame_idx": 25158, "task_index": 27}, {"db_idx": 25159, "episode_idx": 136, "frame_idx": 90, "global_frame_idx": 25159, "task_index": 27}, {"db_idx": 25160, "episode_idx": 136, "frame_idx": 91, "global_frame_idx": 25160, "task_index": 27}, {"db_idx": 25161, "episode_idx": 136, "frame_idx": 92, "global_frame_idx": 25161, "task_index": 27}, {"db_idx": 25162, "episode_idx": 136, "frame_idx": 93, "global_frame_idx": 25162, "task_index": 27}, {"db_idx": 25163, "episode_idx": 136, "frame_idx": 94, "global_frame_idx": 25163, "task_index": 27}, {"db_idx": 25164, "episode_idx": 136, "frame_idx": 95, "global_frame_idx": 25164, "task_index": 27}, {"db_idx": 25165, "episode_idx": 136, "frame_idx": 96, "global_frame_idx": 25165, "task_index": 27}, {"db_idx": 25166, "episode_idx": 136, "frame_idx": 97, "global_frame_idx": 25166, "task_index": 27}, {"db_idx": 25167, "episode_idx": 136, "frame_idx": 98, "global_frame_idx": 25167, "task_index": 27}, {"db_idx": 25168, "episode_idx": 136, "frame_idx": 99, "global_frame_idx": 25168, "task_index": 27}, {"db_idx": 25169, "episode_idx": 136, "frame_idx": 100, "global_frame_idx": 25169, "task_index": 27}, {"db_idx": 25170, "episode_idx": 136, "frame_idx": 101, "global_frame_idx": 25170, "task_index": 27}, {"db_idx": 25171, "episode_idx": 136, "frame_idx": 102, "global_frame_idx": 25171, "task_index": 27}, {"db_idx": 25172, "episode_idx": 136, "frame_idx": 103, "global_frame_idx": 25172, "task_index": 27}, {"db_idx": 25173, "episode_idx": 136, "frame_idx": 104, "global_frame_idx": 25173, "task_index": 27}, {"db_idx": 25174, "episode_idx": 136, "frame_idx": 105, "global_frame_idx": 25174, "task_index": 27}, {"db_idx": 25175, "episode_idx": 136, "frame_idx": 106, "global_frame_idx": 25175, "task_index": 27}, {"db_idx": 25176, "episode_idx": 136, "frame_idx": 107, "global_frame_idx": 25176, "task_index": 27}, {"db_idx": 25177, "episode_idx": 136, "frame_idx": 108, "global_frame_idx": 25177, "task_index": 27}, {"db_idx": 25178, "episode_idx": 136, "frame_idx": 109, "global_frame_idx": 25178, "task_index": 27}, {"db_idx": 25179, "episode_idx": 136, "frame_idx": 110, "global_frame_idx": 25179, "task_index": 27}, {"db_idx": 25180, "episode_idx": 136, "frame_idx": 111, "global_frame_idx": 25180, "task_index": 27}, {"db_idx": 25181, "episode_idx": 136, "frame_idx": 112, "global_frame_idx": 25181, "task_index": 27}, {"db_idx": 25182, "episode_idx": 136, "frame_idx": 113, "global_frame_idx": 25182, "task_index": 27}, {"db_idx": 25183, "episode_idx": 136, "frame_idx": 114, "global_frame_idx": 25183, "task_index": 27}, {"db_idx": 25184, "episode_idx": 136, "frame_idx": 115, "global_frame_idx": 25184, "task_index": 27}, {"db_idx": 25185, "episode_idx": 136, "frame_idx": 116, "global_frame_idx": 25185, "task_index": 27}, {"db_idx": 25186, "episode_idx": 136, "frame_idx": 117, "global_frame_idx": 25186, "task_index": 27}, {"db_idx": 25187, "episode_idx": 136, "frame_idx": 118, "global_frame_idx": 25187, "task_index": 27}, {"db_idx": 25188, "episode_idx": 136, "frame_idx": 119, "global_frame_idx": 25188, "task_index": 27}, {"db_idx": 25189, "episode_idx": 136, "frame_idx": 120, "global_frame_idx": 25189, "task_index": 27}, {"db_idx": 25190, "episode_idx": 136, "frame_idx": 121, "global_frame_idx": 25190, "task_index": 27}, {"db_idx": 25191, "episode_idx": 136, "frame_idx": 122, "global_frame_idx": 25191, "task_index": 27}, {"db_idx": 25192, "episode_idx": 136, "frame_idx": 123, "global_frame_idx": 25192, "task_index": 27}, {"db_idx": 25193, "episode_idx": 136, "frame_idx": 124, "global_frame_idx": 25193, "task_index": 27}, {"db_idx": 25194, "episode_idx": 136, "frame_idx": 125, "global_frame_idx": 25194, "task_index": 27}, {"db_idx": 25195, "episode_idx": 136, "frame_idx": 126, "global_frame_idx": 25195, "task_index": 27}, {"db_idx": 25196, "episode_idx": 136, "frame_idx": 127, "global_frame_idx": 25196, "task_index": 27}, {"db_idx": 25197, "episode_idx": 136, "frame_idx": 128, "global_frame_idx": 25197, "task_index": 27}, {"db_idx": 25198, "episode_idx": 136, "frame_idx": 129, "global_frame_idx": 25198, "task_index": 27}, {"db_idx": 25199, "episode_idx": 136, "frame_idx": 130, "global_frame_idx": 25199, "task_index": 27}, {"db_idx": 25200, "episode_idx": 136, "frame_idx": 131, "global_frame_idx": 25200, "task_index": 27}, {"db_idx": 25201, "episode_idx": 136, "frame_idx": 132, "global_frame_idx": 25201, "task_index": 27}, {"db_idx": 25202, "episode_idx": 136, "frame_idx": 133, "global_frame_idx": 25202, "task_index": 27}, {"db_idx": 25203, "episode_idx": 136, "frame_idx": 134, "global_frame_idx": 25203, "task_index": 27}, {"db_idx": 25204, "episode_idx": 136, "frame_idx": 135, "global_frame_idx": 25204, "task_index": 27}, {"db_idx": 25205, "episode_idx": 136, "frame_idx": 136, "global_frame_idx": 25205, "task_index": 27}, {"db_idx": 25206, "episode_idx": 136, "frame_idx": 137, "global_frame_idx": 25206, "task_index": 27}, {"db_idx": 25207, "episode_idx": 136, "frame_idx": 138, "global_frame_idx": 25207, "task_index": 27}, {"db_idx": 25208, "episode_idx": 136, "frame_idx": 139, "global_frame_idx": 25208, "task_index": 27}, {"db_idx": 25209, "episode_idx": 136, "frame_idx": 140, "global_frame_idx": 25209, "task_index": 27}, {"db_idx": 25210, "episode_idx": 136, "frame_idx": 141, "global_frame_idx": 25210, "task_index": 27}, {"db_idx": 25211, "episode_idx": 136, "frame_idx": 142, "global_frame_idx": 25211, "task_index": 27}, {"db_idx": 25212, "episode_idx": 136, "frame_idx": 143, "global_frame_idx": 25212, "task_index": 27}, {"db_idx": 25213, "episode_idx": 136, "frame_idx": 144, "global_frame_idx": 25213, "task_index": 27}, {"db_idx": 25214, "episode_idx": 136, "frame_idx": 145, "global_frame_idx": 25214, "task_index": 27}, {"db_idx": 25215, "episode_idx": 136, "frame_idx": 146, "global_frame_idx": 25215, "task_index": 27}, {"db_idx": 25216, "episode_idx": 136, "frame_idx": 147, "global_frame_idx": 25216, "task_index": 27}, {"db_idx": 25217, "episode_idx": 136, "frame_idx": 148, "global_frame_idx": 25217, "task_index": 27}, {"db_idx": 25218, "episode_idx": 136, "frame_idx": 149, "global_frame_idx": 25218, "task_index": 27}, {"db_idx": 25219, "episode_idx": 136, "frame_idx": 150, "global_frame_idx": 25219, "task_index": 27}, {"db_idx": 25220, "episode_idx": 136, "frame_idx": 151, "global_frame_idx": 25220, "task_index": 27}, {"db_idx": 25221, "episode_idx": 136, "frame_idx": 152, "global_frame_idx": 25221, "task_index": 27}, {"db_idx": 25222, "episode_idx": 136, "frame_idx": 153, "global_frame_idx": 25222, "task_index": 27}, {"db_idx": 25223, "episode_idx": 136, "frame_idx": 154, "global_frame_idx": 25223, "task_index": 27}, {"db_idx": 25224, "episode_idx": 136, "frame_idx": 155, "global_frame_idx": 25224, "task_index": 27}, {"db_idx": 25225, "episode_idx": 136, "frame_idx": 156, "global_frame_idx": 25225, "task_index": 27}, {"db_idx": 25226, "episode_idx": 136, "frame_idx": 157, "global_frame_idx": 25226, "task_index": 27}, {"db_idx": 25227, "episode_idx": 136, "frame_idx": 158, "global_frame_idx": 25227, "task_index": 27}, {"db_idx": 25228, "episode_idx": 136, "frame_idx": 159, "global_frame_idx": 25228, "task_index": 27}, {"db_idx": 25229, "episode_idx": 136, "frame_idx": 160, "global_frame_idx": 25229, "task_index": 27}, {"db_idx": 25230, "episode_idx": 136, "frame_idx": 161, "global_frame_idx": 25230, "task_index": 27}, {"db_idx": 25231, "episode_idx": 136, "frame_idx": 162, "global_frame_idx": 25231, "task_index": 27}, {"db_idx": 25232, "episode_idx": 136, "frame_idx": 163, "global_frame_idx": 25232, "task_index": 27}, {"db_idx": 25233, "episode_idx": 136, "frame_idx": 164, "global_frame_idx": 25233, "task_index": 27}, {"db_idx": 25234, "episode_idx": 136, "frame_idx": 165, "global_frame_idx": 25234, "task_index": 27}, {"db_idx": 25235, "episode_idx": 136, "frame_idx": 166, "global_frame_idx": 25235, "task_index": 27}, {"db_idx": 25236, "episode_idx": 136, "frame_idx": 167, "global_frame_idx": 25236, "task_index": 27}, {"db_idx": 25237, "episode_idx": 136, "frame_idx": 168, "global_frame_idx": 25237, "task_index": 27}, {"db_idx": 25238, "episode_idx": 136, "frame_idx": 169, "global_frame_idx": 25238, "task_index": 27}, {"db_idx": 25239, "episode_idx": 136, "frame_idx": 170, "global_frame_idx": 25239, "task_index": 27}, {"db_idx": 25240, "episode_idx": 136, "frame_idx": 171, "global_frame_idx": 25240, "task_index": 27}, {"db_idx": 25241, "episode_idx": 136, "frame_idx": 172, "global_frame_idx": 25241, "task_index": 27}, {"db_idx": 25242, "episode_idx": 136, "frame_idx": 173, "global_frame_idx": 25242, "task_index": 27}, {"db_idx": 25243, "episode_idx": 136, "frame_idx": 174, "global_frame_idx": 25243, "task_index": 27}, {"db_idx": 25244, "episode_idx": 136, "frame_idx": 175, "global_frame_idx": 25244, "task_index": 27}, {"db_idx": 25245, "episode_idx": 136, "frame_idx": 176, "global_frame_idx": 25245, "task_index": 27}, {"db_idx": 25246, "episode_idx": 136, "frame_idx": 177, "global_frame_idx": 25246, "task_index": 27}, {"db_idx": 25247, "episode_idx": 136, "frame_idx": 178, "global_frame_idx": 25247, "task_index": 27}, {"db_idx": 25248, "episode_idx": 136, "frame_idx": 179, "global_frame_idx": 25248, "task_index": 27}, {"db_idx": 25249, "episode_idx": 136, "frame_idx": 180, "global_frame_idx": 25249, "task_index": 27}, {"db_idx": 25250, "episode_idx": 136, "frame_idx": 181, "global_frame_idx": 25250, "task_index": 27}, {"db_idx": 25251, "episode_idx": 136, "frame_idx": 182, "global_frame_idx": 25251, "task_index": 27}, {"db_idx": 25252, "episode_idx": 136, "frame_idx": 183, "global_frame_idx": 25252, "task_index": 27}, {"db_idx": 25253, "episode_idx": 136, "frame_idx": 184, "global_frame_idx": 25253, "task_index": 27}, {"db_idx": 25254, "episode_idx": 136, "frame_idx": 185, "global_frame_idx": 25254, "task_index": 27}, {"db_idx": 25255, "episode_idx": 136, "frame_idx": 186, "global_frame_idx": 25255, "task_index": 27}, {"db_idx": 25256, "episode_idx": 136, "frame_idx": 187, "global_frame_idx": 25256, "task_index": 27}, {"db_idx": 25257, "episode_idx": 137, "frame_idx": 0, "global_frame_idx": 25257, "task_index": 27}, {"db_idx": 25258, "episode_idx": 137, "frame_idx": 1, "global_frame_idx": 25258, "task_index": 27}, {"db_idx": 25259, "episode_idx": 137, "frame_idx": 2, "global_frame_idx": 25259, "task_index": 27}, {"db_idx": 25260, "episode_idx": 137, "frame_idx": 3, "global_frame_idx": 25260, "task_index": 27}, {"db_idx": 25261, "episode_idx": 137, "frame_idx": 4, "global_frame_idx": 25261, "task_index": 27}, {"db_idx": 25262, "episode_idx": 137, "frame_idx": 5, "global_frame_idx": 25262, "task_index": 27}, {"db_idx": 25263, "episode_idx": 137, "frame_idx": 6, "global_frame_idx": 25263, "task_index": 27}, {"db_idx": 25264, "episode_idx": 137, "frame_idx": 7, "global_frame_idx": 25264, "task_index": 27}, {"db_idx": 25265, "episode_idx": 137, "frame_idx": 8, "global_frame_idx": 25265, "task_index": 27}, {"db_idx": 25266, "episode_idx": 137, "frame_idx": 9, "global_frame_idx": 25266, "task_index": 27}, {"db_idx": 25267, "episode_idx": 137, "frame_idx": 10, "global_frame_idx": 25267, "task_index": 27}, {"db_idx": 25268, "episode_idx": 137, "frame_idx": 11, "global_frame_idx": 25268, "task_index": 27}, {"db_idx": 25269, "episode_idx": 137, "frame_idx": 12, "global_frame_idx": 25269, "task_index": 27}, {"db_idx": 25270, "episode_idx": 137, "frame_idx": 13, "global_frame_idx": 25270, "task_index": 27}, {"db_idx": 25271, "episode_idx": 137, "frame_idx": 14, "global_frame_idx": 25271, "task_index": 27}, {"db_idx": 25272, "episode_idx": 137, "frame_idx": 15, "global_frame_idx": 25272, "task_index": 27}, {"db_idx": 25273, "episode_idx": 137, "frame_idx": 16, "global_frame_idx": 25273, "task_index": 27}, {"db_idx": 25274, "episode_idx": 137, "frame_idx": 17, "global_frame_idx": 25274, "task_index": 27}, {"db_idx": 25275, "episode_idx": 137, "frame_idx": 18, "global_frame_idx": 25275, "task_index": 27}, {"db_idx": 25276, "episode_idx": 137, "frame_idx": 19, "global_frame_idx": 25276, "task_index": 27}, {"db_idx": 25277, "episode_idx": 137, "frame_idx": 20, "global_frame_idx": 25277, "task_index": 27}, {"db_idx": 25278, "episode_idx": 137, "frame_idx": 21, "global_frame_idx": 25278, "task_index": 27}, {"db_idx": 25279, "episode_idx": 137, "frame_idx": 22, "global_frame_idx": 25279, "task_index": 27}, {"db_idx": 25280, "episode_idx": 137, "frame_idx": 23, "global_frame_idx": 25280, "task_index": 27}, {"db_idx": 25281, "episode_idx": 137, "frame_idx": 24, "global_frame_idx": 25281, "task_index": 27}, {"db_idx": 25282, "episode_idx": 137, "frame_idx": 25, "global_frame_idx": 25282, "task_index": 27}, {"db_idx": 25283, "episode_idx": 137, "frame_idx": 26, "global_frame_idx": 25283, "task_index": 27}, {"db_idx": 25284, "episode_idx": 137, "frame_idx": 27, "global_frame_idx": 25284, "task_index": 27}, {"db_idx": 25285, "episode_idx": 137, "frame_idx": 28, "global_frame_idx": 25285, "task_index": 27}, {"db_idx": 25286, "episode_idx": 137, "frame_idx": 29, "global_frame_idx": 25286, "task_index": 27}, {"db_idx": 25287, "episode_idx": 137, "frame_idx": 30, "global_frame_idx": 25287, "task_index": 27}, {"db_idx": 25288, "episode_idx": 137, "frame_idx": 31, "global_frame_idx": 25288, "task_index": 27}, {"db_idx": 25289, "episode_idx": 137, "frame_idx": 32, "global_frame_idx": 25289, "task_index": 27}, {"db_idx": 25290, "episode_idx": 137, "frame_idx": 33, "global_frame_idx": 25290, "task_index": 27}, {"db_idx": 25291, "episode_idx": 137, "frame_idx": 34, "global_frame_idx": 25291, "task_index": 27}, {"db_idx": 25292, "episode_idx": 137, "frame_idx": 35, "global_frame_idx": 25292, "task_index": 27}, {"db_idx": 25293, "episode_idx": 137, "frame_idx": 36, "global_frame_idx": 25293, "task_index": 27}, {"db_idx": 25294, "episode_idx": 137, "frame_idx": 37, "global_frame_idx": 25294, "task_index": 27}, {"db_idx": 25295, "episode_idx": 137, "frame_idx": 38, "global_frame_idx": 25295, "task_index": 27}, {"db_idx": 25296, "episode_idx": 137, "frame_idx": 39, "global_frame_idx": 25296, "task_index": 27}, {"db_idx": 25297, "episode_idx": 137, "frame_idx": 40, "global_frame_idx": 25297, "task_index": 27}, {"db_idx": 25298, "episode_idx": 137, "frame_idx": 41, "global_frame_idx": 25298, "task_index": 27}, {"db_idx": 25299, "episode_idx": 137, "frame_idx": 42, "global_frame_idx": 25299, "task_index": 27}, {"db_idx": 25300, "episode_idx": 137, "frame_idx": 43, "global_frame_idx": 25300, "task_index": 27}, {"db_idx": 25301, "episode_idx": 137, "frame_idx": 44, "global_frame_idx": 25301, "task_index": 27}, {"db_idx": 25302, "episode_idx": 137, "frame_idx": 45, "global_frame_idx": 25302, "task_index": 27}, {"db_idx": 25303, "episode_idx": 137, "frame_idx": 46, "global_frame_idx": 25303, "task_index": 27}, {"db_idx": 25304, "episode_idx": 137, "frame_idx": 47, "global_frame_idx": 25304, "task_index": 27}, {"db_idx": 25305, "episode_idx": 137, "frame_idx": 48, "global_frame_idx": 25305, "task_index": 27}, {"db_idx": 25306, "episode_idx": 137, "frame_idx": 49, "global_frame_idx": 25306, "task_index": 27}, {"db_idx": 25307, "episode_idx": 137, "frame_idx": 50, "global_frame_idx": 25307, "task_index": 27}, {"db_idx": 25308, "episode_idx": 137, "frame_idx": 51, "global_frame_idx": 25308, "task_index": 27}, {"db_idx": 25309, "episode_idx": 137, "frame_idx": 52, "global_frame_idx": 25309, "task_index": 27}, {"db_idx": 25310, "episode_idx": 137, "frame_idx": 53, "global_frame_idx": 25310, "task_index": 27}, {"db_idx": 25311, "episode_idx": 137, "frame_idx": 54, "global_frame_idx": 25311, "task_index": 27}, {"db_idx": 25312, "episode_idx": 137, "frame_idx": 55, "global_frame_idx": 25312, "task_index": 27}, {"db_idx": 25313, "episode_idx": 137, "frame_idx": 56, "global_frame_idx": 25313, "task_index": 27}, {"db_idx": 25314, "episode_idx": 137, "frame_idx": 57, "global_frame_idx": 25314, "task_index": 27}, {"db_idx": 25315, "episode_idx": 137, "frame_idx": 58, "global_frame_idx": 25315, "task_index": 27}, {"db_idx": 25316, "episode_idx": 137, "frame_idx": 59, "global_frame_idx": 25316, "task_index": 27}, {"db_idx": 25317, "episode_idx": 137, "frame_idx": 60, "global_frame_idx": 25317, "task_index": 27}, {"db_idx": 25318, "episode_idx": 137, "frame_idx": 61, "global_frame_idx": 25318, "task_index": 27}, {"db_idx": 25319, "episode_idx": 137, "frame_idx": 62, "global_frame_idx": 25319, "task_index": 27}, {"db_idx": 25320, "episode_idx": 137, "frame_idx": 63, "global_frame_idx": 25320, "task_index": 27}, {"db_idx": 25321, "episode_idx": 137, "frame_idx": 64, "global_frame_idx": 25321, "task_index": 27}, {"db_idx": 25322, "episode_idx": 137, "frame_idx": 65, "global_frame_idx": 25322, "task_index": 27}, {"db_idx": 25323, "episode_idx": 137, "frame_idx": 66, "global_frame_idx": 25323, "task_index": 27}, {"db_idx": 25324, "episode_idx": 137, "frame_idx": 67, "global_frame_idx": 25324, "task_index": 27}, {"db_idx": 25325, "episode_idx": 137, "frame_idx": 68, "global_frame_idx": 25325, "task_index": 27}, {"db_idx": 25326, "episode_idx": 137, "frame_idx": 69, "global_frame_idx": 25326, "task_index": 27}, {"db_idx": 25327, "episode_idx": 137, "frame_idx": 70, "global_frame_idx": 25327, "task_index": 27}, {"db_idx": 25328, "episode_idx": 137, "frame_idx": 71, "global_frame_idx": 25328, "task_index": 27}, {"db_idx": 25329, "episode_idx": 137, "frame_idx": 72, "global_frame_idx": 25329, "task_index": 27}, {"db_idx": 25330, "episode_idx": 137, "frame_idx": 73, "global_frame_idx": 25330, "task_index": 27}, {"db_idx": 25331, "episode_idx": 137, "frame_idx": 74, "global_frame_idx": 25331, "task_index": 27}, {"db_idx": 25332, "episode_idx": 137, "frame_idx": 75, "global_frame_idx": 25332, "task_index": 27}, {"db_idx": 25333, "episode_idx": 137, "frame_idx": 76, "global_frame_idx": 25333, "task_index": 27}, {"db_idx": 25334, "episode_idx": 137, "frame_idx": 77, "global_frame_idx": 25334, "task_index": 27}, {"db_idx": 25335, "episode_idx": 137, "frame_idx": 78, "global_frame_idx": 25335, "task_index": 27}, {"db_idx": 25336, "episode_idx": 137, "frame_idx": 79, "global_frame_idx": 25336, "task_index": 27}, {"db_idx": 25337, "episode_idx": 137, "frame_idx": 80, "global_frame_idx": 25337, "task_index": 27}, {"db_idx": 25338, "episode_idx": 137, "frame_idx": 81, "global_frame_idx": 25338, "task_index": 27}, {"db_idx": 25339, "episode_idx": 137, "frame_idx": 82, "global_frame_idx": 25339, "task_index": 27}, {"db_idx": 25340, "episode_idx": 137, "frame_idx": 83, "global_frame_idx": 25340, "task_index": 27}, {"db_idx": 25341, "episode_idx": 137, "frame_idx": 84, "global_frame_idx": 25341, "task_index": 27}, {"db_idx": 25342, "episode_idx": 137, "frame_idx": 85, "global_frame_idx": 25342, "task_index": 27}, {"db_idx": 25343, "episode_idx": 137, "frame_idx": 86, "global_frame_idx": 25343, "task_index": 27}, {"db_idx": 25344, "episode_idx": 137, "frame_idx": 87, "global_frame_idx": 25344, "task_index": 27}, {"db_idx": 25345, "episode_idx": 137, "frame_idx": 88, "global_frame_idx": 25345, "task_index": 27}, {"db_idx": 25346, "episode_idx": 137, "frame_idx": 89, "global_frame_idx": 25346, "task_index": 27}, {"db_idx": 25347, "episode_idx": 137, "frame_idx": 90, "global_frame_idx": 25347, "task_index": 27}, {"db_idx": 25348, "episode_idx": 137, "frame_idx": 91, "global_frame_idx": 25348, "task_index": 27}, {"db_idx": 25349, "episode_idx": 137, "frame_idx": 92, "global_frame_idx": 25349, "task_index": 27}, {"db_idx": 25350, "episode_idx": 137, "frame_idx": 93, "global_frame_idx": 25350, "task_index": 27}, {"db_idx": 25351, "episode_idx": 137, "frame_idx": 94, "global_frame_idx": 25351, "task_index": 27}, {"db_idx": 25352, "episode_idx": 137, "frame_idx": 95, "global_frame_idx": 25352, "task_index": 27}, {"db_idx": 25353, "episode_idx": 137, "frame_idx": 96, "global_frame_idx": 25353, "task_index": 27}, {"db_idx": 25354, "episode_idx": 137, "frame_idx": 97, "global_frame_idx": 25354, "task_index": 27}, {"db_idx": 25355, "episode_idx": 137, "frame_idx": 98, "global_frame_idx": 25355, "task_index": 27}, {"db_idx": 25356, "episode_idx": 137, "frame_idx": 99, "global_frame_idx": 25356, "task_index": 27}, {"db_idx": 25357, "episode_idx": 137, "frame_idx": 100, "global_frame_idx": 25357, "task_index": 27}, {"db_idx": 25358, "episode_idx": 137, "frame_idx": 101, "global_frame_idx": 25358, "task_index": 27}, {"db_idx": 25359, "episode_idx": 137, "frame_idx": 102, "global_frame_idx": 25359, "task_index": 27}, {"db_idx": 25360, "episode_idx": 137, "frame_idx": 103, "global_frame_idx": 25360, "task_index": 27}, {"db_idx": 25361, "episode_idx": 137, "frame_idx": 104, "global_frame_idx": 25361, "task_index": 27}, {"db_idx": 25362, "episode_idx": 137, "frame_idx": 105, "global_frame_idx": 25362, "task_index": 27}, {"db_idx": 25363, "episode_idx": 137, "frame_idx": 106, "global_frame_idx": 25363, "task_index": 27}, {"db_idx": 25364, "episode_idx": 137, "frame_idx": 107, "global_frame_idx": 25364, "task_index": 27}, {"db_idx": 25365, "episode_idx": 137, "frame_idx": 108, "global_frame_idx": 25365, "task_index": 27}, {"db_idx": 25366, "episode_idx": 137, "frame_idx": 109, "global_frame_idx": 25366, "task_index": 27}, {"db_idx": 25367, "episode_idx": 137, "frame_idx": 110, "global_frame_idx": 25367, "task_index": 27}, {"db_idx": 25368, "episode_idx": 137, "frame_idx": 111, "global_frame_idx": 25368, "task_index": 27}, {"db_idx": 25369, "episode_idx": 137, "frame_idx": 112, "global_frame_idx": 25369, "task_index": 27}, {"db_idx": 25370, "episode_idx": 137, "frame_idx": 113, "global_frame_idx": 25370, "task_index": 27}, {"db_idx": 25371, "episode_idx": 137, "frame_idx": 114, "global_frame_idx": 25371, "task_index": 27}, {"db_idx": 25372, "episode_idx": 137, "frame_idx": 115, "global_frame_idx": 25372, "task_index": 27}, {"db_idx": 25373, "episode_idx": 137, "frame_idx": 116, "global_frame_idx": 25373, "task_index": 27}, {"db_idx": 25374, "episode_idx": 137, "frame_idx": 117, "global_frame_idx": 25374, "task_index": 27}, {"db_idx": 25375, "episode_idx": 137, "frame_idx": 118, "global_frame_idx": 25375, "task_index": 27}, {"db_idx": 25376, "episode_idx": 137, "frame_idx": 119, "global_frame_idx": 25376, "task_index": 27}, {"db_idx": 25377, "episode_idx": 137, "frame_idx": 120, "global_frame_idx": 25377, "task_index": 27}, {"db_idx": 25378, "episode_idx": 137, "frame_idx": 121, "global_frame_idx": 25378, "task_index": 27}, {"db_idx": 25379, "episode_idx": 137, "frame_idx": 122, "global_frame_idx": 25379, "task_index": 27}, {"db_idx": 25380, "episode_idx": 137, "frame_idx": 123, "global_frame_idx": 25380, "task_index": 27}, {"db_idx": 25381, "episode_idx": 137, "frame_idx": 124, "global_frame_idx": 25381, "task_index": 27}, {"db_idx": 25382, "episode_idx": 137, "frame_idx": 125, "global_frame_idx": 25382, "task_index": 27}, {"db_idx": 25383, "episode_idx": 137, "frame_idx": 126, "global_frame_idx": 25383, "task_index": 27}, {"db_idx": 25384, "episode_idx": 137, "frame_idx": 127, "global_frame_idx": 25384, "task_index": 27}, {"db_idx": 25385, "episode_idx": 137, "frame_idx": 128, "global_frame_idx": 25385, "task_index": 27}, {"db_idx": 25386, "episode_idx": 137, "frame_idx": 129, "global_frame_idx": 25386, "task_index": 27}, {"db_idx": 25387, "episode_idx": 137, "frame_idx": 130, "global_frame_idx": 25387, "task_index": 27}, {"db_idx": 25388, "episode_idx": 137, "frame_idx": 131, "global_frame_idx": 25388, "task_index": 27}, {"db_idx": 25389, "episode_idx": 137, "frame_idx": 132, "global_frame_idx": 25389, "task_index": 27}, {"db_idx": 25390, "episode_idx": 137, "frame_idx": 133, "global_frame_idx": 25390, "task_index": 27}, {"db_idx": 25391, "episode_idx": 137, "frame_idx": 134, "global_frame_idx": 25391, "task_index": 27}, {"db_idx": 25392, "episode_idx": 137, "frame_idx": 135, "global_frame_idx": 25392, "task_index": 27}, {"db_idx": 25393, "episode_idx": 137, "frame_idx": 136, "global_frame_idx": 25393, "task_index": 27}, {"db_idx": 25394, "episode_idx": 137, "frame_idx": 137, "global_frame_idx": 25394, "task_index": 27}, {"db_idx": 25395, "episode_idx": 137, "frame_idx": 138, "global_frame_idx": 25395, "task_index": 27}, {"db_idx": 25396, "episode_idx": 137, "frame_idx": 139, "global_frame_idx": 25396, "task_index": 27}, {"db_idx": 25397, "episode_idx": 137, "frame_idx": 140, "global_frame_idx": 25397, "task_index": 27}, {"db_idx": 25398, "episode_idx": 137, "frame_idx": 141, "global_frame_idx": 25398, "task_index": 27}, {"db_idx": 25399, "episode_idx": 137, "frame_idx": 142, "global_frame_idx": 25399, "task_index": 27}, {"db_idx": 25400, "episode_idx": 137, "frame_idx": 143, "global_frame_idx": 25400, "task_index": 27}, {"db_idx": 25401, "episode_idx": 137, "frame_idx": 144, "global_frame_idx": 25401, "task_index": 27}, {"db_idx": 25402, "episode_idx": 137, "frame_idx": 145, "global_frame_idx": 25402, "task_index": 27}, {"db_idx": 25403, "episode_idx": 137, "frame_idx": 146, "global_frame_idx": 25403, "task_index": 27}, {"db_idx": 25404, "episode_idx": 137, "frame_idx": 147, "global_frame_idx": 25404, "task_index": 27}, {"db_idx": 25405, "episode_idx": 137, "frame_idx": 148, "global_frame_idx": 25405, "task_index": 27}, {"db_idx": 25406, "episode_idx": 137, "frame_idx": 149, "global_frame_idx": 25406, "task_index": 27}, {"db_idx": 25407, "episode_idx": 137, "frame_idx": 150, "global_frame_idx": 25407, "task_index": 27}, {"db_idx": 25408, "episode_idx": 137, "frame_idx": 151, "global_frame_idx": 25408, "task_index": 27}, {"db_idx": 25409, "episode_idx": 137, "frame_idx": 152, "global_frame_idx": 25409, "task_index": 27}, {"db_idx": 25410, "episode_idx": 137, "frame_idx": 153, "global_frame_idx": 25410, "task_index": 27}, {"db_idx": 25411, "episode_idx": 137, "frame_idx": 154, "global_frame_idx": 25411, "task_index": 27}, {"db_idx": 25412, "episode_idx": 137, "frame_idx": 155, "global_frame_idx": 25412, "task_index": 27}, {"db_idx": 25413, "episode_idx": 137, "frame_idx": 156, "global_frame_idx": 25413, "task_index": 27}, {"db_idx": 25414, "episode_idx": 137, "frame_idx": 157, "global_frame_idx": 25414, "task_index": 27}, {"db_idx": 25415, "episode_idx": 137, "frame_idx": 158, "global_frame_idx": 25415, "task_index": 27}, {"db_idx": 25416, "episode_idx": 137, "frame_idx": 159, "global_frame_idx": 25416, "task_index": 27}, {"db_idx": 25417, "episode_idx": 137, "frame_idx": 160, "global_frame_idx": 25417, "task_index": 27}, {"db_idx": 25418, "episode_idx": 137, "frame_idx": 161, "global_frame_idx": 25418, "task_index": 27}, {"db_idx": 25419, "episode_idx": 137, "frame_idx": 162, "global_frame_idx": 25419, "task_index": 27}, {"db_idx": 25420, "episode_idx": 137, "frame_idx": 163, "global_frame_idx": 25420, "task_index": 27}, {"db_idx": 25421, "episode_idx": 137, "frame_idx": 164, "global_frame_idx": 25421, "task_index": 27}, {"db_idx": 25422, "episode_idx": 137, "frame_idx": 165, "global_frame_idx": 25422, "task_index": 27}, {"db_idx": 25423, "episode_idx": 137, "frame_idx": 166, "global_frame_idx": 25423, "task_index": 27}, {"db_idx": 25424, "episode_idx": 137, "frame_idx": 167, "global_frame_idx": 25424, "task_index": 27}, {"db_idx": 25425, "episode_idx": 137, "frame_idx": 168, "global_frame_idx": 25425, "task_index": 27}, {"db_idx": 25426, "episode_idx": 137, "frame_idx": 169, "global_frame_idx": 25426, "task_index": 27}, {"db_idx": 25427, "episode_idx": 137, "frame_idx": 170, "global_frame_idx": 25427, "task_index": 27}, {"db_idx": 25428, "episode_idx": 137, "frame_idx": 171, "global_frame_idx": 25428, "task_index": 27}, {"db_idx": 25429, "episode_idx": 137, "frame_idx": 172, "global_frame_idx": 25429, "task_index": 27}, {"db_idx": 25430, "episode_idx": 137, "frame_idx": 173, "global_frame_idx": 25430, "task_index": 27}, {"db_idx": 25431, "episode_idx": 137, "frame_idx": 174, "global_frame_idx": 25431, "task_index": 27}, {"db_idx": 25432, "episode_idx": 137, "frame_idx": 175, "global_frame_idx": 25432, "task_index": 27}, {"db_idx": 25433, "episode_idx": 137, "frame_idx": 176, "global_frame_idx": 25433, "task_index": 27}, {"db_idx": 25434, "episode_idx": 138, "frame_idx": 0, "global_frame_idx": 25434, "task_index": 27}, {"db_idx": 25435, "episode_idx": 138, "frame_idx": 1, "global_frame_idx": 25435, "task_index": 27}, {"db_idx": 25436, "episode_idx": 138, "frame_idx": 2, "global_frame_idx": 25436, "task_index": 27}, {"db_idx": 25437, "episode_idx": 138, "frame_idx": 3, "global_frame_idx": 25437, "task_index": 27}, {"db_idx": 25438, "episode_idx": 138, "frame_idx": 4, "global_frame_idx": 25438, "task_index": 27}, {"db_idx": 25439, "episode_idx": 138, "frame_idx": 5, "global_frame_idx": 25439, "task_index": 27}, {"db_idx": 25440, "episode_idx": 138, "frame_idx": 6, "global_frame_idx": 25440, "task_index": 27}, {"db_idx": 25441, "episode_idx": 138, "frame_idx": 7, "global_frame_idx": 25441, "task_index": 27}, {"db_idx": 25442, "episode_idx": 138, "frame_idx": 8, "global_frame_idx": 25442, "task_index": 27}, {"db_idx": 25443, "episode_idx": 138, "frame_idx": 9, "global_frame_idx": 25443, "task_index": 27}, {"db_idx": 25444, "episode_idx": 138, "frame_idx": 10, "global_frame_idx": 25444, "task_index": 27}, {"db_idx": 25445, "episode_idx": 138, "frame_idx": 11, "global_frame_idx": 25445, "task_index": 27}, {"db_idx": 25446, "episode_idx": 138, "frame_idx": 12, "global_frame_idx": 25446, "task_index": 27}, {"db_idx": 25447, "episode_idx": 138, "frame_idx": 13, "global_frame_idx": 25447, "task_index": 27}, {"db_idx": 25448, "episode_idx": 138, "frame_idx": 14, "global_frame_idx": 25448, "task_index": 27}, {"db_idx": 25449, "episode_idx": 138, "frame_idx": 15, "global_frame_idx": 25449, "task_index": 27}, {"db_idx": 25450, "episode_idx": 138, "frame_idx": 16, "global_frame_idx": 25450, "task_index": 27}, {"db_idx": 25451, "episode_idx": 138, "frame_idx": 17, "global_frame_idx": 25451, "task_index": 27}, {"db_idx": 25452, "episode_idx": 138, "frame_idx": 18, "global_frame_idx": 25452, "task_index": 27}, {"db_idx": 25453, "episode_idx": 138, "frame_idx": 19, "global_frame_idx": 25453, "task_index": 27}, {"db_idx": 25454, "episode_idx": 138, "frame_idx": 20, "global_frame_idx": 25454, "task_index": 27}, {"db_idx": 25455, "episode_idx": 138, "frame_idx": 21, "global_frame_idx": 25455, "task_index": 27}, {"db_idx": 25456, "episode_idx": 138, "frame_idx": 22, "global_frame_idx": 25456, "task_index": 27}, {"db_idx": 25457, "episode_idx": 138, "frame_idx": 23, "global_frame_idx": 25457, "task_index": 27}, {"db_idx": 25458, "episode_idx": 138, "frame_idx": 24, "global_frame_idx": 25458, "task_index": 27}, {"db_idx": 25459, "episode_idx": 138, "frame_idx": 25, "global_frame_idx": 25459, "task_index": 27}, {"db_idx": 25460, "episode_idx": 138, "frame_idx": 26, "global_frame_idx": 25460, "task_index": 27}, {"db_idx": 25461, "episode_idx": 138, "frame_idx": 27, "global_frame_idx": 25461, "task_index": 27}, {"db_idx": 25462, "episode_idx": 138, "frame_idx": 28, "global_frame_idx": 25462, "task_index": 27}, {"db_idx": 25463, "episode_idx": 138, "frame_idx": 29, "global_frame_idx": 25463, "task_index": 27}, {"db_idx": 25464, "episode_idx": 138, "frame_idx": 30, "global_frame_idx": 25464, "task_index": 27}, {"db_idx": 25465, "episode_idx": 138, "frame_idx": 31, "global_frame_idx": 25465, "task_index": 27}, {"db_idx": 25466, "episode_idx": 138, "frame_idx": 32, "global_frame_idx": 25466, "task_index": 27}, {"db_idx": 25467, "episode_idx": 138, "frame_idx": 33, "global_frame_idx": 25467, "task_index": 27}, {"db_idx": 25468, "episode_idx": 138, "frame_idx": 34, "global_frame_idx": 25468, "task_index": 27}, {"db_idx": 25469, "episode_idx": 138, "frame_idx": 35, "global_frame_idx": 25469, "task_index": 27}, {"db_idx": 25470, "episode_idx": 138, "frame_idx": 36, "global_frame_idx": 25470, "task_index": 27}, {"db_idx": 25471, "episode_idx": 138, "frame_idx": 37, "global_frame_idx": 25471, "task_index": 27}, {"db_idx": 25472, "episode_idx": 138, "frame_idx": 38, "global_frame_idx": 25472, "task_index": 27}, {"db_idx": 25473, "episode_idx": 138, "frame_idx": 39, "global_frame_idx": 25473, "task_index": 27}, {"db_idx": 25474, "episode_idx": 138, "frame_idx": 40, "global_frame_idx": 25474, "task_index": 27}, {"db_idx": 25475, "episode_idx": 138, "frame_idx": 41, "global_frame_idx": 25475, "task_index": 27}, {"db_idx": 25476, "episode_idx": 138, "frame_idx": 42, "global_frame_idx": 25476, "task_index": 27}, {"db_idx": 25477, "episode_idx": 138, "frame_idx": 43, "global_frame_idx": 25477, "task_index": 27}, {"db_idx": 25478, "episode_idx": 138, "frame_idx": 44, "global_frame_idx": 25478, "task_index": 27}, {"db_idx": 25479, "episode_idx": 138, "frame_idx": 45, "global_frame_idx": 25479, "task_index": 27}, {"db_idx": 25480, "episode_idx": 138, "frame_idx": 46, "global_frame_idx": 25480, "task_index": 27}, {"db_idx": 25481, "episode_idx": 138, "frame_idx": 47, "global_frame_idx": 25481, "task_index": 27}, {"db_idx": 25482, "episode_idx": 138, "frame_idx": 48, "global_frame_idx": 25482, "task_index": 27}, {"db_idx": 25483, "episode_idx": 138, "frame_idx": 49, "global_frame_idx": 25483, "task_index": 27}, {"db_idx": 25484, "episode_idx": 138, "frame_idx": 50, "global_frame_idx": 25484, "task_index": 27}, {"db_idx": 25485, "episode_idx": 138, "frame_idx": 51, "global_frame_idx": 25485, "task_index": 27}, {"db_idx": 25486, "episode_idx": 138, "frame_idx": 52, "global_frame_idx": 25486, "task_index": 27}, {"db_idx": 25487, "episode_idx": 138, "frame_idx": 53, "global_frame_idx": 25487, "task_index": 27}, {"db_idx": 25488, "episode_idx": 138, "frame_idx": 54, "global_frame_idx": 25488, "task_index": 27}, {"db_idx": 25489, "episode_idx": 138, "frame_idx": 55, "global_frame_idx": 25489, "task_index": 27}, {"db_idx": 25490, "episode_idx": 138, "frame_idx": 56, "global_frame_idx": 25490, "task_index": 27}, {"db_idx": 25491, "episode_idx": 138, "frame_idx": 57, "global_frame_idx": 25491, "task_index": 27}, {"db_idx": 25492, "episode_idx": 138, "frame_idx": 58, "global_frame_idx": 25492, "task_index": 27}, {"db_idx": 25493, "episode_idx": 138, "frame_idx": 59, "global_frame_idx": 25493, "task_index": 27}, {"db_idx": 25494, "episode_idx": 138, "frame_idx": 60, "global_frame_idx": 25494, "task_index": 27}, {"db_idx": 25495, "episode_idx": 138, "frame_idx": 61, "global_frame_idx": 25495, "task_index": 27}, {"db_idx": 25496, "episode_idx": 138, "frame_idx": 62, "global_frame_idx": 25496, "task_index": 27}, {"db_idx": 25497, "episode_idx": 138, "frame_idx": 63, "global_frame_idx": 25497, "task_index": 27}, {"db_idx": 25498, "episode_idx": 138, "frame_idx": 64, "global_frame_idx": 25498, "task_index": 27}, {"db_idx": 25499, "episode_idx": 138, "frame_idx": 65, "global_frame_idx": 25499, "task_index": 27}, {"db_idx": 25500, "episode_idx": 138, "frame_idx": 66, "global_frame_idx": 25500, "task_index": 27}, {"db_idx": 25501, "episode_idx": 138, "frame_idx": 67, "global_frame_idx": 25501, "task_index": 27}, {"db_idx": 25502, "episode_idx": 138, "frame_idx": 68, "global_frame_idx": 25502, "task_index": 27}, {"db_idx": 25503, "episode_idx": 138, "frame_idx": 69, "global_frame_idx": 25503, "task_index": 27}, {"db_idx": 25504, "episode_idx": 138, "frame_idx": 70, "global_frame_idx": 25504, "task_index": 27}, {"db_idx": 25505, "episode_idx": 138, "frame_idx": 71, "global_frame_idx": 25505, "task_index": 27}, {"db_idx": 25506, "episode_idx": 138, "frame_idx": 72, "global_frame_idx": 25506, "task_index": 27}, {"db_idx": 25507, "episode_idx": 138, "frame_idx": 73, "global_frame_idx": 25507, "task_index": 27}, {"db_idx": 25508, "episode_idx": 138, "frame_idx": 74, "global_frame_idx": 25508, "task_index": 27}, {"db_idx": 25509, "episode_idx": 138, "frame_idx": 75, "global_frame_idx": 25509, "task_index": 27}, {"db_idx": 25510, "episode_idx": 138, "frame_idx": 76, "global_frame_idx": 25510, "task_index": 27}, {"db_idx": 25511, "episode_idx": 138, "frame_idx": 77, "global_frame_idx": 25511, "task_index": 27}, {"db_idx": 25512, "episode_idx": 138, "frame_idx": 78, "global_frame_idx": 25512, "task_index": 27}, {"db_idx": 25513, "episode_idx": 138, "frame_idx": 79, "global_frame_idx": 25513, "task_index": 27}, {"db_idx": 25514, "episode_idx": 138, "frame_idx": 80, "global_frame_idx": 25514, "task_index": 27}, {"db_idx": 25515, "episode_idx": 138, "frame_idx": 81, "global_frame_idx": 25515, "task_index": 27}, {"db_idx": 25516, "episode_idx": 138, "frame_idx": 82, "global_frame_idx": 25516, "task_index": 27}, {"db_idx": 25517, "episode_idx": 138, "frame_idx": 83, "global_frame_idx": 25517, "task_index": 27}, {"db_idx": 25518, "episode_idx": 138, "frame_idx": 84, "global_frame_idx": 25518, "task_index": 27}, {"db_idx": 25519, "episode_idx": 138, "frame_idx": 85, "global_frame_idx": 25519, "task_index": 27}, {"db_idx": 25520, "episode_idx": 138, "frame_idx": 86, "global_frame_idx": 25520, "task_index": 27}, {"db_idx": 25521, "episode_idx": 138, "frame_idx": 87, "global_frame_idx": 25521, "task_index": 27}, {"db_idx": 25522, "episode_idx": 138, "frame_idx": 88, "global_frame_idx": 25522, "task_index": 27}, {"db_idx": 25523, "episode_idx": 138, "frame_idx": 89, "global_frame_idx": 25523, "task_index": 27}, {"db_idx": 25524, "episode_idx": 138, "frame_idx": 90, "global_frame_idx": 25524, "task_index": 27}, {"db_idx": 25525, "episode_idx": 138, "frame_idx": 91, "global_frame_idx": 25525, "task_index": 27}, {"db_idx": 25526, "episode_idx": 138, "frame_idx": 92, "global_frame_idx": 25526, "task_index": 27}, {"db_idx": 25527, "episode_idx": 138, "frame_idx": 93, "global_frame_idx": 25527, "task_index": 27}, {"db_idx": 25528, "episode_idx": 138, "frame_idx": 94, "global_frame_idx": 25528, "task_index": 27}, {"db_idx": 25529, "episode_idx": 138, "frame_idx": 95, "global_frame_idx": 25529, "task_index": 27}, {"db_idx": 25530, "episode_idx": 138, "frame_idx": 96, "global_frame_idx": 25530, "task_index": 27}, {"db_idx": 25531, "episode_idx": 138, "frame_idx": 97, "global_frame_idx": 25531, "task_index": 27}, {"db_idx": 25532, "episode_idx": 138, "frame_idx": 98, "global_frame_idx": 25532, "task_index": 27}, {"db_idx": 25533, "episode_idx": 138, "frame_idx": 99, "global_frame_idx": 25533, "task_index": 27}, {"db_idx": 25534, "episode_idx": 138, "frame_idx": 100, "global_frame_idx": 25534, "task_index": 27}, {"db_idx": 25535, "episode_idx": 138, "frame_idx": 101, "global_frame_idx": 25535, "task_index": 27}, {"db_idx": 25536, "episode_idx": 138, "frame_idx": 102, "global_frame_idx": 25536, "task_index": 27}, {"db_idx": 25537, "episode_idx": 138, "frame_idx": 103, "global_frame_idx": 25537, "task_index": 27}, {"db_idx": 25538, "episode_idx": 138, "frame_idx": 104, "global_frame_idx": 25538, "task_index": 27}, {"db_idx": 25539, "episode_idx": 138, "frame_idx": 105, "global_frame_idx": 25539, "task_index": 27}, {"db_idx": 25540, "episode_idx": 138, "frame_idx": 106, "global_frame_idx": 25540, "task_index": 27}, {"db_idx": 25541, "episode_idx": 138, "frame_idx": 107, "global_frame_idx": 25541, "task_index": 27}, {"db_idx": 25542, "episode_idx": 138, "frame_idx": 108, "global_frame_idx": 25542, "task_index": 27}, {"db_idx": 25543, "episode_idx": 138, "frame_idx": 109, "global_frame_idx": 25543, "task_index": 27}, {"db_idx": 25544, "episode_idx": 138, "frame_idx": 110, "global_frame_idx": 25544, "task_index": 27}, {"db_idx": 25545, "episode_idx": 138, "frame_idx": 111, "global_frame_idx": 25545, "task_index": 27}, {"db_idx": 25546, "episode_idx": 138, "frame_idx": 112, "global_frame_idx": 25546, "task_index": 27}, {"db_idx": 25547, "episode_idx": 138, "frame_idx": 113, "global_frame_idx": 25547, "task_index": 27}, {"db_idx": 25548, "episode_idx": 138, "frame_idx": 114, "global_frame_idx": 25548, "task_index": 27}, {"db_idx": 25549, "episode_idx": 138, "frame_idx": 115, "global_frame_idx": 25549, "task_index": 27}, {"db_idx": 25550, "episode_idx": 138, "frame_idx": 116, "global_frame_idx": 25550, "task_index": 27}, {"db_idx": 25551, "episode_idx": 138, "frame_idx": 117, "global_frame_idx": 25551, "task_index": 27}, {"db_idx": 25552, "episode_idx": 138, "frame_idx": 118, "global_frame_idx": 25552, "task_index": 27}, {"db_idx": 25553, "episode_idx": 138, "frame_idx": 119, "global_frame_idx": 25553, "task_index": 27}, {"db_idx": 25554, "episode_idx": 138, "frame_idx": 120, "global_frame_idx": 25554, "task_index": 27}, {"db_idx": 25555, "episode_idx": 138, "frame_idx": 121, "global_frame_idx": 25555, "task_index": 27}, {"db_idx": 25556, "episode_idx": 138, "frame_idx": 122, "global_frame_idx": 25556, "task_index": 27}, {"db_idx": 25557, "episode_idx": 138, "frame_idx": 123, "global_frame_idx": 25557, "task_index": 27}, {"db_idx": 25558, "episode_idx": 138, "frame_idx": 124, "global_frame_idx": 25558, "task_index": 27}, {"db_idx": 25559, "episode_idx": 138, "frame_idx": 125, "global_frame_idx": 25559, "task_index": 27}, {"db_idx": 25560, "episode_idx": 138, "frame_idx": 126, "global_frame_idx": 25560, "task_index": 27}, {"db_idx": 25561, "episode_idx": 138, "frame_idx": 127, "global_frame_idx": 25561, "task_index": 27}, {"db_idx": 25562, "episode_idx": 138, "frame_idx": 128, "global_frame_idx": 25562, "task_index": 27}, {"db_idx": 25563, "episode_idx": 138, "frame_idx": 129, "global_frame_idx": 25563, "task_index": 27}, {"db_idx": 25564, "episode_idx": 138, "frame_idx": 130, "global_frame_idx": 25564, "task_index": 27}, {"db_idx": 25565, "episode_idx": 138, "frame_idx": 131, "global_frame_idx": 25565, "task_index": 27}, {"db_idx": 25566, "episode_idx": 138, "frame_idx": 132, "global_frame_idx": 25566, "task_index": 27}, {"db_idx": 25567, "episode_idx": 138, "frame_idx": 133, "global_frame_idx": 25567, "task_index": 27}, {"db_idx": 25568, "episode_idx": 138, "frame_idx": 134, "global_frame_idx": 25568, "task_index": 27}, {"db_idx": 25569, "episode_idx": 138, "frame_idx": 135, "global_frame_idx": 25569, "task_index": 27}, {"db_idx": 25570, "episode_idx": 138, "frame_idx": 136, "global_frame_idx": 25570, "task_index": 27}, {"db_idx": 25571, "episode_idx": 138, "frame_idx": 137, "global_frame_idx": 25571, "task_index": 27}, {"db_idx": 25572, "episode_idx": 138, "frame_idx": 138, "global_frame_idx": 25572, "task_index": 27}, {"db_idx": 25573, "episode_idx": 138, "frame_idx": 139, "global_frame_idx": 25573, "task_index": 27}, {"db_idx": 25574, "episode_idx": 138, "frame_idx": 140, "global_frame_idx": 25574, "task_index": 27}, {"db_idx": 25575, "episode_idx": 138, "frame_idx": 141, "global_frame_idx": 25575, "task_index": 27}, {"db_idx": 25576, "episode_idx": 138, "frame_idx": 142, "global_frame_idx": 25576, "task_index": 27}, {"db_idx": 25577, "episode_idx": 138, "frame_idx": 143, "global_frame_idx": 25577, "task_index": 27}, {"db_idx": 25578, "episode_idx": 138, "frame_idx": 144, "global_frame_idx": 25578, "task_index": 27}, {"db_idx": 25579, "episode_idx": 138, "frame_idx": 145, "global_frame_idx": 25579, "task_index": 27}, {"db_idx": 25580, "episode_idx": 138, "frame_idx": 146, "global_frame_idx": 25580, "task_index": 27}, {"db_idx": 25581, "episode_idx": 138, "frame_idx": 147, "global_frame_idx": 25581, "task_index": 27}, {"db_idx": 25582, "episode_idx": 138, "frame_idx": 148, "global_frame_idx": 25582, "task_index": 27}, {"db_idx": 25583, "episode_idx": 138, "frame_idx": 149, "global_frame_idx": 25583, "task_index": 27}, {"db_idx": 25584, "episode_idx": 138, "frame_idx": 150, "global_frame_idx": 25584, "task_index": 27}, {"db_idx": 25585, "episode_idx": 138, "frame_idx": 151, "global_frame_idx": 25585, "task_index": 27}, {"db_idx": 25586, "episode_idx": 138, "frame_idx": 152, "global_frame_idx": 25586, "task_index": 27}, {"db_idx": 25587, "episode_idx": 138, "frame_idx": 153, "global_frame_idx": 25587, "task_index": 27}, {"db_idx": 25588, "episode_idx": 138, "frame_idx": 154, "global_frame_idx": 25588, "task_index": 27}, {"db_idx": 25589, "episode_idx": 138, "frame_idx": 155, "global_frame_idx": 25589, "task_index": 27}, {"db_idx": 25590, "episode_idx": 138, "frame_idx": 156, "global_frame_idx": 25590, "task_index": 27}, {"db_idx": 25591, "episode_idx": 138, "frame_idx": 157, "global_frame_idx": 25591, "task_index": 27}, {"db_idx": 25592, "episode_idx": 138, "frame_idx": 158, "global_frame_idx": 25592, "task_index": 27}, {"db_idx": 25593, "episode_idx": 138, "frame_idx": 159, "global_frame_idx": 25593, "task_index": 27}, {"db_idx": 25594, "episode_idx": 138, "frame_idx": 160, "global_frame_idx": 25594, "task_index": 27}, {"db_idx": 25595, "episode_idx": 138, "frame_idx": 161, "global_frame_idx": 25595, "task_index": 27}, {"db_idx": 25596, "episode_idx": 138, "frame_idx": 162, "global_frame_idx": 25596, "task_index": 27}, {"db_idx": 25597, "episode_idx": 138, "frame_idx": 163, "global_frame_idx": 25597, "task_index": 27}, {"db_idx": 25598, "episode_idx": 138, "frame_idx": 164, "global_frame_idx": 25598, "task_index": 27}, {"db_idx": 25599, "episode_idx": 138, "frame_idx": 165, "global_frame_idx": 25599, "task_index": 27}, {"db_idx": 25600, "episode_idx": 138, "frame_idx": 166, "global_frame_idx": 25600, "task_index": 27}, {"db_idx": 25601, "episode_idx": 138, "frame_idx": 167, "global_frame_idx": 25601, "task_index": 27}, {"db_idx": 25602, "episode_idx": 138, "frame_idx": 168, "global_frame_idx": 25602, "task_index": 27}, {"db_idx": 25603, "episode_idx": 138, "frame_idx": 169, "global_frame_idx": 25603, "task_index": 27}, {"db_idx": 25604, "episode_idx": 138, "frame_idx": 170, "global_frame_idx": 25604, "task_index": 27}, {"db_idx": 25605, "episode_idx": 138, "frame_idx": 171, "global_frame_idx": 25605, "task_index": 27}, {"db_idx": 25606, "episode_idx": 138, "frame_idx": 172, "global_frame_idx": 25606, "task_index": 27}, {"db_idx": 25607, "episode_idx": 138, "frame_idx": 173, "global_frame_idx": 25607, "task_index": 27}, {"db_idx": 25608, "episode_idx": 138, "frame_idx": 174, "global_frame_idx": 25608, "task_index": 27}, {"db_idx": 25609, "episode_idx": 138, "frame_idx": 175, "global_frame_idx": 25609, "task_index": 27}, {"db_idx": 25610, "episode_idx": 138, "frame_idx": 176, "global_frame_idx": 25610, "task_index": 27}, {"db_idx": 25611, "episode_idx": 138, "frame_idx": 177, "global_frame_idx": 25611, "task_index": 27}, {"db_idx": 25612, "episode_idx": 138, "frame_idx": 178, "global_frame_idx": 25612, "task_index": 27}, {"db_idx": 25613, "episode_idx": 138, "frame_idx": 179, "global_frame_idx": 25613, "task_index": 27}, {"db_idx": 25614, "episode_idx": 138, "frame_idx": 180, "global_frame_idx": 25614, "task_index": 27}, {"db_idx": 25615, "episode_idx": 138, "frame_idx": 181, "global_frame_idx": 25615, "task_index": 27}, {"db_idx": 25616, "episode_idx": 138, "frame_idx": 182, "global_frame_idx": 25616, "task_index": 27}, {"db_idx": 25617, "episode_idx": 138, "frame_idx": 183, "global_frame_idx": 25617, "task_index": 27}, {"db_idx": 25618, "episode_idx": 138, "frame_idx": 184, "global_frame_idx": 25618, "task_index": 27}, {"db_idx": 25619, "episode_idx": 139, "frame_idx": 0, "global_frame_idx": 25619, "task_index": 27}, {"db_idx": 25620, "episode_idx": 139, "frame_idx": 1, "global_frame_idx": 25620, "task_index": 27}, {"db_idx": 25621, "episode_idx": 139, "frame_idx": 2, "global_frame_idx": 25621, "task_index": 27}, {"db_idx": 25622, "episode_idx": 139, "frame_idx": 3, "global_frame_idx": 25622, "task_index": 27}, {"db_idx": 25623, "episode_idx": 139, "frame_idx": 4, "global_frame_idx": 25623, "task_index": 27}, {"db_idx": 25624, "episode_idx": 139, "frame_idx": 5, "global_frame_idx": 25624, "task_index": 27}, {"db_idx": 25625, "episode_idx": 139, "frame_idx": 6, "global_frame_idx": 25625, "task_index": 27}, {"db_idx": 25626, "episode_idx": 139, "frame_idx": 7, "global_frame_idx": 25626, "task_index": 27}, {"db_idx": 25627, "episode_idx": 139, "frame_idx": 8, "global_frame_idx": 25627, "task_index": 27}, {"db_idx": 25628, "episode_idx": 139, "frame_idx": 9, "global_frame_idx": 25628, "task_index": 27}, {"db_idx": 25629, "episode_idx": 139, "frame_idx": 10, "global_frame_idx": 25629, "task_index": 27}, {"db_idx": 25630, "episode_idx": 139, "frame_idx": 11, "global_frame_idx": 25630, "task_index": 27}, {"db_idx": 25631, "episode_idx": 139, "frame_idx": 12, "global_frame_idx": 25631, "task_index": 27}, {"db_idx": 25632, "episode_idx": 139, "frame_idx": 13, "global_frame_idx": 25632, "task_index": 27}, {"db_idx": 25633, "episode_idx": 139, "frame_idx": 14, "global_frame_idx": 25633, "task_index": 27}, {"db_idx": 25634, "episode_idx": 139, "frame_idx": 15, "global_frame_idx": 25634, "task_index": 27}, {"db_idx": 25635, "episode_idx": 139, "frame_idx": 16, "global_frame_idx": 25635, "task_index": 27}, {"db_idx": 25636, "episode_idx": 139, "frame_idx": 17, "global_frame_idx": 25636, "task_index": 27}, {"db_idx": 25637, "episode_idx": 139, "frame_idx": 18, "global_frame_idx": 25637, "task_index": 27}, {"db_idx": 25638, "episode_idx": 139, "frame_idx": 19, "global_frame_idx": 25638, "task_index": 27}, {"db_idx": 25639, "episode_idx": 139, "frame_idx": 20, "global_frame_idx": 25639, "task_index": 27}, {"db_idx": 25640, "episode_idx": 139, "frame_idx": 21, "global_frame_idx": 25640, "task_index": 27}, {"db_idx": 25641, "episode_idx": 139, "frame_idx": 22, "global_frame_idx": 25641, "task_index": 27}, {"db_idx": 25642, "episode_idx": 139, "frame_idx": 23, "global_frame_idx": 25642, "task_index": 27}, {"db_idx": 25643, "episode_idx": 139, "frame_idx": 24, "global_frame_idx": 25643, "task_index": 27}, {"db_idx": 25644, "episode_idx": 139, "frame_idx": 25, "global_frame_idx": 25644, "task_index": 27}, {"db_idx": 25645, "episode_idx": 139, "frame_idx": 26, "global_frame_idx": 25645, "task_index": 27}, {"db_idx": 25646, "episode_idx": 139, "frame_idx": 27, "global_frame_idx": 25646, "task_index": 27}, {"db_idx": 25647, "episode_idx": 139, "frame_idx": 28, "global_frame_idx": 25647, "task_index": 27}, {"db_idx": 25648, "episode_idx": 139, "frame_idx": 29, "global_frame_idx": 25648, "task_index": 27}, {"db_idx": 25649, "episode_idx": 139, "frame_idx": 30, "global_frame_idx": 25649, "task_index": 27}, {"db_idx": 25650, "episode_idx": 139, "frame_idx": 31, "global_frame_idx": 25650, "task_index": 27}, {"db_idx": 25651, "episode_idx": 139, "frame_idx": 32, "global_frame_idx": 25651, "task_index": 27}, {"db_idx": 25652, "episode_idx": 139, "frame_idx": 33, "global_frame_idx": 25652, "task_index": 27}, {"db_idx": 25653, "episode_idx": 139, "frame_idx": 34, "global_frame_idx": 25653, "task_index": 27}, {"db_idx": 25654, "episode_idx": 139, "frame_idx": 35, "global_frame_idx": 25654, "task_index": 27}, {"db_idx": 25655, "episode_idx": 139, "frame_idx": 36, "global_frame_idx": 25655, "task_index": 27}, {"db_idx": 25656, "episode_idx": 139, "frame_idx": 37, "global_frame_idx": 25656, "task_index": 27}, {"db_idx": 25657, "episode_idx": 139, "frame_idx": 38, "global_frame_idx": 25657, "task_index": 27}, {"db_idx": 25658, "episode_idx": 139, "frame_idx": 39, "global_frame_idx": 25658, "task_index": 27}, {"db_idx": 25659, "episode_idx": 139, "frame_idx": 40, "global_frame_idx": 25659, "task_index": 27}, {"db_idx": 25660, "episode_idx": 139, "frame_idx": 41, "global_frame_idx": 25660, "task_index": 27}, {"db_idx": 25661, "episode_idx": 139, "frame_idx": 42, "global_frame_idx": 25661, "task_index": 27}, {"db_idx": 25662, "episode_idx": 139, "frame_idx": 43, "global_frame_idx": 25662, "task_index": 27}, {"db_idx": 25663, "episode_idx": 139, "frame_idx": 44, "global_frame_idx": 25663, "task_index": 27}, {"db_idx": 25664, "episode_idx": 139, "frame_idx": 45, "global_frame_idx": 25664, "task_index": 27}, {"db_idx": 25665, "episode_idx": 139, "frame_idx": 46, "global_frame_idx": 25665, "task_index": 27}, {"db_idx": 25666, "episode_idx": 139, "frame_idx": 47, "global_frame_idx": 25666, "task_index": 27}, {"db_idx": 25667, "episode_idx": 139, "frame_idx": 48, "global_frame_idx": 25667, "task_index": 27}, {"db_idx": 25668, "episode_idx": 139, "frame_idx": 49, "global_frame_idx": 25668, "task_index": 27}, {"db_idx": 25669, "episode_idx": 139, "frame_idx": 50, "global_frame_idx": 25669, "task_index": 27}, {"db_idx": 25670, "episode_idx": 139, "frame_idx": 51, "global_frame_idx": 25670, "task_index": 27}, {"db_idx": 25671, "episode_idx": 139, "frame_idx": 52, "global_frame_idx": 25671, "task_index": 27}, {"db_idx": 25672, "episode_idx": 139, "frame_idx": 53, "global_frame_idx": 25672, "task_index": 27}, {"db_idx": 25673, "episode_idx": 139, "frame_idx": 54, "global_frame_idx": 25673, "task_index": 27}, {"db_idx": 25674, "episode_idx": 139, "frame_idx": 55, "global_frame_idx": 25674, "task_index": 27}, {"db_idx": 25675, "episode_idx": 139, "frame_idx": 56, "global_frame_idx": 25675, "task_index": 27}, {"db_idx": 25676, "episode_idx": 139, "frame_idx": 57, "global_frame_idx": 25676, "task_index": 27}, {"db_idx": 25677, "episode_idx": 139, "frame_idx": 58, "global_frame_idx": 25677, "task_index": 27}, {"db_idx": 25678, "episode_idx": 139, "frame_idx": 59, "global_frame_idx": 25678, "task_index": 27}, {"db_idx": 25679, "episode_idx": 139, "frame_idx": 60, "global_frame_idx": 25679, "task_index": 27}, {"db_idx": 25680, "episode_idx": 139, "frame_idx": 61, "global_frame_idx": 25680, "task_index": 27}, {"db_idx": 25681, "episode_idx": 139, "frame_idx": 62, "global_frame_idx": 25681, "task_index": 27}, {"db_idx": 25682, "episode_idx": 139, "frame_idx": 63, "global_frame_idx": 25682, "task_index": 27}, {"db_idx": 25683, "episode_idx": 139, "frame_idx": 64, "global_frame_idx": 25683, "task_index": 27}, {"db_idx": 25684, "episode_idx": 139, "frame_idx": 65, "global_frame_idx": 25684, "task_index": 27}, {"db_idx": 25685, "episode_idx": 139, "frame_idx": 66, "global_frame_idx": 25685, "task_index": 27}, {"db_idx": 25686, "episode_idx": 139, "frame_idx": 67, "global_frame_idx": 25686, "task_index": 27}, {"db_idx": 25687, "episode_idx": 139, "frame_idx": 68, "global_frame_idx": 25687, "task_index": 27}, {"db_idx": 25688, "episode_idx": 139, "frame_idx": 69, "global_frame_idx": 25688, "task_index": 27}, {"db_idx": 25689, "episode_idx": 139, "frame_idx": 70, "global_frame_idx": 25689, "task_index": 27}, {"db_idx": 25690, "episode_idx": 139, "frame_idx": 71, "global_frame_idx": 25690, "task_index": 27}, {"db_idx": 25691, "episode_idx": 139, "frame_idx": 72, "global_frame_idx": 25691, "task_index": 27}, {"db_idx": 25692, "episode_idx": 139, "frame_idx": 73, "global_frame_idx": 25692, "task_index": 27}, {"db_idx": 25693, "episode_idx": 139, "frame_idx": 74, "global_frame_idx": 25693, "task_index": 27}, {"db_idx": 25694, "episode_idx": 139, "frame_idx": 75, "global_frame_idx": 25694, "task_index": 27}, {"db_idx": 25695, "episode_idx": 139, "frame_idx": 76, "global_frame_idx": 25695, "task_index": 27}, {"db_idx": 25696, "episode_idx": 139, "frame_idx": 77, "global_frame_idx": 25696, "task_index": 27}, {"db_idx": 25697, "episode_idx": 139, "frame_idx": 78, "global_frame_idx": 25697, "task_index": 27}, {"db_idx": 25698, "episode_idx": 139, "frame_idx": 79, "global_frame_idx": 25698, "task_index": 27}, {"db_idx": 25699, "episode_idx": 139, "frame_idx": 80, "global_frame_idx": 25699, "task_index": 27}, {"db_idx": 25700, "episode_idx": 139, "frame_idx": 81, "global_frame_idx": 25700, "task_index": 27}, {"db_idx": 25701, "episode_idx": 139, "frame_idx": 82, "global_frame_idx": 25701, "task_index": 27}, {"db_idx": 25702, "episode_idx": 139, "frame_idx": 83, "global_frame_idx": 25702, "task_index": 27}, {"db_idx": 25703, "episode_idx": 139, "frame_idx": 84, "global_frame_idx": 25703, "task_index": 27}, {"db_idx": 25704, "episode_idx": 139, "frame_idx": 85, "global_frame_idx": 25704, "task_index": 27}, {"db_idx": 25705, "episode_idx": 139, "frame_idx": 86, "global_frame_idx": 25705, "task_index": 27}, {"db_idx": 25706, "episode_idx": 139, "frame_idx": 87, "global_frame_idx": 25706, "task_index": 27}, {"db_idx": 25707, "episode_idx": 139, "frame_idx": 88, "global_frame_idx": 25707, "task_index": 27}, {"db_idx": 25708, "episode_idx": 139, "frame_idx": 89, "global_frame_idx": 25708, "task_index": 27}, {"db_idx": 25709, "episode_idx": 139, "frame_idx": 90, "global_frame_idx": 25709, "task_index": 27}, {"db_idx": 25710, "episode_idx": 139, "frame_idx": 91, "global_frame_idx": 25710, "task_index": 27}, {"db_idx": 25711, "episode_idx": 139, "frame_idx": 92, "global_frame_idx": 25711, "task_index": 27}, {"db_idx": 25712, "episode_idx": 139, "frame_idx": 93, "global_frame_idx": 25712, "task_index": 27}, {"db_idx": 25713, "episode_idx": 139, "frame_idx": 94, "global_frame_idx": 25713, "task_index": 27}, {"db_idx": 25714, "episode_idx": 139, "frame_idx": 95, "global_frame_idx": 25714, "task_index": 27}, {"db_idx": 25715, "episode_idx": 139, "frame_idx": 96, "global_frame_idx": 25715, "task_index": 27}, {"db_idx": 25716, "episode_idx": 139, "frame_idx": 97, "global_frame_idx": 25716, "task_index": 27}, {"db_idx": 25717, "episode_idx": 139, "frame_idx": 98, "global_frame_idx": 25717, "task_index": 27}, {"db_idx": 25718, "episode_idx": 139, "frame_idx": 99, "global_frame_idx": 25718, "task_index": 27}, {"db_idx": 25719, "episode_idx": 139, "frame_idx": 100, "global_frame_idx": 25719, "task_index": 27}, {"db_idx": 25720, "episode_idx": 139, "frame_idx": 101, "global_frame_idx": 25720, "task_index": 27}, {"db_idx": 25721, "episode_idx": 139, "frame_idx": 102, "global_frame_idx": 25721, "task_index": 27}, {"db_idx": 25722, "episode_idx": 139, "frame_idx": 103, "global_frame_idx": 25722, "task_index": 27}, {"db_idx": 25723, "episode_idx": 139, "frame_idx": 104, "global_frame_idx": 25723, "task_index": 27}, {"db_idx": 25724, "episode_idx": 139, "frame_idx": 105, "global_frame_idx": 25724, "task_index": 27}, {"db_idx": 25725, "episode_idx": 139, "frame_idx": 106, "global_frame_idx": 25725, "task_index": 27}, {"db_idx": 25726, "episode_idx": 139, "frame_idx": 107, "global_frame_idx": 25726, "task_index": 27}, {"db_idx": 25727, "episode_idx": 139, "frame_idx": 108, "global_frame_idx": 25727, "task_index": 27}, {"db_idx": 25728, "episode_idx": 139, "frame_idx": 109, "global_frame_idx": 25728, "task_index": 27}, {"db_idx": 25729, "episode_idx": 139, "frame_idx": 110, "global_frame_idx": 25729, "task_index": 27}, {"db_idx": 25730, "episode_idx": 139, "frame_idx": 111, "global_frame_idx": 25730, "task_index": 27}, {"db_idx": 25731, "episode_idx": 139, "frame_idx": 112, "global_frame_idx": 25731, "task_index": 27}, {"db_idx": 25732, "episode_idx": 139, "frame_idx": 113, "global_frame_idx": 25732, "task_index": 27}, {"db_idx": 25733, "episode_idx": 139, "frame_idx": 114, "global_frame_idx": 25733, "task_index": 27}, {"db_idx": 25734, "episode_idx": 139, "frame_idx": 115, "global_frame_idx": 25734, "task_index": 27}, {"db_idx": 25735, "episode_idx": 139, "frame_idx": 116, "global_frame_idx": 25735, "task_index": 27}, {"db_idx": 25736, "episode_idx": 139, "frame_idx": 117, "global_frame_idx": 25736, "task_index": 27}, {"db_idx": 25737, "episode_idx": 139, "frame_idx": 118, "global_frame_idx": 25737, "task_index": 27}, {"db_idx": 25738, "episode_idx": 139, "frame_idx": 119, "global_frame_idx": 25738, "task_index": 27}, {"db_idx": 25739, "episode_idx": 139, "frame_idx": 120, "global_frame_idx": 25739, "task_index": 27}, {"db_idx": 25740, "episode_idx": 139, "frame_idx": 121, "global_frame_idx": 25740, "task_index": 27}, {"db_idx": 25741, "episode_idx": 139, "frame_idx": 122, "global_frame_idx": 25741, "task_index": 27}, {"db_idx": 25742, "episode_idx": 139, "frame_idx": 123, "global_frame_idx": 25742, "task_index": 27}, {"db_idx": 25743, "episode_idx": 139, "frame_idx": 124, "global_frame_idx": 25743, "task_index": 27}, {"db_idx": 25744, "episode_idx": 139, "frame_idx": 125, "global_frame_idx": 25744, "task_index": 27}, {"db_idx": 25745, "episode_idx": 139, "frame_idx": 126, "global_frame_idx": 25745, "task_index": 27}, {"db_idx": 25746, "episode_idx": 139, "frame_idx": 127, "global_frame_idx": 25746, "task_index": 27}, {"db_idx": 25747, "episode_idx": 139, "frame_idx": 128, "global_frame_idx": 25747, "task_index": 27}, {"db_idx": 25748, "episode_idx": 139, "frame_idx": 129, "global_frame_idx": 25748, "task_index": 27}, {"db_idx": 25749, "episode_idx": 139, "frame_idx": 130, "global_frame_idx": 25749, "task_index": 27}, {"db_idx": 25750, "episode_idx": 139, "frame_idx": 131, "global_frame_idx": 25750, "task_index": 27}, {"db_idx": 25751, "episode_idx": 139, "frame_idx": 132, "global_frame_idx": 25751, "task_index": 27}, {"db_idx": 25752, "episode_idx": 139, "frame_idx": 133, "global_frame_idx": 25752, "task_index": 27}, {"db_idx": 25753, "episode_idx": 139, "frame_idx": 134, "global_frame_idx": 25753, "task_index": 27}, {"db_idx": 25754, "episode_idx": 139, "frame_idx": 135, "global_frame_idx": 25754, "task_index": 27}, {"db_idx": 25755, "episode_idx": 139, "frame_idx": 136, "global_frame_idx": 25755, "task_index": 27}, {"db_idx": 25756, "episode_idx": 139, "frame_idx": 137, "global_frame_idx": 25756, "task_index": 27}, {"db_idx": 25757, "episode_idx": 139, "frame_idx": 138, "global_frame_idx": 25757, "task_index": 27}, {"db_idx": 25758, "episode_idx": 139, "frame_idx": 139, "global_frame_idx": 25758, "task_index": 27}, {"db_idx": 25759, "episode_idx": 139, "frame_idx": 140, "global_frame_idx": 25759, "task_index": 27}, {"db_idx": 25760, "episode_idx": 139, "frame_idx": 141, "global_frame_idx": 25760, "task_index": 27}, {"db_idx": 25761, "episode_idx": 139, "frame_idx": 142, "global_frame_idx": 25761, "task_index": 27}, {"db_idx": 25762, "episode_idx": 139, "frame_idx": 143, "global_frame_idx": 25762, "task_index": 27}, {"db_idx": 25763, "episode_idx": 139, "frame_idx": 144, "global_frame_idx": 25763, "task_index": 27}, {"db_idx": 25764, "episode_idx": 139, "frame_idx": 145, "global_frame_idx": 25764, "task_index": 27}, {"db_idx": 25765, "episode_idx": 139, "frame_idx": 146, "global_frame_idx": 25765, "task_index": 27}, {"db_idx": 25766, "episode_idx": 139, "frame_idx": 147, "global_frame_idx": 25766, "task_index": 27}, {"db_idx": 25767, "episode_idx": 139, "frame_idx": 148, "global_frame_idx": 25767, "task_index": 27}, {"db_idx": 25768, "episode_idx": 139, "frame_idx": 149, "global_frame_idx": 25768, "task_index": 27}, {"db_idx": 25769, "episode_idx": 139, "frame_idx": 150, "global_frame_idx": 25769, "task_index": 27}, {"db_idx": 25770, "episode_idx": 139, "frame_idx": 151, "global_frame_idx": 25770, "task_index": 27}, {"db_idx": 25771, "episode_idx": 139, "frame_idx": 152, "global_frame_idx": 25771, "task_index": 27}, {"db_idx": 25772, "episode_idx": 139, "frame_idx": 153, "global_frame_idx": 25772, "task_index": 27}, {"db_idx": 25773, "episode_idx": 139, "frame_idx": 154, "global_frame_idx": 25773, "task_index": 27}, {"db_idx": 25774, "episode_idx": 139, "frame_idx": 155, "global_frame_idx": 25774, "task_index": 27}, {"db_idx": 25775, "episode_idx": 139, "frame_idx": 156, "global_frame_idx": 25775, "task_index": 27}, {"db_idx": 25776, "episode_idx": 139, "frame_idx": 157, "global_frame_idx": 25776, "task_index": 27}, {"db_idx": 25777, "episode_idx": 139, "frame_idx": 158, "global_frame_idx": 25777, "task_index": 27}, {"db_idx": 25778, "episode_idx": 139, "frame_idx": 159, "global_frame_idx": 25778, "task_index": 27}, {"db_idx": 25779, "episode_idx": 139, "frame_idx": 160, "global_frame_idx": 25779, "task_index": 27}, {"db_idx": 25780, "episode_idx": 139, "frame_idx": 161, "global_frame_idx": 25780, "task_index": 27}, {"db_idx": 25781, "episode_idx": 139, "frame_idx": 162, "global_frame_idx": 25781, "task_index": 27}, {"db_idx": 25782, "episode_idx": 139, "frame_idx": 163, "global_frame_idx": 25782, "task_index": 27}, {"db_idx": 25783, "episode_idx": 139, "frame_idx": 164, "global_frame_idx": 25783, "task_index": 27}, {"db_idx": 25784, "episode_idx": 139, "frame_idx": 165, "global_frame_idx": 25784, "task_index": 27}, {"db_idx": 25785, "episode_idx": 139, "frame_idx": 166, "global_frame_idx": 25785, "task_index": 27}, {"db_idx": 25786, "episode_idx": 139, "frame_idx": 167, "global_frame_idx": 25786, "task_index": 27}, {"db_idx": 25787, "episode_idx": 139, "frame_idx": 168, "global_frame_idx": 25787, "task_index": 27}, {"db_idx": 25788, "episode_idx": 139, "frame_idx": 169, "global_frame_idx": 25788, "task_index": 27}, {"db_idx": 25789, "episode_idx": 139, "frame_idx": 170, "global_frame_idx": 25789, "task_index": 27}, {"db_idx": 25790, "episode_idx": 139, "frame_idx": 171, "global_frame_idx": 25790, "task_index": 27}, {"db_idx": 25791, "episode_idx": 139, "frame_idx": 172, "global_frame_idx": 25791, "task_index": 27}, {"db_idx": 25792, "episode_idx": 139, "frame_idx": 173, "global_frame_idx": 25792, "task_index": 27}, {"db_idx": 25793, "episode_idx": 139, "frame_idx": 174, "global_frame_idx": 25793, "task_index": 27}, {"db_idx": 25794, "episode_idx": 139, "frame_idx": 175, "global_frame_idx": 25794, "task_index": 27}, {"db_idx": 25795, "episode_idx": 139, "frame_idx": 176, "global_frame_idx": 25795, "task_index": 27}, {"db_idx": 25796, "episode_idx": 139, "frame_idx": 177, "global_frame_idx": 25796, "task_index": 27}, {"db_idx": 25797, "episode_idx": 139, "frame_idx": 178, "global_frame_idx": 25797, "task_index": 27}, {"db_idx": 25798, "episode_idx": 139, "frame_idx": 179, "global_frame_idx": 25798, "task_index": 27}, {"db_idx": 25799, "episode_idx": 139, "frame_idx": 180, "global_frame_idx": 25799, "task_index": 27}, {"db_idx": 25800, "episode_idx": 139, "frame_idx": 181, "global_frame_idx": 25800, "task_index": 27}, {"db_idx": 25801, "episode_idx": 139, "frame_idx": 182, "global_frame_idx": 25801, "task_index": 27}, {"db_idx": 25802, "episode_idx": 139, "frame_idx": 183, "global_frame_idx": 25802, "task_index": 27}, {"db_idx": 25803, "episode_idx": 139, "frame_idx": 184, "global_frame_idx": 25803, "task_index": 27}, {"db_idx": 25804, "episode_idx": 139, "frame_idx": 185, "global_frame_idx": 25804, "task_index": 27}, {"db_idx": 25805, "episode_idx": 139, "frame_idx": 186, "global_frame_idx": 25805, "task_index": 27}, {"db_idx": 25806, "episode_idx": 139, "frame_idx": 187, "global_frame_idx": 25806, "task_index": 27}, {"db_idx": 25807, "episode_idx": 139, "frame_idx": 188, "global_frame_idx": 25807, "task_index": 27}, {"db_idx": 25808, "episode_idx": 139, "frame_idx": 189, "global_frame_idx": 25808, "task_index": 27}, {"db_idx": 25809, "episode_idx": 140, "frame_idx": 0, "global_frame_idx": 25809, "task_index": 28}, {"db_idx": 25810, "episode_idx": 140, "frame_idx": 1, "global_frame_idx": 25810, "task_index": 28}, {"db_idx": 25811, "episode_idx": 140, "frame_idx": 2, "global_frame_idx": 25811, "task_index": 28}, {"db_idx": 25812, "episode_idx": 140, "frame_idx": 3, "global_frame_idx": 25812, "task_index": 28}, {"db_idx": 25813, "episode_idx": 140, "frame_idx": 4, "global_frame_idx": 25813, "task_index": 28}, {"db_idx": 25814, "episode_idx": 140, "frame_idx": 5, "global_frame_idx": 25814, "task_index": 28}, {"db_idx": 25815, "episode_idx": 140, "frame_idx": 6, "global_frame_idx": 25815, "task_index": 28}, {"db_idx": 25816, "episode_idx": 140, "frame_idx": 7, "global_frame_idx": 25816, "task_index": 28}, {"db_idx": 25817, "episode_idx": 140, "frame_idx": 8, "global_frame_idx": 25817, "task_index": 28}, {"db_idx": 25818, "episode_idx": 140, "frame_idx": 9, "global_frame_idx": 25818, "task_index": 28}, {"db_idx": 25819, "episode_idx": 140, "frame_idx": 10, "global_frame_idx": 25819, "task_index": 28}, {"db_idx": 25820, "episode_idx": 140, "frame_idx": 11, "global_frame_idx": 25820, "task_index": 28}, {"db_idx": 25821, "episode_idx": 140, "frame_idx": 12, "global_frame_idx": 25821, "task_index": 28}, {"db_idx": 25822, "episode_idx": 140, "frame_idx": 13, "global_frame_idx": 25822, "task_index": 28}, {"db_idx": 25823, "episode_idx": 140, "frame_idx": 14, "global_frame_idx": 25823, "task_index": 28}, {"db_idx": 25824, "episode_idx": 140, "frame_idx": 15, "global_frame_idx": 25824, "task_index": 28}, {"db_idx": 25825, "episode_idx": 140, "frame_idx": 16, "global_frame_idx": 25825, "task_index": 28}, {"db_idx": 25826, "episode_idx": 140, "frame_idx": 17, "global_frame_idx": 25826, "task_index": 28}, {"db_idx": 25827, "episode_idx": 140, "frame_idx": 18, "global_frame_idx": 25827, "task_index": 28}, {"db_idx": 25828, "episode_idx": 140, "frame_idx": 19, "global_frame_idx": 25828, "task_index": 28}, {"db_idx": 25829, "episode_idx": 140, "frame_idx": 20, "global_frame_idx": 25829, "task_index": 28}, {"db_idx": 25830, "episode_idx": 140, "frame_idx": 21, "global_frame_idx": 25830, "task_index": 28}, {"db_idx": 25831, "episode_idx": 140, "frame_idx": 22, "global_frame_idx": 25831, "task_index": 28}, {"db_idx": 25832, "episode_idx": 140, "frame_idx": 23, "global_frame_idx": 25832, "task_index": 28}, {"db_idx": 25833, "episode_idx": 140, "frame_idx": 24, "global_frame_idx": 25833, "task_index": 28}, {"db_idx": 25834, "episode_idx": 140, "frame_idx": 25, "global_frame_idx": 25834, "task_index": 28}, {"db_idx": 25835, "episode_idx": 140, "frame_idx": 26, "global_frame_idx": 25835, "task_index": 28}, {"db_idx": 25836, "episode_idx": 140, "frame_idx": 27, "global_frame_idx": 25836, "task_index": 28}, {"db_idx": 25837, "episode_idx": 140, "frame_idx": 28, "global_frame_idx": 25837, "task_index": 28}, {"db_idx": 25838, "episode_idx": 140, "frame_idx": 29, "global_frame_idx": 25838, "task_index": 28}, {"db_idx": 25839, "episode_idx": 140, "frame_idx": 30, "global_frame_idx": 25839, "task_index": 28}, {"db_idx": 25840, "episode_idx": 140, "frame_idx": 31, "global_frame_idx": 25840, "task_index": 28}, {"db_idx": 25841, "episode_idx": 140, "frame_idx": 32, "global_frame_idx": 25841, "task_index": 28}, {"db_idx": 25842, "episode_idx": 140, "frame_idx": 33, "global_frame_idx": 25842, "task_index": 28}, {"db_idx": 25843, "episode_idx": 140, "frame_idx": 34, "global_frame_idx": 25843, "task_index": 28}, {"db_idx": 25844, "episode_idx": 140, "frame_idx": 35, "global_frame_idx": 25844, "task_index": 28}, {"db_idx": 25845, "episode_idx": 140, "frame_idx": 36, "global_frame_idx": 25845, "task_index": 28}, {"db_idx": 25846, "episode_idx": 140, "frame_idx": 37, "global_frame_idx": 25846, "task_index": 28}, {"db_idx": 25847, "episode_idx": 140, "frame_idx": 38, "global_frame_idx": 25847, "task_index": 28}, {"db_idx": 25848, "episode_idx": 140, "frame_idx": 39, "global_frame_idx": 25848, "task_index": 28}, {"db_idx": 25849, "episode_idx": 140, "frame_idx": 40, "global_frame_idx": 25849, "task_index": 28}, {"db_idx": 25850, "episode_idx": 140, "frame_idx": 41, "global_frame_idx": 25850, "task_index": 28}, {"db_idx": 25851, "episode_idx": 140, "frame_idx": 42, "global_frame_idx": 25851, "task_index": 28}, {"db_idx": 25852, "episode_idx": 140, "frame_idx": 43, "global_frame_idx": 25852, "task_index": 28}, {"db_idx": 25853, "episode_idx": 140, "frame_idx": 44, "global_frame_idx": 25853, "task_index": 28}, {"db_idx": 25854, "episode_idx": 140, "frame_idx": 45, "global_frame_idx": 25854, "task_index": 28}, {"db_idx": 25855, "episode_idx": 140, "frame_idx": 46, "global_frame_idx": 25855, "task_index": 28}, {"db_idx": 25856, "episode_idx": 140, "frame_idx": 47, "global_frame_idx": 25856, "task_index": 28}, {"db_idx": 25857, "episode_idx": 140, "frame_idx": 48, "global_frame_idx": 25857, "task_index": 28}, {"db_idx": 25858, "episode_idx": 140, "frame_idx": 49, "global_frame_idx": 25858, "task_index": 28}, {"db_idx": 25859, "episode_idx": 140, "frame_idx": 50, "global_frame_idx": 25859, "task_index": 28}, {"db_idx": 25860, "episode_idx": 140, "frame_idx": 51, "global_frame_idx": 25860, "task_index": 28}, {"db_idx": 25861, "episode_idx": 140, "frame_idx": 52, "global_frame_idx": 25861, "task_index": 28}, {"db_idx": 25862, "episode_idx": 140, "frame_idx": 53, "global_frame_idx": 25862, "task_index": 28}, {"db_idx": 25863, "episode_idx": 140, "frame_idx": 54, "global_frame_idx": 25863, "task_index": 28}, {"db_idx": 25864, "episode_idx": 140, "frame_idx": 55, "global_frame_idx": 25864, "task_index": 28}, {"db_idx": 25865, "episode_idx": 140, "frame_idx": 56, "global_frame_idx": 25865, "task_index": 28}, {"db_idx": 25866, "episode_idx": 140, "frame_idx": 57, "global_frame_idx": 25866, "task_index": 28}, {"db_idx": 25867, "episode_idx": 140, "frame_idx": 58, "global_frame_idx": 25867, "task_index": 28}, {"db_idx": 25868, "episode_idx": 140, "frame_idx": 59, "global_frame_idx": 25868, "task_index": 28}, {"db_idx": 25869, "episode_idx": 140, "frame_idx": 60, "global_frame_idx": 25869, "task_index": 28}, {"db_idx": 25870, "episode_idx": 140, "frame_idx": 61, "global_frame_idx": 25870, "task_index": 28}, {"db_idx": 25871, "episode_idx": 140, "frame_idx": 62, "global_frame_idx": 25871, "task_index": 28}, {"db_idx": 25872, "episode_idx": 140, "frame_idx": 63, "global_frame_idx": 25872, "task_index": 28}, {"db_idx": 25873, "episode_idx": 140, "frame_idx": 64, "global_frame_idx": 25873, "task_index": 28}, {"db_idx": 25874, "episode_idx": 140, "frame_idx": 65, "global_frame_idx": 25874, "task_index": 28}, {"db_idx": 25875, "episode_idx": 140, "frame_idx": 66, "global_frame_idx": 25875, "task_index": 28}, {"db_idx": 25876, "episode_idx": 140, "frame_idx": 67, "global_frame_idx": 25876, "task_index": 28}, {"db_idx": 25877, "episode_idx": 140, "frame_idx": 68, "global_frame_idx": 25877, "task_index": 28}, {"db_idx": 25878, "episode_idx": 140, "frame_idx": 69, "global_frame_idx": 25878, "task_index": 28}, {"db_idx": 25879, "episode_idx": 140, "frame_idx": 70, "global_frame_idx": 25879, "task_index": 28}, {"db_idx": 25880, "episode_idx": 140, "frame_idx": 71, "global_frame_idx": 25880, "task_index": 28}, {"db_idx": 25881, "episode_idx": 140, "frame_idx": 72, "global_frame_idx": 25881, "task_index": 28}, {"db_idx": 25882, "episode_idx": 140, "frame_idx": 73, "global_frame_idx": 25882, "task_index": 28}, {"db_idx": 25883, "episode_idx": 140, "frame_idx": 74, "global_frame_idx": 25883, "task_index": 28}, {"db_idx": 25884, "episode_idx": 140, "frame_idx": 75, "global_frame_idx": 25884, "task_index": 28}, {"db_idx": 25885, "episode_idx": 140, "frame_idx": 76, "global_frame_idx": 25885, "task_index": 28}, {"db_idx": 25886, "episode_idx": 140, "frame_idx": 77, "global_frame_idx": 25886, "task_index": 28}, {"db_idx": 25887, "episode_idx": 140, "frame_idx": 78, "global_frame_idx": 25887, "task_index": 28}, {"db_idx": 25888, "episode_idx": 140, "frame_idx": 79, "global_frame_idx": 25888, "task_index": 28}, {"db_idx": 25889, "episode_idx": 140, "frame_idx": 80, "global_frame_idx": 25889, "task_index": 28}, {"db_idx": 25890, "episode_idx": 140, "frame_idx": 81, "global_frame_idx": 25890, "task_index": 28}, {"db_idx": 25891, "episode_idx": 140, "frame_idx": 82, "global_frame_idx": 25891, "task_index": 28}, {"db_idx": 25892, "episode_idx": 140, "frame_idx": 83, "global_frame_idx": 25892, "task_index": 28}, {"db_idx": 25893, "episode_idx": 140, "frame_idx": 84, "global_frame_idx": 25893, "task_index": 28}, {"db_idx": 25894, "episode_idx": 140, "frame_idx": 85, "global_frame_idx": 25894, "task_index": 28}, {"db_idx": 25895, "episode_idx": 140, "frame_idx": 86, "global_frame_idx": 25895, "task_index": 28}, {"db_idx": 25896, "episode_idx": 140, "frame_idx": 87, "global_frame_idx": 25896, "task_index": 28}, {"db_idx": 25897, "episode_idx": 140, "frame_idx": 88, "global_frame_idx": 25897, "task_index": 28}, {"db_idx": 25898, "episode_idx": 140, "frame_idx": 89, "global_frame_idx": 25898, "task_index": 28}, {"db_idx": 25899, "episode_idx": 140, "frame_idx": 90, "global_frame_idx": 25899, "task_index": 28}, {"db_idx": 25900, "episode_idx": 140, "frame_idx": 91, "global_frame_idx": 25900, "task_index": 28}, {"db_idx": 25901, "episode_idx": 140, "frame_idx": 92, "global_frame_idx": 25901, "task_index": 28}, {"db_idx": 25902, "episode_idx": 140, "frame_idx": 93, "global_frame_idx": 25902, "task_index": 28}, {"db_idx": 25903, "episode_idx": 140, "frame_idx": 94, "global_frame_idx": 25903, "task_index": 28}, {"db_idx": 25904, "episode_idx": 140, "frame_idx": 95, "global_frame_idx": 25904, "task_index": 28}, {"db_idx": 25905, "episode_idx": 140, "frame_idx": 96, "global_frame_idx": 25905, "task_index": 28}, {"db_idx": 25906, "episode_idx": 140, "frame_idx": 97, "global_frame_idx": 25906, "task_index": 28}, {"db_idx": 25907, "episode_idx": 140, "frame_idx": 98, "global_frame_idx": 25907, "task_index": 28}, {"db_idx": 25908, "episode_idx": 140, "frame_idx": 99, "global_frame_idx": 25908, "task_index": 28}, {"db_idx": 25909, "episode_idx": 140, "frame_idx": 100, "global_frame_idx": 25909, "task_index": 28}, {"db_idx": 25910, "episode_idx": 140, "frame_idx": 101, "global_frame_idx": 25910, "task_index": 28}, {"db_idx": 25911, "episode_idx": 140, "frame_idx": 102, "global_frame_idx": 25911, "task_index": 28}, {"db_idx": 25912, "episode_idx": 140, "frame_idx": 103, "global_frame_idx": 25912, "task_index": 28}, {"db_idx": 25913, "episode_idx": 140, "frame_idx": 104, "global_frame_idx": 25913, "task_index": 28}, {"db_idx": 25914, "episode_idx": 140, "frame_idx": 105, "global_frame_idx": 25914, "task_index": 28}, {"db_idx": 25915, "episode_idx": 140, "frame_idx": 106, "global_frame_idx": 25915, "task_index": 28}, {"db_idx": 25916, "episode_idx": 140, "frame_idx": 107, "global_frame_idx": 25916, "task_index": 28}, {"db_idx": 25917, "episode_idx": 140, "frame_idx": 108, "global_frame_idx": 25917, "task_index": 28}, {"db_idx": 25918, "episode_idx": 140, "frame_idx": 109, "global_frame_idx": 25918, "task_index": 28}, {"db_idx": 25919, "episode_idx": 140, "frame_idx": 110, "global_frame_idx": 25919, "task_index": 28}, {"db_idx": 25920, "episode_idx": 140, "frame_idx": 111, "global_frame_idx": 25920, "task_index": 28}, {"db_idx": 25921, "episode_idx": 140, "frame_idx": 112, "global_frame_idx": 25921, "task_index": 28}, {"db_idx": 25922, "episode_idx": 140, "frame_idx": 113, "global_frame_idx": 25922, "task_index": 28}, {"db_idx": 25923, "episode_idx": 140, "frame_idx": 114, "global_frame_idx": 25923, "task_index": 28}, {"db_idx": 25924, "episode_idx": 140, "frame_idx": 115, "global_frame_idx": 25924, "task_index": 28}, {"db_idx": 25925, "episode_idx": 140, "frame_idx": 116, "global_frame_idx": 25925, "task_index": 28}, {"db_idx": 25926, "episode_idx": 140, "frame_idx": 117, "global_frame_idx": 25926, "task_index": 28}, {"db_idx": 25927, "episode_idx": 140, "frame_idx": 118, "global_frame_idx": 25927, "task_index": 28}, {"db_idx": 25928, "episode_idx": 140, "frame_idx": 119, "global_frame_idx": 25928, "task_index": 28}, {"db_idx": 25929, "episode_idx": 140, "frame_idx": 120, "global_frame_idx": 25929, "task_index": 28}, {"db_idx": 25930, "episode_idx": 140, "frame_idx": 121, "global_frame_idx": 25930, "task_index": 28}, {"db_idx": 25931, "episode_idx": 140, "frame_idx": 122, "global_frame_idx": 25931, "task_index": 28}, {"db_idx": 25932, "episode_idx": 140, "frame_idx": 123, "global_frame_idx": 25932, "task_index": 28}, {"db_idx": 25933, "episode_idx": 140, "frame_idx": 124, "global_frame_idx": 25933, "task_index": 28}, {"db_idx": 25934, "episode_idx": 140, "frame_idx": 125, "global_frame_idx": 25934, "task_index": 28}, {"db_idx": 25935, "episode_idx": 140, "frame_idx": 126, "global_frame_idx": 25935, "task_index": 28}, {"db_idx": 25936, "episode_idx": 140, "frame_idx": 127, "global_frame_idx": 25936, "task_index": 28}, {"db_idx": 25937, "episode_idx": 141, "frame_idx": 0, "global_frame_idx": 25937, "task_index": 28}, {"db_idx": 25938, "episode_idx": 141, "frame_idx": 1, "global_frame_idx": 25938, "task_index": 28}, {"db_idx": 25939, "episode_idx": 141, "frame_idx": 2, "global_frame_idx": 25939, "task_index": 28}, {"db_idx": 25940, "episode_idx": 141, "frame_idx": 3, "global_frame_idx": 25940, "task_index": 28}, {"db_idx": 25941, "episode_idx": 141, "frame_idx": 4, "global_frame_idx": 25941, "task_index": 28}, {"db_idx": 25942, "episode_idx": 141, "frame_idx": 5, "global_frame_idx": 25942, "task_index": 28}, {"db_idx": 25943, "episode_idx": 141, "frame_idx": 6, "global_frame_idx": 25943, "task_index": 28}, {"db_idx": 25944, "episode_idx": 141, "frame_idx": 7, "global_frame_idx": 25944, "task_index": 28}, {"db_idx": 25945, "episode_idx": 141, "frame_idx": 8, "global_frame_idx": 25945, "task_index": 28}, {"db_idx": 25946, "episode_idx": 141, "frame_idx": 9, "global_frame_idx": 25946, "task_index": 28}, {"db_idx": 25947, "episode_idx": 141, "frame_idx": 10, "global_frame_idx": 25947, "task_index": 28}, {"db_idx": 25948, "episode_idx": 141, "frame_idx": 11, "global_frame_idx": 25948, "task_index": 28}, {"db_idx": 25949, "episode_idx": 141, "frame_idx": 12, "global_frame_idx": 25949, "task_index": 28}, {"db_idx": 25950, "episode_idx": 141, "frame_idx": 13, "global_frame_idx": 25950, "task_index": 28}, {"db_idx": 25951, "episode_idx": 141, "frame_idx": 14, "global_frame_idx": 25951, "task_index": 28}, {"db_idx": 25952, "episode_idx": 141, "frame_idx": 15, "global_frame_idx": 25952, "task_index": 28}, {"db_idx": 25953, "episode_idx": 141, "frame_idx": 16, "global_frame_idx": 25953, "task_index": 28}, {"db_idx": 25954, "episode_idx": 141, "frame_idx": 17, "global_frame_idx": 25954, "task_index": 28}, {"db_idx": 25955, "episode_idx": 141, "frame_idx": 18, "global_frame_idx": 25955, "task_index": 28}, {"db_idx": 25956, "episode_idx": 141, "frame_idx": 19, "global_frame_idx": 25956, "task_index": 28}, {"db_idx": 25957, "episode_idx": 141, "frame_idx": 20, "global_frame_idx": 25957, "task_index": 28}, {"db_idx": 25958, "episode_idx": 141, "frame_idx": 21, "global_frame_idx": 25958, "task_index": 28}, {"db_idx": 25959, "episode_idx": 141, "frame_idx": 22, "global_frame_idx": 25959, "task_index": 28}, {"db_idx": 25960, "episode_idx": 141, "frame_idx": 23, "global_frame_idx": 25960, "task_index": 28}, {"db_idx": 25961, "episode_idx": 141, "frame_idx": 24, "global_frame_idx": 25961, "task_index": 28}, {"db_idx": 25962, "episode_idx": 141, "frame_idx": 25, "global_frame_idx": 25962, "task_index": 28}, {"db_idx": 25963, "episode_idx": 141, "frame_idx": 26, "global_frame_idx": 25963, "task_index": 28}, {"db_idx": 25964, "episode_idx": 141, "frame_idx": 27, "global_frame_idx": 25964, "task_index": 28}, {"db_idx": 25965, "episode_idx": 141, "frame_idx": 28, "global_frame_idx": 25965, "task_index": 28}, {"db_idx": 25966, "episode_idx": 141, "frame_idx": 29, "global_frame_idx": 25966, "task_index": 28}, {"db_idx": 25967, "episode_idx": 141, "frame_idx": 30, "global_frame_idx": 25967, "task_index": 28}, {"db_idx": 25968, "episode_idx": 141, "frame_idx": 31, "global_frame_idx": 25968, "task_index": 28}, {"db_idx": 25969, "episode_idx": 141, "frame_idx": 32, "global_frame_idx": 25969, "task_index": 28}, {"db_idx": 25970, "episode_idx": 141, "frame_idx": 33, "global_frame_idx": 25970, "task_index": 28}, {"db_idx": 25971, "episode_idx": 141, "frame_idx": 34, "global_frame_idx": 25971, "task_index": 28}, {"db_idx": 25972, "episode_idx": 141, "frame_idx": 35, "global_frame_idx": 25972, "task_index": 28}, {"db_idx": 25973, "episode_idx": 141, "frame_idx": 36, "global_frame_idx": 25973, "task_index": 28}, {"db_idx": 25974, "episode_idx": 141, "frame_idx": 37, "global_frame_idx": 25974, "task_index": 28}, {"db_idx": 25975, "episode_idx": 141, "frame_idx": 38, "global_frame_idx": 25975, "task_index": 28}, {"db_idx": 25976, "episode_idx": 141, "frame_idx": 39, "global_frame_idx": 25976, "task_index": 28}, {"db_idx": 25977, "episode_idx": 141, "frame_idx": 40, "global_frame_idx": 25977, "task_index": 28}, {"db_idx": 25978, "episode_idx": 141, "frame_idx": 41, "global_frame_idx": 25978, "task_index": 28}, {"db_idx": 25979, "episode_idx": 141, "frame_idx": 42, "global_frame_idx": 25979, "task_index": 28}, {"db_idx": 25980, "episode_idx": 141, "frame_idx": 43, "global_frame_idx": 25980, "task_index": 28}, {"db_idx": 25981, "episode_idx": 141, "frame_idx": 44, "global_frame_idx": 25981, "task_index": 28}, {"db_idx": 25982, "episode_idx": 141, "frame_idx": 45, "global_frame_idx": 25982, "task_index": 28}, {"db_idx": 25983, "episode_idx": 141, "frame_idx": 46, "global_frame_idx": 25983, "task_index": 28}, {"db_idx": 25984, "episode_idx": 141, "frame_idx": 47, "global_frame_idx": 25984, "task_index": 28}, {"db_idx": 25985, "episode_idx": 141, "frame_idx": 48, "global_frame_idx": 25985, "task_index": 28}, {"db_idx": 25986, "episode_idx": 141, "frame_idx": 49, "global_frame_idx": 25986, "task_index": 28}, {"db_idx": 25987, "episode_idx": 141, "frame_idx": 50, "global_frame_idx": 25987, "task_index": 28}, {"db_idx": 25988, "episode_idx": 141, "frame_idx": 51, "global_frame_idx": 25988, "task_index": 28}, {"db_idx": 25989, "episode_idx": 141, "frame_idx": 52, "global_frame_idx": 25989, "task_index": 28}, {"db_idx": 25990, "episode_idx": 141, "frame_idx": 53, "global_frame_idx": 25990, "task_index": 28}, {"db_idx": 25991, "episode_idx": 141, "frame_idx": 54, "global_frame_idx": 25991, "task_index": 28}, {"db_idx": 25992, "episode_idx": 141, "frame_idx": 55, "global_frame_idx": 25992, "task_index": 28}, {"db_idx": 25993, "episode_idx": 141, "frame_idx": 56, "global_frame_idx": 25993, "task_index": 28}, {"db_idx": 25994, "episode_idx": 141, "frame_idx": 57, "global_frame_idx": 25994, "task_index": 28}, {"db_idx": 25995, "episode_idx": 141, "frame_idx": 58, "global_frame_idx": 25995, "task_index": 28}, {"db_idx": 25996, "episode_idx": 141, "frame_idx": 59, "global_frame_idx": 25996, "task_index": 28}, {"db_idx": 25997, "episode_idx": 141, "frame_idx": 60, "global_frame_idx": 25997, "task_index": 28}, {"db_idx": 25998, "episode_idx": 141, "frame_idx": 61, "global_frame_idx": 25998, "task_index": 28}, {"db_idx": 25999, "episode_idx": 141, "frame_idx": 62, "global_frame_idx": 25999, "task_index": 28}, {"db_idx": 26000, "episode_idx": 141, "frame_idx": 63, "global_frame_idx": 26000, "task_index": 28}, {"db_idx": 26001, "episode_idx": 141, "frame_idx": 64, "global_frame_idx": 26001, "task_index": 28}, {"db_idx": 26002, "episode_idx": 141, "frame_idx": 65, "global_frame_idx": 26002, "task_index": 28}, {"db_idx": 26003, "episode_idx": 141, "frame_idx": 66, "global_frame_idx": 26003, "task_index": 28}, {"db_idx": 26004, "episode_idx": 141, "frame_idx": 67, "global_frame_idx": 26004, "task_index": 28}, {"db_idx": 26005, "episode_idx": 141, "frame_idx": 68, "global_frame_idx": 26005, "task_index": 28}, {"db_idx": 26006, "episode_idx": 141, "frame_idx": 69, "global_frame_idx": 26006, "task_index": 28}, {"db_idx": 26007, "episode_idx": 141, "frame_idx": 70, "global_frame_idx": 26007, "task_index": 28}, {"db_idx": 26008, "episode_idx": 141, "frame_idx": 71, "global_frame_idx": 26008, "task_index": 28}, {"db_idx": 26009, "episode_idx": 141, "frame_idx": 72, "global_frame_idx": 26009, "task_index": 28}, {"db_idx": 26010, "episode_idx": 141, "frame_idx": 73, "global_frame_idx": 26010, "task_index": 28}, {"db_idx": 26011, "episode_idx": 141, "frame_idx": 74, "global_frame_idx": 26011, "task_index": 28}, {"db_idx": 26012, "episode_idx": 141, "frame_idx": 75, "global_frame_idx": 26012, "task_index": 28}, {"db_idx": 26013, "episode_idx": 141, "frame_idx": 76, "global_frame_idx": 26013, "task_index": 28}, {"db_idx": 26014, "episode_idx": 141, "frame_idx": 77, "global_frame_idx": 26014, "task_index": 28}, {"db_idx": 26015, "episode_idx": 141, "frame_idx": 78, "global_frame_idx": 26015, "task_index": 28}, {"db_idx": 26016, "episode_idx": 141, "frame_idx": 79, "global_frame_idx": 26016, "task_index": 28}, {"db_idx": 26017, "episode_idx": 141, "frame_idx": 80, "global_frame_idx": 26017, "task_index": 28}, {"db_idx": 26018, "episode_idx": 141, "frame_idx": 81, "global_frame_idx": 26018, "task_index": 28}, {"db_idx": 26019, "episode_idx": 141, "frame_idx": 82, "global_frame_idx": 26019, "task_index": 28}, {"db_idx": 26020, "episode_idx": 141, "frame_idx": 83, "global_frame_idx": 26020, "task_index": 28}, {"db_idx": 26021, "episode_idx": 141, "frame_idx": 84, "global_frame_idx": 26021, "task_index": 28}, {"db_idx": 26022, "episode_idx": 141, "frame_idx": 85, "global_frame_idx": 26022, "task_index": 28}, {"db_idx": 26023, "episode_idx": 141, "frame_idx": 86, "global_frame_idx": 26023, "task_index": 28}, {"db_idx": 26024, "episode_idx": 141, "frame_idx": 87, "global_frame_idx": 26024, "task_index": 28}, {"db_idx": 26025, "episode_idx": 141, "frame_idx": 88, "global_frame_idx": 26025, "task_index": 28}, {"db_idx": 26026, "episode_idx": 141, "frame_idx": 89, "global_frame_idx": 26026, "task_index": 28}, {"db_idx": 26027, "episode_idx": 141, "frame_idx": 90, "global_frame_idx": 26027, "task_index": 28}, {"db_idx": 26028, "episode_idx": 141, "frame_idx": 91, "global_frame_idx": 26028, "task_index": 28}, {"db_idx": 26029, "episode_idx": 141, "frame_idx": 92, "global_frame_idx": 26029, "task_index": 28}, {"db_idx": 26030, "episode_idx": 141, "frame_idx": 93, "global_frame_idx": 26030, "task_index": 28}, {"db_idx": 26031, "episode_idx": 141, "frame_idx": 94, "global_frame_idx": 26031, "task_index": 28}, {"db_idx": 26032, "episode_idx": 141, "frame_idx": 95, "global_frame_idx": 26032, "task_index": 28}, {"db_idx": 26033, "episode_idx": 141, "frame_idx": 96, "global_frame_idx": 26033, "task_index": 28}, {"db_idx": 26034, "episode_idx": 141, "frame_idx": 97, "global_frame_idx": 26034, "task_index": 28}, {"db_idx": 26035, "episode_idx": 141, "frame_idx": 98, "global_frame_idx": 26035, "task_index": 28}, {"db_idx": 26036, "episode_idx": 141, "frame_idx": 99, "global_frame_idx": 26036, "task_index": 28}, {"db_idx": 26037, "episode_idx": 141, "frame_idx": 100, "global_frame_idx": 26037, "task_index": 28}, {"db_idx": 26038, "episode_idx": 141, "frame_idx": 101, "global_frame_idx": 26038, "task_index": 28}, {"db_idx": 26039, "episode_idx": 141, "frame_idx": 102, "global_frame_idx": 26039, "task_index": 28}, {"db_idx": 26040, "episode_idx": 141, "frame_idx": 103, "global_frame_idx": 26040, "task_index": 28}, {"db_idx": 26041, "episode_idx": 141, "frame_idx": 104, "global_frame_idx": 26041, "task_index": 28}, {"db_idx": 26042, "episode_idx": 141, "frame_idx": 105, "global_frame_idx": 26042, "task_index": 28}, {"db_idx": 26043, "episode_idx": 141, "frame_idx": 106, "global_frame_idx": 26043, "task_index": 28}, {"db_idx": 26044, "episode_idx": 141, "frame_idx": 107, "global_frame_idx": 26044, "task_index": 28}, {"db_idx": 26045, "episode_idx": 141, "frame_idx": 108, "global_frame_idx": 26045, "task_index": 28}, {"db_idx": 26046, "episode_idx": 141, "frame_idx": 109, "global_frame_idx": 26046, "task_index": 28}, {"db_idx": 26047, "episode_idx": 141, "frame_idx": 110, "global_frame_idx": 26047, "task_index": 28}, {"db_idx": 26048, "episode_idx": 141, "frame_idx": 111, "global_frame_idx": 26048, "task_index": 28}, {"db_idx": 26049, "episode_idx": 141, "frame_idx": 112, "global_frame_idx": 26049, "task_index": 28}, {"db_idx": 26050, "episode_idx": 141, "frame_idx": 113, "global_frame_idx": 26050, "task_index": 28}, {"db_idx": 26051, "episode_idx": 141, "frame_idx": 114, "global_frame_idx": 26051, "task_index": 28}, {"db_idx": 26052, "episode_idx": 141, "frame_idx": 115, "global_frame_idx": 26052, "task_index": 28}, {"db_idx": 26053, "episode_idx": 141, "frame_idx": 116, "global_frame_idx": 26053, "task_index": 28}, {"db_idx": 26054, "episode_idx": 141, "frame_idx": 117, "global_frame_idx": 26054, "task_index": 28}, {"db_idx": 26055, "episode_idx": 141, "frame_idx": 118, "global_frame_idx": 26055, "task_index": 28}, {"db_idx": 26056, "episode_idx": 141, "frame_idx": 119, "global_frame_idx": 26056, "task_index": 28}, {"db_idx": 26057, "episode_idx": 141, "frame_idx": 120, "global_frame_idx": 26057, "task_index": 28}, {"db_idx": 26058, "episode_idx": 141, "frame_idx": 121, "global_frame_idx": 26058, "task_index": 28}, {"db_idx": 26059, "episode_idx": 141, "frame_idx": 122, "global_frame_idx": 26059, "task_index": 28}, {"db_idx": 26060, "episode_idx": 141, "frame_idx": 123, "global_frame_idx": 26060, "task_index": 28}, {"db_idx": 26061, "episode_idx": 141, "frame_idx": 124, "global_frame_idx": 26061, "task_index": 28}, {"db_idx": 26062, "episode_idx": 141, "frame_idx": 125, "global_frame_idx": 26062, "task_index": 28}, {"db_idx": 26063, "episode_idx": 141, "frame_idx": 126, "global_frame_idx": 26063, "task_index": 28}, {"db_idx": 26064, "episode_idx": 142, "frame_idx": 0, "global_frame_idx": 26064, "task_index": 28}, {"db_idx": 26065, "episode_idx": 142, "frame_idx": 1, "global_frame_idx": 26065, "task_index": 28}, {"db_idx": 26066, "episode_idx": 142, "frame_idx": 2, "global_frame_idx": 26066, "task_index": 28}, {"db_idx": 26067, "episode_idx": 142, "frame_idx": 3, "global_frame_idx": 26067, "task_index": 28}, {"db_idx": 26068, "episode_idx": 142, "frame_idx": 4, "global_frame_idx": 26068, "task_index": 28}, {"db_idx": 26069, "episode_idx": 142, "frame_idx": 5, "global_frame_idx": 26069, "task_index": 28}, {"db_idx": 26070, "episode_idx": 142, "frame_idx": 6, "global_frame_idx": 26070, "task_index": 28}, {"db_idx": 26071, "episode_idx": 142, "frame_idx": 7, "global_frame_idx": 26071, "task_index": 28}, {"db_idx": 26072, "episode_idx": 142, "frame_idx": 8, "global_frame_idx": 26072, "task_index": 28}, {"db_idx": 26073, "episode_idx": 142, "frame_idx": 9, "global_frame_idx": 26073, "task_index": 28}, {"db_idx": 26074, "episode_idx": 142, "frame_idx": 10, "global_frame_idx": 26074, "task_index": 28}, {"db_idx": 26075, "episode_idx": 142, "frame_idx": 11, "global_frame_idx": 26075, "task_index": 28}, {"db_idx": 26076, "episode_idx": 142, "frame_idx": 12, "global_frame_idx": 26076, "task_index": 28}, {"db_idx": 26077, "episode_idx": 142, "frame_idx": 13, "global_frame_idx": 26077, "task_index": 28}, {"db_idx": 26078, "episode_idx": 142, "frame_idx": 14, "global_frame_idx": 26078, "task_index": 28}, {"db_idx": 26079, "episode_idx": 142, "frame_idx": 15, "global_frame_idx": 26079, "task_index": 28}, {"db_idx": 26080, "episode_idx": 142, "frame_idx": 16, "global_frame_idx": 26080, "task_index": 28}, {"db_idx": 26081, "episode_idx": 142, "frame_idx": 17, "global_frame_idx": 26081, "task_index": 28}, {"db_idx": 26082, "episode_idx": 142, "frame_idx": 18, "global_frame_idx": 26082, "task_index": 28}, {"db_idx": 26083, "episode_idx": 142, "frame_idx": 19, "global_frame_idx": 26083, "task_index": 28}, {"db_idx": 26084, "episode_idx": 142, "frame_idx": 20, "global_frame_idx": 26084, "task_index": 28}, {"db_idx": 26085, "episode_idx": 142, "frame_idx": 21, "global_frame_idx": 26085, "task_index": 28}, {"db_idx": 26086, "episode_idx": 142, "frame_idx": 22, "global_frame_idx": 26086, "task_index": 28}, {"db_idx": 26087, "episode_idx": 142, "frame_idx": 23, "global_frame_idx": 26087, "task_index": 28}, {"db_idx": 26088, "episode_idx": 142, "frame_idx": 24, "global_frame_idx": 26088, "task_index": 28}, {"db_idx": 26089, "episode_idx": 142, "frame_idx": 25, "global_frame_idx": 26089, "task_index": 28}, {"db_idx": 26090, "episode_idx": 142, "frame_idx": 26, "global_frame_idx": 26090, "task_index": 28}, {"db_idx": 26091, "episode_idx": 142, "frame_idx": 27, "global_frame_idx": 26091, "task_index": 28}, {"db_idx": 26092, "episode_idx": 142, "frame_idx": 28, "global_frame_idx": 26092, "task_index": 28}, {"db_idx": 26093, "episode_idx": 142, "frame_idx": 29, "global_frame_idx": 26093, "task_index": 28}, {"db_idx": 26094, "episode_idx": 142, "frame_idx": 30, "global_frame_idx": 26094, "task_index": 28}, {"db_idx": 26095, "episode_idx": 142, "frame_idx": 31, "global_frame_idx": 26095, "task_index": 28}, {"db_idx": 26096, "episode_idx": 142, "frame_idx": 32, "global_frame_idx": 26096, "task_index": 28}, {"db_idx": 26097, "episode_idx": 142, "frame_idx": 33, "global_frame_idx": 26097, "task_index": 28}, {"db_idx": 26098, "episode_idx": 142, "frame_idx": 34, "global_frame_idx": 26098, "task_index": 28}, {"db_idx": 26099, "episode_idx": 142, "frame_idx": 35, "global_frame_idx": 26099, "task_index": 28}, {"db_idx": 26100, "episode_idx": 142, "frame_idx": 36, "global_frame_idx": 26100, "task_index": 28}, {"db_idx": 26101, "episode_idx": 142, "frame_idx": 37, "global_frame_idx": 26101, "task_index": 28}, {"db_idx": 26102, "episode_idx": 142, "frame_idx": 38, "global_frame_idx": 26102, "task_index": 28}, {"db_idx": 26103, "episode_idx": 142, "frame_idx": 39, "global_frame_idx": 26103, "task_index": 28}, {"db_idx": 26104, "episode_idx": 142, "frame_idx": 40, "global_frame_idx": 26104, "task_index": 28}, {"db_idx": 26105, "episode_idx": 142, "frame_idx": 41, "global_frame_idx": 26105, "task_index": 28}, {"db_idx": 26106, "episode_idx": 142, "frame_idx": 42, "global_frame_idx": 26106, "task_index": 28}, {"db_idx": 26107, "episode_idx": 142, "frame_idx": 43, "global_frame_idx": 26107, "task_index": 28}, {"db_idx": 26108, "episode_idx": 142, "frame_idx": 44, "global_frame_idx": 26108, "task_index": 28}, {"db_idx": 26109, "episode_idx": 142, "frame_idx": 45, "global_frame_idx": 26109, "task_index": 28}, {"db_idx": 26110, "episode_idx": 142, "frame_idx": 46, "global_frame_idx": 26110, "task_index": 28}, {"db_idx": 26111, "episode_idx": 142, "frame_idx": 47, "global_frame_idx": 26111, "task_index": 28}, {"db_idx": 26112, "episode_idx": 142, "frame_idx": 48, "global_frame_idx": 26112, "task_index": 28}, {"db_idx": 26113, "episode_idx": 142, "frame_idx": 49, "global_frame_idx": 26113, "task_index": 28}, {"db_idx": 26114, "episode_idx": 142, "frame_idx": 50, "global_frame_idx": 26114, "task_index": 28}, {"db_idx": 26115, "episode_idx": 142, "frame_idx": 51, "global_frame_idx": 26115, "task_index": 28}, {"db_idx": 26116, "episode_idx": 142, "frame_idx": 52, "global_frame_idx": 26116, "task_index": 28}, {"db_idx": 26117, "episode_idx": 142, "frame_idx": 53, "global_frame_idx": 26117, "task_index": 28}, {"db_idx": 26118, "episode_idx": 142, "frame_idx": 54, "global_frame_idx": 26118, "task_index": 28}, {"db_idx": 26119, "episode_idx": 142, "frame_idx": 55, "global_frame_idx": 26119, "task_index": 28}, {"db_idx": 26120, "episode_idx": 142, "frame_idx": 56, "global_frame_idx": 26120, "task_index": 28}, {"db_idx": 26121, "episode_idx": 142, "frame_idx": 57, "global_frame_idx": 26121, "task_index": 28}, {"db_idx": 26122, "episode_idx": 142, "frame_idx": 58, "global_frame_idx": 26122, "task_index": 28}, {"db_idx": 26123, "episode_idx": 142, "frame_idx": 59, "global_frame_idx": 26123, "task_index": 28}, {"db_idx": 26124, "episode_idx": 142, "frame_idx": 60, "global_frame_idx": 26124, "task_index": 28}, {"db_idx": 26125, "episode_idx": 142, "frame_idx": 61, "global_frame_idx": 26125, "task_index": 28}, {"db_idx": 26126, "episode_idx": 142, "frame_idx": 62, "global_frame_idx": 26126, "task_index": 28}, {"db_idx": 26127, "episode_idx": 142, "frame_idx": 63, "global_frame_idx": 26127, "task_index": 28}, {"db_idx": 26128, "episode_idx": 142, "frame_idx": 64, "global_frame_idx": 26128, "task_index": 28}, {"db_idx": 26129, "episode_idx": 142, "frame_idx": 65, "global_frame_idx": 26129, "task_index": 28}, {"db_idx": 26130, "episode_idx": 142, "frame_idx": 66, "global_frame_idx": 26130, "task_index": 28}, {"db_idx": 26131, "episode_idx": 142, "frame_idx": 67, "global_frame_idx": 26131, "task_index": 28}, {"db_idx": 26132, "episode_idx": 142, "frame_idx": 68, "global_frame_idx": 26132, "task_index": 28}, {"db_idx": 26133, "episode_idx": 142, "frame_idx": 69, "global_frame_idx": 26133, "task_index": 28}, {"db_idx": 26134, "episode_idx": 142, "frame_idx": 70, "global_frame_idx": 26134, "task_index": 28}, {"db_idx": 26135, "episode_idx": 142, "frame_idx": 71, "global_frame_idx": 26135, "task_index": 28}, {"db_idx": 26136, "episode_idx": 142, "frame_idx": 72, "global_frame_idx": 26136, "task_index": 28}, {"db_idx": 26137, "episode_idx": 142, "frame_idx": 73, "global_frame_idx": 26137, "task_index": 28}, {"db_idx": 26138, "episode_idx": 142, "frame_idx": 74, "global_frame_idx": 26138, "task_index": 28}, {"db_idx": 26139, "episode_idx": 142, "frame_idx": 75, "global_frame_idx": 26139, "task_index": 28}, {"db_idx": 26140, "episode_idx": 142, "frame_idx": 76, "global_frame_idx": 26140, "task_index": 28}, {"db_idx": 26141, "episode_idx": 142, "frame_idx": 77, "global_frame_idx": 26141, "task_index": 28}, {"db_idx": 26142, "episode_idx": 142, "frame_idx": 78, "global_frame_idx": 26142, "task_index": 28}, {"db_idx": 26143, "episode_idx": 142, "frame_idx": 79, "global_frame_idx": 26143, "task_index": 28}, {"db_idx": 26144, "episode_idx": 142, "frame_idx": 80, "global_frame_idx": 26144, "task_index": 28}, {"db_idx": 26145, "episode_idx": 142, "frame_idx": 81, "global_frame_idx": 26145, "task_index": 28}, {"db_idx": 26146, "episode_idx": 142, "frame_idx": 82, "global_frame_idx": 26146, "task_index": 28}, {"db_idx": 26147, "episode_idx": 142, "frame_idx": 83, "global_frame_idx": 26147, "task_index": 28}, {"db_idx": 26148, "episode_idx": 142, "frame_idx": 84, "global_frame_idx": 26148, "task_index": 28}, {"db_idx": 26149, "episode_idx": 142, "frame_idx": 85, "global_frame_idx": 26149, "task_index": 28}, {"db_idx": 26150, "episode_idx": 142, "frame_idx": 86, "global_frame_idx": 26150, "task_index": 28}, {"db_idx": 26151, "episode_idx": 142, "frame_idx": 87, "global_frame_idx": 26151, "task_index": 28}, {"db_idx": 26152, "episode_idx": 142, "frame_idx": 88, "global_frame_idx": 26152, "task_index": 28}, {"db_idx": 26153, "episode_idx": 142, "frame_idx": 89, "global_frame_idx": 26153, "task_index": 28}, {"db_idx": 26154, "episode_idx": 142, "frame_idx": 90, "global_frame_idx": 26154, "task_index": 28}, {"db_idx": 26155, "episode_idx": 142, "frame_idx": 91, "global_frame_idx": 26155, "task_index": 28}, {"db_idx": 26156, "episode_idx": 142, "frame_idx": 92, "global_frame_idx": 26156, "task_index": 28}, {"db_idx": 26157, "episode_idx": 142, "frame_idx": 93, "global_frame_idx": 26157, "task_index": 28}, {"db_idx": 26158, "episode_idx": 142, "frame_idx": 94, "global_frame_idx": 26158, "task_index": 28}, {"db_idx": 26159, "episode_idx": 142, "frame_idx": 95, "global_frame_idx": 26159, "task_index": 28}, {"db_idx": 26160, "episode_idx": 142, "frame_idx": 96, "global_frame_idx": 26160, "task_index": 28}, {"db_idx": 26161, "episode_idx": 142, "frame_idx": 97, "global_frame_idx": 26161, "task_index": 28}, {"db_idx": 26162, "episode_idx": 142, "frame_idx": 98, "global_frame_idx": 26162, "task_index": 28}, {"db_idx": 26163, "episode_idx": 142, "frame_idx": 99, "global_frame_idx": 26163, "task_index": 28}, {"db_idx": 26164, "episode_idx": 142, "frame_idx": 100, "global_frame_idx": 26164, "task_index": 28}, {"db_idx": 26165, "episode_idx": 142, "frame_idx": 101, "global_frame_idx": 26165, "task_index": 28}, {"db_idx": 26166, "episode_idx": 142, "frame_idx": 102, "global_frame_idx": 26166, "task_index": 28}, {"db_idx": 26167, "episode_idx": 142, "frame_idx": 103, "global_frame_idx": 26167, "task_index": 28}, {"db_idx": 26168, "episode_idx": 142, "frame_idx": 104, "global_frame_idx": 26168, "task_index": 28}, {"db_idx": 26169, "episode_idx": 142, "frame_idx": 105, "global_frame_idx": 26169, "task_index": 28}, {"db_idx": 26170, "episode_idx": 142, "frame_idx": 106, "global_frame_idx": 26170, "task_index": 28}, {"db_idx": 26171, "episode_idx": 142, "frame_idx": 107, "global_frame_idx": 26171, "task_index": 28}, {"db_idx": 26172, "episode_idx": 142, "frame_idx": 108, "global_frame_idx": 26172, "task_index": 28}, {"db_idx": 26173, "episode_idx": 142, "frame_idx": 109, "global_frame_idx": 26173, "task_index": 28}, {"db_idx": 26174, "episode_idx": 142, "frame_idx": 110, "global_frame_idx": 26174, "task_index": 28}, {"db_idx": 26175, "episode_idx": 142, "frame_idx": 111, "global_frame_idx": 26175, "task_index": 28}, {"db_idx": 26176, "episode_idx": 142, "frame_idx": 112, "global_frame_idx": 26176, "task_index": 28}, {"db_idx": 26177, "episode_idx": 142, "frame_idx": 113, "global_frame_idx": 26177, "task_index": 28}, {"db_idx": 26178, "episode_idx": 142, "frame_idx": 114, "global_frame_idx": 26178, "task_index": 28}, {"db_idx": 26179, "episode_idx": 142, "frame_idx": 115, "global_frame_idx": 26179, "task_index": 28}, {"db_idx": 26180, "episode_idx": 142, "frame_idx": 116, "global_frame_idx": 26180, "task_index": 28}, {"db_idx": 26181, "episode_idx": 142, "frame_idx": 117, "global_frame_idx": 26181, "task_index": 28}, {"db_idx": 26182, "episode_idx": 142, "frame_idx": 118, "global_frame_idx": 26182, "task_index": 28}, {"db_idx": 26183, "episode_idx": 142, "frame_idx": 119, "global_frame_idx": 26183, "task_index": 28}, {"db_idx": 26184, "episode_idx": 142, "frame_idx": 120, "global_frame_idx": 26184, "task_index": 28}, {"db_idx": 26185, "episode_idx": 142, "frame_idx": 121, "global_frame_idx": 26185, "task_index": 28}, {"db_idx": 26186, "episode_idx": 142, "frame_idx": 122, "global_frame_idx": 26186, "task_index": 28}, {"db_idx": 26187, "episode_idx": 142, "frame_idx": 123, "global_frame_idx": 26187, "task_index": 28}, {"db_idx": 26188, "episode_idx": 142, "frame_idx": 124, "global_frame_idx": 26188, "task_index": 28}, {"db_idx": 26189, "episode_idx": 142, "frame_idx": 125, "global_frame_idx": 26189, "task_index": 28}, {"db_idx": 26190, "episode_idx": 143, "frame_idx": 0, "global_frame_idx": 26190, "task_index": 28}, {"db_idx": 26191, "episode_idx": 143, "frame_idx": 1, "global_frame_idx": 26191, "task_index": 28}, {"db_idx": 26192, "episode_idx": 143, "frame_idx": 2, "global_frame_idx": 26192, "task_index": 28}, {"db_idx": 26193, "episode_idx": 143, "frame_idx": 3, "global_frame_idx": 26193, "task_index": 28}, {"db_idx": 26194, "episode_idx": 143, "frame_idx": 4, "global_frame_idx": 26194, "task_index": 28}, {"db_idx": 26195, "episode_idx": 143, "frame_idx": 5, "global_frame_idx": 26195, "task_index": 28}, {"db_idx": 26196, "episode_idx": 143, "frame_idx": 6, "global_frame_idx": 26196, "task_index": 28}, {"db_idx": 26197, "episode_idx": 143, "frame_idx": 7, "global_frame_idx": 26197, "task_index": 28}, {"db_idx": 26198, "episode_idx": 143, "frame_idx": 8, "global_frame_idx": 26198, "task_index": 28}, {"db_idx": 26199, "episode_idx": 143, "frame_idx": 9, "global_frame_idx": 26199, "task_index": 28}, {"db_idx": 26200, "episode_idx": 143, "frame_idx": 10, "global_frame_idx": 26200, "task_index": 28}, {"db_idx": 26201, "episode_idx": 143, "frame_idx": 11, "global_frame_idx": 26201, "task_index": 28}, {"db_idx": 26202, "episode_idx": 143, "frame_idx": 12, "global_frame_idx": 26202, "task_index": 28}, {"db_idx": 26203, "episode_idx": 143, "frame_idx": 13, "global_frame_idx": 26203, "task_index": 28}, {"db_idx": 26204, "episode_idx": 143, "frame_idx": 14, "global_frame_idx": 26204, "task_index": 28}, {"db_idx": 26205, "episode_idx": 143, "frame_idx": 15, "global_frame_idx": 26205, "task_index": 28}, {"db_idx": 26206, "episode_idx": 143, "frame_idx": 16, "global_frame_idx": 26206, "task_index": 28}, {"db_idx": 26207, "episode_idx": 143, "frame_idx": 17, "global_frame_idx": 26207, "task_index": 28}, {"db_idx": 26208, "episode_idx": 143, "frame_idx": 18, "global_frame_idx": 26208, "task_index": 28}, {"db_idx": 26209, "episode_idx": 143, "frame_idx": 19, "global_frame_idx": 26209, "task_index": 28}, {"db_idx": 26210, "episode_idx": 143, "frame_idx": 20, "global_frame_idx": 26210, "task_index": 28}, {"db_idx": 26211, "episode_idx": 143, "frame_idx": 21, "global_frame_idx": 26211, "task_index": 28}, {"db_idx": 26212, "episode_idx": 143, "frame_idx": 22, "global_frame_idx": 26212, "task_index": 28}, {"db_idx": 26213, "episode_idx": 143, "frame_idx": 23, "global_frame_idx": 26213, "task_index": 28}, {"db_idx": 26214, "episode_idx": 143, "frame_idx": 24, "global_frame_idx": 26214, "task_index": 28}, {"db_idx": 26215, "episode_idx": 143, "frame_idx": 25, "global_frame_idx": 26215, "task_index": 28}, {"db_idx": 26216, "episode_idx": 143, "frame_idx": 26, "global_frame_idx": 26216, "task_index": 28}, {"db_idx": 26217, "episode_idx": 143, "frame_idx": 27, "global_frame_idx": 26217, "task_index": 28}, {"db_idx": 26218, "episode_idx": 143, "frame_idx": 28, "global_frame_idx": 26218, "task_index": 28}, {"db_idx": 26219, "episode_idx": 143, "frame_idx": 29, "global_frame_idx": 26219, "task_index": 28}, {"db_idx": 26220, "episode_idx": 143, "frame_idx": 30, "global_frame_idx": 26220, "task_index": 28}, {"db_idx": 26221, "episode_idx": 143, "frame_idx": 31, "global_frame_idx": 26221, "task_index": 28}, {"db_idx": 26222, "episode_idx": 143, "frame_idx": 32, "global_frame_idx": 26222, "task_index": 28}, {"db_idx": 26223, "episode_idx": 143, "frame_idx": 33, "global_frame_idx": 26223, "task_index": 28}, {"db_idx": 26224, "episode_idx": 143, "frame_idx": 34, "global_frame_idx": 26224, "task_index": 28}, {"db_idx": 26225, "episode_idx": 143, "frame_idx": 35, "global_frame_idx": 26225, "task_index": 28}, {"db_idx": 26226, "episode_idx": 143, "frame_idx": 36, "global_frame_idx": 26226, "task_index": 28}, {"db_idx": 26227, "episode_idx": 143, "frame_idx": 37, "global_frame_idx": 26227, "task_index": 28}, {"db_idx": 26228, "episode_idx": 143, "frame_idx": 38, "global_frame_idx": 26228, "task_index": 28}, {"db_idx": 26229, "episode_idx": 143, "frame_idx": 39, "global_frame_idx": 26229, "task_index": 28}, {"db_idx": 26230, "episode_idx": 143, "frame_idx": 40, "global_frame_idx": 26230, "task_index": 28}, {"db_idx": 26231, "episode_idx": 143, "frame_idx": 41, "global_frame_idx": 26231, "task_index": 28}, {"db_idx": 26232, "episode_idx": 143, "frame_idx": 42, "global_frame_idx": 26232, "task_index": 28}, {"db_idx": 26233, "episode_idx": 143, "frame_idx": 43, "global_frame_idx": 26233, "task_index": 28}, {"db_idx": 26234, "episode_idx": 143, "frame_idx": 44, "global_frame_idx": 26234, "task_index": 28}, {"db_idx": 26235, "episode_idx": 143, "frame_idx": 45, "global_frame_idx": 26235, "task_index": 28}, {"db_idx": 26236, "episode_idx": 143, "frame_idx": 46, "global_frame_idx": 26236, "task_index": 28}, {"db_idx": 26237, "episode_idx": 143, "frame_idx": 47, "global_frame_idx": 26237, "task_index": 28}, {"db_idx": 26238, "episode_idx": 143, "frame_idx": 48, "global_frame_idx": 26238, "task_index": 28}, {"db_idx": 26239, "episode_idx": 143, "frame_idx": 49, "global_frame_idx": 26239, "task_index": 28}, {"db_idx": 26240, "episode_idx": 143, "frame_idx": 50, "global_frame_idx": 26240, "task_index": 28}, {"db_idx": 26241, "episode_idx": 143, "frame_idx": 51, "global_frame_idx": 26241, "task_index": 28}, {"db_idx": 26242, "episode_idx": 143, "frame_idx": 52, "global_frame_idx": 26242, "task_index": 28}, {"db_idx": 26243, "episode_idx": 143, "frame_idx": 53, "global_frame_idx": 26243, "task_index": 28}, {"db_idx": 26244, "episode_idx": 143, "frame_idx": 54, "global_frame_idx": 26244, "task_index": 28}, {"db_idx": 26245, "episode_idx": 143, "frame_idx": 55, "global_frame_idx": 26245, "task_index": 28}, {"db_idx": 26246, "episode_idx": 143, "frame_idx": 56, "global_frame_idx": 26246, "task_index": 28}, {"db_idx": 26247, "episode_idx": 143, "frame_idx": 57, "global_frame_idx": 26247, "task_index": 28}, {"db_idx": 26248, "episode_idx": 143, "frame_idx": 58, "global_frame_idx": 26248, "task_index": 28}, {"db_idx": 26249, "episode_idx": 143, "frame_idx": 59, "global_frame_idx": 26249, "task_index": 28}, {"db_idx": 26250, "episode_idx": 143, "frame_idx": 60, "global_frame_idx": 26250, "task_index": 28}, {"db_idx": 26251, "episode_idx": 143, "frame_idx": 61, "global_frame_idx": 26251, "task_index": 28}, {"db_idx": 26252, "episode_idx": 143, "frame_idx": 62, "global_frame_idx": 26252, "task_index": 28}, {"db_idx": 26253, "episode_idx": 143, "frame_idx": 63, "global_frame_idx": 26253, "task_index": 28}, {"db_idx": 26254, "episode_idx": 143, "frame_idx": 64, "global_frame_idx": 26254, "task_index": 28}, {"db_idx": 26255, "episode_idx": 143, "frame_idx": 65, "global_frame_idx": 26255, "task_index": 28}, {"db_idx": 26256, "episode_idx": 143, "frame_idx": 66, "global_frame_idx": 26256, "task_index": 28}, {"db_idx": 26257, "episode_idx": 143, "frame_idx": 67, "global_frame_idx": 26257, "task_index": 28}, {"db_idx": 26258, "episode_idx": 143, "frame_idx": 68, "global_frame_idx": 26258, "task_index": 28}, {"db_idx": 26259, "episode_idx": 143, "frame_idx": 69, "global_frame_idx": 26259, "task_index": 28}, {"db_idx": 26260, "episode_idx": 143, "frame_idx": 70, "global_frame_idx": 26260, "task_index": 28}, {"db_idx": 26261, "episode_idx": 143, "frame_idx": 71, "global_frame_idx": 26261, "task_index": 28}, {"db_idx": 26262, "episode_idx": 143, "frame_idx": 72, "global_frame_idx": 26262, "task_index": 28}, {"db_idx": 26263, "episode_idx": 143, "frame_idx": 73, "global_frame_idx": 26263, "task_index": 28}, {"db_idx": 26264, "episode_idx": 143, "frame_idx": 74, "global_frame_idx": 26264, "task_index": 28}, {"db_idx": 26265, "episode_idx": 143, "frame_idx": 75, "global_frame_idx": 26265, "task_index": 28}, {"db_idx": 26266, "episode_idx": 143, "frame_idx": 76, "global_frame_idx": 26266, "task_index": 28}, {"db_idx": 26267, "episode_idx": 143, "frame_idx": 77, "global_frame_idx": 26267, "task_index": 28}, {"db_idx": 26268, "episode_idx": 143, "frame_idx": 78, "global_frame_idx": 26268, "task_index": 28}, {"db_idx": 26269, "episode_idx": 143, "frame_idx": 79, "global_frame_idx": 26269, "task_index": 28}, {"db_idx": 26270, "episode_idx": 143, "frame_idx": 80, "global_frame_idx": 26270, "task_index": 28}, {"db_idx": 26271, "episode_idx": 143, "frame_idx": 81, "global_frame_idx": 26271, "task_index": 28}, {"db_idx": 26272, "episode_idx": 143, "frame_idx": 82, "global_frame_idx": 26272, "task_index": 28}, {"db_idx": 26273, "episode_idx": 143, "frame_idx": 83, "global_frame_idx": 26273, "task_index": 28}, {"db_idx": 26274, "episode_idx": 143, "frame_idx": 84, "global_frame_idx": 26274, "task_index": 28}, {"db_idx": 26275, "episode_idx": 143, "frame_idx": 85, "global_frame_idx": 26275, "task_index": 28}, {"db_idx": 26276, "episode_idx": 143, "frame_idx": 86, "global_frame_idx": 26276, "task_index": 28}, {"db_idx": 26277, "episode_idx": 143, "frame_idx": 87, "global_frame_idx": 26277, "task_index": 28}, {"db_idx": 26278, "episode_idx": 143, "frame_idx": 88, "global_frame_idx": 26278, "task_index": 28}, {"db_idx": 26279, "episode_idx": 143, "frame_idx": 89, "global_frame_idx": 26279, "task_index": 28}, {"db_idx": 26280, "episode_idx": 143, "frame_idx": 90, "global_frame_idx": 26280, "task_index": 28}, {"db_idx": 26281, "episode_idx": 143, "frame_idx": 91, "global_frame_idx": 26281, "task_index": 28}, {"db_idx": 26282, "episode_idx": 143, "frame_idx": 92, "global_frame_idx": 26282, "task_index": 28}, {"db_idx": 26283, "episode_idx": 143, "frame_idx": 93, "global_frame_idx": 26283, "task_index": 28}, {"db_idx": 26284, "episode_idx": 143, "frame_idx": 94, "global_frame_idx": 26284, "task_index": 28}, {"db_idx": 26285, "episode_idx": 143, "frame_idx": 95, "global_frame_idx": 26285, "task_index": 28}, {"db_idx": 26286, "episode_idx": 143, "frame_idx": 96, "global_frame_idx": 26286, "task_index": 28}, {"db_idx": 26287, "episode_idx": 143, "frame_idx": 97, "global_frame_idx": 26287, "task_index": 28}, {"db_idx": 26288, "episode_idx": 143, "frame_idx": 98, "global_frame_idx": 26288, "task_index": 28}, {"db_idx": 26289, "episode_idx": 143, "frame_idx": 99, "global_frame_idx": 26289, "task_index": 28}, {"db_idx": 26290, "episode_idx": 143, "frame_idx": 100, "global_frame_idx": 26290, "task_index": 28}, {"db_idx": 26291, "episode_idx": 143, "frame_idx": 101, "global_frame_idx": 26291, "task_index": 28}, {"db_idx": 26292, "episode_idx": 143, "frame_idx": 102, "global_frame_idx": 26292, "task_index": 28}, {"db_idx": 26293, "episode_idx": 143, "frame_idx": 103, "global_frame_idx": 26293, "task_index": 28}, {"db_idx": 26294, "episode_idx": 143, "frame_idx": 104, "global_frame_idx": 26294, "task_index": 28}, {"db_idx": 26295, "episode_idx": 143, "frame_idx": 105, "global_frame_idx": 26295, "task_index": 28}, {"db_idx": 26296, "episode_idx": 143, "frame_idx": 106, "global_frame_idx": 26296, "task_index": 28}, {"db_idx": 26297, "episode_idx": 143, "frame_idx": 107, "global_frame_idx": 26297, "task_index": 28}, {"db_idx": 26298, "episode_idx": 143, "frame_idx": 108, "global_frame_idx": 26298, "task_index": 28}, {"db_idx": 26299, "episode_idx": 143, "frame_idx": 109, "global_frame_idx": 26299, "task_index": 28}, {"db_idx": 26300, "episode_idx": 143, "frame_idx": 110, "global_frame_idx": 26300, "task_index": 28}, {"db_idx": 26301, "episode_idx": 143, "frame_idx": 111, "global_frame_idx": 26301, "task_index": 28}, {"db_idx": 26302, "episode_idx": 143, "frame_idx": 112, "global_frame_idx": 26302, "task_index": 28}, {"db_idx": 26303, "episode_idx": 143, "frame_idx": 113, "global_frame_idx": 26303, "task_index": 28}, {"db_idx": 26304, "episode_idx": 143, "frame_idx": 114, "global_frame_idx": 26304, "task_index": 28}, {"db_idx": 26305, "episode_idx": 143, "frame_idx": 115, "global_frame_idx": 26305, "task_index": 28}, {"db_idx": 26306, "episode_idx": 143, "frame_idx": 116, "global_frame_idx": 26306, "task_index": 28}, {"db_idx": 26307, "episode_idx": 143, "frame_idx": 117, "global_frame_idx": 26307, "task_index": 28}, {"db_idx": 26308, "episode_idx": 143, "frame_idx": 118, "global_frame_idx": 26308, "task_index": 28}, {"db_idx": 26309, "episode_idx": 143, "frame_idx": 119, "global_frame_idx": 26309, "task_index": 28}, {"db_idx": 26310, "episode_idx": 143, "frame_idx": 120, "global_frame_idx": 26310, "task_index": 28}, {"db_idx": 26311, "episode_idx": 143, "frame_idx": 121, "global_frame_idx": 26311, "task_index": 28}, {"db_idx": 26312, "episode_idx": 143, "frame_idx": 122, "global_frame_idx": 26312, "task_index": 28}, {"db_idx": 26313, "episode_idx": 143, "frame_idx": 123, "global_frame_idx": 26313, "task_index": 28}, {"db_idx": 26314, "episode_idx": 143, "frame_idx": 124, "global_frame_idx": 26314, "task_index": 28}, {"db_idx": 26315, "episode_idx": 143, "frame_idx": 125, "global_frame_idx": 26315, "task_index": 28}, {"db_idx": 26316, "episode_idx": 143, "frame_idx": 126, "global_frame_idx": 26316, "task_index": 28}, {"db_idx": 26317, "episode_idx": 143, "frame_idx": 127, "global_frame_idx": 26317, "task_index": 28}, {"db_idx": 26318, "episode_idx": 143, "frame_idx": 128, "global_frame_idx": 26318, "task_index": 28}, {"db_idx": 26319, "episode_idx": 143, "frame_idx": 129, "global_frame_idx": 26319, "task_index": 28}, {"db_idx": 26320, "episode_idx": 143, "frame_idx": 130, "global_frame_idx": 26320, "task_index": 28}, {"db_idx": 26321, "episode_idx": 143, "frame_idx": 131, "global_frame_idx": 26321, "task_index": 28}, {"db_idx": 26322, "episode_idx": 143, "frame_idx": 132, "global_frame_idx": 26322, "task_index": 28}, {"db_idx": 26323, "episode_idx": 143, "frame_idx": 133, "global_frame_idx": 26323, "task_index": 28}, {"db_idx": 26324, "episode_idx": 143, "frame_idx": 134, "global_frame_idx": 26324, "task_index": 28}, {"db_idx": 26325, "episode_idx": 143, "frame_idx": 135, "global_frame_idx": 26325, "task_index": 28}, {"db_idx": 26326, "episode_idx": 143, "frame_idx": 136, "global_frame_idx": 26326, "task_index": 28}, {"db_idx": 26327, "episode_idx": 143, "frame_idx": 137, "global_frame_idx": 26327, "task_index": 28}, {"db_idx": 26328, "episode_idx": 143, "frame_idx": 138, "global_frame_idx": 26328, "task_index": 28}, {"db_idx": 26329, "episode_idx": 143, "frame_idx": 139, "global_frame_idx": 26329, "task_index": 28}, {"db_idx": 26330, "episode_idx": 143, "frame_idx": 140, "global_frame_idx": 26330, "task_index": 28}, {"db_idx": 26331, "episode_idx": 143, "frame_idx": 141, "global_frame_idx": 26331, "task_index": 28}, {"db_idx": 26332, "episode_idx": 144, "frame_idx": 0, "global_frame_idx": 26332, "task_index": 28}, {"db_idx": 26333, "episode_idx": 144, "frame_idx": 1, "global_frame_idx": 26333, "task_index": 28}, {"db_idx": 26334, "episode_idx": 144, "frame_idx": 2, "global_frame_idx": 26334, "task_index": 28}, {"db_idx": 26335, "episode_idx": 144, "frame_idx": 3, "global_frame_idx": 26335, "task_index": 28}, {"db_idx": 26336, "episode_idx": 144, "frame_idx": 4, "global_frame_idx": 26336, "task_index": 28}, {"db_idx": 26337, "episode_idx": 144, "frame_idx": 5, "global_frame_idx": 26337, "task_index": 28}, {"db_idx": 26338, "episode_idx": 144, "frame_idx": 6, "global_frame_idx": 26338, "task_index": 28}, {"db_idx": 26339, "episode_idx": 144, "frame_idx": 7, "global_frame_idx": 26339, "task_index": 28}, {"db_idx": 26340, "episode_idx": 144, "frame_idx": 8, "global_frame_idx": 26340, "task_index": 28}, {"db_idx": 26341, "episode_idx": 144, "frame_idx": 9, "global_frame_idx": 26341, "task_index": 28}, {"db_idx": 26342, "episode_idx": 144, "frame_idx": 10, "global_frame_idx": 26342, "task_index": 28}, {"db_idx": 26343, "episode_idx": 144, "frame_idx": 11, "global_frame_idx": 26343, "task_index": 28}, {"db_idx": 26344, "episode_idx": 144, "frame_idx": 12, "global_frame_idx": 26344, "task_index": 28}, {"db_idx": 26345, "episode_idx": 144, "frame_idx": 13, "global_frame_idx": 26345, "task_index": 28}, {"db_idx": 26346, "episode_idx": 144, "frame_idx": 14, "global_frame_idx": 26346, "task_index": 28}, {"db_idx": 26347, "episode_idx": 144, "frame_idx": 15, "global_frame_idx": 26347, "task_index": 28}, {"db_idx": 26348, "episode_idx": 144, "frame_idx": 16, "global_frame_idx": 26348, "task_index": 28}, {"db_idx": 26349, "episode_idx": 144, "frame_idx": 17, "global_frame_idx": 26349, "task_index": 28}, {"db_idx": 26350, "episode_idx": 144, "frame_idx": 18, "global_frame_idx": 26350, "task_index": 28}, {"db_idx": 26351, "episode_idx": 144, "frame_idx": 19, "global_frame_idx": 26351, "task_index": 28}, {"db_idx": 26352, "episode_idx": 144, "frame_idx": 20, "global_frame_idx": 26352, "task_index": 28}, {"db_idx": 26353, "episode_idx": 144, "frame_idx": 21, "global_frame_idx": 26353, "task_index": 28}, {"db_idx": 26354, "episode_idx": 144, "frame_idx": 22, "global_frame_idx": 26354, "task_index": 28}, {"db_idx": 26355, "episode_idx": 144, "frame_idx": 23, "global_frame_idx": 26355, "task_index": 28}, {"db_idx": 26356, "episode_idx": 144, "frame_idx": 24, "global_frame_idx": 26356, "task_index": 28}, {"db_idx": 26357, "episode_idx": 144, "frame_idx": 25, "global_frame_idx": 26357, "task_index": 28}, {"db_idx": 26358, "episode_idx": 144, "frame_idx": 26, "global_frame_idx": 26358, "task_index": 28}, {"db_idx": 26359, "episode_idx": 144, "frame_idx": 27, "global_frame_idx": 26359, "task_index": 28}, {"db_idx": 26360, "episode_idx": 144, "frame_idx": 28, "global_frame_idx": 26360, "task_index": 28}, {"db_idx": 26361, "episode_idx": 144, "frame_idx": 29, "global_frame_idx": 26361, "task_index": 28}, {"db_idx": 26362, "episode_idx": 144, "frame_idx": 30, "global_frame_idx": 26362, "task_index": 28}, {"db_idx": 26363, "episode_idx": 144, "frame_idx": 31, "global_frame_idx": 26363, "task_index": 28}, {"db_idx": 26364, "episode_idx": 144, "frame_idx": 32, "global_frame_idx": 26364, "task_index": 28}, {"db_idx": 26365, "episode_idx": 144, "frame_idx": 33, "global_frame_idx": 26365, "task_index": 28}, {"db_idx": 26366, "episode_idx": 144, "frame_idx": 34, "global_frame_idx": 26366, "task_index": 28}, {"db_idx": 26367, "episode_idx": 144, "frame_idx": 35, "global_frame_idx": 26367, "task_index": 28}, {"db_idx": 26368, "episode_idx": 144, "frame_idx": 36, "global_frame_idx": 26368, "task_index": 28}, {"db_idx": 26369, "episode_idx": 144, "frame_idx": 37, "global_frame_idx": 26369, "task_index": 28}, {"db_idx": 26370, "episode_idx": 144, "frame_idx": 38, "global_frame_idx": 26370, "task_index": 28}, {"db_idx": 26371, "episode_idx": 144, "frame_idx": 39, "global_frame_idx": 26371, "task_index": 28}, {"db_idx": 26372, "episode_idx": 144, "frame_idx": 40, "global_frame_idx": 26372, "task_index": 28}, {"db_idx": 26373, "episode_idx": 144, "frame_idx": 41, "global_frame_idx": 26373, "task_index": 28}, {"db_idx": 26374, "episode_idx": 144, "frame_idx": 42, "global_frame_idx": 26374, "task_index": 28}, {"db_idx": 26375, "episode_idx": 144, "frame_idx": 43, "global_frame_idx": 26375, "task_index": 28}, {"db_idx": 26376, "episode_idx": 144, "frame_idx": 44, "global_frame_idx": 26376, "task_index": 28}, {"db_idx": 26377, "episode_idx": 144, "frame_idx": 45, "global_frame_idx": 26377, "task_index": 28}, {"db_idx": 26378, "episode_idx": 144, "frame_idx": 46, "global_frame_idx": 26378, "task_index": 28}, {"db_idx": 26379, "episode_idx": 144, "frame_idx": 47, "global_frame_idx": 26379, "task_index": 28}, {"db_idx": 26380, "episode_idx": 144, "frame_idx": 48, "global_frame_idx": 26380, "task_index": 28}, {"db_idx": 26381, "episode_idx": 144, "frame_idx": 49, "global_frame_idx": 26381, "task_index": 28}, {"db_idx": 26382, "episode_idx": 144, "frame_idx": 50, "global_frame_idx": 26382, "task_index": 28}, {"db_idx": 26383, "episode_idx": 144, "frame_idx": 51, "global_frame_idx": 26383, "task_index": 28}, {"db_idx": 26384, "episode_idx": 144, "frame_idx": 52, "global_frame_idx": 26384, "task_index": 28}, {"db_idx": 26385, "episode_idx": 144, "frame_idx": 53, "global_frame_idx": 26385, "task_index": 28}, {"db_idx": 26386, "episode_idx": 144, "frame_idx": 54, "global_frame_idx": 26386, "task_index": 28}, {"db_idx": 26387, "episode_idx": 144, "frame_idx": 55, "global_frame_idx": 26387, "task_index": 28}, {"db_idx": 26388, "episode_idx": 144, "frame_idx": 56, "global_frame_idx": 26388, "task_index": 28}, {"db_idx": 26389, "episode_idx": 144, "frame_idx": 57, "global_frame_idx": 26389, "task_index": 28}, {"db_idx": 26390, "episode_idx": 144, "frame_idx": 58, "global_frame_idx": 26390, "task_index": 28}, {"db_idx": 26391, "episode_idx": 144, "frame_idx": 59, "global_frame_idx": 26391, "task_index": 28}, {"db_idx": 26392, "episode_idx": 144, "frame_idx": 60, "global_frame_idx": 26392, "task_index": 28}, {"db_idx": 26393, "episode_idx": 144, "frame_idx": 61, "global_frame_idx": 26393, "task_index": 28}, {"db_idx": 26394, "episode_idx": 144, "frame_idx": 62, "global_frame_idx": 26394, "task_index": 28}, {"db_idx": 26395, "episode_idx": 144, "frame_idx": 63, "global_frame_idx": 26395, "task_index": 28}, {"db_idx": 26396, "episode_idx": 144, "frame_idx": 64, "global_frame_idx": 26396, "task_index": 28}, {"db_idx": 26397, "episode_idx": 144, "frame_idx": 65, "global_frame_idx": 26397, "task_index": 28}, {"db_idx": 26398, "episode_idx": 144, "frame_idx": 66, "global_frame_idx": 26398, "task_index": 28}, {"db_idx": 26399, "episode_idx": 144, "frame_idx": 67, "global_frame_idx": 26399, "task_index": 28}, {"db_idx": 26400, "episode_idx": 144, "frame_idx": 68, "global_frame_idx": 26400, "task_index": 28}, {"db_idx": 26401, "episode_idx": 144, "frame_idx": 69, "global_frame_idx": 26401, "task_index": 28}, {"db_idx": 26402, "episode_idx": 144, "frame_idx": 70, "global_frame_idx": 26402, "task_index": 28}, {"db_idx": 26403, "episode_idx": 144, "frame_idx": 71, "global_frame_idx": 26403, "task_index": 28}, {"db_idx": 26404, "episode_idx": 144, "frame_idx": 72, "global_frame_idx": 26404, "task_index": 28}, {"db_idx": 26405, "episode_idx": 144, "frame_idx": 73, "global_frame_idx": 26405, "task_index": 28}, {"db_idx": 26406, "episode_idx": 144, "frame_idx": 74, "global_frame_idx": 26406, "task_index": 28}, {"db_idx": 26407, "episode_idx": 144, "frame_idx": 75, "global_frame_idx": 26407, "task_index": 28}, {"db_idx": 26408, "episode_idx": 144, "frame_idx": 76, "global_frame_idx": 26408, "task_index": 28}, {"db_idx": 26409, "episode_idx": 144, "frame_idx": 77, "global_frame_idx": 26409, "task_index": 28}, {"db_idx": 26410, "episode_idx": 144, "frame_idx": 78, "global_frame_idx": 26410, "task_index": 28}, {"db_idx": 26411, "episode_idx": 144, "frame_idx": 79, "global_frame_idx": 26411, "task_index": 28}, {"db_idx": 26412, "episode_idx": 144, "frame_idx": 80, "global_frame_idx": 26412, "task_index": 28}, {"db_idx": 26413, "episode_idx": 144, "frame_idx": 81, "global_frame_idx": 26413, "task_index": 28}, {"db_idx": 26414, "episode_idx": 144, "frame_idx": 82, "global_frame_idx": 26414, "task_index": 28}, {"db_idx": 26415, "episode_idx": 144, "frame_idx": 83, "global_frame_idx": 26415, "task_index": 28}, {"db_idx": 26416, "episode_idx": 144, "frame_idx": 84, "global_frame_idx": 26416, "task_index": 28}, {"db_idx": 26417, "episode_idx": 144, "frame_idx": 85, "global_frame_idx": 26417, "task_index": 28}, {"db_idx": 26418, "episode_idx": 144, "frame_idx": 86, "global_frame_idx": 26418, "task_index": 28}, {"db_idx": 26419, "episode_idx": 144, "frame_idx": 87, "global_frame_idx": 26419, "task_index": 28}, {"db_idx": 26420, "episode_idx": 144, "frame_idx": 88, "global_frame_idx": 26420, "task_index": 28}, {"db_idx": 26421, "episode_idx": 144, "frame_idx": 89, "global_frame_idx": 26421, "task_index": 28}, {"db_idx": 26422, "episode_idx": 144, "frame_idx": 90, "global_frame_idx": 26422, "task_index": 28}, {"db_idx": 26423, "episode_idx": 144, "frame_idx": 91, "global_frame_idx": 26423, "task_index": 28}, {"db_idx": 26424, "episode_idx": 144, "frame_idx": 92, "global_frame_idx": 26424, "task_index": 28}, {"db_idx": 26425, "episode_idx": 144, "frame_idx": 93, "global_frame_idx": 26425, "task_index": 28}, {"db_idx": 26426, "episode_idx": 144, "frame_idx": 94, "global_frame_idx": 26426, "task_index": 28}, {"db_idx": 26427, "episode_idx": 144, "frame_idx": 95, "global_frame_idx": 26427, "task_index": 28}, {"db_idx": 26428, "episode_idx": 144, "frame_idx": 96, "global_frame_idx": 26428, "task_index": 28}, {"db_idx": 26429, "episode_idx": 144, "frame_idx": 97, "global_frame_idx": 26429, "task_index": 28}, {"db_idx": 26430, "episode_idx": 144, "frame_idx": 98, "global_frame_idx": 26430, "task_index": 28}, {"db_idx": 26431, "episode_idx": 144, "frame_idx": 99, "global_frame_idx": 26431, "task_index": 28}, {"db_idx": 26432, "episode_idx": 144, "frame_idx": 100, "global_frame_idx": 26432, "task_index": 28}, {"db_idx": 26433, "episode_idx": 144, "frame_idx": 101, "global_frame_idx": 26433, "task_index": 28}, {"db_idx": 26434, "episode_idx": 144, "frame_idx": 102, "global_frame_idx": 26434, "task_index": 28}, {"db_idx": 26435, "episode_idx": 144, "frame_idx": 103, "global_frame_idx": 26435, "task_index": 28}, {"db_idx": 26436, "episode_idx": 144, "frame_idx": 104, "global_frame_idx": 26436, "task_index": 28}, {"db_idx": 26437, "episode_idx": 144, "frame_idx": 105, "global_frame_idx": 26437, "task_index": 28}, {"db_idx": 26438, "episode_idx": 144, "frame_idx": 106, "global_frame_idx": 26438, "task_index": 28}, {"db_idx": 26439, "episode_idx": 144, "frame_idx": 107, "global_frame_idx": 26439, "task_index": 28}, {"db_idx": 26440, "episode_idx": 144, "frame_idx": 108, "global_frame_idx": 26440, "task_index": 28}, {"db_idx": 26441, "episode_idx": 144, "frame_idx": 109, "global_frame_idx": 26441, "task_index": 28}, {"db_idx": 26442, "episode_idx": 144, "frame_idx": 110, "global_frame_idx": 26442, "task_index": 28}, {"db_idx": 26443, "episode_idx": 144, "frame_idx": 111, "global_frame_idx": 26443, "task_index": 28}, {"db_idx": 26444, "episode_idx": 144, "frame_idx": 112, "global_frame_idx": 26444, "task_index": 28}, {"db_idx": 26445, "episode_idx": 144, "frame_idx": 113, "global_frame_idx": 26445, "task_index": 28}, {"db_idx": 26446, "episode_idx": 144, "frame_idx": 114, "global_frame_idx": 26446, "task_index": 28}, {"db_idx": 26447, "episode_idx": 144, "frame_idx": 115, "global_frame_idx": 26447, "task_index": 28}, {"db_idx": 26448, "episode_idx": 144, "frame_idx": 116, "global_frame_idx": 26448, "task_index": 28}, {"db_idx": 26449, "episode_idx": 144, "frame_idx": 117, "global_frame_idx": 26449, "task_index": 28}, {"db_idx": 26450, "episode_idx": 144, "frame_idx": 118, "global_frame_idx": 26450, "task_index": 28}, {"db_idx": 26451, "episode_idx": 144, "frame_idx": 119, "global_frame_idx": 26451, "task_index": 28}, {"db_idx": 26452, "episode_idx": 144, "frame_idx": 120, "global_frame_idx": 26452, "task_index": 28}, {"db_idx": 26453, "episode_idx": 144, "frame_idx": 121, "global_frame_idx": 26453, "task_index": 28}, {"db_idx": 26454, "episode_idx": 144, "frame_idx": 122, "global_frame_idx": 26454, "task_index": 28}, {"db_idx": 26455, "episode_idx": 144, "frame_idx": 123, "global_frame_idx": 26455, "task_index": 28}, {"db_idx": 26456, "episode_idx": 144, "frame_idx": 124, "global_frame_idx": 26456, "task_index": 28}, {"db_idx": 26457, "episode_idx": 144, "frame_idx": 125, "global_frame_idx": 26457, "task_index": 28}, {"db_idx": 26458, "episode_idx": 144, "frame_idx": 126, "global_frame_idx": 26458, "task_index": 28}, {"db_idx": 26459, "episode_idx": 144, "frame_idx": 127, "global_frame_idx": 26459, "task_index": 28}, {"db_idx": 26460, "episode_idx": 145, "frame_idx": 0, "global_frame_idx": 26460, "task_index": 29}, {"db_idx": 26461, "episode_idx": 145, "frame_idx": 1, "global_frame_idx": 26461, "task_index": 29}, {"db_idx": 26462, "episode_idx": 145, "frame_idx": 2, "global_frame_idx": 26462, "task_index": 29}, {"db_idx": 26463, "episode_idx": 145, "frame_idx": 3, "global_frame_idx": 26463, "task_index": 29}, {"db_idx": 26464, "episode_idx": 145, "frame_idx": 4, "global_frame_idx": 26464, "task_index": 29}, {"db_idx": 26465, "episode_idx": 145, "frame_idx": 5, "global_frame_idx": 26465, "task_index": 29}, {"db_idx": 26466, "episode_idx": 145, "frame_idx": 6, "global_frame_idx": 26466, "task_index": 29}, {"db_idx": 26467, "episode_idx": 145, "frame_idx": 7, "global_frame_idx": 26467, "task_index": 29}, {"db_idx": 26468, "episode_idx": 145, "frame_idx": 8, "global_frame_idx": 26468, "task_index": 29}, {"db_idx": 26469, "episode_idx": 145, "frame_idx": 9, "global_frame_idx": 26469, "task_index": 29}, {"db_idx": 26470, "episode_idx": 145, "frame_idx": 10, "global_frame_idx": 26470, "task_index": 29}, {"db_idx": 26471, "episode_idx": 145, "frame_idx": 11, "global_frame_idx": 26471, "task_index": 29}, {"db_idx": 26472, "episode_idx": 145, "frame_idx": 12, "global_frame_idx": 26472, "task_index": 29}, {"db_idx": 26473, "episode_idx": 145, "frame_idx": 13, "global_frame_idx": 26473, "task_index": 29}, {"db_idx": 26474, "episode_idx": 145, "frame_idx": 14, "global_frame_idx": 26474, "task_index": 29}, {"db_idx": 26475, "episode_idx": 145, "frame_idx": 15, "global_frame_idx": 26475, "task_index": 29}, {"db_idx": 26476, "episode_idx": 145, "frame_idx": 16, "global_frame_idx": 26476, "task_index": 29}, {"db_idx": 26477, "episode_idx": 145, "frame_idx": 17, "global_frame_idx": 26477, "task_index": 29}, {"db_idx": 26478, "episode_idx": 145, "frame_idx": 18, "global_frame_idx": 26478, "task_index": 29}, {"db_idx": 26479, "episode_idx": 145, "frame_idx": 19, "global_frame_idx": 26479, "task_index": 29}, {"db_idx": 26480, "episode_idx": 145, "frame_idx": 20, "global_frame_idx": 26480, "task_index": 29}, {"db_idx": 26481, "episode_idx": 145, "frame_idx": 21, "global_frame_idx": 26481, "task_index": 29}, {"db_idx": 26482, "episode_idx": 145, "frame_idx": 22, "global_frame_idx": 26482, "task_index": 29}, {"db_idx": 26483, "episode_idx": 145, "frame_idx": 23, "global_frame_idx": 26483, "task_index": 29}, {"db_idx": 26484, "episode_idx": 145, "frame_idx": 24, "global_frame_idx": 26484, "task_index": 29}, {"db_idx": 26485, "episode_idx": 145, "frame_idx": 25, "global_frame_idx": 26485, "task_index": 29}, {"db_idx": 26486, "episode_idx": 145, "frame_idx": 26, "global_frame_idx": 26486, "task_index": 29}, {"db_idx": 26487, "episode_idx": 145, "frame_idx": 27, "global_frame_idx": 26487, "task_index": 29}, {"db_idx": 26488, "episode_idx": 145, "frame_idx": 28, "global_frame_idx": 26488, "task_index": 29}, {"db_idx": 26489, "episode_idx": 145, "frame_idx": 29, "global_frame_idx": 26489, "task_index": 29}, {"db_idx": 26490, "episode_idx": 145, "frame_idx": 30, "global_frame_idx": 26490, "task_index": 29}, {"db_idx": 26491, "episode_idx": 145, "frame_idx": 31, "global_frame_idx": 26491, "task_index": 29}, {"db_idx": 26492, "episode_idx": 145, "frame_idx": 32, "global_frame_idx": 26492, "task_index": 29}, {"db_idx": 26493, "episode_idx": 145, "frame_idx": 33, "global_frame_idx": 26493, "task_index": 29}, {"db_idx": 26494, "episode_idx": 145, "frame_idx": 34, "global_frame_idx": 26494, "task_index": 29}, {"db_idx": 26495, "episode_idx": 145, "frame_idx": 35, "global_frame_idx": 26495, "task_index": 29}, {"db_idx": 26496, "episode_idx": 145, "frame_idx": 36, "global_frame_idx": 26496, "task_index": 29}, {"db_idx": 26497, "episode_idx": 145, "frame_idx": 37, "global_frame_idx": 26497, "task_index": 29}, {"db_idx": 26498, "episode_idx": 145, "frame_idx": 38, "global_frame_idx": 26498, "task_index": 29}, {"db_idx": 26499, "episode_idx": 145, "frame_idx": 39, "global_frame_idx": 26499, "task_index": 29}, {"db_idx": 26500, "episode_idx": 145, "frame_idx": 40, "global_frame_idx": 26500, "task_index": 29}, {"db_idx": 26501, "episode_idx": 145, "frame_idx": 41, "global_frame_idx": 26501, "task_index": 29}, {"db_idx": 26502, "episode_idx": 145, "frame_idx": 42, "global_frame_idx": 26502, "task_index": 29}, {"db_idx": 26503, "episode_idx": 145, "frame_idx": 43, "global_frame_idx": 26503, "task_index": 29}, {"db_idx": 26504, "episode_idx": 145, "frame_idx": 44, "global_frame_idx": 26504, "task_index": 29}, {"db_idx": 26505, "episode_idx": 145, "frame_idx": 45, "global_frame_idx": 26505, "task_index": 29}, {"db_idx": 26506, "episode_idx": 145, "frame_idx": 46, "global_frame_idx": 26506, "task_index": 29}, {"db_idx": 26507, "episode_idx": 145, "frame_idx": 47, "global_frame_idx": 26507, "task_index": 29}, {"db_idx": 26508, "episode_idx": 145, "frame_idx": 48, "global_frame_idx": 26508, "task_index": 29}, {"db_idx": 26509, "episode_idx": 145, "frame_idx": 49, "global_frame_idx": 26509, "task_index": 29}, {"db_idx": 26510, "episode_idx": 145, "frame_idx": 50, "global_frame_idx": 26510, "task_index": 29}, {"db_idx": 26511, "episode_idx": 145, "frame_idx": 51, "global_frame_idx": 26511, "task_index": 29}, {"db_idx": 26512, "episode_idx": 145, "frame_idx": 52, "global_frame_idx": 26512, "task_index": 29}, {"db_idx": 26513, "episode_idx": 145, "frame_idx": 53, "global_frame_idx": 26513, "task_index": 29}, {"db_idx": 26514, "episode_idx": 145, "frame_idx": 54, "global_frame_idx": 26514, "task_index": 29}, {"db_idx": 26515, "episode_idx": 145, "frame_idx": 55, "global_frame_idx": 26515, "task_index": 29}, {"db_idx": 26516, "episode_idx": 145, "frame_idx": 56, "global_frame_idx": 26516, "task_index": 29}, {"db_idx": 26517, "episode_idx": 145, "frame_idx": 57, "global_frame_idx": 26517, "task_index": 29}, {"db_idx": 26518, "episode_idx": 145, "frame_idx": 58, "global_frame_idx": 26518, "task_index": 29}, {"db_idx": 26519, "episode_idx": 145, "frame_idx": 59, "global_frame_idx": 26519, "task_index": 29}, {"db_idx": 26520, "episode_idx": 145, "frame_idx": 60, "global_frame_idx": 26520, "task_index": 29}, {"db_idx": 26521, "episode_idx": 145, "frame_idx": 61, "global_frame_idx": 26521, "task_index": 29}, {"db_idx": 26522, "episode_idx": 145, "frame_idx": 62, "global_frame_idx": 26522, "task_index": 29}, {"db_idx": 26523, "episode_idx": 145, "frame_idx": 63, "global_frame_idx": 26523, "task_index": 29}, {"db_idx": 26524, "episode_idx": 145, "frame_idx": 64, "global_frame_idx": 26524, "task_index": 29}, {"db_idx": 26525, "episode_idx": 145, "frame_idx": 65, "global_frame_idx": 26525, "task_index": 29}, {"db_idx": 26526, "episode_idx": 145, "frame_idx": 66, "global_frame_idx": 26526, "task_index": 29}, {"db_idx": 26527, "episode_idx": 145, "frame_idx": 67, "global_frame_idx": 26527, "task_index": 29}, {"db_idx": 26528, "episode_idx": 145, "frame_idx": 68, "global_frame_idx": 26528, "task_index": 29}, {"db_idx": 26529, "episode_idx": 145, "frame_idx": 69, "global_frame_idx": 26529, "task_index": 29}, {"db_idx": 26530, "episode_idx": 145, "frame_idx": 70, "global_frame_idx": 26530, "task_index": 29}, {"db_idx": 26531, "episode_idx": 145, "frame_idx": 71, "global_frame_idx": 26531, "task_index": 29}, {"db_idx": 26532, "episode_idx": 145, "frame_idx": 72, "global_frame_idx": 26532, "task_index": 29}, {"db_idx": 26533, "episode_idx": 145, "frame_idx": 73, "global_frame_idx": 26533, "task_index": 29}, {"db_idx": 26534, "episode_idx": 145, "frame_idx": 74, "global_frame_idx": 26534, "task_index": 29}, {"db_idx": 26535, "episode_idx": 145, "frame_idx": 75, "global_frame_idx": 26535, "task_index": 29}, {"db_idx": 26536, "episode_idx": 145, "frame_idx": 76, "global_frame_idx": 26536, "task_index": 29}, {"db_idx": 26537, "episode_idx": 145, "frame_idx": 77, "global_frame_idx": 26537, "task_index": 29}, {"db_idx": 26538, "episode_idx": 145, "frame_idx": 78, "global_frame_idx": 26538, "task_index": 29}, {"db_idx": 26539, "episode_idx": 145, "frame_idx": 79, "global_frame_idx": 26539, "task_index": 29}, {"db_idx": 26540, "episode_idx": 145, "frame_idx": 80, "global_frame_idx": 26540, "task_index": 29}, {"db_idx": 26541, "episode_idx": 145, "frame_idx": 81, "global_frame_idx": 26541, "task_index": 29}, {"db_idx": 26542, "episode_idx": 145, "frame_idx": 82, "global_frame_idx": 26542, "task_index": 29}, {"db_idx": 26543, "episode_idx": 145, "frame_idx": 83, "global_frame_idx": 26543, "task_index": 29}, {"db_idx": 26544, "episode_idx": 145, "frame_idx": 84, "global_frame_idx": 26544, "task_index": 29}, {"db_idx": 26545, "episode_idx": 145, "frame_idx": 85, "global_frame_idx": 26545, "task_index": 29}, {"db_idx": 26546, "episode_idx": 145, "frame_idx": 86, "global_frame_idx": 26546, "task_index": 29}, {"db_idx": 26547, "episode_idx": 145, "frame_idx": 87, "global_frame_idx": 26547, "task_index": 29}, {"db_idx": 26548, "episode_idx": 145, "frame_idx": 88, "global_frame_idx": 26548, "task_index": 29}, {"db_idx": 26549, "episode_idx": 145, "frame_idx": 89, "global_frame_idx": 26549, "task_index": 29}, {"db_idx": 26550, "episode_idx": 145, "frame_idx": 90, "global_frame_idx": 26550, "task_index": 29}, {"db_idx": 26551, "episode_idx": 145, "frame_idx": 91, "global_frame_idx": 26551, "task_index": 29}, {"db_idx": 26552, "episode_idx": 145, "frame_idx": 92, "global_frame_idx": 26552, "task_index": 29}, {"db_idx": 26553, "episode_idx": 145, "frame_idx": 93, "global_frame_idx": 26553, "task_index": 29}, {"db_idx": 26554, "episode_idx": 145, "frame_idx": 94, "global_frame_idx": 26554, "task_index": 29}, {"db_idx": 26555, "episode_idx": 145, "frame_idx": 95, "global_frame_idx": 26555, "task_index": 29}, {"db_idx": 26556, "episode_idx": 145, "frame_idx": 96, "global_frame_idx": 26556, "task_index": 29}, {"db_idx": 26557, "episode_idx": 145, "frame_idx": 97, "global_frame_idx": 26557, "task_index": 29}, {"db_idx": 26558, "episode_idx": 145, "frame_idx": 98, "global_frame_idx": 26558, "task_index": 29}, {"db_idx": 26559, "episode_idx": 145, "frame_idx": 99, "global_frame_idx": 26559, "task_index": 29}, {"db_idx": 26560, "episode_idx": 145, "frame_idx": 100, "global_frame_idx": 26560, "task_index": 29}, {"db_idx": 26561, "episode_idx": 145, "frame_idx": 101, "global_frame_idx": 26561, "task_index": 29}, {"db_idx": 26562, "episode_idx": 145, "frame_idx": 102, "global_frame_idx": 26562, "task_index": 29}, {"db_idx": 26563, "episode_idx": 145, "frame_idx": 103, "global_frame_idx": 26563, "task_index": 29}, {"db_idx": 26564, "episode_idx": 145, "frame_idx": 104, "global_frame_idx": 26564, "task_index": 29}, {"db_idx": 26565, "episode_idx": 145, "frame_idx": 105, "global_frame_idx": 26565, "task_index": 29}, {"db_idx": 26566, "episode_idx": 145, "frame_idx": 106, "global_frame_idx": 26566, "task_index": 29}, {"db_idx": 26567, "episode_idx": 145, "frame_idx": 107, "global_frame_idx": 26567, "task_index": 29}, {"db_idx": 26568, "episode_idx": 145, "frame_idx": 108, "global_frame_idx": 26568, "task_index": 29}, {"db_idx": 26569, "episode_idx": 145, "frame_idx": 109, "global_frame_idx": 26569, "task_index": 29}, {"db_idx": 26570, "episode_idx": 145, "frame_idx": 110, "global_frame_idx": 26570, "task_index": 29}, {"db_idx": 26571, "episode_idx": 145, "frame_idx": 111, "global_frame_idx": 26571, "task_index": 29}, {"db_idx": 26572, "episode_idx": 145, "frame_idx": 112, "global_frame_idx": 26572, "task_index": 29}, {"db_idx": 26573, "episode_idx": 145, "frame_idx": 113, "global_frame_idx": 26573, "task_index": 29}, {"db_idx": 26574, "episode_idx": 145, "frame_idx": 114, "global_frame_idx": 26574, "task_index": 29}, {"db_idx": 26575, "episode_idx": 145, "frame_idx": 115, "global_frame_idx": 26575, "task_index": 29}, {"db_idx": 26576, "episode_idx": 145, "frame_idx": 116, "global_frame_idx": 26576, "task_index": 29}, {"db_idx": 26577, "episode_idx": 145, "frame_idx": 117, "global_frame_idx": 26577, "task_index": 29}, {"db_idx": 26578, "episode_idx": 145, "frame_idx": 118, "global_frame_idx": 26578, "task_index": 29}, {"db_idx": 26579, "episode_idx": 145, "frame_idx": 119, "global_frame_idx": 26579, "task_index": 29}, {"db_idx": 26580, "episode_idx": 145, "frame_idx": 120, "global_frame_idx": 26580, "task_index": 29}, {"db_idx": 26581, "episode_idx": 145, "frame_idx": 121, "global_frame_idx": 26581, "task_index": 29}, {"db_idx": 26582, "episode_idx": 145, "frame_idx": 122, "global_frame_idx": 26582, "task_index": 29}, {"db_idx": 26583, "episode_idx": 145, "frame_idx": 123, "global_frame_idx": 26583, "task_index": 29}, {"db_idx": 26584, "episode_idx": 145, "frame_idx": 124, "global_frame_idx": 26584, "task_index": 29}, {"db_idx": 26585, "episode_idx": 145, "frame_idx": 125, "global_frame_idx": 26585, "task_index": 29}, {"db_idx": 26586, "episode_idx": 145, "frame_idx": 126, "global_frame_idx": 26586, "task_index": 29}, {"db_idx": 26587, "episode_idx": 145, "frame_idx": 127, "global_frame_idx": 26587, "task_index": 29}, {"db_idx": 26588, "episode_idx": 145, "frame_idx": 128, "global_frame_idx": 26588, "task_index": 29}, {"db_idx": 26589, "episode_idx": 145, "frame_idx": 129, "global_frame_idx": 26589, "task_index": 29}, {"db_idx": 26590, "episode_idx": 145, "frame_idx": 130, "global_frame_idx": 26590, "task_index": 29}, {"db_idx": 26591, "episode_idx": 145, "frame_idx": 131, "global_frame_idx": 26591, "task_index": 29}, {"db_idx": 26592, "episode_idx": 145, "frame_idx": 132, "global_frame_idx": 26592, "task_index": 29}, {"db_idx": 26593, "episode_idx": 145, "frame_idx": 133, "global_frame_idx": 26593, "task_index": 29}, {"db_idx": 26594, "episode_idx": 145, "frame_idx": 134, "global_frame_idx": 26594, "task_index": 29}, {"db_idx": 26595, "episode_idx": 145, "frame_idx": 135, "global_frame_idx": 26595, "task_index": 29}, {"db_idx": 26596, "episode_idx": 145, "frame_idx": 136, "global_frame_idx": 26596, "task_index": 29}, {"db_idx": 26597, "episode_idx": 145, "frame_idx": 137, "global_frame_idx": 26597, "task_index": 29}, {"db_idx": 26598, "episode_idx": 145, "frame_idx": 138, "global_frame_idx": 26598, "task_index": 29}, {"db_idx": 26599, "episode_idx": 145, "frame_idx": 139, "global_frame_idx": 26599, "task_index": 29}, {"db_idx": 26600, "episode_idx": 145, "frame_idx": 140, "global_frame_idx": 26600, "task_index": 29}, {"db_idx": 26601, "episode_idx": 145, "frame_idx": 141, "global_frame_idx": 26601, "task_index": 29}, {"db_idx": 26602, "episode_idx": 145, "frame_idx": 142, "global_frame_idx": 26602, "task_index": 29}, {"db_idx": 26603, "episode_idx": 145, "frame_idx": 143, "global_frame_idx": 26603, "task_index": 29}, {"db_idx": 26604, "episode_idx": 145, "frame_idx": 144, "global_frame_idx": 26604, "task_index": 29}, {"db_idx": 26605, "episode_idx": 145, "frame_idx": 145, "global_frame_idx": 26605, "task_index": 29}, {"db_idx": 26606, "episode_idx": 145, "frame_idx": 146, "global_frame_idx": 26606, "task_index": 29}, {"db_idx": 26607, "episode_idx": 145, "frame_idx": 147, "global_frame_idx": 26607, "task_index": 29}, {"db_idx": 26608, "episode_idx": 145, "frame_idx": 148, "global_frame_idx": 26608, "task_index": 29}, {"db_idx": 26609, "episode_idx": 145, "frame_idx": 149, "global_frame_idx": 26609, "task_index": 29}, {"db_idx": 26610, "episode_idx": 145, "frame_idx": 150, "global_frame_idx": 26610, "task_index": 29}, {"db_idx": 26611, "episode_idx": 145, "frame_idx": 151, "global_frame_idx": 26611, "task_index": 29}, {"db_idx": 26612, "episode_idx": 145, "frame_idx": 152, "global_frame_idx": 26612, "task_index": 29}, {"db_idx": 26613, "episode_idx": 145, "frame_idx": 153, "global_frame_idx": 26613, "task_index": 29}, {"db_idx": 26614, "episode_idx": 145, "frame_idx": 154, "global_frame_idx": 26614, "task_index": 29}, {"db_idx": 26615, "episode_idx": 145, "frame_idx": 155, "global_frame_idx": 26615, "task_index": 29}, {"db_idx": 26616, "episode_idx": 145, "frame_idx": 156, "global_frame_idx": 26616, "task_index": 29}, {"db_idx": 26617, "episode_idx": 145, "frame_idx": 157, "global_frame_idx": 26617, "task_index": 29}, {"db_idx": 26618, "episode_idx": 145, "frame_idx": 158, "global_frame_idx": 26618, "task_index": 29}, {"db_idx": 26619, "episode_idx": 145, "frame_idx": 159, "global_frame_idx": 26619, "task_index": 29}, {"db_idx": 26620, "episode_idx": 146, "frame_idx": 0, "global_frame_idx": 26620, "task_index": 29}, {"db_idx": 26621, "episode_idx": 146, "frame_idx": 1, "global_frame_idx": 26621, "task_index": 29}, {"db_idx": 26622, "episode_idx": 146, "frame_idx": 2, "global_frame_idx": 26622, "task_index": 29}, {"db_idx": 26623, "episode_idx": 146, "frame_idx": 3, "global_frame_idx": 26623, "task_index": 29}, {"db_idx": 26624, "episode_idx": 146, "frame_idx": 4, "global_frame_idx": 26624, "task_index": 29}, {"db_idx": 26625, "episode_idx": 146, "frame_idx": 5, "global_frame_idx": 26625, "task_index": 29}, {"db_idx": 26626, "episode_idx": 146, "frame_idx": 6, "global_frame_idx": 26626, "task_index": 29}, {"db_idx": 26627, "episode_idx": 146, "frame_idx": 7, "global_frame_idx": 26627, "task_index": 29}, {"db_idx": 26628, "episode_idx": 146, "frame_idx": 8, "global_frame_idx": 26628, "task_index": 29}, {"db_idx": 26629, "episode_idx": 146, "frame_idx": 9, "global_frame_idx": 26629, "task_index": 29}, {"db_idx": 26630, "episode_idx": 146, "frame_idx": 10, "global_frame_idx": 26630, "task_index": 29}, {"db_idx": 26631, "episode_idx": 146, "frame_idx": 11, "global_frame_idx": 26631, "task_index": 29}, {"db_idx": 26632, "episode_idx": 146, "frame_idx": 12, "global_frame_idx": 26632, "task_index": 29}, {"db_idx": 26633, "episode_idx": 146, "frame_idx": 13, "global_frame_idx": 26633, "task_index": 29}, {"db_idx": 26634, "episode_idx": 146, "frame_idx": 14, "global_frame_idx": 26634, "task_index": 29}, {"db_idx": 26635, "episode_idx": 146, "frame_idx": 15, "global_frame_idx": 26635, "task_index": 29}, {"db_idx": 26636, "episode_idx": 146, "frame_idx": 16, "global_frame_idx": 26636, "task_index": 29}, {"db_idx": 26637, "episode_idx": 146, "frame_idx": 17, "global_frame_idx": 26637, "task_index": 29}, {"db_idx": 26638, "episode_idx": 146, "frame_idx": 18, "global_frame_idx": 26638, "task_index": 29}, {"db_idx": 26639, "episode_idx": 146, "frame_idx": 19, "global_frame_idx": 26639, "task_index": 29}, {"db_idx": 26640, "episode_idx": 146, "frame_idx": 20, "global_frame_idx": 26640, "task_index": 29}, {"db_idx": 26641, "episode_idx": 146, "frame_idx": 21, "global_frame_idx": 26641, "task_index": 29}, {"db_idx": 26642, "episode_idx": 146, "frame_idx": 22, "global_frame_idx": 26642, "task_index": 29}, {"db_idx": 26643, "episode_idx": 146, "frame_idx": 23, "global_frame_idx": 26643, "task_index": 29}, {"db_idx": 26644, "episode_idx": 146, "frame_idx": 24, "global_frame_idx": 26644, "task_index": 29}, {"db_idx": 26645, "episode_idx": 146, "frame_idx": 25, "global_frame_idx": 26645, "task_index": 29}, {"db_idx": 26646, "episode_idx": 146, "frame_idx": 26, "global_frame_idx": 26646, "task_index": 29}, {"db_idx": 26647, "episode_idx": 146, "frame_idx": 27, "global_frame_idx": 26647, "task_index": 29}, {"db_idx": 26648, "episode_idx": 146, "frame_idx": 28, "global_frame_idx": 26648, "task_index": 29}, {"db_idx": 26649, "episode_idx": 146, "frame_idx": 29, "global_frame_idx": 26649, "task_index": 29}, {"db_idx": 26650, "episode_idx": 146, "frame_idx": 30, "global_frame_idx": 26650, "task_index": 29}, {"db_idx": 26651, "episode_idx": 146, "frame_idx": 31, "global_frame_idx": 26651, "task_index": 29}, {"db_idx": 26652, "episode_idx": 146, "frame_idx": 32, "global_frame_idx": 26652, "task_index": 29}, {"db_idx": 26653, "episode_idx": 146, "frame_idx": 33, "global_frame_idx": 26653, "task_index": 29}, {"db_idx": 26654, "episode_idx": 146, "frame_idx": 34, "global_frame_idx": 26654, "task_index": 29}, {"db_idx": 26655, "episode_idx": 146, "frame_idx": 35, "global_frame_idx": 26655, "task_index": 29}, {"db_idx": 26656, "episode_idx": 146, "frame_idx": 36, "global_frame_idx": 26656, "task_index": 29}, {"db_idx": 26657, "episode_idx": 146, "frame_idx": 37, "global_frame_idx": 26657, "task_index": 29}, {"db_idx": 26658, "episode_idx": 146, "frame_idx": 38, "global_frame_idx": 26658, "task_index": 29}, {"db_idx": 26659, "episode_idx": 146, "frame_idx": 39, "global_frame_idx": 26659, "task_index": 29}, {"db_idx": 26660, "episode_idx": 146, "frame_idx": 40, "global_frame_idx": 26660, "task_index": 29}, {"db_idx": 26661, "episode_idx": 146, "frame_idx": 41, "global_frame_idx": 26661, "task_index": 29}, {"db_idx": 26662, "episode_idx": 146, "frame_idx": 42, "global_frame_idx": 26662, "task_index": 29}, {"db_idx": 26663, "episode_idx": 146, "frame_idx": 43, "global_frame_idx": 26663, "task_index": 29}, {"db_idx": 26664, "episode_idx": 146, "frame_idx": 44, "global_frame_idx": 26664, "task_index": 29}, {"db_idx": 26665, "episode_idx": 146, "frame_idx": 45, "global_frame_idx": 26665, "task_index": 29}, {"db_idx": 26666, "episode_idx": 146, "frame_idx": 46, "global_frame_idx": 26666, "task_index": 29}, {"db_idx": 26667, "episode_idx": 146, "frame_idx": 47, "global_frame_idx": 26667, "task_index": 29}, {"db_idx": 26668, "episode_idx": 146, "frame_idx": 48, "global_frame_idx": 26668, "task_index": 29}, {"db_idx": 26669, "episode_idx": 146, "frame_idx": 49, "global_frame_idx": 26669, "task_index": 29}, {"db_idx": 26670, "episode_idx": 146, "frame_idx": 50, "global_frame_idx": 26670, "task_index": 29}, {"db_idx": 26671, "episode_idx": 146, "frame_idx": 51, "global_frame_idx": 26671, "task_index": 29}, {"db_idx": 26672, "episode_idx": 146, "frame_idx": 52, "global_frame_idx": 26672, "task_index": 29}, {"db_idx": 26673, "episode_idx": 146, "frame_idx": 53, "global_frame_idx": 26673, "task_index": 29}, {"db_idx": 26674, "episode_idx": 146, "frame_idx": 54, "global_frame_idx": 26674, "task_index": 29}, {"db_idx": 26675, "episode_idx": 146, "frame_idx": 55, "global_frame_idx": 26675, "task_index": 29}, {"db_idx": 26676, "episode_idx": 146, "frame_idx": 56, "global_frame_idx": 26676, "task_index": 29}, {"db_idx": 26677, "episode_idx": 146, "frame_idx": 57, "global_frame_idx": 26677, "task_index": 29}, {"db_idx": 26678, "episode_idx": 146, "frame_idx": 58, "global_frame_idx": 26678, "task_index": 29}, {"db_idx": 26679, "episode_idx": 146, "frame_idx": 59, "global_frame_idx": 26679, "task_index": 29}, {"db_idx": 26680, "episode_idx": 146, "frame_idx": 60, "global_frame_idx": 26680, "task_index": 29}, {"db_idx": 26681, "episode_idx": 146, "frame_idx": 61, "global_frame_idx": 26681, "task_index": 29}, {"db_idx": 26682, "episode_idx": 146, "frame_idx": 62, "global_frame_idx": 26682, "task_index": 29}, {"db_idx": 26683, "episode_idx": 146, "frame_idx": 63, "global_frame_idx": 26683, "task_index": 29}, {"db_idx": 26684, "episode_idx": 146, "frame_idx": 64, "global_frame_idx": 26684, "task_index": 29}, {"db_idx": 26685, "episode_idx": 146, "frame_idx": 65, "global_frame_idx": 26685, "task_index": 29}, {"db_idx": 26686, "episode_idx": 146, "frame_idx": 66, "global_frame_idx": 26686, "task_index": 29}, {"db_idx": 26687, "episode_idx": 146, "frame_idx": 67, "global_frame_idx": 26687, "task_index": 29}, {"db_idx": 26688, "episode_idx": 146, "frame_idx": 68, "global_frame_idx": 26688, "task_index": 29}, {"db_idx": 26689, "episode_idx": 146, "frame_idx": 69, "global_frame_idx": 26689, "task_index": 29}, {"db_idx": 26690, "episode_idx": 146, "frame_idx": 70, "global_frame_idx": 26690, "task_index": 29}, {"db_idx": 26691, "episode_idx": 146, "frame_idx": 71, "global_frame_idx": 26691, "task_index": 29}, {"db_idx": 26692, "episode_idx": 146, "frame_idx": 72, "global_frame_idx": 26692, "task_index": 29}, {"db_idx": 26693, "episode_idx": 146, "frame_idx": 73, "global_frame_idx": 26693, "task_index": 29}, {"db_idx": 26694, "episode_idx": 146, "frame_idx": 74, "global_frame_idx": 26694, "task_index": 29}, {"db_idx": 26695, "episode_idx": 146, "frame_idx": 75, "global_frame_idx": 26695, "task_index": 29}, {"db_idx": 26696, "episode_idx": 146, "frame_idx": 76, "global_frame_idx": 26696, "task_index": 29}, {"db_idx": 26697, "episode_idx": 146, "frame_idx": 77, "global_frame_idx": 26697, "task_index": 29}, {"db_idx": 26698, "episode_idx": 146, "frame_idx": 78, "global_frame_idx": 26698, "task_index": 29}, {"db_idx": 26699, "episode_idx": 146, "frame_idx": 79, "global_frame_idx": 26699, "task_index": 29}, {"db_idx": 26700, "episode_idx": 146, "frame_idx": 80, "global_frame_idx": 26700, "task_index": 29}, {"db_idx": 26701, "episode_idx": 146, "frame_idx": 81, "global_frame_idx": 26701, "task_index": 29}, {"db_idx": 26702, "episode_idx": 146, "frame_idx": 82, "global_frame_idx": 26702, "task_index": 29}, {"db_idx": 26703, "episode_idx": 146, "frame_idx": 83, "global_frame_idx": 26703, "task_index": 29}, {"db_idx": 26704, "episode_idx": 146, "frame_idx": 84, "global_frame_idx": 26704, "task_index": 29}, {"db_idx": 26705, "episode_idx": 146, "frame_idx": 85, "global_frame_idx": 26705, "task_index": 29}, {"db_idx": 26706, "episode_idx": 146, "frame_idx": 86, "global_frame_idx": 26706, "task_index": 29}, {"db_idx": 26707, "episode_idx": 146, "frame_idx": 87, "global_frame_idx": 26707, "task_index": 29}, {"db_idx": 26708, "episode_idx": 146, "frame_idx": 88, "global_frame_idx": 26708, "task_index": 29}, {"db_idx": 26709, "episode_idx": 146, "frame_idx": 89, "global_frame_idx": 26709, "task_index": 29}, {"db_idx": 26710, "episode_idx": 146, "frame_idx": 90, "global_frame_idx": 26710, "task_index": 29}, {"db_idx": 26711, "episode_idx": 146, "frame_idx": 91, "global_frame_idx": 26711, "task_index": 29}, {"db_idx": 26712, "episode_idx": 146, "frame_idx": 92, "global_frame_idx": 26712, "task_index": 29}, {"db_idx": 26713, "episode_idx": 146, "frame_idx": 93, "global_frame_idx": 26713, "task_index": 29}, {"db_idx": 26714, "episode_idx": 146, "frame_idx": 94, "global_frame_idx": 26714, "task_index": 29}, {"db_idx": 26715, "episode_idx": 146, "frame_idx": 95, "global_frame_idx": 26715, "task_index": 29}, {"db_idx": 26716, "episode_idx": 146, "frame_idx": 96, "global_frame_idx": 26716, "task_index": 29}, {"db_idx": 26717, "episode_idx": 146, "frame_idx": 97, "global_frame_idx": 26717, "task_index": 29}, {"db_idx": 26718, "episode_idx": 146, "frame_idx": 98, "global_frame_idx": 26718, "task_index": 29}, {"db_idx": 26719, "episode_idx": 146, "frame_idx": 99, "global_frame_idx": 26719, "task_index": 29}, {"db_idx": 26720, "episode_idx": 146, "frame_idx": 100, "global_frame_idx": 26720, "task_index": 29}, {"db_idx": 26721, "episode_idx": 146, "frame_idx": 101, "global_frame_idx": 26721, "task_index": 29}, {"db_idx": 26722, "episode_idx": 146, "frame_idx": 102, "global_frame_idx": 26722, "task_index": 29}, {"db_idx": 26723, "episode_idx": 146, "frame_idx": 103, "global_frame_idx": 26723, "task_index": 29}, {"db_idx": 26724, "episode_idx": 146, "frame_idx": 104, "global_frame_idx": 26724, "task_index": 29}, {"db_idx": 26725, "episode_idx": 146, "frame_idx": 105, "global_frame_idx": 26725, "task_index": 29}, {"db_idx": 26726, "episode_idx": 146, "frame_idx": 106, "global_frame_idx": 26726, "task_index": 29}, {"db_idx": 26727, "episode_idx": 146, "frame_idx": 107, "global_frame_idx": 26727, "task_index": 29}, {"db_idx": 26728, "episode_idx": 146, "frame_idx": 108, "global_frame_idx": 26728, "task_index": 29}, {"db_idx": 26729, "episode_idx": 146, "frame_idx": 109, "global_frame_idx": 26729, "task_index": 29}, {"db_idx": 26730, "episode_idx": 146, "frame_idx": 110, "global_frame_idx": 26730, "task_index": 29}, {"db_idx": 26731, "episode_idx": 146, "frame_idx": 111, "global_frame_idx": 26731, "task_index": 29}, {"db_idx": 26732, "episode_idx": 146, "frame_idx": 112, "global_frame_idx": 26732, "task_index": 29}, {"db_idx": 26733, "episode_idx": 146, "frame_idx": 113, "global_frame_idx": 26733, "task_index": 29}, {"db_idx": 26734, "episode_idx": 146, "frame_idx": 114, "global_frame_idx": 26734, "task_index": 29}, {"db_idx": 26735, "episode_idx": 146, "frame_idx": 115, "global_frame_idx": 26735, "task_index": 29}, {"db_idx": 26736, "episode_idx": 146, "frame_idx": 116, "global_frame_idx": 26736, "task_index": 29}, {"db_idx": 26737, "episode_idx": 146, "frame_idx": 117, "global_frame_idx": 26737, "task_index": 29}, {"db_idx": 26738, "episode_idx": 146, "frame_idx": 118, "global_frame_idx": 26738, "task_index": 29}, {"db_idx": 26739, "episode_idx": 146, "frame_idx": 119, "global_frame_idx": 26739, "task_index": 29}, {"db_idx": 26740, "episode_idx": 146, "frame_idx": 120, "global_frame_idx": 26740, "task_index": 29}, {"db_idx": 26741, "episode_idx": 146, "frame_idx": 121, "global_frame_idx": 26741, "task_index": 29}, {"db_idx": 26742, "episode_idx": 146, "frame_idx": 122, "global_frame_idx": 26742, "task_index": 29}, {"db_idx": 26743, "episode_idx": 146, "frame_idx": 123, "global_frame_idx": 26743, "task_index": 29}, {"db_idx": 26744, "episode_idx": 146, "frame_idx": 124, "global_frame_idx": 26744, "task_index": 29}, {"db_idx": 26745, "episode_idx": 146, "frame_idx": 125, "global_frame_idx": 26745, "task_index": 29}, {"db_idx": 26746, "episode_idx": 146, "frame_idx": 126, "global_frame_idx": 26746, "task_index": 29}, {"db_idx": 26747, "episode_idx": 146, "frame_idx": 127, "global_frame_idx": 26747, "task_index": 29}, {"db_idx": 26748, "episode_idx": 146, "frame_idx": 128, "global_frame_idx": 26748, "task_index": 29}, {"db_idx": 26749, "episode_idx": 146, "frame_idx": 129, "global_frame_idx": 26749, "task_index": 29}, {"db_idx": 26750, "episode_idx": 146, "frame_idx": 130, "global_frame_idx": 26750, "task_index": 29}, {"db_idx": 26751, "episode_idx": 146, "frame_idx": 131, "global_frame_idx": 26751, "task_index": 29}, {"db_idx": 26752, "episode_idx": 146, "frame_idx": 132, "global_frame_idx": 26752, "task_index": 29}, {"db_idx": 26753, "episode_idx": 146, "frame_idx": 133, "global_frame_idx": 26753, "task_index": 29}, {"db_idx": 26754, "episode_idx": 146, "frame_idx": 134, "global_frame_idx": 26754, "task_index": 29}, {"db_idx": 26755, "episode_idx": 146, "frame_idx": 135, "global_frame_idx": 26755, "task_index": 29}, {"db_idx": 26756, "episode_idx": 146, "frame_idx": 136, "global_frame_idx": 26756, "task_index": 29}, {"db_idx": 26757, "episode_idx": 146, "frame_idx": 137, "global_frame_idx": 26757, "task_index": 29}, {"db_idx": 26758, "episode_idx": 146, "frame_idx": 138, "global_frame_idx": 26758, "task_index": 29}, {"db_idx": 26759, "episode_idx": 146, "frame_idx": 139, "global_frame_idx": 26759, "task_index": 29}, {"db_idx": 26760, "episode_idx": 146, "frame_idx": 140, "global_frame_idx": 26760, "task_index": 29}, {"db_idx": 26761, "episode_idx": 146, "frame_idx": 141, "global_frame_idx": 26761, "task_index": 29}, {"db_idx": 26762, "episode_idx": 146, "frame_idx": 142, "global_frame_idx": 26762, "task_index": 29}, {"db_idx": 26763, "episode_idx": 146, "frame_idx": 143, "global_frame_idx": 26763, "task_index": 29}, {"db_idx": 26764, "episode_idx": 146, "frame_idx": 144, "global_frame_idx": 26764, "task_index": 29}, {"db_idx": 26765, "episode_idx": 146, "frame_idx": 145, "global_frame_idx": 26765, "task_index": 29}, {"db_idx": 26766, "episode_idx": 146, "frame_idx": 146, "global_frame_idx": 26766, "task_index": 29}, {"db_idx": 26767, "episode_idx": 146, "frame_idx": 147, "global_frame_idx": 26767, "task_index": 29}, {"db_idx": 26768, "episode_idx": 146, "frame_idx": 148, "global_frame_idx": 26768, "task_index": 29}, {"db_idx": 26769, "episode_idx": 146, "frame_idx": 149, "global_frame_idx": 26769, "task_index": 29}, {"db_idx": 26770, "episode_idx": 146, "frame_idx": 150, "global_frame_idx": 26770, "task_index": 29}, {"db_idx": 26771, "episode_idx": 146, "frame_idx": 151, "global_frame_idx": 26771, "task_index": 29}, {"db_idx": 26772, "episode_idx": 146, "frame_idx": 152, "global_frame_idx": 26772, "task_index": 29}, {"db_idx": 26773, "episode_idx": 146, "frame_idx": 153, "global_frame_idx": 26773, "task_index": 29}, {"db_idx": 26774, "episode_idx": 146, "frame_idx": 154, "global_frame_idx": 26774, "task_index": 29}, {"db_idx": 26775, "episode_idx": 146, "frame_idx": 155, "global_frame_idx": 26775, "task_index": 29}, {"db_idx": 26776, "episode_idx": 146, "frame_idx": 156, "global_frame_idx": 26776, "task_index": 29}, {"db_idx": 26777, "episode_idx": 146, "frame_idx": 157, "global_frame_idx": 26777, "task_index": 29}, {"db_idx": 26778, "episode_idx": 146, "frame_idx": 158, "global_frame_idx": 26778, "task_index": 29}, {"db_idx": 26779, "episode_idx": 146, "frame_idx": 159, "global_frame_idx": 26779, "task_index": 29}, {"db_idx": 26780, "episode_idx": 146, "frame_idx": 160, "global_frame_idx": 26780, "task_index": 29}, {"db_idx": 26781, "episode_idx": 146, "frame_idx": 161, "global_frame_idx": 26781, "task_index": 29}, {"db_idx": 26782, "episode_idx": 146, "frame_idx": 162, "global_frame_idx": 26782, "task_index": 29}, {"db_idx": 26783, "episode_idx": 146, "frame_idx": 163, "global_frame_idx": 26783, "task_index": 29}, {"db_idx": 26784, "episode_idx": 146, "frame_idx": 164, "global_frame_idx": 26784, "task_index": 29}, {"db_idx": 26785, "episode_idx": 146, "frame_idx": 165, "global_frame_idx": 26785, "task_index": 29}, {"db_idx": 26786, "episode_idx": 146, "frame_idx": 166, "global_frame_idx": 26786, "task_index": 29}, {"db_idx": 26787, "episode_idx": 146, "frame_idx": 167, "global_frame_idx": 26787, "task_index": 29}, {"db_idx": 26788, "episode_idx": 146, "frame_idx": 168, "global_frame_idx": 26788, "task_index": 29}, {"db_idx": 26789, "episode_idx": 146, "frame_idx": 169, "global_frame_idx": 26789, "task_index": 29}, {"db_idx": 26790, "episode_idx": 146, "frame_idx": 170, "global_frame_idx": 26790, "task_index": 29}, {"db_idx": 26791, "episode_idx": 146, "frame_idx": 171, "global_frame_idx": 26791, "task_index": 29}, {"db_idx": 26792, "episode_idx": 146, "frame_idx": 172, "global_frame_idx": 26792, "task_index": 29}, {"db_idx": 26793, "episode_idx": 146, "frame_idx": 173, "global_frame_idx": 26793, "task_index": 29}, {"db_idx": 26794, "episode_idx": 146, "frame_idx": 174, "global_frame_idx": 26794, "task_index": 29}, {"db_idx": 26795, "episode_idx": 146, "frame_idx": 175, "global_frame_idx": 26795, "task_index": 29}, {"db_idx": 26796, "episode_idx": 146, "frame_idx": 176, "global_frame_idx": 26796, "task_index": 29}, {"db_idx": 26797, "episode_idx": 146, "frame_idx": 177, "global_frame_idx": 26797, "task_index": 29}, {"db_idx": 26798, "episode_idx": 146, "frame_idx": 178, "global_frame_idx": 26798, "task_index": 29}, {"db_idx": 26799, "episode_idx": 146, "frame_idx": 179, "global_frame_idx": 26799, "task_index": 29}, {"db_idx": 26800, "episode_idx": 146, "frame_idx": 180, "global_frame_idx": 26800, "task_index": 29}, {"db_idx": 26801, "episode_idx": 146, "frame_idx": 181, "global_frame_idx": 26801, "task_index": 29}, {"db_idx": 26802, "episode_idx": 146, "frame_idx": 182, "global_frame_idx": 26802, "task_index": 29}, {"db_idx": 26803, "episode_idx": 146, "frame_idx": 183, "global_frame_idx": 26803, "task_index": 29}, {"db_idx": 26804, "episode_idx": 146, "frame_idx": 184, "global_frame_idx": 26804, "task_index": 29}, {"db_idx": 26805, "episode_idx": 146, "frame_idx": 185, "global_frame_idx": 26805, "task_index": 29}, {"db_idx": 26806, "episode_idx": 146, "frame_idx": 186, "global_frame_idx": 26806, "task_index": 29}, {"db_idx": 26807, "episode_idx": 146, "frame_idx": 187, "global_frame_idx": 26807, "task_index": 29}, {"db_idx": 26808, "episode_idx": 146, "frame_idx": 188, "global_frame_idx": 26808, "task_index": 29}, {"db_idx": 26809, "episode_idx": 146, "frame_idx": 189, "global_frame_idx": 26809, "task_index": 29}, {"db_idx": 26810, "episode_idx": 146, "frame_idx": 190, "global_frame_idx": 26810, "task_index": 29}, {"db_idx": 26811, "episode_idx": 146, "frame_idx": 191, "global_frame_idx": 26811, "task_index": 29}, {"db_idx": 26812, "episode_idx": 146, "frame_idx": 192, "global_frame_idx": 26812, "task_index": 29}, {"db_idx": 26813, "episode_idx": 146, "frame_idx": 193, "global_frame_idx": 26813, "task_index": 29}, {"db_idx": 26814, "episode_idx": 146, "frame_idx": 194, "global_frame_idx": 26814, "task_index": 29}, {"db_idx": 26815, "episode_idx": 146, "frame_idx": 195, "global_frame_idx": 26815, "task_index": 29}, {"db_idx": 26816, "episode_idx": 146, "frame_idx": 196, "global_frame_idx": 26816, "task_index": 29}, {"db_idx": 26817, "episode_idx": 146, "frame_idx": 197, "global_frame_idx": 26817, "task_index": 29}, {"db_idx": 26818, "episode_idx": 146, "frame_idx": 198, "global_frame_idx": 26818, "task_index": 29}, {"db_idx": 26819, "episode_idx": 146, "frame_idx": 199, "global_frame_idx": 26819, "task_index": 29}, {"db_idx": 26820, "episode_idx": 146, "frame_idx": 200, "global_frame_idx": 26820, "task_index": 29}, {"db_idx": 26821, "episode_idx": 146, "frame_idx": 201, "global_frame_idx": 26821, "task_index": 29}, {"db_idx": 26822, "episode_idx": 146, "frame_idx": 202, "global_frame_idx": 26822, "task_index": 29}, {"db_idx": 26823, "episode_idx": 146, "frame_idx": 203, "global_frame_idx": 26823, "task_index": 29}, {"db_idx": 26824, "episode_idx": 146, "frame_idx": 204, "global_frame_idx": 26824, "task_index": 29}, {"db_idx": 26825, "episode_idx": 146, "frame_idx": 205, "global_frame_idx": 26825, "task_index": 29}, {"db_idx": 26826, "episode_idx": 146, "frame_idx": 206, "global_frame_idx": 26826, "task_index": 29}, {"db_idx": 26827, "episode_idx": 146, "frame_idx": 207, "global_frame_idx": 26827, "task_index": 29}, {"db_idx": 26828, "episode_idx": 146, "frame_idx": 208, "global_frame_idx": 26828, "task_index": 29}, {"db_idx": 26829, "episode_idx": 146, "frame_idx": 209, "global_frame_idx": 26829, "task_index": 29}, {"db_idx": 26830, "episode_idx": 146, "frame_idx": 210, "global_frame_idx": 26830, "task_index": 29}, {"db_idx": 26831, "episode_idx": 146, "frame_idx": 211, "global_frame_idx": 26831, "task_index": 29}, {"db_idx": 26832, "episode_idx": 146, "frame_idx": 212, "global_frame_idx": 26832, "task_index": 29}, {"db_idx": 26833, "episode_idx": 146, "frame_idx": 213, "global_frame_idx": 26833, "task_index": 29}, {"db_idx": 26834, "episode_idx": 146, "frame_idx": 214, "global_frame_idx": 26834, "task_index": 29}, {"db_idx": 26835, "episode_idx": 146, "frame_idx": 215, "global_frame_idx": 26835, "task_index": 29}, {"db_idx": 26836, "episode_idx": 147, "frame_idx": 0, "global_frame_idx": 26836, "task_index": 29}, {"db_idx": 26837, "episode_idx": 147, "frame_idx": 1, "global_frame_idx": 26837, "task_index": 29}, {"db_idx": 26838, "episode_idx": 147, "frame_idx": 2, "global_frame_idx": 26838, "task_index": 29}, {"db_idx": 26839, "episode_idx": 147, "frame_idx": 3, "global_frame_idx": 26839, "task_index": 29}, {"db_idx": 26840, "episode_idx": 147, "frame_idx": 4, "global_frame_idx": 26840, "task_index": 29}, {"db_idx": 26841, "episode_idx": 147, "frame_idx": 5, "global_frame_idx": 26841, "task_index": 29}, {"db_idx": 26842, "episode_idx": 147, "frame_idx": 6, "global_frame_idx": 26842, "task_index": 29}, {"db_idx": 26843, "episode_idx": 147, "frame_idx": 7, "global_frame_idx": 26843, "task_index": 29}, {"db_idx": 26844, "episode_idx": 147, "frame_idx": 8, "global_frame_idx": 26844, "task_index": 29}, {"db_idx": 26845, "episode_idx": 147, "frame_idx": 9, "global_frame_idx": 26845, "task_index": 29}, {"db_idx": 26846, "episode_idx": 147, "frame_idx": 10, "global_frame_idx": 26846, "task_index": 29}, {"db_idx": 26847, "episode_idx": 147, "frame_idx": 11, "global_frame_idx": 26847, "task_index": 29}, {"db_idx": 26848, "episode_idx": 147, "frame_idx": 12, "global_frame_idx": 26848, "task_index": 29}, {"db_idx": 26849, "episode_idx": 147, "frame_idx": 13, "global_frame_idx": 26849, "task_index": 29}, {"db_idx": 26850, "episode_idx": 147, "frame_idx": 14, "global_frame_idx": 26850, "task_index": 29}, {"db_idx": 26851, "episode_idx": 147, "frame_idx": 15, "global_frame_idx": 26851, "task_index": 29}, {"db_idx": 26852, "episode_idx": 147, "frame_idx": 16, "global_frame_idx": 26852, "task_index": 29}, {"db_idx": 26853, "episode_idx": 147, "frame_idx": 17, "global_frame_idx": 26853, "task_index": 29}, {"db_idx": 26854, "episode_idx": 147, "frame_idx": 18, "global_frame_idx": 26854, "task_index": 29}, {"db_idx": 26855, "episode_idx": 147, "frame_idx": 19, "global_frame_idx": 26855, "task_index": 29}, {"db_idx": 26856, "episode_idx": 147, "frame_idx": 20, "global_frame_idx": 26856, "task_index": 29}, {"db_idx": 26857, "episode_idx": 147, "frame_idx": 21, "global_frame_idx": 26857, "task_index": 29}, {"db_idx": 26858, "episode_idx": 147, "frame_idx": 22, "global_frame_idx": 26858, "task_index": 29}, {"db_idx": 26859, "episode_idx": 147, "frame_idx": 23, "global_frame_idx": 26859, "task_index": 29}, {"db_idx": 26860, "episode_idx": 147, "frame_idx": 24, "global_frame_idx": 26860, "task_index": 29}, {"db_idx": 26861, "episode_idx": 147, "frame_idx": 25, "global_frame_idx": 26861, "task_index": 29}, {"db_idx": 26862, "episode_idx": 147, "frame_idx": 26, "global_frame_idx": 26862, "task_index": 29}, {"db_idx": 26863, "episode_idx": 147, "frame_idx": 27, "global_frame_idx": 26863, "task_index": 29}, {"db_idx": 26864, "episode_idx": 147, "frame_idx": 28, "global_frame_idx": 26864, "task_index": 29}, {"db_idx": 26865, "episode_idx": 147, "frame_idx": 29, "global_frame_idx": 26865, "task_index": 29}, {"db_idx": 26866, "episode_idx": 147, "frame_idx": 30, "global_frame_idx": 26866, "task_index": 29}, {"db_idx": 26867, "episode_idx": 147, "frame_idx": 31, "global_frame_idx": 26867, "task_index": 29}, {"db_idx": 26868, "episode_idx": 147, "frame_idx": 32, "global_frame_idx": 26868, "task_index": 29}, {"db_idx": 26869, "episode_idx": 147, "frame_idx": 33, "global_frame_idx": 26869, "task_index": 29}, {"db_idx": 26870, "episode_idx": 147, "frame_idx": 34, "global_frame_idx": 26870, "task_index": 29}, {"db_idx": 26871, "episode_idx": 147, "frame_idx": 35, "global_frame_idx": 26871, "task_index": 29}, {"db_idx": 26872, "episode_idx": 147, "frame_idx": 36, "global_frame_idx": 26872, "task_index": 29}, {"db_idx": 26873, "episode_idx": 147, "frame_idx": 37, "global_frame_idx": 26873, "task_index": 29}, {"db_idx": 26874, "episode_idx": 147, "frame_idx": 38, "global_frame_idx": 26874, "task_index": 29}, {"db_idx": 26875, "episode_idx": 147, "frame_idx": 39, "global_frame_idx": 26875, "task_index": 29}, {"db_idx": 26876, "episode_idx": 147, "frame_idx": 40, "global_frame_idx": 26876, "task_index": 29}, {"db_idx": 26877, "episode_idx": 147, "frame_idx": 41, "global_frame_idx": 26877, "task_index": 29}, {"db_idx": 26878, "episode_idx": 147, "frame_idx": 42, "global_frame_idx": 26878, "task_index": 29}, {"db_idx": 26879, "episode_idx": 147, "frame_idx": 43, "global_frame_idx": 26879, "task_index": 29}, {"db_idx": 26880, "episode_idx": 147, "frame_idx": 44, "global_frame_idx": 26880, "task_index": 29}, {"db_idx": 26881, "episode_idx": 147, "frame_idx": 45, "global_frame_idx": 26881, "task_index": 29}, {"db_idx": 26882, "episode_idx": 147, "frame_idx": 46, "global_frame_idx": 26882, "task_index": 29}, {"db_idx": 26883, "episode_idx": 147, "frame_idx": 47, "global_frame_idx": 26883, "task_index": 29}, {"db_idx": 26884, "episode_idx": 147, "frame_idx": 48, "global_frame_idx": 26884, "task_index": 29}, {"db_idx": 26885, "episode_idx": 147, "frame_idx": 49, "global_frame_idx": 26885, "task_index": 29}, {"db_idx": 26886, "episode_idx": 147, "frame_idx": 50, "global_frame_idx": 26886, "task_index": 29}, {"db_idx": 26887, "episode_idx": 147, "frame_idx": 51, "global_frame_idx": 26887, "task_index": 29}, {"db_idx": 26888, "episode_idx": 147, "frame_idx": 52, "global_frame_idx": 26888, "task_index": 29}, {"db_idx": 26889, "episode_idx": 147, "frame_idx": 53, "global_frame_idx": 26889, "task_index": 29}, {"db_idx": 26890, "episode_idx": 147, "frame_idx": 54, "global_frame_idx": 26890, "task_index": 29}, {"db_idx": 26891, "episode_idx": 147, "frame_idx": 55, "global_frame_idx": 26891, "task_index": 29}, {"db_idx": 26892, "episode_idx": 147, "frame_idx": 56, "global_frame_idx": 26892, "task_index": 29}, {"db_idx": 26893, "episode_idx": 147, "frame_idx": 57, "global_frame_idx": 26893, "task_index": 29}, {"db_idx": 26894, "episode_idx": 147, "frame_idx": 58, "global_frame_idx": 26894, "task_index": 29}, {"db_idx": 26895, "episode_idx": 147, "frame_idx": 59, "global_frame_idx": 26895, "task_index": 29}, {"db_idx": 26896, "episode_idx": 147, "frame_idx": 60, "global_frame_idx": 26896, "task_index": 29}, {"db_idx": 26897, "episode_idx": 147, "frame_idx": 61, "global_frame_idx": 26897, "task_index": 29}, {"db_idx": 26898, "episode_idx": 147, "frame_idx": 62, "global_frame_idx": 26898, "task_index": 29}, {"db_idx": 26899, "episode_idx": 147, "frame_idx": 63, "global_frame_idx": 26899, "task_index": 29}, {"db_idx": 26900, "episode_idx": 147, "frame_idx": 64, "global_frame_idx": 26900, "task_index": 29}, {"db_idx": 26901, "episode_idx": 147, "frame_idx": 65, "global_frame_idx": 26901, "task_index": 29}, {"db_idx": 26902, "episode_idx": 147, "frame_idx": 66, "global_frame_idx": 26902, "task_index": 29}, {"db_idx": 26903, "episode_idx": 147, "frame_idx": 67, "global_frame_idx": 26903, "task_index": 29}, {"db_idx": 26904, "episode_idx": 147, "frame_idx": 68, "global_frame_idx": 26904, "task_index": 29}, {"db_idx": 26905, "episode_idx": 147, "frame_idx": 69, "global_frame_idx": 26905, "task_index": 29}, {"db_idx": 26906, "episode_idx": 147, "frame_idx": 70, "global_frame_idx": 26906, "task_index": 29}, {"db_idx": 26907, "episode_idx": 147, "frame_idx": 71, "global_frame_idx": 26907, "task_index": 29}, {"db_idx": 26908, "episode_idx": 147, "frame_idx": 72, "global_frame_idx": 26908, "task_index": 29}, {"db_idx": 26909, "episode_idx": 147, "frame_idx": 73, "global_frame_idx": 26909, "task_index": 29}, {"db_idx": 26910, "episode_idx": 147, "frame_idx": 74, "global_frame_idx": 26910, "task_index": 29}, {"db_idx": 26911, "episode_idx": 147, "frame_idx": 75, "global_frame_idx": 26911, "task_index": 29}, {"db_idx": 26912, "episode_idx": 147, "frame_idx": 76, "global_frame_idx": 26912, "task_index": 29}, {"db_idx": 26913, "episode_idx": 147, "frame_idx": 77, "global_frame_idx": 26913, "task_index": 29}, {"db_idx": 26914, "episode_idx": 147, "frame_idx": 78, "global_frame_idx": 26914, "task_index": 29}, {"db_idx": 26915, "episode_idx": 147, "frame_idx": 79, "global_frame_idx": 26915, "task_index": 29}, {"db_idx": 26916, "episode_idx": 147, "frame_idx": 80, "global_frame_idx": 26916, "task_index": 29}, {"db_idx": 26917, "episode_idx": 147, "frame_idx": 81, "global_frame_idx": 26917, "task_index": 29}, {"db_idx": 26918, "episode_idx": 147, "frame_idx": 82, "global_frame_idx": 26918, "task_index": 29}, {"db_idx": 26919, "episode_idx": 147, "frame_idx": 83, "global_frame_idx": 26919, "task_index": 29}, {"db_idx": 26920, "episode_idx": 147, "frame_idx": 84, "global_frame_idx": 26920, "task_index": 29}, {"db_idx": 26921, "episode_idx": 147, "frame_idx": 85, "global_frame_idx": 26921, "task_index": 29}, {"db_idx": 26922, "episode_idx": 147, "frame_idx": 86, "global_frame_idx": 26922, "task_index": 29}, {"db_idx": 26923, "episode_idx": 147, "frame_idx": 87, "global_frame_idx": 26923, "task_index": 29}, {"db_idx": 26924, "episode_idx": 147, "frame_idx": 88, "global_frame_idx": 26924, "task_index": 29}, {"db_idx": 26925, "episode_idx": 147, "frame_idx": 89, "global_frame_idx": 26925, "task_index": 29}, {"db_idx": 26926, "episode_idx": 147, "frame_idx": 90, "global_frame_idx": 26926, "task_index": 29}, {"db_idx": 26927, "episode_idx": 147, "frame_idx": 91, "global_frame_idx": 26927, "task_index": 29}, {"db_idx": 26928, "episode_idx": 147, "frame_idx": 92, "global_frame_idx": 26928, "task_index": 29}, {"db_idx": 26929, "episode_idx": 147, "frame_idx": 93, "global_frame_idx": 26929, "task_index": 29}, {"db_idx": 26930, "episode_idx": 147, "frame_idx": 94, "global_frame_idx": 26930, "task_index": 29}, {"db_idx": 26931, "episode_idx": 147, "frame_idx": 95, "global_frame_idx": 26931, "task_index": 29}, {"db_idx": 26932, "episode_idx": 147, "frame_idx": 96, "global_frame_idx": 26932, "task_index": 29}, {"db_idx": 26933, "episode_idx": 147, "frame_idx": 97, "global_frame_idx": 26933, "task_index": 29}, {"db_idx": 26934, "episode_idx": 147, "frame_idx": 98, "global_frame_idx": 26934, "task_index": 29}, {"db_idx": 26935, "episode_idx": 147, "frame_idx": 99, "global_frame_idx": 26935, "task_index": 29}, {"db_idx": 26936, "episode_idx": 147, "frame_idx": 100, "global_frame_idx": 26936, "task_index": 29}, {"db_idx": 26937, "episode_idx": 147, "frame_idx": 101, "global_frame_idx": 26937, "task_index": 29}, {"db_idx": 26938, "episode_idx": 147, "frame_idx": 102, "global_frame_idx": 26938, "task_index": 29}, {"db_idx": 26939, "episode_idx": 147, "frame_idx": 103, "global_frame_idx": 26939, "task_index": 29}, {"db_idx": 26940, "episode_idx": 147, "frame_idx": 104, "global_frame_idx": 26940, "task_index": 29}, {"db_idx": 26941, "episode_idx": 147, "frame_idx": 105, "global_frame_idx": 26941, "task_index": 29}, {"db_idx": 26942, "episode_idx": 147, "frame_idx": 106, "global_frame_idx": 26942, "task_index": 29}, {"db_idx": 26943, "episode_idx": 147, "frame_idx": 107, "global_frame_idx": 26943, "task_index": 29}, {"db_idx": 26944, "episode_idx": 147, "frame_idx": 108, "global_frame_idx": 26944, "task_index": 29}, {"db_idx": 26945, "episode_idx": 147, "frame_idx": 109, "global_frame_idx": 26945, "task_index": 29}, {"db_idx": 26946, "episode_idx": 147, "frame_idx": 110, "global_frame_idx": 26946, "task_index": 29}, {"db_idx": 26947, "episode_idx": 147, "frame_idx": 111, "global_frame_idx": 26947, "task_index": 29}, {"db_idx": 26948, "episode_idx": 147, "frame_idx": 112, "global_frame_idx": 26948, "task_index": 29}, {"db_idx": 26949, "episode_idx": 147, "frame_idx": 113, "global_frame_idx": 26949, "task_index": 29}, {"db_idx": 26950, "episode_idx": 147, "frame_idx": 114, "global_frame_idx": 26950, "task_index": 29}, {"db_idx": 26951, "episode_idx": 147, "frame_idx": 115, "global_frame_idx": 26951, "task_index": 29}, {"db_idx": 26952, "episode_idx": 147, "frame_idx": 116, "global_frame_idx": 26952, "task_index": 29}, {"db_idx": 26953, "episode_idx": 147, "frame_idx": 117, "global_frame_idx": 26953, "task_index": 29}, {"db_idx": 26954, "episode_idx": 147, "frame_idx": 118, "global_frame_idx": 26954, "task_index": 29}, {"db_idx": 26955, "episode_idx": 147, "frame_idx": 119, "global_frame_idx": 26955, "task_index": 29}, {"db_idx": 26956, "episode_idx": 147, "frame_idx": 120, "global_frame_idx": 26956, "task_index": 29}, {"db_idx": 26957, "episode_idx": 147, "frame_idx": 121, "global_frame_idx": 26957, "task_index": 29}, {"db_idx": 26958, "episode_idx": 147, "frame_idx": 122, "global_frame_idx": 26958, "task_index": 29}, {"db_idx": 26959, "episode_idx": 147, "frame_idx": 123, "global_frame_idx": 26959, "task_index": 29}, {"db_idx": 26960, "episode_idx": 147, "frame_idx": 124, "global_frame_idx": 26960, "task_index": 29}, {"db_idx": 26961, "episode_idx": 147, "frame_idx": 125, "global_frame_idx": 26961, "task_index": 29}, {"db_idx": 26962, "episode_idx": 147, "frame_idx": 126, "global_frame_idx": 26962, "task_index": 29}, {"db_idx": 26963, "episode_idx": 147, "frame_idx": 127, "global_frame_idx": 26963, "task_index": 29}, {"db_idx": 26964, "episode_idx": 147, "frame_idx": 128, "global_frame_idx": 26964, "task_index": 29}, {"db_idx": 26965, "episode_idx": 147, "frame_idx": 129, "global_frame_idx": 26965, "task_index": 29}, {"db_idx": 26966, "episode_idx": 147, "frame_idx": 130, "global_frame_idx": 26966, "task_index": 29}, {"db_idx": 26967, "episode_idx": 147, "frame_idx": 131, "global_frame_idx": 26967, "task_index": 29}, {"db_idx": 26968, "episode_idx": 147, "frame_idx": 132, "global_frame_idx": 26968, "task_index": 29}, {"db_idx": 26969, "episode_idx": 147, "frame_idx": 133, "global_frame_idx": 26969, "task_index": 29}, {"db_idx": 26970, "episode_idx": 147, "frame_idx": 134, "global_frame_idx": 26970, "task_index": 29}, {"db_idx": 26971, "episode_idx": 147, "frame_idx": 135, "global_frame_idx": 26971, "task_index": 29}, {"db_idx": 26972, "episode_idx": 147, "frame_idx": 136, "global_frame_idx": 26972, "task_index": 29}, {"db_idx": 26973, "episode_idx": 147, "frame_idx": 137, "global_frame_idx": 26973, "task_index": 29}, {"db_idx": 26974, "episode_idx": 147, "frame_idx": 138, "global_frame_idx": 26974, "task_index": 29}, {"db_idx": 26975, "episode_idx": 147, "frame_idx": 139, "global_frame_idx": 26975, "task_index": 29}, {"db_idx": 26976, "episode_idx": 147, "frame_idx": 140, "global_frame_idx": 26976, "task_index": 29}, {"db_idx": 26977, "episode_idx": 147, "frame_idx": 141, "global_frame_idx": 26977, "task_index": 29}, {"db_idx": 26978, "episode_idx": 147, "frame_idx": 142, "global_frame_idx": 26978, "task_index": 29}, {"db_idx": 26979, "episode_idx": 147, "frame_idx": 143, "global_frame_idx": 26979, "task_index": 29}, {"db_idx": 26980, "episode_idx": 147, "frame_idx": 144, "global_frame_idx": 26980, "task_index": 29}, {"db_idx": 26981, "episode_idx": 147, "frame_idx": 145, "global_frame_idx": 26981, "task_index": 29}, {"db_idx": 26982, "episode_idx": 147, "frame_idx": 146, "global_frame_idx": 26982, "task_index": 29}, {"db_idx": 26983, "episode_idx": 147, "frame_idx": 147, "global_frame_idx": 26983, "task_index": 29}, {"db_idx": 26984, "episode_idx": 147, "frame_idx": 148, "global_frame_idx": 26984, "task_index": 29}, {"db_idx": 26985, "episode_idx": 147, "frame_idx": 149, "global_frame_idx": 26985, "task_index": 29}, {"db_idx": 26986, "episode_idx": 147, "frame_idx": 150, "global_frame_idx": 26986, "task_index": 29}, {"db_idx": 26987, "episode_idx": 147, "frame_idx": 151, "global_frame_idx": 26987, "task_index": 29}, {"db_idx": 26988, "episode_idx": 147, "frame_idx": 152, "global_frame_idx": 26988, "task_index": 29}, {"db_idx": 26989, "episode_idx": 147, "frame_idx": 153, "global_frame_idx": 26989, "task_index": 29}, {"db_idx": 26990, "episode_idx": 147, "frame_idx": 154, "global_frame_idx": 26990, "task_index": 29}, {"db_idx": 26991, "episode_idx": 147, "frame_idx": 155, "global_frame_idx": 26991, "task_index": 29}, {"db_idx": 26992, "episode_idx": 147, "frame_idx": 156, "global_frame_idx": 26992, "task_index": 29}, {"db_idx": 26993, "episode_idx": 147, "frame_idx": 157, "global_frame_idx": 26993, "task_index": 29}, {"db_idx": 26994, "episode_idx": 147, "frame_idx": 158, "global_frame_idx": 26994, "task_index": 29}, {"db_idx": 26995, "episode_idx": 147, "frame_idx": 159, "global_frame_idx": 26995, "task_index": 29}, {"db_idx": 26996, "episode_idx": 147, "frame_idx": 160, "global_frame_idx": 26996, "task_index": 29}, {"db_idx": 26997, "episode_idx": 147, "frame_idx": 161, "global_frame_idx": 26997, "task_index": 29}, {"db_idx": 26998, "episode_idx": 147, "frame_idx": 162, "global_frame_idx": 26998, "task_index": 29}, {"db_idx": 26999, "episode_idx": 147, "frame_idx": 163, "global_frame_idx": 26999, "task_index": 29}, {"db_idx": 27000, "episode_idx": 147, "frame_idx": 164, "global_frame_idx": 27000, "task_index": 29}, {"db_idx": 27001, "episode_idx": 147, "frame_idx": 165, "global_frame_idx": 27001, "task_index": 29}, {"db_idx": 27002, "episode_idx": 147, "frame_idx": 166, "global_frame_idx": 27002, "task_index": 29}, {"db_idx": 27003, "episode_idx": 147, "frame_idx": 167, "global_frame_idx": 27003, "task_index": 29}, {"db_idx": 27004, "episode_idx": 147, "frame_idx": 168, "global_frame_idx": 27004, "task_index": 29}, {"db_idx": 27005, "episode_idx": 147, "frame_idx": 169, "global_frame_idx": 27005, "task_index": 29}, {"db_idx": 27006, "episode_idx": 147, "frame_idx": 170, "global_frame_idx": 27006, "task_index": 29}, {"db_idx": 27007, "episode_idx": 147, "frame_idx": 171, "global_frame_idx": 27007, "task_index": 29}, {"db_idx": 27008, "episode_idx": 147, "frame_idx": 172, "global_frame_idx": 27008, "task_index": 29}, {"db_idx": 27009, "episode_idx": 147, "frame_idx": 173, "global_frame_idx": 27009, "task_index": 29}, {"db_idx": 27010, "episode_idx": 147, "frame_idx": 174, "global_frame_idx": 27010, "task_index": 29}, {"db_idx": 27011, "episode_idx": 147, "frame_idx": 175, "global_frame_idx": 27011, "task_index": 29}, {"db_idx": 27012, "episode_idx": 147, "frame_idx": 176, "global_frame_idx": 27012, "task_index": 29}, {"db_idx": 27013, "episode_idx": 147, "frame_idx": 177, "global_frame_idx": 27013, "task_index": 29}, {"db_idx": 27014, "episode_idx": 147, "frame_idx": 178, "global_frame_idx": 27014, "task_index": 29}, {"db_idx": 27015, "episode_idx": 147, "frame_idx": 179, "global_frame_idx": 27015, "task_index": 29}, {"db_idx": 27016, "episode_idx": 147, "frame_idx": 180, "global_frame_idx": 27016, "task_index": 29}, {"db_idx": 27017, "episode_idx": 147, "frame_idx": 181, "global_frame_idx": 27017, "task_index": 29}, {"db_idx": 27018, "episode_idx": 147, "frame_idx": 182, "global_frame_idx": 27018, "task_index": 29}, {"db_idx": 27019, "episode_idx": 147, "frame_idx": 183, "global_frame_idx": 27019, "task_index": 29}, {"db_idx": 27020, "episode_idx": 147, "frame_idx": 184, "global_frame_idx": 27020, "task_index": 29}, {"db_idx": 27021, "episode_idx": 147, "frame_idx": 185, "global_frame_idx": 27021, "task_index": 29}, {"db_idx": 27022, "episode_idx": 147, "frame_idx": 186, "global_frame_idx": 27022, "task_index": 29}, {"db_idx": 27023, "episode_idx": 147, "frame_idx": 187, "global_frame_idx": 27023, "task_index": 29}, {"db_idx": 27024, "episode_idx": 147, "frame_idx": 188, "global_frame_idx": 27024, "task_index": 29}, {"db_idx": 27025, "episode_idx": 147, "frame_idx": 189, "global_frame_idx": 27025, "task_index": 29}, {"db_idx": 27026, "episode_idx": 147, "frame_idx": 190, "global_frame_idx": 27026, "task_index": 29}, {"db_idx": 27027, "episode_idx": 147, "frame_idx": 191, "global_frame_idx": 27027, "task_index": 29}, {"db_idx": 27028, "episode_idx": 147, "frame_idx": 192, "global_frame_idx": 27028, "task_index": 29}, {"db_idx": 27029, "episode_idx": 147, "frame_idx": 193, "global_frame_idx": 27029, "task_index": 29}, {"db_idx": 27030, "episode_idx": 147, "frame_idx": 194, "global_frame_idx": 27030, "task_index": 29}, {"db_idx": 27031, "episode_idx": 147, "frame_idx": 195, "global_frame_idx": 27031, "task_index": 29}, {"db_idx": 27032, "episode_idx": 148, "frame_idx": 0, "global_frame_idx": 27032, "task_index": 29}, {"db_idx": 27033, "episode_idx": 148, "frame_idx": 1, "global_frame_idx": 27033, "task_index": 29}, {"db_idx": 27034, "episode_idx": 148, "frame_idx": 2, "global_frame_idx": 27034, "task_index": 29}, {"db_idx": 27035, "episode_idx": 148, "frame_idx": 3, "global_frame_idx": 27035, "task_index": 29}, {"db_idx": 27036, "episode_idx": 148, "frame_idx": 4, "global_frame_idx": 27036, "task_index": 29}, {"db_idx": 27037, "episode_idx": 148, "frame_idx": 5, "global_frame_idx": 27037, "task_index": 29}, {"db_idx": 27038, "episode_idx": 148, "frame_idx": 6, "global_frame_idx": 27038, "task_index": 29}, {"db_idx": 27039, "episode_idx": 148, "frame_idx": 7, "global_frame_idx": 27039, "task_index": 29}, {"db_idx": 27040, "episode_idx": 148, "frame_idx": 8, "global_frame_idx": 27040, "task_index": 29}, {"db_idx": 27041, "episode_idx": 148, "frame_idx": 9, "global_frame_idx": 27041, "task_index": 29}, {"db_idx": 27042, "episode_idx": 148, "frame_idx": 10, "global_frame_idx": 27042, "task_index": 29}, {"db_idx": 27043, "episode_idx": 148, "frame_idx": 11, "global_frame_idx": 27043, "task_index": 29}, {"db_idx": 27044, "episode_idx": 148, "frame_idx": 12, "global_frame_idx": 27044, "task_index": 29}, {"db_idx": 27045, "episode_idx": 148, "frame_idx": 13, "global_frame_idx": 27045, "task_index": 29}, {"db_idx": 27046, "episode_idx": 148, "frame_idx": 14, "global_frame_idx": 27046, "task_index": 29}, {"db_idx": 27047, "episode_idx": 148, "frame_idx": 15, "global_frame_idx": 27047, "task_index": 29}, {"db_idx": 27048, "episode_idx": 148, "frame_idx": 16, "global_frame_idx": 27048, "task_index": 29}, {"db_idx": 27049, "episode_idx": 148, "frame_idx": 17, "global_frame_idx": 27049, "task_index": 29}, {"db_idx": 27050, "episode_idx": 148, "frame_idx": 18, "global_frame_idx": 27050, "task_index": 29}, {"db_idx": 27051, "episode_idx": 148, "frame_idx": 19, "global_frame_idx": 27051, "task_index": 29}, {"db_idx": 27052, "episode_idx": 148, "frame_idx": 20, "global_frame_idx": 27052, "task_index": 29}, {"db_idx": 27053, "episode_idx": 148, "frame_idx": 21, "global_frame_idx": 27053, "task_index": 29}, {"db_idx": 27054, "episode_idx": 148, "frame_idx": 22, "global_frame_idx": 27054, "task_index": 29}, {"db_idx": 27055, "episode_idx": 148, "frame_idx": 23, "global_frame_idx": 27055, "task_index": 29}, {"db_idx": 27056, "episode_idx": 148, "frame_idx": 24, "global_frame_idx": 27056, "task_index": 29}, {"db_idx": 27057, "episode_idx": 148, "frame_idx": 25, "global_frame_idx": 27057, "task_index": 29}, {"db_idx": 27058, "episode_idx": 148, "frame_idx": 26, "global_frame_idx": 27058, "task_index": 29}, {"db_idx": 27059, "episode_idx": 148, "frame_idx": 27, "global_frame_idx": 27059, "task_index": 29}, {"db_idx": 27060, "episode_idx": 148, "frame_idx": 28, "global_frame_idx": 27060, "task_index": 29}, {"db_idx": 27061, "episode_idx": 148, "frame_idx": 29, "global_frame_idx": 27061, "task_index": 29}, {"db_idx": 27062, "episode_idx": 148, "frame_idx": 30, "global_frame_idx": 27062, "task_index": 29}, {"db_idx": 27063, "episode_idx": 148, "frame_idx": 31, "global_frame_idx": 27063, "task_index": 29}, {"db_idx": 27064, "episode_idx": 148, "frame_idx": 32, "global_frame_idx": 27064, "task_index": 29}, {"db_idx": 27065, "episode_idx": 148, "frame_idx": 33, "global_frame_idx": 27065, "task_index": 29}, {"db_idx": 27066, "episode_idx": 148, "frame_idx": 34, "global_frame_idx": 27066, "task_index": 29}, {"db_idx": 27067, "episode_idx": 148, "frame_idx": 35, "global_frame_idx": 27067, "task_index": 29}, {"db_idx": 27068, "episode_idx": 148, "frame_idx": 36, "global_frame_idx": 27068, "task_index": 29}, {"db_idx": 27069, "episode_idx": 148, "frame_idx": 37, "global_frame_idx": 27069, "task_index": 29}, {"db_idx": 27070, "episode_idx": 148, "frame_idx": 38, "global_frame_idx": 27070, "task_index": 29}, {"db_idx": 27071, "episode_idx": 148, "frame_idx": 39, "global_frame_idx": 27071, "task_index": 29}, {"db_idx": 27072, "episode_idx": 148, "frame_idx": 40, "global_frame_idx": 27072, "task_index": 29}, {"db_idx": 27073, "episode_idx": 148, "frame_idx": 41, "global_frame_idx": 27073, "task_index": 29}, {"db_idx": 27074, "episode_idx": 148, "frame_idx": 42, "global_frame_idx": 27074, "task_index": 29}, {"db_idx": 27075, "episode_idx": 148, "frame_idx": 43, "global_frame_idx": 27075, "task_index": 29}, {"db_idx": 27076, "episode_idx": 148, "frame_idx": 44, "global_frame_idx": 27076, "task_index": 29}, {"db_idx": 27077, "episode_idx": 148, "frame_idx": 45, "global_frame_idx": 27077, "task_index": 29}, {"db_idx": 27078, "episode_idx": 148, "frame_idx": 46, "global_frame_idx": 27078, "task_index": 29}, {"db_idx": 27079, "episode_idx": 148, "frame_idx": 47, "global_frame_idx": 27079, "task_index": 29}, {"db_idx": 27080, "episode_idx": 148, "frame_idx": 48, "global_frame_idx": 27080, "task_index": 29}, {"db_idx": 27081, "episode_idx": 148, "frame_idx": 49, "global_frame_idx": 27081, "task_index": 29}, {"db_idx": 27082, "episode_idx": 148, "frame_idx": 50, "global_frame_idx": 27082, "task_index": 29}, {"db_idx": 27083, "episode_idx": 148, "frame_idx": 51, "global_frame_idx": 27083, "task_index": 29}, {"db_idx": 27084, "episode_idx": 148, "frame_idx": 52, "global_frame_idx": 27084, "task_index": 29}, {"db_idx": 27085, "episode_idx": 148, "frame_idx": 53, "global_frame_idx": 27085, "task_index": 29}, {"db_idx": 27086, "episode_idx": 148, "frame_idx": 54, "global_frame_idx": 27086, "task_index": 29}, {"db_idx": 27087, "episode_idx": 148, "frame_idx": 55, "global_frame_idx": 27087, "task_index": 29}, {"db_idx": 27088, "episode_idx": 148, "frame_idx": 56, "global_frame_idx": 27088, "task_index": 29}, {"db_idx": 27089, "episode_idx": 148, "frame_idx": 57, "global_frame_idx": 27089, "task_index": 29}, {"db_idx": 27090, "episode_idx": 148, "frame_idx": 58, "global_frame_idx": 27090, "task_index": 29}, {"db_idx": 27091, "episode_idx": 148, "frame_idx": 59, "global_frame_idx": 27091, "task_index": 29}, {"db_idx": 27092, "episode_idx": 148, "frame_idx": 60, "global_frame_idx": 27092, "task_index": 29}, {"db_idx": 27093, "episode_idx": 148, "frame_idx": 61, "global_frame_idx": 27093, "task_index": 29}, {"db_idx": 27094, "episode_idx": 148, "frame_idx": 62, "global_frame_idx": 27094, "task_index": 29}, {"db_idx": 27095, "episode_idx": 148, "frame_idx": 63, "global_frame_idx": 27095, "task_index": 29}, {"db_idx": 27096, "episode_idx": 148, "frame_idx": 64, "global_frame_idx": 27096, "task_index": 29}, {"db_idx": 27097, "episode_idx": 148, "frame_idx": 65, "global_frame_idx": 27097, "task_index": 29}, {"db_idx": 27098, "episode_idx": 148, "frame_idx": 66, "global_frame_idx": 27098, "task_index": 29}, {"db_idx": 27099, "episode_idx": 148, "frame_idx": 67, "global_frame_idx": 27099, "task_index": 29}, {"db_idx": 27100, "episode_idx": 148, "frame_idx": 68, "global_frame_idx": 27100, "task_index": 29}, {"db_idx": 27101, "episode_idx": 148, "frame_idx": 69, "global_frame_idx": 27101, "task_index": 29}, {"db_idx": 27102, "episode_idx": 148, "frame_idx": 70, "global_frame_idx": 27102, "task_index": 29}, {"db_idx": 27103, "episode_idx": 148, "frame_idx": 71, "global_frame_idx": 27103, "task_index": 29}, {"db_idx": 27104, "episode_idx": 148, "frame_idx": 72, "global_frame_idx": 27104, "task_index": 29}, {"db_idx": 27105, "episode_idx": 148, "frame_idx": 73, "global_frame_idx": 27105, "task_index": 29}, {"db_idx": 27106, "episode_idx": 148, "frame_idx": 74, "global_frame_idx": 27106, "task_index": 29}, {"db_idx": 27107, "episode_idx": 148, "frame_idx": 75, "global_frame_idx": 27107, "task_index": 29}, {"db_idx": 27108, "episode_idx": 148, "frame_idx": 76, "global_frame_idx": 27108, "task_index": 29}, {"db_idx": 27109, "episode_idx": 148, "frame_idx": 77, "global_frame_idx": 27109, "task_index": 29}, {"db_idx": 27110, "episode_idx": 148, "frame_idx": 78, "global_frame_idx": 27110, "task_index": 29}, {"db_idx": 27111, "episode_idx": 148, "frame_idx": 79, "global_frame_idx": 27111, "task_index": 29}, {"db_idx": 27112, "episode_idx": 148, "frame_idx": 80, "global_frame_idx": 27112, "task_index": 29}, {"db_idx": 27113, "episode_idx": 148, "frame_idx": 81, "global_frame_idx": 27113, "task_index": 29}, {"db_idx": 27114, "episode_idx": 148, "frame_idx": 82, "global_frame_idx": 27114, "task_index": 29}, {"db_idx": 27115, "episode_idx": 148, "frame_idx": 83, "global_frame_idx": 27115, "task_index": 29}, {"db_idx": 27116, "episode_idx": 148, "frame_idx": 84, "global_frame_idx": 27116, "task_index": 29}, {"db_idx": 27117, "episode_idx": 148, "frame_idx": 85, "global_frame_idx": 27117, "task_index": 29}, {"db_idx": 27118, "episode_idx": 148, "frame_idx": 86, "global_frame_idx": 27118, "task_index": 29}, {"db_idx": 27119, "episode_idx": 148, "frame_idx": 87, "global_frame_idx": 27119, "task_index": 29}, {"db_idx": 27120, "episode_idx": 148, "frame_idx": 88, "global_frame_idx": 27120, "task_index": 29}, {"db_idx": 27121, "episode_idx": 148, "frame_idx": 89, "global_frame_idx": 27121, "task_index": 29}, {"db_idx": 27122, "episode_idx": 148, "frame_idx": 90, "global_frame_idx": 27122, "task_index": 29}, {"db_idx": 27123, "episode_idx": 148, "frame_idx": 91, "global_frame_idx": 27123, "task_index": 29}, {"db_idx": 27124, "episode_idx": 148, "frame_idx": 92, "global_frame_idx": 27124, "task_index": 29}, {"db_idx": 27125, "episode_idx": 148, "frame_idx": 93, "global_frame_idx": 27125, "task_index": 29}, {"db_idx": 27126, "episode_idx": 148, "frame_idx": 94, "global_frame_idx": 27126, "task_index": 29}, {"db_idx": 27127, "episode_idx": 148, "frame_idx": 95, "global_frame_idx": 27127, "task_index": 29}, {"db_idx": 27128, "episode_idx": 148, "frame_idx": 96, "global_frame_idx": 27128, "task_index": 29}, {"db_idx": 27129, "episode_idx": 148, "frame_idx": 97, "global_frame_idx": 27129, "task_index": 29}, {"db_idx": 27130, "episode_idx": 148, "frame_idx": 98, "global_frame_idx": 27130, "task_index": 29}, {"db_idx": 27131, "episode_idx": 148, "frame_idx": 99, "global_frame_idx": 27131, "task_index": 29}, {"db_idx": 27132, "episode_idx": 148, "frame_idx": 100, "global_frame_idx": 27132, "task_index": 29}, {"db_idx": 27133, "episode_idx": 148, "frame_idx": 101, "global_frame_idx": 27133, "task_index": 29}, {"db_idx": 27134, "episode_idx": 148, "frame_idx": 102, "global_frame_idx": 27134, "task_index": 29}, {"db_idx": 27135, "episode_idx": 148, "frame_idx": 103, "global_frame_idx": 27135, "task_index": 29}, {"db_idx": 27136, "episode_idx": 148, "frame_idx": 104, "global_frame_idx": 27136, "task_index": 29}, {"db_idx": 27137, "episode_idx": 148, "frame_idx": 105, "global_frame_idx": 27137, "task_index": 29}, {"db_idx": 27138, "episode_idx": 148, "frame_idx": 106, "global_frame_idx": 27138, "task_index": 29}, {"db_idx": 27139, "episode_idx": 148, "frame_idx": 107, "global_frame_idx": 27139, "task_index": 29}, {"db_idx": 27140, "episode_idx": 148, "frame_idx": 108, "global_frame_idx": 27140, "task_index": 29}, {"db_idx": 27141, "episode_idx": 148, "frame_idx": 109, "global_frame_idx": 27141, "task_index": 29}, {"db_idx": 27142, "episode_idx": 148, "frame_idx": 110, "global_frame_idx": 27142, "task_index": 29}, {"db_idx": 27143, "episode_idx": 148, "frame_idx": 111, "global_frame_idx": 27143, "task_index": 29}, {"db_idx": 27144, "episode_idx": 148, "frame_idx": 112, "global_frame_idx": 27144, "task_index": 29}, {"db_idx": 27145, "episode_idx": 148, "frame_idx": 113, "global_frame_idx": 27145, "task_index": 29}, {"db_idx": 27146, "episode_idx": 148, "frame_idx": 114, "global_frame_idx": 27146, "task_index": 29}, {"db_idx": 27147, "episode_idx": 148, "frame_idx": 115, "global_frame_idx": 27147, "task_index": 29}, {"db_idx": 27148, "episode_idx": 148, "frame_idx": 116, "global_frame_idx": 27148, "task_index": 29}, {"db_idx": 27149, "episode_idx": 148, "frame_idx": 117, "global_frame_idx": 27149, "task_index": 29}, {"db_idx": 27150, "episode_idx": 148, "frame_idx": 118, "global_frame_idx": 27150, "task_index": 29}, {"db_idx": 27151, "episode_idx": 148, "frame_idx": 119, "global_frame_idx": 27151, "task_index": 29}, {"db_idx": 27152, "episode_idx": 148, "frame_idx": 120, "global_frame_idx": 27152, "task_index": 29}, {"db_idx": 27153, "episode_idx": 148, "frame_idx": 121, "global_frame_idx": 27153, "task_index": 29}, {"db_idx": 27154, "episode_idx": 148, "frame_idx": 122, "global_frame_idx": 27154, "task_index": 29}, {"db_idx": 27155, "episode_idx": 148, "frame_idx": 123, "global_frame_idx": 27155, "task_index": 29}, {"db_idx": 27156, "episode_idx": 148, "frame_idx": 124, "global_frame_idx": 27156, "task_index": 29}, {"db_idx": 27157, "episode_idx": 148, "frame_idx": 125, "global_frame_idx": 27157, "task_index": 29}, {"db_idx": 27158, "episode_idx": 148, "frame_idx": 126, "global_frame_idx": 27158, "task_index": 29}, {"db_idx": 27159, "episode_idx": 148, "frame_idx": 127, "global_frame_idx": 27159, "task_index": 29}, {"db_idx": 27160, "episode_idx": 148, "frame_idx": 128, "global_frame_idx": 27160, "task_index": 29}, {"db_idx": 27161, "episode_idx": 148, "frame_idx": 129, "global_frame_idx": 27161, "task_index": 29}, {"db_idx": 27162, "episode_idx": 148, "frame_idx": 130, "global_frame_idx": 27162, "task_index": 29}, {"db_idx": 27163, "episode_idx": 148, "frame_idx": 131, "global_frame_idx": 27163, "task_index": 29}, {"db_idx": 27164, "episode_idx": 148, "frame_idx": 132, "global_frame_idx": 27164, "task_index": 29}, {"db_idx": 27165, "episode_idx": 148, "frame_idx": 133, "global_frame_idx": 27165, "task_index": 29}, {"db_idx": 27166, "episode_idx": 148, "frame_idx": 134, "global_frame_idx": 27166, "task_index": 29}, {"db_idx": 27167, "episode_idx": 148, "frame_idx": 135, "global_frame_idx": 27167, "task_index": 29}, {"db_idx": 27168, "episode_idx": 148, "frame_idx": 136, "global_frame_idx": 27168, "task_index": 29}, {"db_idx": 27169, "episode_idx": 148, "frame_idx": 137, "global_frame_idx": 27169, "task_index": 29}, {"db_idx": 27170, "episode_idx": 148, "frame_idx": 138, "global_frame_idx": 27170, "task_index": 29}, {"db_idx": 27171, "episode_idx": 148, "frame_idx": 139, "global_frame_idx": 27171, "task_index": 29}, {"db_idx": 27172, "episode_idx": 148, "frame_idx": 140, "global_frame_idx": 27172, "task_index": 29}, {"db_idx": 27173, "episode_idx": 148, "frame_idx": 141, "global_frame_idx": 27173, "task_index": 29}, {"db_idx": 27174, "episode_idx": 148, "frame_idx": 142, "global_frame_idx": 27174, "task_index": 29}, {"db_idx": 27175, "episode_idx": 148, "frame_idx": 143, "global_frame_idx": 27175, "task_index": 29}, {"db_idx": 27176, "episode_idx": 148, "frame_idx": 144, "global_frame_idx": 27176, "task_index": 29}, {"db_idx": 27177, "episode_idx": 148, "frame_idx": 145, "global_frame_idx": 27177, "task_index": 29}, {"db_idx": 27178, "episode_idx": 148, "frame_idx": 146, "global_frame_idx": 27178, "task_index": 29}, {"db_idx": 27179, "episode_idx": 148, "frame_idx": 147, "global_frame_idx": 27179, "task_index": 29}, {"db_idx": 27180, "episode_idx": 148, "frame_idx": 148, "global_frame_idx": 27180, "task_index": 29}, {"db_idx": 27181, "episode_idx": 148, "frame_idx": 149, "global_frame_idx": 27181, "task_index": 29}, {"db_idx": 27182, "episode_idx": 148, "frame_idx": 150, "global_frame_idx": 27182, "task_index": 29}, {"db_idx": 27183, "episode_idx": 148, "frame_idx": 151, "global_frame_idx": 27183, "task_index": 29}, {"db_idx": 27184, "episode_idx": 148, "frame_idx": 152, "global_frame_idx": 27184, "task_index": 29}, {"db_idx": 27185, "episode_idx": 148, "frame_idx": 153, "global_frame_idx": 27185, "task_index": 29}, {"db_idx": 27186, "episode_idx": 148, "frame_idx": 154, "global_frame_idx": 27186, "task_index": 29}, {"db_idx": 27187, "episode_idx": 148, "frame_idx": 155, "global_frame_idx": 27187, "task_index": 29}, {"db_idx": 27188, "episode_idx": 148, "frame_idx": 156, "global_frame_idx": 27188, "task_index": 29}, {"db_idx": 27189, "episode_idx": 148, "frame_idx": 157, "global_frame_idx": 27189, "task_index": 29}, {"db_idx": 27190, "episode_idx": 148, "frame_idx": 158, "global_frame_idx": 27190, "task_index": 29}, {"db_idx": 27191, "episode_idx": 148, "frame_idx": 159, "global_frame_idx": 27191, "task_index": 29}, {"db_idx": 27192, "episode_idx": 148, "frame_idx": 160, "global_frame_idx": 27192, "task_index": 29}, {"db_idx": 27193, "episode_idx": 148, "frame_idx": 161, "global_frame_idx": 27193, "task_index": 29}, {"db_idx": 27194, "episode_idx": 149, "frame_idx": 0, "global_frame_idx": 27194, "task_index": 29}, {"db_idx": 27195, "episode_idx": 149, "frame_idx": 1, "global_frame_idx": 27195, "task_index": 29}, {"db_idx": 27196, "episode_idx": 149, "frame_idx": 2, "global_frame_idx": 27196, "task_index": 29}, {"db_idx": 27197, "episode_idx": 149, "frame_idx": 3, "global_frame_idx": 27197, "task_index": 29}, {"db_idx": 27198, "episode_idx": 149, "frame_idx": 4, "global_frame_idx": 27198, "task_index": 29}, {"db_idx": 27199, "episode_idx": 149, "frame_idx": 5, "global_frame_idx": 27199, "task_index": 29}, {"db_idx": 27200, "episode_idx": 149, "frame_idx": 6, "global_frame_idx": 27200, "task_index": 29}, {"db_idx": 27201, "episode_idx": 149, "frame_idx": 7, "global_frame_idx": 27201, "task_index": 29}, {"db_idx": 27202, "episode_idx": 149, "frame_idx": 8, "global_frame_idx": 27202, "task_index": 29}, {"db_idx": 27203, "episode_idx": 149, "frame_idx": 9, "global_frame_idx": 27203, "task_index": 29}, {"db_idx": 27204, "episode_idx": 149, "frame_idx": 10, "global_frame_idx": 27204, "task_index": 29}, {"db_idx": 27205, "episode_idx": 149, "frame_idx": 11, "global_frame_idx": 27205, "task_index": 29}, {"db_idx": 27206, "episode_idx": 149, "frame_idx": 12, "global_frame_idx": 27206, "task_index": 29}, {"db_idx": 27207, "episode_idx": 149, "frame_idx": 13, "global_frame_idx": 27207, "task_index": 29}, {"db_idx": 27208, "episode_idx": 149, "frame_idx": 14, "global_frame_idx": 27208, "task_index": 29}, {"db_idx": 27209, "episode_idx": 149, "frame_idx": 15, "global_frame_idx": 27209, "task_index": 29}, {"db_idx": 27210, "episode_idx": 149, "frame_idx": 16, "global_frame_idx": 27210, "task_index": 29}, {"db_idx": 27211, "episode_idx": 149, "frame_idx": 17, "global_frame_idx": 27211, "task_index": 29}, {"db_idx": 27212, "episode_idx": 149, "frame_idx": 18, "global_frame_idx": 27212, "task_index": 29}, {"db_idx": 27213, "episode_idx": 149, "frame_idx": 19, "global_frame_idx": 27213, "task_index": 29}, {"db_idx": 27214, "episode_idx": 149, "frame_idx": 20, "global_frame_idx": 27214, "task_index": 29}, {"db_idx": 27215, "episode_idx": 149, "frame_idx": 21, "global_frame_idx": 27215, "task_index": 29}, {"db_idx": 27216, "episode_idx": 149, "frame_idx": 22, "global_frame_idx": 27216, "task_index": 29}, {"db_idx": 27217, "episode_idx": 149, "frame_idx": 23, "global_frame_idx": 27217, "task_index": 29}, {"db_idx": 27218, "episode_idx": 149, "frame_idx": 24, "global_frame_idx": 27218, "task_index": 29}, {"db_idx": 27219, "episode_idx": 149, "frame_idx": 25, "global_frame_idx": 27219, "task_index": 29}, {"db_idx": 27220, "episode_idx": 149, "frame_idx": 26, "global_frame_idx": 27220, "task_index": 29}, {"db_idx": 27221, "episode_idx": 149, "frame_idx": 27, "global_frame_idx": 27221, "task_index": 29}, {"db_idx": 27222, "episode_idx": 149, "frame_idx": 28, "global_frame_idx": 27222, "task_index": 29}, {"db_idx": 27223, "episode_idx": 149, "frame_idx": 29, "global_frame_idx": 27223, "task_index": 29}, {"db_idx": 27224, "episode_idx": 149, "frame_idx": 30, "global_frame_idx": 27224, "task_index": 29}, {"db_idx": 27225, "episode_idx": 149, "frame_idx": 31, "global_frame_idx": 27225, "task_index": 29}, {"db_idx": 27226, "episode_idx": 149, "frame_idx": 32, "global_frame_idx": 27226, "task_index": 29}, {"db_idx": 27227, "episode_idx": 149, "frame_idx": 33, "global_frame_idx": 27227, "task_index": 29}, {"db_idx": 27228, "episode_idx": 149, "frame_idx": 34, "global_frame_idx": 27228, "task_index": 29}, {"db_idx": 27229, "episode_idx": 149, "frame_idx": 35, "global_frame_idx": 27229, "task_index": 29}, {"db_idx": 27230, "episode_idx": 149, "frame_idx": 36, "global_frame_idx": 27230, "task_index": 29}, {"db_idx": 27231, "episode_idx": 149, "frame_idx": 37, "global_frame_idx": 27231, "task_index": 29}, {"db_idx": 27232, "episode_idx": 149, "frame_idx": 38, "global_frame_idx": 27232, "task_index": 29}, {"db_idx": 27233, "episode_idx": 149, "frame_idx": 39, "global_frame_idx": 27233, "task_index": 29}, {"db_idx": 27234, "episode_idx": 149, "frame_idx": 40, "global_frame_idx": 27234, "task_index": 29}, {"db_idx": 27235, "episode_idx": 149, "frame_idx": 41, "global_frame_idx": 27235, "task_index": 29}, {"db_idx": 27236, "episode_idx": 149, "frame_idx": 42, "global_frame_idx": 27236, "task_index": 29}, {"db_idx": 27237, "episode_idx": 149, "frame_idx": 43, "global_frame_idx": 27237, "task_index": 29}, {"db_idx": 27238, "episode_idx": 149, "frame_idx": 44, "global_frame_idx": 27238, "task_index": 29}, {"db_idx": 27239, "episode_idx": 149, "frame_idx": 45, "global_frame_idx": 27239, "task_index": 29}, {"db_idx": 27240, "episode_idx": 149, "frame_idx": 46, "global_frame_idx": 27240, "task_index": 29}, {"db_idx": 27241, "episode_idx": 149, "frame_idx": 47, "global_frame_idx": 27241, "task_index": 29}, {"db_idx": 27242, "episode_idx": 149, "frame_idx": 48, "global_frame_idx": 27242, "task_index": 29}, {"db_idx": 27243, "episode_idx": 149, "frame_idx": 49, "global_frame_idx": 27243, "task_index": 29}, {"db_idx": 27244, "episode_idx": 149, "frame_idx": 50, "global_frame_idx": 27244, "task_index": 29}, {"db_idx": 27245, "episode_idx": 149, "frame_idx": 51, "global_frame_idx": 27245, "task_index": 29}, {"db_idx": 27246, "episode_idx": 149, "frame_idx": 52, "global_frame_idx": 27246, "task_index": 29}, {"db_idx": 27247, "episode_idx": 149, "frame_idx": 53, "global_frame_idx": 27247, "task_index": 29}, {"db_idx": 27248, "episode_idx": 149, "frame_idx": 54, "global_frame_idx": 27248, "task_index": 29}, {"db_idx": 27249, "episode_idx": 149, "frame_idx": 55, "global_frame_idx": 27249, "task_index": 29}, {"db_idx": 27250, "episode_idx": 149, "frame_idx": 56, "global_frame_idx": 27250, "task_index": 29}, {"db_idx": 27251, "episode_idx": 149, "frame_idx": 57, "global_frame_idx": 27251, "task_index": 29}, {"db_idx": 27252, "episode_idx": 149, "frame_idx": 58, "global_frame_idx": 27252, "task_index": 29}, {"db_idx": 27253, "episode_idx": 149, "frame_idx": 59, "global_frame_idx": 27253, "task_index": 29}, {"db_idx": 27254, "episode_idx": 149, "frame_idx": 60, "global_frame_idx": 27254, "task_index": 29}, {"db_idx": 27255, "episode_idx": 149, "frame_idx": 61, "global_frame_idx": 27255, "task_index": 29}, {"db_idx": 27256, "episode_idx": 149, "frame_idx": 62, "global_frame_idx": 27256, "task_index": 29}, {"db_idx": 27257, "episode_idx": 149, "frame_idx": 63, "global_frame_idx": 27257, "task_index": 29}, {"db_idx": 27258, "episode_idx": 149, "frame_idx": 64, "global_frame_idx": 27258, "task_index": 29}, {"db_idx": 27259, "episode_idx": 149, "frame_idx": 65, "global_frame_idx": 27259, "task_index": 29}, {"db_idx": 27260, "episode_idx": 149, "frame_idx": 66, "global_frame_idx": 27260, "task_index": 29}, {"db_idx": 27261, "episode_idx": 149, "frame_idx": 67, "global_frame_idx": 27261, "task_index": 29}, {"db_idx": 27262, "episode_idx": 149, "frame_idx": 68, "global_frame_idx": 27262, "task_index": 29}, {"db_idx": 27263, "episode_idx": 149, "frame_idx": 69, "global_frame_idx": 27263, "task_index": 29}, {"db_idx": 27264, "episode_idx": 149, "frame_idx": 70, "global_frame_idx": 27264, "task_index": 29}, {"db_idx": 27265, "episode_idx": 149, "frame_idx": 71, "global_frame_idx": 27265, "task_index": 29}, {"db_idx": 27266, "episode_idx": 149, "frame_idx": 72, "global_frame_idx": 27266, "task_index": 29}, {"db_idx": 27267, "episode_idx": 149, "frame_idx": 73, "global_frame_idx": 27267, "task_index": 29}, {"db_idx": 27268, "episode_idx": 149, "frame_idx": 74, "global_frame_idx": 27268, "task_index": 29}, {"db_idx": 27269, "episode_idx": 149, "frame_idx": 75, "global_frame_idx": 27269, "task_index": 29}, {"db_idx": 27270, "episode_idx": 149, "frame_idx": 76, "global_frame_idx": 27270, "task_index": 29}, {"db_idx": 27271, "episode_idx": 149, "frame_idx": 77, "global_frame_idx": 27271, "task_index": 29}, {"db_idx": 27272, "episode_idx": 149, "frame_idx": 78, "global_frame_idx": 27272, "task_index": 29}, {"db_idx": 27273, "episode_idx": 149, "frame_idx": 79, "global_frame_idx": 27273, "task_index": 29}, {"db_idx": 27274, "episode_idx": 149, "frame_idx": 80, "global_frame_idx": 27274, "task_index": 29}, {"db_idx": 27275, "episode_idx": 149, "frame_idx": 81, "global_frame_idx": 27275, "task_index": 29}, {"db_idx": 27276, "episode_idx": 149, "frame_idx": 82, "global_frame_idx": 27276, "task_index": 29}, {"db_idx": 27277, "episode_idx": 149, "frame_idx": 83, "global_frame_idx": 27277, "task_index": 29}, {"db_idx": 27278, "episode_idx": 149, "frame_idx": 84, "global_frame_idx": 27278, "task_index": 29}, {"db_idx": 27279, "episode_idx": 149, "frame_idx": 85, "global_frame_idx": 27279, "task_index": 29}, {"db_idx": 27280, "episode_idx": 149, "frame_idx": 86, "global_frame_idx": 27280, "task_index": 29}, {"db_idx": 27281, "episode_idx": 149, "frame_idx": 87, "global_frame_idx": 27281, "task_index": 29}, {"db_idx": 27282, "episode_idx": 149, "frame_idx": 88, "global_frame_idx": 27282, "task_index": 29}, {"db_idx": 27283, "episode_idx": 149, "frame_idx": 89, "global_frame_idx": 27283, "task_index": 29}, {"db_idx": 27284, "episode_idx": 149, "frame_idx": 90, "global_frame_idx": 27284, "task_index": 29}, {"db_idx": 27285, "episode_idx": 149, "frame_idx": 91, "global_frame_idx": 27285, "task_index": 29}, {"db_idx": 27286, "episode_idx": 149, "frame_idx": 92, "global_frame_idx": 27286, "task_index": 29}, {"db_idx": 27287, "episode_idx": 149, "frame_idx": 93, "global_frame_idx": 27287, "task_index": 29}, {"db_idx": 27288, "episode_idx": 149, "frame_idx": 94, "global_frame_idx": 27288, "task_index": 29}, {"db_idx": 27289, "episode_idx": 149, "frame_idx": 95, "global_frame_idx": 27289, "task_index": 29}, {"db_idx": 27290, "episode_idx": 149, "frame_idx": 96, "global_frame_idx": 27290, "task_index": 29}, {"db_idx": 27291, "episode_idx": 149, "frame_idx": 97, "global_frame_idx": 27291, "task_index": 29}, {"db_idx": 27292, "episode_idx": 149, "frame_idx": 98, "global_frame_idx": 27292, "task_index": 29}, {"db_idx": 27293, "episode_idx": 149, "frame_idx": 99, "global_frame_idx": 27293, "task_index": 29}, {"db_idx": 27294, "episode_idx": 149, "frame_idx": 100, "global_frame_idx": 27294, "task_index": 29}, {"db_idx": 27295, "episode_idx": 149, "frame_idx": 101, "global_frame_idx": 27295, "task_index": 29}, {"db_idx": 27296, "episode_idx": 149, "frame_idx": 102, "global_frame_idx": 27296, "task_index": 29}, {"db_idx": 27297, "episode_idx": 149, "frame_idx": 103, "global_frame_idx": 27297, "task_index": 29}, {"db_idx": 27298, "episode_idx": 149, "frame_idx": 104, "global_frame_idx": 27298, "task_index": 29}, {"db_idx": 27299, "episode_idx": 149, "frame_idx": 105, "global_frame_idx": 27299, "task_index": 29}, {"db_idx": 27300, "episode_idx": 149, "frame_idx": 106, "global_frame_idx": 27300, "task_index": 29}, {"db_idx": 27301, "episode_idx": 149, "frame_idx": 107, "global_frame_idx": 27301, "task_index": 29}, {"db_idx": 27302, "episode_idx": 149, "frame_idx": 108, "global_frame_idx": 27302, "task_index": 29}, {"db_idx": 27303, "episode_idx": 149, "frame_idx": 109, "global_frame_idx": 27303, "task_index": 29}, {"db_idx": 27304, "episode_idx": 149, "frame_idx": 110, "global_frame_idx": 27304, "task_index": 29}, {"db_idx": 27305, "episode_idx": 149, "frame_idx": 111, "global_frame_idx": 27305, "task_index": 29}, {"db_idx": 27306, "episode_idx": 149, "frame_idx": 112, "global_frame_idx": 27306, "task_index": 29}, {"db_idx": 27307, "episode_idx": 149, "frame_idx": 113, "global_frame_idx": 27307, "task_index": 29}, {"db_idx": 27308, "episode_idx": 149, "frame_idx": 114, "global_frame_idx": 27308, "task_index": 29}, {"db_idx": 27309, "episode_idx": 149, "frame_idx": 115, "global_frame_idx": 27309, "task_index": 29}, {"db_idx": 27310, "episode_idx": 149, "frame_idx": 116, "global_frame_idx": 27310, "task_index": 29}, {"db_idx": 27311, "episode_idx": 149, "frame_idx": 117, "global_frame_idx": 27311, "task_index": 29}, {"db_idx": 27312, "episode_idx": 149, "frame_idx": 118, "global_frame_idx": 27312, "task_index": 29}, {"db_idx": 27313, "episode_idx": 149, "frame_idx": 119, "global_frame_idx": 27313, "task_index": 29}, {"db_idx": 27314, "episode_idx": 149, "frame_idx": 120, "global_frame_idx": 27314, "task_index": 29}, {"db_idx": 27315, "episode_idx": 149, "frame_idx": 121, "global_frame_idx": 27315, "task_index": 29}, {"db_idx": 27316, "episode_idx": 149, "frame_idx": 122, "global_frame_idx": 27316, "task_index": 29}, {"db_idx": 27317, "episode_idx": 149, "frame_idx": 123, "global_frame_idx": 27317, "task_index": 29}, {"db_idx": 27318, "episode_idx": 149, "frame_idx": 124, "global_frame_idx": 27318, "task_index": 29}, {"db_idx": 27319, "episode_idx": 149, "frame_idx": 125, "global_frame_idx": 27319, "task_index": 29}, {"db_idx": 27320, "episode_idx": 149, "frame_idx": 126, "global_frame_idx": 27320, "task_index": 29}, {"db_idx": 27321, "episode_idx": 149, "frame_idx": 127, "global_frame_idx": 27321, "task_index": 29}, {"db_idx": 27322, "episode_idx": 149, "frame_idx": 128, "global_frame_idx": 27322, "task_index": 29}, {"db_idx": 27323, "episode_idx": 149, "frame_idx": 129, "global_frame_idx": 27323, "task_index": 29}, {"db_idx": 27324, "episode_idx": 149, "frame_idx": 130, "global_frame_idx": 27324, "task_index": 29}, {"db_idx": 27325, "episode_idx": 149, "frame_idx": 131, "global_frame_idx": 27325, "task_index": 29}, {"db_idx": 27326, "episode_idx": 149, "frame_idx": 132, "global_frame_idx": 27326, "task_index": 29}, {"db_idx": 27327, "episode_idx": 149, "frame_idx": 133, "global_frame_idx": 27327, "task_index": 29}, {"db_idx": 27328, "episode_idx": 149, "frame_idx": 134, "global_frame_idx": 27328, "task_index": 29}, {"db_idx": 27329, "episode_idx": 149, "frame_idx": 135, "global_frame_idx": 27329, "task_index": 29}, {"db_idx": 27330, "episode_idx": 149, "frame_idx": 136, "global_frame_idx": 27330, "task_index": 29}, {"db_idx": 27331, "episode_idx": 149, "frame_idx": 137, "global_frame_idx": 27331, "task_index": 29}, {"db_idx": 27332, "episode_idx": 149, "frame_idx": 138, "global_frame_idx": 27332, "task_index": 29}, {"db_idx": 27333, "episode_idx": 149, "frame_idx": 139, "global_frame_idx": 27333, "task_index": 29}, {"db_idx": 27334, "episode_idx": 149, "frame_idx": 140, "global_frame_idx": 27334, "task_index": 29}, {"db_idx": 27335, "episode_idx": 149, "frame_idx": 141, "global_frame_idx": 27335, "task_index": 29}, {"db_idx": 27336, "episode_idx": 149, "frame_idx": 142, "global_frame_idx": 27336, "task_index": 29}, {"db_idx": 27337, "episode_idx": 149, "frame_idx": 143, "global_frame_idx": 27337, "task_index": 29}, {"db_idx": 27338, "episode_idx": 149, "frame_idx": 144, "global_frame_idx": 27338, "task_index": 29}, {"db_idx": 27339, "episode_idx": 149, "frame_idx": 145, "global_frame_idx": 27339, "task_index": 29}, {"db_idx": 27340, "episode_idx": 149, "frame_idx": 146, "global_frame_idx": 27340, "task_index": 29}, {"db_idx": 27341, "episode_idx": 149, "frame_idx": 147, "global_frame_idx": 27341, "task_index": 29}, {"db_idx": 27342, "episode_idx": 149, "frame_idx": 148, "global_frame_idx": 27342, "task_index": 29}, {"db_idx": 27343, "episode_idx": 149, "frame_idx": 149, "global_frame_idx": 27343, "task_index": 29}, {"db_idx": 27344, "episode_idx": 149, "frame_idx": 150, "global_frame_idx": 27344, "task_index": 29}, {"db_idx": 27345, "episode_idx": 149, "frame_idx": 151, "global_frame_idx": 27345, "task_index": 29}, {"db_idx": 27346, "episode_idx": 149, "frame_idx": 152, "global_frame_idx": 27346, "task_index": 29}, {"db_idx": 27347, "episode_idx": 149, "frame_idx": 153, "global_frame_idx": 27347, "task_index": 29}, {"db_idx": 27348, "episode_idx": 149, "frame_idx": 154, "global_frame_idx": 27348, "task_index": 29}, {"db_idx": 27349, "episode_idx": 149, "frame_idx": 155, "global_frame_idx": 27349, "task_index": 29}, {"db_idx": 27350, "episode_idx": 149, "frame_idx": 156, "global_frame_idx": 27350, "task_index": 29}, {"db_idx": 27351, "episode_idx": 149, "frame_idx": 157, "global_frame_idx": 27351, "task_index": 29}, {"db_idx": 27352, "episode_idx": 149, "frame_idx": 158, "global_frame_idx": 27352, "task_index": 29}, {"db_idx": 27353, "episode_idx": 149, "frame_idx": 159, "global_frame_idx": 27353, "task_index": 29}, {"db_idx": 27354, "episode_idx": 149, "frame_idx": 160, "global_frame_idx": 27354, "task_index": 29}, {"db_idx": 27355, "episode_idx": 149, "frame_idx": 161, "global_frame_idx": 27355, "task_index": 29}, {"db_idx": 27356, "episode_idx": 149, "frame_idx": 162, "global_frame_idx": 27356, "task_index": 29}, {"db_idx": 27357, "episode_idx": 149, "frame_idx": 163, "global_frame_idx": 27357, "task_index": 29}, {"db_idx": 27358, "episode_idx": 149, "frame_idx": 164, "global_frame_idx": 27358, "task_index": 29}, {"db_idx": 27359, "episode_idx": 149, "frame_idx": 165, "global_frame_idx": 27359, "task_index": 29}, {"db_idx": 27360, "episode_idx": 149, "frame_idx": 166, "global_frame_idx": 27360, "task_index": 29}, {"db_idx": 27361, "episode_idx": 149, "frame_idx": 167, "global_frame_idx": 27361, "task_index": 29}, {"db_idx": 27362, "episode_idx": 149, "frame_idx": 168, "global_frame_idx": 27362, "task_index": 29}, {"db_idx": 27363, "episode_idx": 149, "frame_idx": 169, "global_frame_idx": 27363, "task_index": 29}, {"db_idx": 27364, "episode_idx": 149, "frame_idx": 170, "global_frame_idx": 27364, "task_index": 29}, {"db_idx": 27365, "episode_idx": 149, "frame_idx": 171, "global_frame_idx": 27365, "task_index": 29}, {"db_idx": 27366, "episode_idx": 149, "frame_idx": 172, "global_frame_idx": 27366, "task_index": 29}, {"db_idx": 27367, "episode_idx": 149, "frame_idx": 173, "global_frame_idx": 27367, "task_index": 29}, {"db_idx": 27368, "episode_idx": 149, "frame_idx": 174, "global_frame_idx": 27368, "task_index": 29}, {"db_idx": 27369, "episode_idx": 149, "frame_idx": 175, "global_frame_idx": 27369, "task_index": 29}, {"db_idx": 27370, "episode_idx": 149, "frame_idx": 176, "global_frame_idx": 27370, "task_index": 29}, {"db_idx": 27371, "episode_idx": 149, "frame_idx": 177, "global_frame_idx": 27371, "task_index": 29}, {"db_idx": 27372, "episode_idx": 149, "frame_idx": 178, "global_frame_idx": 27372, "task_index": 29}, {"db_idx": 27373, "episode_idx": 150, "frame_idx": 0, "global_frame_idx": 27373, "task_index": 30}, {"db_idx": 27374, "episode_idx": 150, "frame_idx": 1, "global_frame_idx": 27374, "task_index": 30}, {"db_idx": 27375, "episode_idx": 150, "frame_idx": 2, "global_frame_idx": 27375, "task_index": 30}, {"db_idx": 27376, "episode_idx": 150, "frame_idx": 3, "global_frame_idx": 27376, "task_index": 30}, {"db_idx": 27377, "episode_idx": 150, "frame_idx": 4, "global_frame_idx": 27377, "task_index": 30}, {"db_idx": 27378, "episode_idx": 150, "frame_idx": 5, "global_frame_idx": 27378, "task_index": 30}, {"db_idx": 27379, "episode_idx": 150, "frame_idx": 6, "global_frame_idx": 27379, "task_index": 30}, {"db_idx": 27380, "episode_idx": 150, "frame_idx": 7, "global_frame_idx": 27380, "task_index": 30}, {"db_idx": 27381, "episode_idx": 150, "frame_idx": 8, "global_frame_idx": 27381, "task_index": 30}, {"db_idx": 27382, "episode_idx": 150, "frame_idx": 9, "global_frame_idx": 27382, "task_index": 30}, {"db_idx": 27383, "episode_idx": 150, "frame_idx": 10, "global_frame_idx": 27383, "task_index": 30}, {"db_idx": 27384, "episode_idx": 150, "frame_idx": 11, "global_frame_idx": 27384, "task_index": 30}, {"db_idx": 27385, "episode_idx": 150, "frame_idx": 12, "global_frame_idx": 27385, "task_index": 30}, {"db_idx": 27386, "episode_idx": 150, "frame_idx": 13, "global_frame_idx": 27386, "task_index": 30}, {"db_idx": 27387, "episode_idx": 150, "frame_idx": 14, "global_frame_idx": 27387, "task_index": 30}, {"db_idx": 27388, "episode_idx": 150, "frame_idx": 15, "global_frame_idx": 27388, "task_index": 30}, {"db_idx": 27389, "episode_idx": 150, "frame_idx": 16, "global_frame_idx": 27389, "task_index": 30}, {"db_idx": 27390, "episode_idx": 150, "frame_idx": 17, "global_frame_idx": 27390, "task_index": 30}, {"db_idx": 27391, "episode_idx": 150, "frame_idx": 18, "global_frame_idx": 27391, "task_index": 30}, {"db_idx": 27392, "episode_idx": 150, "frame_idx": 19, "global_frame_idx": 27392, "task_index": 30}, {"db_idx": 27393, "episode_idx": 150, "frame_idx": 20, "global_frame_idx": 27393, "task_index": 30}, {"db_idx": 27394, "episode_idx": 150, "frame_idx": 21, "global_frame_idx": 27394, "task_index": 30}, {"db_idx": 27395, "episode_idx": 150, "frame_idx": 22, "global_frame_idx": 27395, "task_index": 30}, {"db_idx": 27396, "episode_idx": 150, "frame_idx": 23, "global_frame_idx": 27396, "task_index": 30}, {"db_idx": 27397, "episode_idx": 150, "frame_idx": 24, "global_frame_idx": 27397, "task_index": 30}, {"db_idx": 27398, "episode_idx": 150, "frame_idx": 25, "global_frame_idx": 27398, "task_index": 30}, {"db_idx": 27399, "episode_idx": 150, "frame_idx": 26, "global_frame_idx": 27399, "task_index": 30}, {"db_idx": 27400, "episode_idx": 150, "frame_idx": 27, "global_frame_idx": 27400, "task_index": 30}, {"db_idx": 27401, "episode_idx": 150, "frame_idx": 28, "global_frame_idx": 27401, "task_index": 30}, {"db_idx": 27402, "episode_idx": 150, "frame_idx": 29, "global_frame_idx": 27402, "task_index": 30}, {"db_idx": 27403, "episode_idx": 150, "frame_idx": 30, "global_frame_idx": 27403, "task_index": 30}, {"db_idx": 27404, "episode_idx": 150, "frame_idx": 31, "global_frame_idx": 27404, "task_index": 30}, {"db_idx": 27405, "episode_idx": 150, "frame_idx": 32, "global_frame_idx": 27405, "task_index": 30}, {"db_idx": 27406, "episode_idx": 150, "frame_idx": 33, "global_frame_idx": 27406, "task_index": 30}, {"db_idx": 27407, "episode_idx": 150, "frame_idx": 34, "global_frame_idx": 27407, "task_index": 30}, {"db_idx": 27408, "episode_idx": 150, "frame_idx": 35, "global_frame_idx": 27408, "task_index": 30}, {"db_idx": 27409, "episode_idx": 150, "frame_idx": 36, "global_frame_idx": 27409, "task_index": 30}, {"db_idx": 27410, "episode_idx": 150, "frame_idx": 37, "global_frame_idx": 27410, "task_index": 30}, {"db_idx": 27411, "episode_idx": 150, "frame_idx": 38, "global_frame_idx": 27411, "task_index": 30}, {"db_idx": 27412, "episode_idx": 150, "frame_idx": 39, "global_frame_idx": 27412, "task_index": 30}, {"db_idx": 27413, "episode_idx": 150, "frame_idx": 40, "global_frame_idx": 27413, "task_index": 30}, {"db_idx": 27414, "episode_idx": 150, "frame_idx": 41, "global_frame_idx": 27414, "task_index": 30}, {"db_idx": 27415, "episode_idx": 150, "frame_idx": 42, "global_frame_idx": 27415, "task_index": 30}, {"db_idx": 27416, "episode_idx": 150, "frame_idx": 43, "global_frame_idx": 27416, "task_index": 30}, {"db_idx": 27417, "episode_idx": 150, "frame_idx": 44, "global_frame_idx": 27417, "task_index": 30}, {"db_idx": 27418, "episode_idx": 150, "frame_idx": 45, "global_frame_idx": 27418, "task_index": 30}, {"db_idx": 27419, "episode_idx": 150, "frame_idx": 46, "global_frame_idx": 27419, "task_index": 30}, {"db_idx": 27420, "episode_idx": 150, "frame_idx": 47, "global_frame_idx": 27420, "task_index": 30}, {"db_idx": 27421, "episode_idx": 150, "frame_idx": 48, "global_frame_idx": 27421, "task_index": 30}, {"db_idx": 27422, "episode_idx": 150, "frame_idx": 49, "global_frame_idx": 27422, "task_index": 30}, {"db_idx": 27423, "episode_idx": 150, "frame_idx": 50, "global_frame_idx": 27423, "task_index": 30}, {"db_idx": 27424, "episode_idx": 150, "frame_idx": 51, "global_frame_idx": 27424, "task_index": 30}, {"db_idx": 27425, "episode_idx": 150, "frame_idx": 52, "global_frame_idx": 27425, "task_index": 30}, {"db_idx": 27426, "episode_idx": 150, "frame_idx": 53, "global_frame_idx": 27426, "task_index": 30}, {"db_idx": 27427, "episode_idx": 150, "frame_idx": 54, "global_frame_idx": 27427, "task_index": 30}, {"db_idx": 27428, "episode_idx": 150, "frame_idx": 55, "global_frame_idx": 27428, "task_index": 30}, {"db_idx": 27429, "episode_idx": 150, "frame_idx": 56, "global_frame_idx": 27429, "task_index": 30}, {"db_idx": 27430, "episode_idx": 150, "frame_idx": 57, "global_frame_idx": 27430, "task_index": 30}, {"db_idx": 27431, "episode_idx": 150, "frame_idx": 58, "global_frame_idx": 27431, "task_index": 30}, {"db_idx": 27432, "episode_idx": 150, "frame_idx": 59, "global_frame_idx": 27432, "task_index": 30}, {"db_idx": 27433, "episode_idx": 150, "frame_idx": 60, "global_frame_idx": 27433, "task_index": 30}, {"db_idx": 27434, "episode_idx": 150, "frame_idx": 61, "global_frame_idx": 27434, "task_index": 30}, {"db_idx": 27435, "episode_idx": 150, "frame_idx": 62, "global_frame_idx": 27435, "task_index": 30}, {"db_idx": 27436, "episode_idx": 150, "frame_idx": 63, "global_frame_idx": 27436, "task_index": 30}, {"db_idx": 27437, "episode_idx": 150, "frame_idx": 64, "global_frame_idx": 27437, "task_index": 30}, {"db_idx": 27438, "episode_idx": 150, "frame_idx": 65, "global_frame_idx": 27438, "task_index": 30}, {"db_idx": 27439, "episode_idx": 150, "frame_idx": 66, "global_frame_idx": 27439, "task_index": 30}, {"db_idx": 27440, "episode_idx": 150, "frame_idx": 67, "global_frame_idx": 27440, "task_index": 30}, {"db_idx": 27441, "episode_idx": 150, "frame_idx": 68, "global_frame_idx": 27441, "task_index": 30}, {"db_idx": 27442, "episode_idx": 150, "frame_idx": 69, "global_frame_idx": 27442, "task_index": 30}, {"db_idx": 27443, "episode_idx": 150, "frame_idx": 70, "global_frame_idx": 27443, "task_index": 30}, {"db_idx": 27444, "episode_idx": 150, "frame_idx": 71, "global_frame_idx": 27444, "task_index": 30}, {"db_idx": 27445, "episode_idx": 150, "frame_idx": 72, "global_frame_idx": 27445, "task_index": 30}, {"db_idx": 27446, "episode_idx": 150, "frame_idx": 73, "global_frame_idx": 27446, "task_index": 30}, {"db_idx": 27447, "episode_idx": 150, "frame_idx": 74, "global_frame_idx": 27447, "task_index": 30}, {"db_idx": 27448, "episode_idx": 150, "frame_idx": 75, "global_frame_idx": 27448, "task_index": 30}, {"db_idx": 27449, "episode_idx": 150, "frame_idx": 76, "global_frame_idx": 27449, "task_index": 30}, {"db_idx": 27450, "episode_idx": 150, "frame_idx": 77, "global_frame_idx": 27450, "task_index": 30}, {"db_idx": 27451, "episode_idx": 150, "frame_idx": 78, "global_frame_idx": 27451, "task_index": 30}, {"db_idx": 27452, "episode_idx": 150, "frame_idx": 79, "global_frame_idx": 27452, "task_index": 30}, {"db_idx": 27453, "episode_idx": 150, "frame_idx": 80, "global_frame_idx": 27453, "task_index": 30}, {"db_idx": 27454, "episode_idx": 150, "frame_idx": 81, "global_frame_idx": 27454, "task_index": 30}, {"db_idx": 27455, "episode_idx": 150, "frame_idx": 82, "global_frame_idx": 27455, "task_index": 30}, {"db_idx": 27456, "episode_idx": 150, "frame_idx": 83, "global_frame_idx": 27456, "task_index": 30}, {"db_idx": 27457, "episode_idx": 150, "frame_idx": 84, "global_frame_idx": 27457, "task_index": 30}, {"db_idx": 27458, "episode_idx": 150, "frame_idx": 85, "global_frame_idx": 27458, "task_index": 30}, {"db_idx": 27459, "episode_idx": 150, "frame_idx": 86, "global_frame_idx": 27459, "task_index": 30}, {"db_idx": 27460, "episode_idx": 150, "frame_idx": 87, "global_frame_idx": 27460, "task_index": 30}, {"db_idx": 27461, "episode_idx": 150, "frame_idx": 88, "global_frame_idx": 27461, "task_index": 30}, {"db_idx": 27462, "episode_idx": 150, "frame_idx": 89, "global_frame_idx": 27462, "task_index": 30}, {"db_idx": 27463, "episode_idx": 150, "frame_idx": 90, "global_frame_idx": 27463, "task_index": 30}, {"db_idx": 27464, "episode_idx": 150, "frame_idx": 91, "global_frame_idx": 27464, "task_index": 30}, {"db_idx": 27465, "episode_idx": 150, "frame_idx": 92, "global_frame_idx": 27465, "task_index": 30}, {"db_idx": 27466, "episode_idx": 150, "frame_idx": 93, "global_frame_idx": 27466, "task_index": 30}, {"db_idx": 27467, "episode_idx": 150, "frame_idx": 94, "global_frame_idx": 27467, "task_index": 30}, {"db_idx": 27468, "episode_idx": 150, "frame_idx": 95, "global_frame_idx": 27468, "task_index": 30}, {"db_idx": 27469, "episode_idx": 150, "frame_idx": 96, "global_frame_idx": 27469, "task_index": 30}, {"db_idx": 27470, "episode_idx": 150, "frame_idx": 97, "global_frame_idx": 27470, "task_index": 30}, {"db_idx": 27471, "episode_idx": 150, "frame_idx": 98, "global_frame_idx": 27471, "task_index": 30}, {"db_idx": 27472, "episode_idx": 150, "frame_idx": 99, "global_frame_idx": 27472, "task_index": 30}, {"db_idx": 27473, "episode_idx": 150, "frame_idx": 100, "global_frame_idx": 27473, "task_index": 30}, {"db_idx": 27474, "episode_idx": 150, "frame_idx": 101, "global_frame_idx": 27474, "task_index": 30}, {"db_idx": 27475, "episode_idx": 150, "frame_idx": 102, "global_frame_idx": 27475, "task_index": 30}, {"db_idx": 27476, "episode_idx": 150, "frame_idx": 103, "global_frame_idx": 27476, "task_index": 30}, {"db_idx": 27477, "episode_idx": 150, "frame_idx": 104, "global_frame_idx": 27477, "task_index": 30}, {"db_idx": 27478, "episode_idx": 150, "frame_idx": 105, "global_frame_idx": 27478, "task_index": 30}, {"db_idx": 27479, "episode_idx": 150, "frame_idx": 106, "global_frame_idx": 27479, "task_index": 30}, {"db_idx": 27480, "episode_idx": 150, "frame_idx": 107, "global_frame_idx": 27480, "task_index": 30}, {"db_idx": 27481, "episode_idx": 150, "frame_idx": 108, "global_frame_idx": 27481, "task_index": 30}, {"db_idx": 27482, "episode_idx": 150, "frame_idx": 109, "global_frame_idx": 27482, "task_index": 30}, {"db_idx": 27483, "episode_idx": 150, "frame_idx": 110, "global_frame_idx": 27483, "task_index": 30}, {"db_idx": 27484, "episode_idx": 150, "frame_idx": 111, "global_frame_idx": 27484, "task_index": 30}, {"db_idx": 27485, "episode_idx": 150, "frame_idx": 112, "global_frame_idx": 27485, "task_index": 30}, {"db_idx": 27486, "episode_idx": 150, "frame_idx": 113, "global_frame_idx": 27486, "task_index": 30}, {"db_idx": 27487, "episode_idx": 150, "frame_idx": 114, "global_frame_idx": 27487, "task_index": 30}, {"db_idx": 27488, "episode_idx": 150, "frame_idx": 115, "global_frame_idx": 27488, "task_index": 30}, {"db_idx": 27489, "episode_idx": 150, "frame_idx": 116, "global_frame_idx": 27489, "task_index": 30}, {"db_idx": 27490, "episode_idx": 150, "frame_idx": 117, "global_frame_idx": 27490, "task_index": 30}, {"db_idx": 27491, "episode_idx": 150, "frame_idx": 118, "global_frame_idx": 27491, "task_index": 30}, {"db_idx": 27492, "episode_idx": 150, "frame_idx": 119, "global_frame_idx": 27492, "task_index": 30}, {"db_idx": 27493, "episode_idx": 150, "frame_idx": 120, "global_frame_idx": 27493, "task_index": 30}, {"db_idx": 27494, "episode_idx": 150, "frame_idx": 121, "global_frame_idx": 27494, "task_index": 30}, {"db_idx": 27495, "episode_idx": 151, "frame_idx": 0, "global_frame_idx": 27495, "task_index": 30}, {"db_idx": 27496, "episode_idx": 151, "frame_idx": 1, "global_frame_idx": 27496, "task_index": 30}, {"db_idx": 27497, "episode_idx": 151, "frame_idx": 2, "global_frame_idx": 27497, "task_index": 30}, {"db_idx": 27498, "episode_idx": 151, "frame_idx": 3, "global_frame_idx": 27498, "task_index": 30}, {"db_idx": 27499, "episode_idx": 151, "frame_idx": 4, "global_frame_idx": 27499, "task_index": 30}, {"db_idx": 27500, "episode_idx": 151, "frame_idx": 5, "global_frame_idx": 27500, "task_index": 30}, {"db_idx": 27501, "episode_idx": 151, "frame_idx": 6, "global_frame_idx": 27501, "task_index": 30}, {"db_idx": 27502, "episode_idx": 151, "frame_idx": 7, "global_frame_idx": 27502, "task_index": 30}, {"db_idx": 27503, "episode_idx": 151, "frame_idx": 8, "global_frame_idx": 27503, "task_index": 30}, {"db_idx": 27504, "episode_idx": 151, "frame_idx": 9, "global_frame_idx": 27504, "task_index": 30}, {"db_idx": 27505, "episode_idx": 151, "frame_idx": 10, "global_frame_idx": 27505, "task_index": 30}, {"db_idx": 27506, "episode_idx": 151, "frame_idx": 11, "global_frame_idx": 27506, "task_index": 30}, {"db_idx": 27507, "episode_idx": 151, "frame_idx": 12, "global_frame_idx": 27507, "task_index": 30}, {"db_idx": 27508, "episode_idx": 151, "frame_idx": 13, "global_frame_idx": 27508, "task_index": 30}, {"db_idx": 27509, "episode_idx": 151, "frame_idx": 14, "global_frame_idx": 27509, "task_index": 30}, {"db_idx": 27510, "episode_idx": 151, "frame_idx": 15, "global_frame_idx": 27510, "task_index": 30}, {"db_idx": 27511, "episode_idx": 151, "frame_idx": 16, "global_frame_idx": 27511, "task_index": 30}, {"db_idx": 27512, "episode_idx": 151, "frame_idx": 17, "global_frame_idx": 27512, "task_index": 30}, {"db_idx": 27513, "episode_idx": 151, "frame_idx": 18, "global_frame_idx": 27513, "task_index": 30}, {"db_idx": 27514, "episode_idx": 151, "frame_idx": 19, "global_frame_idx": 27514, "task_index": 30}, {"db_idx": 27515, "episode_idx": 151, "frame_idx": 20, "global_frame_idx": 27515, "task_index": 30}, {"db_idx": 27516, "episode_idx": 151, "frame_idx": 21, "global_frame_idx": 27516, "task_index": 30}, {"db_idx": 27517, "episode_idx": 151, "frame_idx": 22, "global_frame_idx": 27517, "task_index": 30}, {"db_idx": 27518, "episode_idx": 151, "frame_idx": 23, "global_frame_idx": 27518, "task_index": 30}, {"db_idx": 27519, "episode_idx": 151, "frame_idx": 24, "global_frame_idx": 27519, "task_index": 30}, {"db_idx": 27520, "episode_idx": 151, "frame_idx": 25, "global_frame_idx": 27520, "task_index": 30}, {"db_idx": 27521, "episode_idx": 151, "frame_idx": 26, "global_frame_idx": 27521, "task_index": 30}, {"db_idx": 27522, "episode_idx": 151, "frame_idx": 27, "global_frame_idx": 27522, "task_index": 30}, {"db_idx": 27523, "episode_idx": 151, "frame_idx": 28, "global_frame_idx": 27523, "task_index": 30}, {"db_idx": 27524, "episode_idx": 151, "frame_idx": 29, "global_frame_idx": 27524, "task_index": 30}, {"db_idx": 27525, "episode_idx": 151, "frame_idx": 30, "global_frame_idx": 27525, "task_index": 30}, {"db_idx": 27526, "episode_idx": 151, "frame_idx": 31, "global_frame_idx": 27526, "task_index": 30}, {"db_idx": 27527, "episode_idx": 151, "frame_idx": 32, "global_frame_idx": 27527, "task_index": 30}, {"db_idx": 27528, "episode_idx": 151, "frame_idx": 33, "global_frame_idx": 27528, "task_index": 30}, {"db_idx": 27529, "episode_idx": 151, "frame_idx": 34, "global_frame_idx": 27529, "task_index": 30}, {"db_idx": 27530, "episode_idx": 151, "frame_idx": 35, "global_frame_idx": 27530, "task_index": 30}, {"db_idx": 27531, "episode_idx": 151, "frame_idx": 36, "global_frame_idx": 27531, "task_index": 30}, {"db_idx": 27532, "episode_idx": 151, "frame_idx": 37, "global_frame_idx": 27532, "task_index": 30}, {"db_idx": 27533, "episode_idx": 151, "frame_idx": 38, "global_frame_idx": 27533, "task_index": 30}, {"db_idx": 27534, "episode_idx": 151, "frame_idx": 39, "global_frame_idx": 27534, "task_index": 30}, {"db_idx": 27535, "episode_idx": 151, "frame_idx": 40, "global_frame_idx": 27535, "task_index": 30}, {"db_idx": 27536, "episode_idx": 151, "frame_idx": 41, "global_frame_idx": 27536, "task_index": 30}, {"db_idx": 27537, "episode_idx": 151, "frame_idx": 42, "global_frame_idx": 27537, "task_index": 30}, {"db_idx": 27538, "episode_idx": 151, "frame_idx": 43, "global_frame_idx": 27538, "task_index": 30}, {"db_idx": 27539, "episode_idx": 151, "frame_idx": 44, "global_frame_idx": 27539, "task_index": 30}, {"db_idx": 27540, "episode_idx": 151, "frame_idx": 45, "global_frame_idx": 27540, "task_index": 30}, {"db_idx": 27541, "episode_idx": 151, "frame_idx": 46, "global_frame_idx": 27541, "task_index": 30}, {"db_idx": 27542, "episode_idx": 151, "frame_idx": 47, "global_frame_idx": 27542, "task_index": 30}, {"db_idx": 27543, "episode_idx": 151, "frame_idx": 48, "global_frame_idx": 27543, "task_index": 30}, {"db_idx": 27544, "episode_idx": 151, "frame_idx": 49, "global_frame_idx": 27544, "task_index": 30}, {"db_idx": 27545, "episode_idx": 151, "frame_idx": 50, "global_frame_idx": 27545, "task_index": 30}, {"db_idx": 27546, "episode_idx": 151, "frame_idx": 51, "global_frame_idx": 27546, "task_index": 30}, {"db_idx": 27547, "episode_idx": 151, "frame_idx": 52, "global_frame_idx": 27547, "task_index": 30}, {"db_idx": 27548, "episode_idx": 151, "frame_idx": 53, "global_frame_idx": 27548, "task_index": 30}, {"db_idx": 27549, "episode_idx": 151, "frame_idx": 54, "global_frame_idx": 27549, "task_index": 30}, {"db_idx": 27550, "episode_idx": 151, "frame_idx": 55, "global_frame_idx": 27550, "task_index": 30}, {"db_idx": 27551, "episode_idx": 151, "frame_idx": 56, "global_frame_idx": 27551, "task_index": 30}, {"db_idx": 27552, "episode_idx": 151, "frame_idx": 57, "global_frame_idx": 27552, "task_index": 30}, {"db_idx": 27553, "episode_idx": 151, "frame_idx": 58, "global_frame_idx": 27553, "task_index": 30}, {"db_idx": 27554, "episode_idx": 151, "frame_idx": 59, "global_frame_idx": 27554, "task_index": 30}, {"db_idx": 27555, "episode_idx": 151, "frame_idx": 60, "global_frame_idx": 27555, "task_index": 30}, {"db_idx": 27556, "episode_idx": 151, "frame_idx": 61, "global_frame_idx": 27556, "task_index": 30}, {"db_idx": 27557, "episode_idx": 151, "frame_idx": 62, "global_frame_idx": 27557, "task_index": 30}, {"db_idx": 27558, "episode_idx": 151, "frame_idx": 63, "global_frame_idx": 27558, "task_index": 30}, {"db_idx": 27559, "episode_idx": 151, "frame_idx": 64, "global_frame_idx": 27559, "task_index": 30}, {"db_idx": 27560, "episode_idx": 151, "frame_idx": 65, "global_frame_idx": 27560, "task_index": 30}, {"db_idx": 27561, "episode_idx": 151, "frame_idx": 66, "global_frame_idx": 27561, "task_index": 30}, {"db_idx": 27562, "episode_idx": 151, "frame_idx": 67, "global_frame_idx": 27562, "task_index": 30}, {"db_idx": 27563, "episode_idx": 151, "frame_idx": 68, "global_frame_idx": 27563, "task_index": 30}, {"db_idx": 27564, "episode_idx": 151, "frame_idx": 69, "global_frame_idx": 27564, "task_index": 30}, {"db_idx": 27565, "episode_idx": 151, "frame_idx": 70, "global_frame_idx": 27565, "task_index": 30}, {"db_idx": 27566, "episode_idx": 151, "frame_idx": 71, "global_frame_idx": 27566, "task_index": 30}, {"db_idx": 27567, "episode_idx": 151, "frame_idx": 72, "global_frame_idx": 27567, "task_index": 30}, {"db_idx": 27568, "episode_idx": 151, "frame_idx": 73, "global_frame_idx": 27568, "task_index": 30}, {"db_idx": 27569, "episode_idx": 151, "frame_idx": 74, "global_frame_idx": 27569, "task_index": 30}, {"db_idx": 27570, "episode_idx": 151, "frame_idx": 75, "global_frame_idx": 27570, "task_index": 30}, {"db_idx": 27571, "episode_idx": 151, "frame_idx": 76, "global_frame_idx": 27571, "task_index": 30}, {"db_idx": 27572, "episode_idx": 151, "frame_idx": 77, "global_frame_idx": 27572, "task_index": 30}, {"db_idx": 27573, "episode_idx": 151, "frame_idx": 78, "global_frame_idx": 27573, "task_index": 30}, {"db_idx": 27574, "episode_idx": 151, "frame_idx": 79, "global_frame_idx": 27574, "task_index": 30}, {"db_idx": 27575, "episode_idx": 151, "frame_idx": 80, "global_frame_idx": 27575, "task_index": 30}, {"db_idx": 27576, "episode_idx": 151, "frame_idx": 81, "global_frame_idx": 27576, "task_index": 30}, {"db_idx": 27577, "episode_idx": 151, "frame_idx": 82, "global_frame_idx": 27577, "task_index": 30}, {"db_idx": 27578, "episode_idx": 151, "frame_idx": 83, "global_frame_idx": 27578, "task_index": 30}, {"db_idx": 27579, "episode_idx": 151, "frame_idx": 84, "global_frame_idx": 27579, "task_index": 30}, {"db_idx": 27580, "episode_idx": 151, "frame_idx": 85, "global_frame_idx": 27580, "task_index": 30}, {"db_idx": 27581, "episode_idx": 151, "frame_idx": 86, "global_frame_idx": 27581, "task_index": 30}, {"db_idx": 27582, "episode_idx": 151, "frame_idx": 87, "global_frame_idx": 27582, "task_index": 30}, {"db_idx": 27583, "episode_idx": 151, "frame_idx": 88, "global_frame_idx": 27583, "task_index": 30}, {"db_idx": 27584, "episode_idx": 151, "frame_idx": 89, "global_frame_idx": 27584, "task_index": 30}, {"db_idx": 27585, "episode_idx": 151, "frame_idx": 90, "global_frame_idx": 27585, "task_index": 30}, {"db_idx": 27586, "episode_idx": 151, "frame_idx": 91, "global_frame_idx": 27586, "task_index": 30}, {"db_idx": 27587, "episode_idx": 151, "frame_idx": 92, "global_frame_idx": 27587, "task_index": 30}, {"db_idx": 27588, "episode_idx": 151, "frame_idx": 93, "global_frame_idx": 27588, "task_index": 30}, {"db_idx": 27589, "episode_idx": 151, "frame_idx": 94, "global_frame_idx": 27589, "task_index": 30}, {"db_idx": 27590, "episode_idx": 151, "frame_idx": 95, "global_frame_idx": 27590, "task_index": 30}, {"db_idx": 27591, "episode_idx": 151, "frame_idx": 96, "global_frame_idx": 27591, "task_index": 30}, {"db_idx": 27592, "episode_idx": 151, "frame_idx": 97, "global_frame_idx": 27592, "task_index": 30}, {"db_idx": 27593, "episode_idx": 151, "frame_idx": 98, "global_frame_idx": 27593, "task_index": 30}, {"db_idx": 27594, "episode_idx": 151, "frame_idx": 99, "global_frame_idx": 27594, "task_index": 30}, {"db_idx": 27595, "episode_idx": 151, "frame_idx": 100, "global_frame_idx": 27595, "task_index": 30}, {"db_idx": 27596, "episode_idx": 151, "frame_idx": 101, "global_frame_idx": 27596, "task_index": 30}, {"db_idx": 27597, "episode_idx": 151, "frame_idx": 102, "global_frame_idx": 27597, "task_index": 30}, {"db_idx": 27598, "episode_idx": 151, "frame_idx": 103, "global_frame_idx": 27598, "task_index": 30}, {"db_idx": 27599, "episode_idx": 151, "frame_idx": 104, "global_frame_idx": 27599, "task_index": 30}, {"db_idx": 27600, "episode_idx": 151, "frame_idx": 105, "global_frame_idx": 27600, "task_index": 30}, {"db_idx": 27601, "episode_idx": 152, "frame_idx": 0, "global_frame_idx": 27601, "task_index": 30}, {"db_idx": 27602, "episode_idx": 152, "frame_idx": 1, "global_frame_idx": 27602, "task_index": 30}, {"db_idx": 27603, "episode_idx": 152, "frame_idx": 2, "global_frame_idx": 27603, "task_index": 30}, {"db_idx": 27604, "episode_idx": 152, "frame_idx": 3, "global_frame_idx": 27604, "task_index": 30}, {"db_idx": 27605, "episode_idx": 152, "frame_idx": 4, "global_frame_idx": 27605, "task_index": 30}, {"db_idx": 27606, "episode_idx": 152, "frame_idx": 5, "global_frame_idx": 27606, "task_index": 30}, {"db_idx": 27607, "episode_idx": 152, "frame_idx": 6, "global_frame_idx": 27607, "task_index": 30}, {"db_idx": 27608, "episode_idx": 152, "frame_idx": 7, "global_frame_idx": 27608, "task_index": 30}, {"db_idx": 27609, "episode_idx": 152, "frame_idx": 8, "global_frame_idx": 27609, "task_index": 30}, {"db_idx": 27610, "episode_idx": 152, "frame_idx": 9, "global_frame_idx": 27610, "task_index": 30}, {"db_idx": 27611, "episode_idx": 152, "frame_idx": 10, "global_frame_idx": 27611, "task_index": 30}, {"db_idx": 27612, "episode_idx": 152, "frame_idx": 11, "global_frame_idx": 27612, "task_index": 30}, {"db_idx": 27613, "episode_idx": 152, "frame_idx": 12, "global_frame_idx": 27613, "task_index": 30}, {"db_idx": 27614, "episode_idx": 152, "frame_idx": 13, "global_frame_idx": 27614, "task_index": 30}, {"db_idx": 27615, "episode_idx": 152, "frame_idx": 14, "global_frame_idx": 27615, "task_index": 30}, {"db_idx": 27616, "episode_idx": 152, "frame_idx": 15, "global_frame_idx": 27616, "task_index": 30}, {"db_idx": 27617, "episode_idx": 152, "frame_idx": 16, "global_frame_idx": 27617, "task_index": 30}, {"db_idx": 27618, "episode_idx": 152, "frame_idx": 17, "global_frame_idx": 27618, "task_index": 30}, {"db_idx": 27619, "episode_idx": 152, "frame_idx": 18, "global_frame_idx": 27619, "task_index": 30}, {"db_idx": 27620, "episode_idx": 152, "frame_idx": 19, "global_frame_idx": 27620, "task_index": 30}, {"db_idx": 27621, "episode_idx": 152, "frame_idx": 20, "global_frame_idx": 27621, "task_index": 30}, {"db_idx": 27622, "episode_idx": 152, "frame_idx": 21, "global_frame_idx": 27622, "task_index": 30}, {"db_idx": 27623, "episode_idx": 152, "frame_idx": 22, "global_frame_idx": 27623, "task_index": 30}, {"db_idx": 27624, "episode_idx": 152, "frame_idx": 23, "global_frame_idx": 27624, "task_index": 30}, {"db_idx": 27625, "episode_idx": 152, "frame_idx": 24, "global_frame_idx": 27625, "task_index": 30}, {"db_idx": 27626, "episode_idx": 152, "frame_idx": 25, "global_frame_idx": 27626, "task_index": 30}, {"db_idx": 27627, "episode_idx": 152, "frame_idx": 26, "global_frame_idx": 27627, "task_index": 30}, {"db_idx": 27628, "episode_idx": 152, "frame_idx": 27, "global_frame_idx": 27628, "task_index": 30}, {"db_idx": 27629, "episode_idx": 152, "frame_idx": 28, "global_frame_idx": 27629, "task_index": 30}, {"db_idx": 27630, "episode_idx": 152, "frame_idx": 29, "global_frame_idx": 27630, "task_index": 30}, {"db_idx": 27631, "episode_idx": 152, "frame_idx": 30, "global_frame_idx": 27631, "task_index": 30}, {"db_idx": 27632, "episode_idx": 152, "frame_idx": 31, "global_frame_idx": 27632, "task_index": 30}, {"db_idx": 27633, "episode_idx": 152, "frame_idx": 32, "global_frame_idx": 27633, "task_index": 30}, {"db_idx": 27634, "episode_idx": 152, "frame_idx": 33, "global_frame_idx": 27634, "task_index": 30}, {"db_idx": 27635, "episode_idx": 152, "frame_idx": 34, "global_frame_idx": 27635, "task_index": 30}, {"db_idx": 27636, "episode_idx": 152, "frame_idx": 35, "global_frame_idx": 27636, "task_index": 30}, {"db_idx": 27637, "episode_idx": 152, "frame_idx": 36, "global_frame_idx": 27637, "task_index": 30}, {"db_idx": 27638, "episode_idx": 152, "frame_idx": 37, "global_frame_idx": 27638, "task_index": 30}, {"db_idx": 27639, "episode_idx": 152, "frame_idx": 38, "global_frame_idx": 27639, "task_index": 30}, {"db_idx": 27640, "episode_idx": 152, "frame_idx": 39, "global_frame_idx": 27640, "task_index": 30}, {"db_idx": 27641, "episode_idx": 152, "frame_idx": 40, "global_frame_idx": 27641, "task_index": 30}, {"db_idx": 27642, "episode_idx": 152, "frame_idx": 41, "global_frame_idx": 27642, "task_index": 30}, {"db_idx": 27643, "episode_idx": 152, "frame_idx": 42, "global_frame_idx": 27643, "task_index": 30}, {"db_idx": 27644, "episode_idx": 152, "frame_idx": 43, "global_frame_idx": 27644, "task_index": 30}, {"db_idx": 27645, "episode_idx": 152, "frame_idx": 44, "global_frame_idx": 27645, "task_index": 30}, {"db_idx": 27646, "episode_idx": 152, "frame_idx": 45, "global_frame_idx": 27646, "task_index": 30}, {"db_idx": 27647, "episode_idx": 152, "frame_idx": 46, "global_frame_idx": 27647, "task_index": 30}, {"db_idx": 27648, "episode_idx": 152, "frame_idx": 47, "global_frame_idx": 27648, "task_index": 30}, {"db_idx": 27649, "episode_idx": 152, "frame_idx": 48, "global_frame_idx": 27649, "task_index": 30}, {"db_idx": 27650, "episode_idx": 152, "frame_idx": 49, "global_frame_idx": 27650, "task_index": 30}, {"db_idx": 27651, "episode_idx": 152, "frame_idx": 50, "global_frame_idx": 27651, "task_index": 30}, {"db_idx": 27652, "episode_idx": 152, "frame_idx": 51, "global_frame_idx": 27652, "task_index": 30}, {"db_idx": 27653, "episode_idx": 152, "frame_idx": 52, "global_frame_idx": 27653, "task_index": 30}, {"db_idx": 27654, "episode_idx": 152, "frame_idx": 53, "global_frame_idx": 27654, "task_index": 30}, {"db_idx": 27655, "episode_idx": 152, "frame_idx": 54, "global_frame_idx": 27655, "task_index": 30}, {"db_idx": 27656, "episode_idx": 152, "frame_idx": 55, "global_frame_idx": 27656, "task_index": 30}, {"db_idx": 27657, "episode_idx": 152, "frame_idx": 56, "global_frame_idx": 27657, "task_index": 30}, {"db_idx": 27658, "episode_idx": 152, "frame_idx": 57, "global_frame_idx": 27658, "task_index": 30}, {"db_idx": 27659, "episode_idx": 152, "frame_idx": 58, "global_frame_idx": 27659, "task_index": 30}, {"db_idx": 27660, "episode_idx": 152, "frame_idx": 59, "global_frame_idx": 27660, "task_index": 30}, {"db_idx": 27661, "episode_idx": 152, "frame_idx": 60, "global_frame_idx": 27661, "task_index": 30}, {"db_idx": 27662, "episode_idx": 152, "frame_idx": 61, "global_frame_idx": 27662, "task_index": 30}, {"db_idx": 27663, "episode_idx": 152, "frame_idx": 62, "global_frame_idx": 27663, "task_index": 30}, {"db_idx": 27664, "episode_idx": 152, "frame_idx": 63, "global_frame_idx": 27664, "task_index": 30}, {"db_idx": 27665, "episode_idx": 152, "frame_idx": 64, "global_frame_idx": 27665, "task_index": 30}, {"db_idx": 27666, "episode_idx": 152, "frame_idx": 65, "global_frame_idx": 27666, "task_index": 30}, {"db_idx": 27667, "episode_idx": 152, "frame_idx": 66, "global_frame_idx": 27667, "task_index": 30}, {"db_idx": 27668, "episode_idx": 152, "frame_idx": 67, "global_frame_idx": 27668, "task_index": 30}, {"db_idx": 27669, "episode_idx": 152, "frame_idx": 68, "global_frame_idx": 27669, "task_index": 30}, {"db_idx": 27670, "episode_idx": 152, "frame_idx": 69, "global_frame_idx": 27670, "task_index": 30}, {"db_idx": 27671, "episode_idx": 152, "frame_idx": 70, "global_frame_idx": 27671, "task_index": 30}, {"db_idx": 27672, "episode_idx": 152, "frame_idx": 71, "global_frame_idx": 27672, "task_index": 30}, {"db_idx": 27673, "episode_idx": 152, "frame_idx": 72, "global_frame_idx": 27673, "task_index": 30}, {"db_idx": 27674, "episode_idx": 152, "frame_idx": 73, "global_frame_idx": 27674, "task_index": 30}, {"db_idx": 27675, "episode_idx": 152, "frame_idx": 74, "global_frame_idx": 27675, "task_index": 30}, {"db_idx": 27676, "episode_idx": 152, "frame_idx": 75, "global_frame_idx": 27676, "task_index": 30}, {"db_idx": 27677, "episode_idx": 152, "frame_idx": 76, "global_frame_idx": 27677, "task_index": 30}, {"db_idx": 27678, "episode_idx": 152, "frame_idx": 77, "global_frame_idx": 27678, "task_index": 30}, {"db_idx": 27679, "episode_idx": 152, "frame_idx": 78, "global_frame_idx": 27679, "task_index": 30}, {"db_idx": 27680, "episode_idx": 152, "frame_idx": 79, "global_frame_idx": 27680, "task_index": 30}, {"db_idx": 27681, "episode_idx": 152, "frame_idx": 80, "global_frame_idx": 27681, "task_index": 30}, {"db_idx": 27682, "episode_idx": 152, "frame_idx": 81, "global_frame_idx": 27682, "task_index": 30}, {"db_idx": 27683, "episode_idx": 152, "frame_idx": 82, "global_frame_idx": 27683, "task_index": 30}, {"db_idx": 27684, "episode_idx": 152, "frame_idx": 83, "global_frame_idx": 27684, "task_index": 30}, {"db_idx": 27685, "episode_idx": 152, "frame_idx": 84, "global_frame_idx": 27685, "task_index": 30}, {"db_idx": 27686, "episode_idx": 152, "frame_idx": 85, "global_frame_idx": 27686, "task_index": 30}, {"db_idx": 27687, "episode_idx": 152, "frame_idx": 86, "global_frame_idx": 27687, "task_index": 30}, {"db_idx": 27688, "episode_idx": 152, "frame_idx": 87, "global_frame_idx": 27688, "task_index": 30}, {"db_idx": 27689, "episode_idx": 152, "frame_idx": 88, "global_frame_idx": 27689, "task_index": 30}, {"db_idx": 27690, "episode_idx": 152, "frame_idx": 89, "global_frame_idx": 27690, "task_index": 30}, {"db_idx": 27691, "episode_idx": 152, "frame_idx": 90, "global_frame_idx": 27691, "task_index": 30}, {"db_idx": 27692, "episode_idx": 152, "frame_idx": 91, "global_frame_idx": 27692, "task_index": 30}, {"db_idx": 27693, "episode_idx": 152, "frame_idx": 92, "global_frame_idx": 27693, "task_index": 30}, {"db_idx": 27694, "episode_idx": 152, "frame_idx": 93, "global_frame_idx": 27694, "task_index": 30}, {"db_idx": 27695, "episode_idx": 152, "frame_idx": 94, "global_frame_idx": 27695, "task_index": 30}, {"db_idx": 27696, "episode_idx": 152, "frame_idx": 95, "global_frame_idx": 27696, "task_index": 30}, {"db_idx": 27697, "episode_idx": 152, "frame_idx": 96, "global_frame_idx": 27697, "task_index": 30}, {"db_idx": 27698, "episode_idx": 152, "frame_idx": 97, "global_frame_idx": 27698, "task_index": 30}, {"db_idx": 27699, "episode_idx": 152, "frame_idx": 98, "global_frame_idx": 27699, "task_index": 30}, {"db_idx": 27700, "episode_idx": 152, "frame_idx": 99, "global_frame_idx": 27700, "task_index": 30}, {"db_idx": 27701, "episode_idx": 152, "frame_idx": 100, "global_frame_idx": 27701, "task_index": 30}, {"db_idx": 27702, "episode_idx": 152, "frame_idx": 101, "global_frame_idx": 27702, "task_index": 30}, {"db_idx": 27703, "episode_idx": 152, "frame_idx": 102, "global_frame_idx": 27703, "task_index": 30}, {"db_idx": 27704, "episode_idx": 152, "frame_idx": 103, "global_frame_idx": 27704, "task_index": 30}, {"db_idx": 27705, "episode_idx": 152, "frame_idx": 104, "global_frame_idx": 27705, "task_index": 30}, {"db_idx": 27706, "episode_idx": 152, "frame_idx": 105, "global_frame_idx": 27706, "task_index": 30}, {"db_idx": 27707, "episode_idx": 152, "frame_idx": 106, "global_frame_idx": 27707, "task_index": 30}, {"db_idx": 27708, "episode_idx": 152, "frame_idx": 107, "global_frame_idx": 27708, "task_index": 30}, {"db_idx": 27709, "episode_idx": 152, "frame_idx": 108, "global_frame_idx": 27709, "task_index": 30}, {"db_idx": 27710, "episode_idx": 152, "frame_idx": 109, "global_frame_idx": 27710, "task_index": 30}, {"db_idx": 27711, "episode_idx": 152, "frame_idx": 110, "global_frame_idx": 27711, "task_index": 30}, {"db_idx": 27712, "episode_idx": 152, "frame_idx": 111, "global_frame_idx": 27712, "task_index": 30}, {"db_idx": 27713, "episode_idx": 152, "frame_idx": 112, "global_frame_idx": 27713, "task_index": 30}, {"db_idx": 27714, "episode_idx": 152, "frame_idx": 113, "global_frame_idx": 27714, "task_index": 30}, {"db_idx": 27715, "episode_idx": 152, "frame_idx": 114, "global_frame_idx": 27715, "task_index": 30}, {"db_idx": 27716, "episode_idx": 152, "frame_idx": 115, "global_frame_idx": 27716, "task_index": 30}, {"db_idx": 27717, "episode_idx": 152, "frame_idx": 116, "global_frame_idx": 27717, "task_index": 30}, {"db_idx": 27718, "episode_idx": 152, "frame_idx": 117, "global_frame_idx": 27718, "task_index": 30}, {"db_idx": 27719, "episode_idx": 152, "frame_idx": 118, "global_frame_idx": 27719, "task_index": 30}, {"db_idx": 27720, "episode_idx": 152, "frame_idx": 119, "global_frame_idx": 27720, "task_index": 30}, {"db_idx": 27721, "episode_idx": 152, "frame_idx": 120, "global_frame_idx": 27721, "task_index": 30}, {"db_idx": 27722, "episode_idx": 152, "frame_idx": 121, "global_frame_idx": 27722, "task_index": 30}, {"db_idx": 27723, "episode_idx": 152, "frame_idx": 122, "global_frame_idx": 27723, "task_index": 30}, {"db_idx": 27724, "episode_idx": 152, "frame_idx": 123, "global_frame_idx": 27724, "task_index": 30}, {"db_idx": 27725, "episode_idx": 152, "frame_idx": 124, "global_frame_idx": 27725, "task_index": 30}, {"db_idx": 27726, "episode_idx": 152, "frame_idx": 125, "global_frame_idx": 27726, "task_index": 30}, {"db_idx": 27727, "episode_idx": 152, "frame_idx": 126, "global_frame_idx": 27727, "task_index": 30}, {"db_idx": 27728, "episode_idx": 152, "frame_idx": 127, "global_frame_idx": 27728, "task_index": 30}, {"db_idx": 27729, "episode_idx": 152, "frame_idx": 128, "global_frame_idx": 27729, "task_index": 30}, {"db_idx": 27730, "episode_idx": 153, "frame_idx": 0, "global_frame_idx": 27730, "task_index": 30}, {"db_idx": 27731, "episode_idx": 153, "frame_idx": 1, "global_frame_idx": 27731, "task_index": 30}, {"db_idx": 27732, "episode_idx": 153, "frame_idx": 2, "global_frame_idx": 27732, "task_index": 30}, {"db_idx": 27733, "episode_idx": 153, "frame_idx": 3, "global_frame_idx": 27733, "task_index": 30}, {"db_idx": 27734, "episode_idx": 153, "frame_idx": 4, "global_frame_idx": 27734, "task_index": 30}, {"db_idx": 27735, "episode_idx": 153, "frame_idx": 5, "global_frame_idx": 27735, "task_index": 30}, {"db_idx": 27736, "episode_idx": 153, "frame_idx": 6, "global_frame_idx": 27736, "task_index": 30}, {"db_idx": 27737, "episode_idx": 153, "frame_idx": 7, "global_frame_idx": 27737, "task_index": 30}, {"db_idx": 27738, "episode_idx": 153, "frame_idx": 8, "global_frame_idx": 27738, "task_index": 30}, {"db_idx": 27739, "episode_idx": 153, "frame_idx": 9, "global_frame_idx": 27739, "task_index": 30}, {"db_idx": 27740, "episode_idx": 153, "frame_idx": 10, "global_frame_idx": 27740, "task_index": 30}, {"db_idx": 27741, "episode_idx": 153, "frame_idx": 11, "global_frame_idx": 27741, "task_index": 30}, {"db_idx": 27742, "episode_idx": 153, "frame_idx": 12, "global_frame_idx": 27742, "task_index": 30}, {"db_idx": 27743, "episode_idx": 153, "frame_idx": 13, "global_frame_idx": 27743, "task_index": 30}, {"db_idx": 27744, "episode_idx": 153, "frame_idx": 14, "global_frame_idx": 27744, "task_index": 30}, {"db_idx": 27745, "episode_idx": 153, "frame_idx": 15, "global_frame_idx": 27745, "task_index": 30}, {"db_idx": 27746, "episode_idx": 153, "frame_idx": 16, "global_frame_idx": 27746, "task_index": 30}, {"db_idx": 27747, "episode_idx": 153, "frame_idx": 17, "global_frame_idx": 27747, "task_index": 30}, {"db_idx": 27748, "episode_idx": 153, "frame_idx": 18, "global_frame_idx": 27748, "task_index": 30}, {"db_idx": 27749, "episode_idx": 153, "frame_idx": 19, "global_frame_idx": 27749, "task_index": 30}, {"db_idx": 27750, "episode_idx": 153, "frame_idx": 20, "global_frame_idx": 27750, "task_index": 30}, {"db_idx": 27751, "episode_idx": 153, "frame_idx": 21, "global_frame_idx": 27751, "task_index": 30}, {"db_idx": 27752, "episode_idx": 153, "frame_idx": 22, "global_frame_idx": 27752, "task_index": 30}, {"db_idx": 27753, "episode_idx": 153, "frame_idx": 23, "global_frame_idx": 27753, "task_index": 30}, {"db_idx": 27754, "episode_idx": 153, "frame_idx": 24, "global_frame_idx": 27754, "task_index": 30}, {"db_idx": 27755, "episode_idx": 153, "frame_idx": 25, "global_frame_idx": 27755, "task_index": 30}, {"db_idx": 27756, "episode_idx": 153, "frame_idx": 26, "global_frame_idx": 27756, "task_index": 30}, {"db_idx": 27757, "episode_idx": 153, "frame_idx": 27, "global_frame_idx": 27757, "task_index": 30}, {"db_idx": 27758, "episode_idx": 153, "frame_idx": 28, "global_frame_idx": 27758, "task_index": 30}, {"db_idx": 27759, "episode_idx": 153, "frame_idx": 29, "global_frame_idx": 27759, "task_index": 30}, {"db_idx": 27760, "episode_idx": 153, "frame_idx": 30, "global_frame_idx": 27760, "task_index": 30}, {"db_idx": 27761, "episode_idx": 153, "frame_idx": 31, "global_frame_idx": 27761, "task_index": 30}, {"db_idx": 27762, "episode_idx": 153, "frame_idx": 32, "global_frame_idx": 27762, "task_index": 30}, {"db_idx": 27763, "episode_idx": 153, "frame_idx": 33, "global_frame_idx": 27763, "task_index": 30}, {"db_idx": 27764, "episode_idx": 153, "frame_idx": 34, "global_frame_idx": 27764, "task_index": 30}, {"db_idx": 27765, "episode_idx": 153, "frame_idx": 35, "global_frame_idx": 27765, "task_index": 30}, {"db_idx": 27766, "episode_idx": 153, "frame_idx": 36, "global_frame_idx": 27766, "task_index": 30}, {"db_idx": 27767, "episode_idx": 153, "frame_idx": 37, "global_frame_idx": 27767, "task_index": 30}, {"db_idx": 27768, "episode_idx": 153, "frame_idx": 38, "global_frame_idx": 27768, "task_index": 30}, {"db_idx": 27769, "episode_idx": 153, "frame_idx": 39, "global_frame_idx": 27769, "task_index": 30}, {"db_idx": 27770, "episode_idx": 153, "frame_idx": 40, "global_frame_idx": 27770, "task_index": 30}, {"db_idx": 27771, "episode_idx": 153, "frame_idx": 41, "global_frame_idx": 27771, "task_index": 30}, {"db_idx": 27772, "episode_idx": 153, "frame_idx": 42, "global_frame_idx": 27772, "task_index": 30}, {"db_idx": 27773, "episode_idx": 153, "frame_idx": 43, "global_frame_idx": 27773, "task_index": 30}, {"db_idx": 27774, "episode_idx": 153, "frame_idx": 44, "global_frame_idx": 27774, "task_index": 30}, {"db_idx": 27775, "episode_idx": 153, "frame_idx": 45, "global_frame_idx": 27775, "task_index": 30}, {"db_idx": 27776, "episode_idx": 153, "frame_idx": 46, "global_frame_idx": 27776, "task_index": 30}, {"db_idx": 27777, "episode_idx": 153, "frame_idx": 47, "global_frame_idx": 27777, "task_index": 30}, {"db_idx": 27778, "episode_idx": 153, "frame_idx": 48, "global_frame_idx": 27778, "task_index": 30}, {"db_idx": 27779, "episode_idx": 153, "frame_idx": 49, "global_frame_idx": 27779, "task_index": 30}, {"db_idx": 27780, "episode_idx": 153, "frame_idx": 50, "global_frame_idx": 27780, "task_index": 30}, {"db_idx": 27781, "episode_idx": 153, "frame_idx": 51, "global_frame_idx": 27781, "task_index": 30}, {"db_idx": 27782, "episode_idx": 153, "frame_idx": 52, "global_frame_idx": 27782, "task_index": 30}, {"db_idx": 27783, "episode_idx": 153, "frame_idx": 53, "global_frame_idx": 27783, "task_index": 30}, {"db_idx": 27784, "episode_idx": 153, "frame_idx": 54, "global_frame_idx": 27784, "task_index": 30}, {"db_idx": 27785, "episode_idx": 153, "frame_idx": 55, "global_frame_idx": 27785, "task_index": 30}, {"db_idx": 27786, "episode_idx": 153, "frame_idx": 56, "global_frame_idx": 27786, "task_index": 30}, {"db_idx": 27787, "episode_idx": 153, "frame_idx": 57, "global_frame_idx": 27787, "task_index": 30}, {"db_idx": 27788, "episode_idx": 153, "frame_idx": 58, "global_frame_idx": 27788, "task_index": 30}, {"db_idx": 27789, "episode_idx": 153, "frame_idx": 59, "global_frame_idx": 27789, "task_index": 30}, {"db_idx": 27790, "episode_idx": 153, "frame_idx": 60, "global_frame_idx": 27790, "task_index": 30}, {"db_idx": 27791, "episode_idx": 153, "frame_idx": 61, "global_frame_idx": 27791, "task_index": 30}, {"db_idx": 27792, "episode_idx": 153, "frame_idx": 62, "global_frame_idx": 27792, "task_index": 30}, {"db_idx": 27793, "episode_idx": 153, "frame_idx": 63, "global_frame_idx": 27793, "task_index": 30}, {"db_idx": 27794, "episode_idx": 153, "frame_idx": 64, "global_frame_idx": 27794, "task_index": 30}, {"db_idx": 27795, "episode_idx": 153, "frame_idx": 65, "global_frame_idx": 27795, "task_index": 30}, {"db_idx": 27796, "episode_idx": 153, "frame_idx": 66, "global_frame_idx": 27796, "task_index": 30}, {"db_idx": 27797, "episode_idx": 153, "frame_idx": 67, "global_frame_idx": 27797, "task_index": 30}, {"db_idx": 27798, "episode_idx": 153, "frame_idx": 68, "global_frame_idx": 27798, "task_index": 30}, {"db_idx": 27799, "episode_idx": 153, "frame_idx": 69, "global_frame_idx": 27799, "task_index": 30}, {"db_idx": 27800, "episode_idx": 153, "frame_idx": 70, "global_frame_idx": 27800, "task_index": 30}, {"db_idx": 27801, "episode_idx": 153, "frame_idx": 71, "global_frame_idx": 27801, "task_index": 30}, {"db_idx": 27802, "episode_idx": 153, "frame_idx": 72, "global_frame_idx": 27802, "task_index": 30}, {"db_idx": 27803, "episode_idx": 153, "frame_idx": 73, "global_frame_idx": 27803, "task_index": 30}, {"db_idx": 27804, "episode_idx": 153, "frame_idx": 74, "global_frame_idx": 27804, "task_index": 30}, {"db_idx": 27805, "episode_idx": 153, "frame_idx": 75, "global_frame_idx": 27805, "task_index": 30}, {"db_idx": 27806, "episode_idx": 153, "frame_idx": 76, "global_frame_idx": 27806, "task_index": 30}, {"db_idx": 27807, "episode_idx": 153, "frame_idx": 77, "global_frame_idx": 27807, "task_index": 30}, {"db_idx": 27808, "episode_idx": 153, "frame_idx": 78, "global_frame_idx": 27808, "task_index": 30}, {"db_idx": 27809, "episode_idx": 153, "frame_idx": 79, "global_frame_idx": 27809, "task_index": 30}, {"db_idx": 27810, "episode_idx": 153, "frame_idx": 80, "global_frame_idx": 27810, "task_index": 30}, {"db_idx": 27811, "episode_idx": 153, "frame_idx": 81, "global_frame_idx": 27811, "task_index": 30}, {"db_idx": 27812, "episode_idx": 153, "frame_idx": 82, "global_frame_idx": 27812, "task_index": 30}, {"db_idx": 27813, "episode_idx": 153, "frame_idx": 83, "global_frame_idx": 27813, "task_index": 30}, {"db_idx": 27814, "episode_idx": 153, "frame_idx": 84, "global_frame_idx": 27814, "task_index": 30}, {"db_idx": 27815, "episode_idx": 153, "frame_idx": 85, "global_frame_idx": 27815, "task_index": 30}, {"db_idx": 27816, "episode_idx": 153, "frame_idx": 86, "global_frame_idx": 27816, "task_index": 30}, {"db_idx": 27817, "episode_idx": 153, "frame_idx": 87, "global_frame_idx": 27817, "task_index": 30}, {"db_idx": 27818, "episode_idx": 153, "frame_idx": 88, "global_frame_idx": 27818, "task_index": 30}, {"db_idx": 27819, "episode_idx": 153, "frame_idx": 89, "global_frame_idx": 27819, "task_index": 30}, {"db_idx": 27820, "episode_idx": 153, "frame_idx": 90, "global_frame_idx": 27820, "task_index": 30}, {"db_idx": 27821, "episode_idx": 153, "frame_idx": 91, "global_frame_idx": 27821, "task_index": 30}, {"db_idx": 27822, "episode_idx": 153, "frame_idx": 92, "global_frame_idx": 27822, "task_index": 30}, {"db_idx": 27823, "episode_idx": 153, "frame_idx": 93, "global_frame_idx": 27823, "task_index": 30}, {"db_idx": 27824, "episode_idx": 153, "frame_idx": 94, "global_frame_idx": 27824, "task_index": 30}, {"db_idx": 27825, "episode_idx": 153, "frame_idx": 95, "global_frame_idx": 27825, "task_index": 30}, {"db_idx": 27826, "episode_idx": 153, "frame_idx": 96, "global_frame_idx": 27826, "task_index": 30}, {"db_idx": 27827, "episode_idx": 153, "frame_idx": 97, "global_frame_idx": 27827, "task_index": 30}, {"db_idx": 27828, "episode_idx": 153, "frame_idx": 98, "global_frame_idx": 27828, "task_index": 30}, {"db_idx": 27829, "episode_idx": 153, "frame_idx": 99, "global_frame_idx": 27829, "task_index": 30}, {"db_idx": 27830, "episode_idx": 153, "frame_idx": 100, "global_frame_idx": 27830, "task_index": 30}, {"db_idx": 27831, "episode_idx": 153, "frame_idx": 101, "global_frame_idx": 27831, "task_index": 30}, {"db_idx": 27832, "episode_idx": 153, "frame_idx": 102, "global_frame_idx": 27832, "task_index": 30}, {"db_idx": 27833, "episode_idx": 153, "frame_idx": 103, "global_frame_idx": 27833, "task_index": 30}, {"db_idx": 27834, "episode_idx": 153, "frame_idx": 104, "global_frame_idx": 27834, "task_index": 30}, {"db_idx": 27835, "episode_idx": 153, "frame_idx": 105, "global_frame_idx": 27835, "task_index": 30}, {"db_idx": 27836, "episode_idx": 153, "frame_idx": 106, "global_frame_idx": 27836, "task_index": 30}, {"db_idx": 27837, "episode_idx": 153, "frame_idx": 107, "global_frame_idx": 27837, "task_index": 30}, {"db_idx": 27838, "episode_idx": 153, "frame_idx": 108, "global_frame_idx": 27838, "task_index": 30}, {"db_idx": 27839, "episode_idx": 153, "frame_idx": 109, "global_frame_idx": 27839, "task_index": 30}, {"db_idx": 27840, "episode_idx": 153, "frame_idx": 110, "global_frame_idx": 27840, "task_index": 30}, {"db_idx": 27841, "episode_idx": 153, "frame_idx": 111, "global_frame_idx": 27841, "task_index": 30}, {"db_idx": 27842, "episode_idx": 153, "frame_idx": 112, "global_frame_idx": 27842, "task_index": 30}, {"db_idx": 27843, "episode_idx": 153, "frame_idx": 113, "global_frame_idx": 27843, "task_index": 30}, {"db_idx": 27844, "episode_idx": 153, "frame_idx": 114, "global_frame_idx": 27844, "task_index": 30}, {"db_idx": 27845, "episode_idx": 153, "frame_idx": 115, "global_frame_idx": 27845, "task_index": 30}, {"db_idx": 27846, "episode_idx": 153, "frame_idx": 116, "global_frame_idx": 27846, "task_index": 30}, {"db_idx": 27847, "episode_idx": 153, "frame_idx": 117, "global_frame_idx": 27847, "task_index": 30}, {"db_idx": 27848, "episode_idx": 153, "frame_idx": 118, "global_frame_idx": 27848, "task_index": 30}, {"db_idx": 27849, "episode_idx": 153, "frame_idx": 119, "global_frame_idx": 27849, "task_index": 30}, {"db_idx": 27850, "episode_idx": 153, "frame_idx": 120, "global_frame_idx": 27850, "task_index": 30}, {"db_idx": 27851, "episode_idx": 153, "frame_idx": 121, "global_frame_idx": 27851, "task_index": 30}, {"db_idx": 27852, "episode_idx": 153, "frame_idx": 122, "global_frame_idx": 27852, "task_index": 30}, {"db_idx": 27853, "episode_idx": 153, "frame_idx": 123, "global_frame_idx": 27853, "task_index": 30}, {"db_idx": 27854, "episode_idx": 153, "frame_idx": 124, "global_frame_idx": 27854, "task_index": 30}, {"db_idx": 27855, "episode_idx": 153, "frame_idx": 125, "global_frame_idx": 27855, "task_index": 30}, {"db_idx": 27856, "episode_idx": 153, "frame_idx": 126, "global_frame_idx": 27856, "task_index": 30}, {"db_idx": 27857, "episode_idx": 153, "frame_idx": 127, "global_frame_idx": 27857, "task_index": 30}, {"db_idx": 27858, "episode_idx": 153, "frame_idx": 128, "global_frame_idx": 27858, "task_index": 30}, {"db_idx": 27859, "episode_idx": 153, "frame_idx": 129, "global_frame_idx": 27859, "task_index": 30}, {"db_idx": 27860, "episode_idx": 153, "frame_idx": 130, "global_frame_idx": 27860, "task_index": 30}, {"db_idx": 27861, "episode_idx": 153, "frame_idx": 131, "global_frame_idx": 27861, "task_index": 30}, {"db_idx": 27862, "episode_idx": 153, "frame_idx": 132, "global_frame_idx": 27862, "task_index": 30}, {"db_idx": 27863, "episode_idx": 153, "frame_idx": 133, "global_frame_idx": 27863, "task_index": 30}, {"db_idx": 27864, "episode_idx": 153, "frame_idx": 134, "global_frame_idx": 27864, "task_index": 30}, {"db_idx": 27865, "episode_idx": 153, "frame_idx": 135, "global_frame_idx": 27865, "task_index": 30}, {"db_idx": 27866, "episode_idx": 153, "frame_idx": 136, "global_frame_idx": 27866, "task_index": 30}, {"db_idx": 27867, "episode_idx": 153, "frame_idx": 137, "global_frame_idx": 27867, "task_index": 30}, {"db_idx": 27868, "episode_idx": 153, "frame_idx": 138, "global_frame_idx": 27868, "task_index": 30}, {"db_idx": 27869, "episode_idx": 153, "frame_idx": 139, "global_frame_idx": 27869, "task_index": 30}, {"db_idx": 27870, "episode_idx": 153, "frame_idx": 140, "global_frame_idx": 27870, "task_index": 30}, {"db_idx": 27871, "episode_idx": 153, "frame_idx": 141, "global_frame_idx": 27871, "task_index": 30}, {"db_idx": 27872, "episode_idx": 153, "frame_idx": 142, "global_frame_idx": 27872, "task_index": 30}, {"db_idx": 27873, "episode_idx": 153, "frame_idx": 143, "global_frame_idx": 27873, "task_index": 30}, {"db_idx": 27874, "episode_idx": 153, "frame_idx": 144, "global_frame_idx": 27874, "task_index": 30}, {"db_idx": 27875, "episode_idx": 153, "frame_idx": 145, "global_frame_idx": 27875, "task_index": 30}, {"db_idx": 27876, "episode_idx": 154, "frame_idx": 0, "global_frame_idx": 27876, "task_index": 30}, {"db_idx": 27877, "episode_idx": 154, "frame_idx": 1, "global_frame_idx": 27877, "task_index": 30}, {"db_idx": 27878, "episode_idx": 154, "frame_idx": 2, "global_frame_idx": 27878, "task_index": 30}, {"db_idx": 27879, "episode_idx": 154, "frame_idx": 3, "global_frame_idx": 27879, "task_index": 30}, {"db_idx": 27880, "episode_idx": 154, "frame_idx": 4, "global_frame_idx": 27880, "task_index": 30}, {"db_idx": 27881, "episode_idx": 154, "frame_idx": 5, "global_frame_idx": 27881, "task_index": 30}, {"db_idx": 27882, "episode_idx": 154, "frame_idx": 6, "global_frame_idx": 27882, "task_index": 30}, {"db_idx": 27883, "episode_idx": 154, "frame_idx": 7, "global_frame_idx": 27883, "task_index": 30}, {"db_idx": 27884, "episode_idx": 154, "frame_idx": 8, "global_frame_idx": 27884, "task_index": 30}, {"db_idx": 27885, "episode_idx": 154, "frame_idx": 9, "global_frame_idx": 27885, "task_index": 30}, {"db_idx": 27886, "episode_idx": 154, "frame_idx": 10, "global_frame_idx": 27886, "task_index": 30}, {"db_idx": 27887, "episode_idx": 154, "frame_idx": 11, "global_frame_idx": 27887, "task_index": 30}, {"db_idx": 27888, "episode_idx": 154, "frame_idx": 12, "global_frame_idx": 27888, "task_index": 30}, {"db_idx": 27889, "episode_idx": 154, "frame_idx": 13, "global_frame_idx": 27889, "task_index": 30}, {"db_idx": 27890, "episode_idx": 154, "frame_idx": 14, "global_frame_idx": 27890, "task_index": 30}, {"db_idx": 27891, "episode_idx": 154, "frame_idx": 15, "global_frame_idx": 27891, "task_index": 30}, {"db_idx": 27892, "episode_idx": 154, "frame_idx": 16, "global_frame_idx": 27892, "task_index": 30}, {"db_idx": 27893, "episode_idx": 154, "frame_idx": 17, "global_frame_idx": 27893, "task_index": 30}, {"db_idx": 27894, "episode_idx": 154, "frame_idx": 18, "global_frame_idx": 27894, "task_index": 30}, {"db_idx": 27895, "episode_idx": 154, "frame_idx": 19, "global_frame_idx": 27895, "task_index": 30}, {"db_idx": 27896, "episode_idx": 154, "frame_idx": 20, "global_frame_idx": 27896, "task_index": 30}, {"db_idx": 27897, "episode_idx": 154, "frame_idx": 21, "global_frame_idx": 27897, "task_index": 30}, {"db_idx": 27898, "episode_idx": 154, "frame_idx": 22, "global_frame_idx": 27898, "task_index": 30}, {"db_idx": 27899, "episode_idx": 154, "frame_idx": 23, "global_frame_idx": 27899, "task_index": 30}, {"db_idx": 27900, "episode_idx": 154, "frame_idx": 24, "global_frame_idx": 27900, "task_index": 30}, {"db_idx": 27901, "episode_idx": 154, "frame_idx": 25, "global_frame_idx": 27901, "task_index": 30}, {"db_idx": 27902, "episode_idx": 154, "frame_idx": 26, "global_frame_idx": 27902, "task_index": 30}, {"db_idx": 27903, "episode_idx": 154, "frame_idx": 27, "global_frame_idx": 27903, "task_index": 30}, {"db_idx": 27904, "episode_idx": 154, "frame_idx": 28, "global_frame_idx": 27904, "task_index": 30}, {"db_idx": 27905, "episode_idx": 154, "frame_idx": 29, "global_frame_idx": 27905, "task_index": 30}, {"db_idx": 27906, "episode_idx": 154, "frame_idx": 30, "global_frame_idx": 27906, "task_index": 30}, {"db_idx": 27907, "episode_idx": 154, "frame_idx": 31, "global_frame_idx": 27907, "task_index": 30}, {"db_idx": 27908, "episode_idx": 154, "frame_idx": 32, "global_frame_idx": 27908, "task_index": 30}, {"db_idx": 27909, "episode_idx": 154, "frame_idx": 33, "global_frame_idx": 27909, "task_index": 30}, {"db_idx": 27910, "episode_idx": 154, "frame_idx": 34, "global_frame_idx": 27910, "task_index": 30}, {"db_idx": 27911, "episode_idx": 154, "frame_idx": 35, "global_frame_idx": 27911, "task_index": 30}, {"db_idx": 27912, "episode_idx": 154, "frame_idx": 36, "global_frame_idx": 27912, "task_index": 30}, {"db_idx": 27913, "episode_idx": 154, "frame_idx": 37, "global_frame_idx": 27913, "task_index": 30}, {"db_idx": 27914, "episode_idx": 154, "frame_idx": 38, "global_frame_idx": 27914, "task_index": 30}, {"db_idx": 27915, "episode_idx": 154, "frame_idx": 39, "global_frame_idx": 27915, "task_index": 30}, {"db_idx": 27916, "episode_idx": 154, "frame_idx": 40, "global_frame_idx": 27916, "task_index": 30}, {"db_idx": 27917, "episode_idx": 154, "frame_idx": 41, "global_frame_idx": 27917, "task_index": 30}, {"db_idx": 27918, "episode_idx": 154, "frame_idx": 42, "global_frame_idx": 27918, "task_index": 30}, {"db_idx": 27919, "episode_idx": 154, "frame_idx": 43, "global_frame_idx": 27919, "task_index": 30}, {"db_idx": 27920, "episode_idx": 154, "frame_idx": 44, "global_frame_idx": 27920, "task_index": 30}, {"db_idx": 27921, "episode_idx": 154, "frame_idx": 45, "global_frame_idx": 27921, "task_index": 30}, {"db_idx": 27922, "episode_idx": 154, "frame_idx": 46, "global_frame_idx": 27922, "task_index": 30}, {"db_idx": 27923, "episode_idx": 154, "frame_idx": 47, "global_frame_idx": 27923, "task_index": 30}, {"db_idx": 27924, "episode_idx": 154, "frame_idx": 48, "global_frame_idx": 27924, "task_index": 30}, {"db_idx": 27925, "episode_idx": 154, "frame_idx": 49, "global_frame_idx": 27925, "task_index": 30}, {"db_idx": 27926, "episode_idx": 154, "frame_idx": 50, "global_frame_idx": 27926, "task_index": 30}, {"db_idx": 27927, "episode_idx": 154, "frame_idx": 51, "global_frame_idx": 27927, "task_index": 30}, {"db_idx": 27928, "episode_idx": 154, "frame_idx": 52, "global_frame_idx": 27928, "task_index": 30}, {"db_idx": 27929, "episode_idx": 154, "frame_idx": 53, "global_frame_idx": 27929, "task_index": 30}, {"db_idx": 27930, "episode_idx": 154, "frame_idx": 54, "global_frame_idx": 27930, "task_index": 30}, {"db_idx": 27931, "episode_idx": 154, "frame_idx": 55, "global_frame_idx": 27931, "task_index": 30}, {"db_idx": 27932, "episode_idx": 154, "frame_idx": 56, "global_frame_idx": 27932, "task_index": 30}, {"db_idx": 27933, "episode_idx": 154, "frame_idx": 57, "global_frame_idx": 27933, "task_index": 30}, {"db_idx": 27934, "episode_idx": 154, "frame_idx": 58, "global_frame_idx": 27934, "task_index": 30}, {"db_idx": 27935, "episode_idx": 154, "frame_idx": 59, "global_frame_idx": 27935, "task_index": 30}, {"db_idx": 27936, "episode_idx": 154, "frame_idx": 60, "global_frame_idx": 27936, "task_index": 30}, {"db_idx": 27937, "episode_idx": 154, "frame_idx": 61, "global_frame_idx": 27937, "task_index": 30}, {"db_idx": 27938, "episode_idx": 154, "frame_idx": 62, "global_frame_idx": 27938, "task_index": 30}, {"db_idx": 27939, "episode_idx": 154, "frame_idx": 63, "global_frame_idx": 27939, "task_index": 30}, {"db_idx": 27940, "episode_idx": 154, "frame_idx": 64, "global_frame_idx": 27940, "task_index": 30}, {"db_idx": 27941, "episode_idx": 154, "frame_idx": 65, "global_frame_idx": 27941, "task_index": 30}, {"db_idx": 27942, "episode_idx": 154, "frame_idx": 66, "global_frame_idx": 27942, "task_index": 30}, {"db_idx": 27943, "episode_idx": 154, "frame_idx": 67, "global_frame_idx": 27943, "task_index": 30}, {"db_idx": 27944, "episode_idx": 154, "frame_idx": 68, "global_frame_idx": 27944, "task_index": 30}, {"db_idx": 27945, "episode_idx": 154, "frame_idx": 69, "global_frame_idx": 27945, "task_index": 30}, {"db_idx": 27946, "episode_idx": 154, "frame_idx": 70, "global_frame_idx": 27946, "task_index": 30}, {"db_idx": 27947, "episode_idx": 154, "frame_idx": 71, "global_frame_idx": 27947, "task_index": 30}, {"db_idx": 27948, "episode_idx": 154, "frame_idx": 72, "global_frame_idx": 27948, "task_index": 30}, {"db_idx": 27949, "episode_idx": 154, "frame_idx": 73, "global_frame_idx": 27949, "task_index": 30}, {"db_idx": 27950, "episode_idx": 154, "frame_idx": 74, "global_frame_idx": 27950, "task_index": 30}, {"db_idx": 27951, "episode_idx": 154, "frame_idx": 75, "global_frame_idx": 27951, "task_index": 30}, {"db_idx": 27952, "episode_idx": 154, "frame_idx": 76, "global_frame_idx": 27952, "task_index": 30}, {"db_idx": 27953, "episode_idx": 154, "frame_idx": 77, "global_frame_idx": 27953, "task_index": 30}, {"db_idx": 27954, "episode_idx": 154, "frame_idx": 78, "global_frame_idx": 27954, "task_index": 30}, {"db_idx": 27955, "episode_idx": 154, "frame_idx": 79, "global_frame_idx": 27955, "task_index": 30}, {"db_idx": 27956, "episode_idx": 154, "frame_idx": 80, "global_frame_idx": 27956, "task_index": 30}, {"db_idx": 27957, "episode_idx": 154, "frame_idx": 81, "global_frame_idx": 27957, "task_index": 30}, {"db_idx": 27958, "episode_idx": 154, "frame_idx": 82, "global_frame_idx": 27958, "task_index": 30}, {"db_idx": 27959, "episode_idx": 154, "frame_idx": 83, "global_frame_idx": 27959, "task_index": 30}, {"db_idx": 27960, "episode_idx": 154, "frame_idx": 84, "global_frame_idx": 27960, "task_index": 30}, {"db_idx": 27961, "episode_idx": 154, "frame_idx": 85, "global_frame_idx": 27961, "task_index": 30}, {"db_idx": 27962, "episode_idx": 154, "frame_idx": 86, "global_frame_idx": 27962, "task_index": 30}, {"db_idx": 27963, "episode_idx": 154, "frame_idx": 87, "global_frame_idx": 27963, "task_index": 30}, {"db_idx": 27964, "episode_idx": 154, "frame_idx": 88, "global_frame_idx": 27964, "task_index": 30}, {"db_idx": 27965, "episode_idx": 154, "frame_idx": 89, "global_frame_idx": 27965, "task_index": 30}, {"db_idx": 27966, "episode_idx": 154, "frame_idx": 90, "global_frame_idx": 27966, "task_index": 30}, {"db_idx": 27967, "episode_idx": 154, "frame_idx": 91, "global_frame_idx": 27967, "task_index": 30}, {"db_idx": 27968, "episode_idx": 154, "frame_idx": 92, "global_frame_idx": 27968, "task_index": 30}, {"db_idx": 27969, "episode_idx": 154, "frame_idx": 93, "global_frame_idx": 27969, "task_index": 30}, {"db_idx": 27970, "episode_idx": 154, "frame_idx": 94, "global_frame_idx": 27970, "task_index": 30}, {"db_idx": 27971, "episode_idx": 154, "frame_idx": 95, "global_frame_idx": 27971, "task_index": 30}, {"db_idx": 27972, "episode_idx": 154, "frame_idx": 96, "global_frame_idx": 27972, "task_index": 30}, {"db_idx": 27973, "episode_idx": 154, "frame_idx": 97, "global_frame_idx": 27973, "task_index": 30}, {"db_idx": 27974, "episode_idx": 154, "frame_idx": 98, "global_frame_idx": 27974, "task_index": 30}, {"db_idx": 27975, "episode_idx": 154, "frame_idx": 99, "global_frame_idx": 27975, "task_index": 30}, {"db_idx": 27976, "episode_idx": 154, "frame_idx": 100, "global_frame_idx": 27976, "task_index": 30}, {"db_idx": 27977, "episode_idx": 154, "frame_idx": 101, "global_frame_idx": 27977, "task_index": 30}, {"db_idx": 27978, "episode_idx": 154, "frame_idx": 102, "global_frame_idx": 27978, "task_index": 30}, {"db_idx": 27979, "episode_idx": 154, "frame_idx": 103, "global_frame_idx": 27979, "task_index": 30}, {"db_idx": 27980, "episode_idx": 154, "frame_idx": 104, "global_frame_idx": 27980, "task_index": 30}, {"db_idx": 27981, "episode_idx": 154, "frame_idx": 105, "global_frame_idx": 27981, "task_index": 30}, {"db_idx": 27982, "episode_idx": 154, "frame_idx": 106, "global_frame_idx": 27982, "task_index": 30}, {"db_idx": 27983, "episode_idx": 154, "frame_idx": 107, "global_frame_idx": 27983, "task_index": 30}, {"db_idx": 27984, "episode_idx": 154, "frame_idx": 108, "global_frame_idx": 27984, "task_index": 30}, {"db_idx": 27985, "episode_idx": 154, "frame_idx": 109, "global_frame_idx": 27985, "task_index": 30}, {"db_idx": 27986, "episode_idx": 154, "frame_idx": 110, "global_frame_idx": 27986, "task_index": 30}, {"db_idx": 27987, "episode_idx": 154, "frame_idx": 111, "global_frame_idx": 27987, "task_index": 30}, {"db_idx": 27988, "episode_idx": 154, "frame_idx": 112, "global_frame_idx": 27988, "task_index": 30}, {"db_idx": 27989, "episode_idx": 155, "frame_idx": 0, "global_frame_idx": 27989, "task_index": 31}, {"db_idx": 27990, "episode_idx": 155, "frame_idx": 1, "global_frame_idx": 27990, "task_index": 31}, {"db_idx": 27991, "episode_idx": 155, "frame_idx": 2, "global_frame_idx": 27991, "task_index": 31}, {"db_idx": 27992, "episode_idx": 155, "frame_idx": 3, "global_frame_idx": 27992, "task_index": 31}, {"db_idx": 27993, "episode_idx": 155, "frame_idx": 4, "global_frame_idx": 27993, "task_index": 31}, {"db_idx": 27994, "episode_idx": 155, "frame_idx": 5, "global_frame_idx": 27994, "task_index": 31}, {"db_idx": 27995, "episode_idx": 155, "frame_idx": 6, "global_frame_idx": 27995, "task_index": 31}, {"db_idx": 27996, "episode_idx": 155, "frame_idx": 7, "global_frame_idx": 27996, "task_index": 31}, {"db_idx": 27997, "episode_idx": 155, "frame_idx": 8, "global_frame_idx": 27997, "task_index": 31}, {"db_idx": 27998, "episode_idx": 155, "frame_idx": 9, "global_frame_idx": 27998, "task_index": 31}, {"db_idx": 27999, "episode_idx": 155, "frame_idx": 10, "global_frame_idx": 27999, "task_index": 31}, {"db_idx": 28000, "episode_idx": 155, "frame_idx": 11, "global_frame_idx": 28000, "task_index": 31}, {"db_idx": 28001, "episode_idx": 155, "frame_idx": 12, "global_frame_idx": 28001, "task_index": 31}, {"db_idx": 28002, "episode_idx": 155, "frame_idx": 13, "global_frame_idx": 28002, "task_index": 31}, {"db_idx": 28003, "episode_idx": 155, "frame_idx": 14, "global_frame_idx": 28003, "task_index": 31}, {"db_idx": 28004, "episode_idx": 155, "frame_idx": 15, "global_frame_idx": 28004, "task_index": 31}, {"db_idx": 28005, "episode_idx": 155, "frame_idx": 16, "global_frame_idx": 28005, "task_index": 31}, {"db_idx": 28006, "episode_idx": 155, "frame_idx": 17, "global_frame_idx": 28006, "task_index": 31}, {"db_idx": 28007, "episode_idx": 155, "frame_idx": 18, "global_frame_idx": 28007, "task_index": 31}, {"db_idx": 28008, "episode_idx": 155, "frame_idx": 19, "global_frame_idx": 28008, "task_index": 31}, {"db_idx": 28009, "episode_idx": 155, "frame_idx": 20, "global_frame_idx": 28009, "task_index": 31}, {"db_idx": 28010, "episode_idx": 155, "frame_idx": 21, "global_frame_idx": 28010, "task_index": 31}, {"db_idx": 28011, "episode_idx": 155, "frame_idx": 22, "global_frame_idx": 28011, "task_index": 31}, {"db_idx": 28012, "episode_idx": 155, "frame_idx": 23, "global_frame_idx": 28012, "task_index": 31}, {"db_idx": 28013, "episode_idx": 155, "frame_idx": 24, "global_frame_idx": 28013, "task_index": 31}, {"db_idx": 28014, "episode_idx": 155, "frame_idx": 25, "global_frame_idx": 28014, "task_index": 31}, {"db_idx": 28015, "episode_idx": 155, "frame_idx": 26, "global_frame_idx": 28015, "task_index": 31}, {"db_idx": 28016, "episode_idx": 155, "frame_idx": 27, "global_frame_idx": 28016, "task_index": 31}, {"db_idx": 28017, "episode_idx": 155, "frame_idx": 28, "global_frame_idx": 28017, "task_index": 31}, {"db_idx": 28018, "episode_idx": 155, "frame_idx": 29, "global_frame_idx": 28018, "task_index": 31}, {"db_idx": 28019, "episode_idx": 155, "frame_idx": 30, "global_frame_idx": 28019, "task_index": 31}, {"db_idx": 28020, "episode_idx": 155, "frame_idx": 31, "global_frame_idx": 28020, "task_index": 31}, {"db_idx": 28021, "episode_idx": 155, "frame_idx": 32, "global_frame_idx": 28021, "task_index": 31}, {"db_idx": 28022, "episode_idx": 155, "frame_idx": 33, "global_frame_idx": 28022, "task_index": 31}, {"db_idx": 28023, "episode_idx": 155, "frame_idx": 34, "global_frame_idx": 28023, "task_index": 31}, {"db_idx": 28024, "episode_idx": 155, "frame_idx": 35, "global_frame_idx": 28024, "task_index": 31}, {"db_idx": 28025, "episode_idx": 155, "frame_idx": 36, "global_frame_idx": 28025, "task_index": 31}, {"db_idx": 28026, "episode_idx": 155, "frame_idx": 37, "global_frame_idx": 28026, "task_index": 31}, {"db_idx": 28027, "episode_idx": 155, "frame_idx": 38, "global_frame_idx": 28027, "task_index": 31}, {"db_idx": 28028, "episode_idx": 155, "frame_idx": 39, "global_frame_idx": 28028, "task_index": 31}, {"db_idx": 28029, "episode_idx": 155, "frame_idx": 40, "global_frame_idx": 28029, "task_index": 31}, {"db_idx": 28030, "episode_idx": 155, "frame_idx": 41, "global_frame_idx": 28030, "task_index": 31}, {"db_idx": 28031, "episode_idx": 155, "frame_idx": 42, "global_frame_idx": 28031, "task_index": 31}, {"db_idx": 28032, "episode_idx": 155, "frame_idx": 43, "global_frame_idx": 28032, "task_index": 31}, {"db_idx": 28033, "episode_idx": 155, "frame_idx": 44, "global_frame_idx": 28033, "task_index": 31}, {"db_idx": 28034, "episode_idx": 155, "frame_idx": 45, "global_frame_idx": 28034, "task_index": 31}, {"db_idx": 28035, "episode_idx": 155, "frame_idx": 46, "global_frame_idx": 28035, "task_index": 31}, {"db_idx": 28036, "episode_idx": 155, "frame_idx": 47, "global_frame_idx": 28036, "task_index": 31}, {"db_idx": 28037, "episode_idx": 155, "frame_idx": 48, "global_frame_idx": 28037, "task_index": 31}, {"db_idx": 28038, "episode_idx": 155, "frame_idx": 49, "global_frame_idx": 28038, "task_index": 31}, {"db_idx": 28039, "episode_idx": 155, "frame_idx": 50, "global_frame_idx": 28039, "task_index": 31}, {"db_idx": 28040, "episode_idx": 155, "frame_idx": 51, "global_frame_idx": 28040, "task_index": 31}, {"db_idx": 28041, "episode_idx": 155, "frame_idx": 52, "global_frame_idx": 28041, "task_index": 31}, {"db_idx": 28042, "episode_idx": 155, "frame_idx": 53, "global_frame_idx": 28042, "task_index": 31}, {"db_idx": 28043, "episode_idx": 155, "frame_idx": 54, "global_frame_idx": 28043, "task_index": 31}, {"db_idx": 28044, "episode_idx": 155, "frame_idx": 55, "global_frame_idx": 28044, "task_index": 31}, {"db_idx": 28045, "episode_idx": 155, "frame_idx": 56, "global_frame_idx": 28045, "task_index": 31}, {"db_idx": 28046, "episode_idx": 155, "frame_idx": 57, "global_frame_idx": 28046, "task_index": 31}, {"db_idx": 28047, "episode_idx": 155, "frame_idx": 58, "global_frame_idx": 28047, "task_index": 31}, {"db_idx": 28048, "episode_idx": 155, "frame_idx": 59, "global_frame_idx": 28048, "task_index": 31}, {"db_idx": 28049, "episode_idx": 155, "frame_idx": 60, "global_frame_idx": 28049, "task_index": 31}, {"db_idx": 28050, "episode_idx": 155, "frame_idx": 61, "global_frame_idx": 28050, "task_index": 31}, {"db_idx": 28051, "episode_idx": 155, "frame_idx": 62, "global_frame_idx": 28051, "task_index": 31}, {"db_idx": 28052, "episode_idx": 155, "frame_idx": 63, "global_frame_idx": 28052, "task_index": 31}, {"db_idx": 28053, "episode_idx": 155, "frame_idx": 64, "global_frame_idx": 28053, "task_index": 31}, {"db_idx": 28054, "episode_idx": 155, "frame_idx": 65, "global_frame_idx": 28054, "task_index": 31}, {"db_idx": 28055, "episode_idx": 155, "frame_idx": 66, "global_frame_idx": 28055, "task_index": 31}, {"db_idx": 28056, "episode_idx": 155, "frame_idx": 67, "global_frame_idx": 28056, "task_index": 31}, {"db_idx": 28057, "episode_idx": 155, "frame_idx": 68, "global_frame_idx": 28057, "task_index": 31}, {"db_idx": 28058, "episode_idx": 155, "frame_idx": 69, "global_frame_idx": 28058, "task_index": 31}, {"db_idx": 28059, "episode_idx": 155, "frame_idx": 70, "global_frame_idx": 28059, "task_index": 31}, {"db_idx": 28060, "episode_idx": 155, "frame_idx": 71, "global_frame_idx": 28060, "task_index": 31}, {"db_idx": 28061, "episode_idx": 155, "frame_idx": 72, "global_frame_idx": 28061, "task_index": 31}, {"db_idx": 28062, "episode_idx": 155, "frame_idx": 73, "global_frame_idx": 28062, "task_index": 31}, {"db_idx": 28063, "episode_idx": 155, "frame_idx": 74, "global_frame_idx": 28063, "task_index": 31}, {"db_idx": 28064, "episode_idx": 155, "frame_idx": 75, "global_frame_idx": 28064, "task_index": 31}, {"db_idx": 28065, "episode_idx": 155, "frame_idx": 76, "global_frame_idx": 28065, "task_index": 31}, {"db_idx": 28066, "episode_idx": 155, "frame_idx": 77, "global_frame_idx": 28066, "task_index": 31}, {"db_idx": 28067, "episode_idx": 155, "frame_idx": 78, "global_frame_idx": 28067, "task_index": 31}, {"db_idx": 28068, "episode_idx": 155, "frame_idx": 79, "global_frame_idx": 28068, "task_index": 31}, {"db_idx": 28069, "episode_idx": 155, "frame_idx": 80, "global_frame_idx": 28069, "task_index": 31}, {"db_idx": 28070, "episode_idx": 155, "frame_idx": 81, "global_frame_idx": 28070, "task_index": 31}, {"db_idx": 28071, "episode_idx": 155, "frame_idx": 82, "global_frame_idx": 28071, "task_index": 31}, {"db_idx": 28072, "episode_idx": 155, "frame_idx": 83, "global_frame_idx": 28072, "task_index": 31}, {"db_idx": 28073, "episode_idx": 155, "frame_idx": 84, "global_frame_idx": 28073, "task_index": 31}, {"db_idx": 28074, "episode_idx": 155, "frame_idx": 85, "global_frame_idx": 28074, "task_index": 31}, {"db_idx": 28075, "episode_idx": 155, "frame_idx": 86, "global_frame_idx": 28075, "task_index": 31}, {"db_idx": 28076, "episode_idx": 155, "frame_idx": 87, "global_frame_idx": 28076, "task_index": 31}, {"db_idx": 28077, "episode_idx": 155, "frame_idx": 88, "global_frame_idx": 28077, "task_index": 31}, {"db_idx": 28078, "episode_idx": 155, "frame_idx": 89, "global_frame_idx": 28078, "task_index": 31}, {"db_idx": 28079, "episode_idx": 155, "frame_idx": 90, "global_frame_idx": 28079, "task_index": 31}, {"db_idx": 28080, "episode_idx": 155, "frame_idx": 91, "global_frame_idx": 28080, "task_index": 31}, {"db_idx": 28081, "episode_idx": 155, "frame_idx": 92, "global_frame_idx": 28081, "task_index": 31}, {"db_idx": 28082, "episode_idx": 155, "frame_idx": 93, "global_frame_idx": 28082, "task_index": 31}, {"db_idx": 28083, "episode_idx": 155, "frame_idx": 94, "global_frame_idx": 28083, "task_index": 31}, {"db_idx": 28084, "episode_idx": 155, "frame_idx": 95, "global_frame_idx": 28084, "task_index": 31}, {"db_idx": 28085, "episode_idx": 155, "frame_idx": 96, "global_frame_idx": 28085, "task_index": 31}, {"db_idx": 28086, "episode_idx": 155, "frame_idx": 97, "global_frame_idx": 28086, "task_index": 31}, {"db_idx": 28087, "episode_idx": 155, "frame_idx": 98, "global_frame_idx": 28087, "task_index": 31}, {"db_idx": 28088, "episode_idx": 155, "frame_idx": 99, "global_frame_idx": 28088, "task_index": 31}, {"db_idx": 28089, "episode_idx": 155, "frame_idx": 100, "global_frame_idx": 28089, "task_index": 31}, {"db_idx": 28090, "episode_idx": 155, "frame_idx": 101, "global_frame_idx": 28090, "task_index": 31}, {"db_idx": 28091, "episode_idx": 155, "frame_idx": 102, "global_frame_idx": 28091, "task_index": 31}, {"db_idx": 28092, "episode_idx": 155, "frame_idx": 103, "global_frame_idx": 28092, "task_index": 31}, {"db_idx": 28093, "episode_idx": 155, "frame_idx": 104, "global_frame_idx": 28093, "task_index": 31}, {"db_idx": 28094, "episode_idx": 155, "frame_idx": 105, "global_frame_idx": 28094, "task_index": 31}, {"db_idx": 28095, "episode_idx": 155, "frame_idx": 106, "global_frame_idx": 28095, "task_index": 31}, {"db_idx": 28096, "episode_idx": 155, "frame_idx": 107, "global_frame_idx": 28096, "task_index": 31}, {"db_idx": 28097, "episode_idx": 155, "frame_idx": 108, "global_frame_idx": 28097, "task_index": 31}, {"db_idx": 28098, "episode_idx": 155, "frame_idx": 109, "global_frame_idx": 28098, "task_index": 31}, {"db_idx": 28099, "episode_idx": 156, "frame_idx": 0, "global_frame_idx": 28099, "task_index": 31}, {"db_idx": 28100, "episode_idx": 156, "frame_idx": 1, "global_frame_idx": 28100, "task_index": 31}, {"db_idx": 28101, "episode_idx": 156, "frame_idx": 2, "global_frame_idx": 28101, "task_index": 31}, {"db_idx": 28102, "episode_idx": 156, "frame_idx": 3, "global_frame_idx": 28102, "task_index": 31}, {"db_idx": 28103, "episode_idx": 156, "frame_idx": 4, "global_frame_idx": 28103, "task_index": 31}, {"db_idx": 28104, "episode_idx": 156, "frame_idx": 5, "global_frame_idx": 28104, "task_index": 31}, {"db_idx": 28105, "episode_idx": 156, "frame_idx": 6, "global_frame_idx": 28105, "task_index": 31}, {"db_idx": 28106, "episode_idx": 156, "frame_idx": 7, "global_frame_idx": 28106, "task_index": 31}, {"db_idx": 28107, "episode_idx": 156, "frame_idx": 8, "global_frame_idx": 28107, "task_index": 31}, {"db_idx": 28108, "episode_idx": 156, "frame_idx": 9, "global_frame_idx": 28108, "task_index": 31}, {"db_idx": 28109, "episode_idx": 156, "frame_idx": 10, "global_frame_idx": 28109, "task_index": 31}, {"db_idx": 28110, "episode_idx": 156, "frame_idx": 11, "global_frame_idx": 28110, "task_index": 31}, {"db_idx": 28111, "episode_idx": 156, "frame_idx": 12, "global_frame_idx": 28111, "task_index": 31}, {"db_idx": 28112, "episode_idx": 156, "frame_idx": 13, "global_frame_idx": 28112, "task_index": 31}, {"db_idx": 28113, "episode_idx": 156, "frame_idx": 14, "global_frame_idx": 28113, "task_index": 31}, {"db_idx": 28114, "episode_idx": 156, "frame_idx": 15, "global_frame_idx": 28114, "task_index": 31}, {"db_idx": 28115, "episode_idx": 156, "frame_idx": 16, "global_frame_idx": 28115, "task_index": 31}, {"db_idx": 28116, "episode_idx": 156, "frame_idx": 17, "global_frame_idx": 28116, "task_index": 31}, {"db_idx": 28117, "episode_idx": 156, "frame_idx": 18, "global_frame_idx": 28117, "task_index": 31}, {"db_idx": 28118, "episode_idx": 156, "frame_idx": 19, "global_frame_idx": 28118, "task_index": 31}, {"db_idx": 28119, "episode_idx": 156, "frame_idx": 20, "global_frame_idx": 28119, "task_index": 31}, {"db_idx": 28120, "episode_idx": 156, "frame_idx": 21, "global_frame_idx": 28120, "task_index": 31}, {"db_idx": 28121, "episode_idx": 156, "frame_idx": 22, "global_frame_idx": 28121, "task_index": 31}, {"db_idx": 28122, "episode_idx": 156, "frame_idx": 23, "global_frame_idx": 28122, "task_index": 31}, {"db_idx": 28123, "episode_idx": 156, "frame_idx": 24, "global_frame_idx": 28123, "task_index": 31}, {"db_idx": 28124, "episode_idx": 156, "frame_idx": 25, "global_frame_idx": 28124, "task_index": 31}, {"db_idx": 28125, "episode_idx": 156, "frame_idx": 26, "global_frame_idx": 28125, "task_index": 31}, {"db_idx": 28126, "episode_idx": 156, "frame_idx": 27, "global_frame_idx": 28126, "task_index": 31}, {"db_idx": 28127, "episode_idx": 156, "frame_idx": 28, "global_frame_idx": 28127, "task_index": 31}, {"db_idx": 28128, "episode_idx": 156, "frame_idx": 29, "global_frame_idx": 28128, "task_index": 31}, {"db_idx": 28129, "episode_idx": 156, "frame_idx": 30, "global_frame_idx": 28129, "task_index": 31}, {"db_idx": 28130, "episode_idx": 156, "frame_idx": 31, "global_frame_idx": 28130, "task_index": 31}, {"db_idx": 28131, "episode_idx": 156, "frame_idx": 32, "global_frame_idx": 28131, "task_index": 31}, {"db_idx": 28132, "episode_idx": 156, "frame_idx": 33, "global_frame_idx": 28132, "task_index": 31}, {"db_idx": 28133, "episode_idx": 156, "frame_idx": 34, "global_frame_idx": 28133, "task_index": 31}, {"db_idx": 28134, "episode_idx": 156, "frame_idx": 35, "global_frame_idx": 28134, "task_index": 31}, {"db_idx": 28135, "episode_idx": 156, "frame_idx": 36, "global_frame_idx": 28135, "task_index": 31}, {"db_idx": 28136, "episode_idx": 156, "frame_idx": 37, "global_frame_idx": 28136, "task_index": 31}, {"db_idx": 28137, "episode_idx": 156, "frame_idx": 38, "global_frame_idx": 28137, "task_index": 31}, {"db_idx": 28138, "episode_idx": 156, "frame_idx": 39, "global_frame_idx": 28138, "task_index": 31}, {"db_idx": 28139, "episode_idx": 156, "frame_idx": 40, "global_frame_idx": 28139, "task_index": 31}, {"db_idx": 28140, "episode_idx": 156, "frame_idx": 41, "global_frame_idx": 28140, "task_index": 31}, {"db_idx": 28141, "episode_idx": 156, "frame_idx": 42, "global_frame_idx": 28141, "task_index": 31}, {"db_idx": 28142, "episode_idx": 156, "frame_idx": 43, "global_frame_idx": 28142, "task_index": 31}, {"db_idx": 28143, "episode_idx": 156, "frame_idx": 44, "global_frame_idx": 28143, "task_index": 31}, {"db_idx": 28144, "episode_idx": 156, "frame_idx": 45, "global_frame_idx": 28144, "task_index": 31}, {"db_idx": 28145, "episode_idx": 156, "frame_idx": 46, "global_frame_idx": 28145, "task_index": 31}, {"db_idx": 28146, "episode_idx": 156, "frame_idx": 47, "global_frame_idx": 28146, "task_index": 31}, {"db_idx": 28147, "episode_idx": 156, "frame_idx": 48, "global_frame_idx": 28147, "task_index": 31}, {"db_idx": 28148, "episode_idx": 156, "frame_idx": 49, "global_frame_idx": 28148, "task_index": 31}, {"db_idx": 28149, "episode_idx": 156, "frame_idx": 50, "global_frame_idx": 28149, "task_index": 31}, {"db_idx": 28150, "episode_idx": 156, "frame_idx": 51, "global_frame_idx": 28150, "task_index": 31}, {"db_idx": 28151, "episode_idx": 156, "frame_idx": 52, "global_frame_idx": 28151, "task_index": 31}, {"db_idx": 28152, "episode_idx": 156, "frame_idx": 53, "global_frame_idx": 28152, "task_index": 31}, {"db_idx": 28153, "episode_idx": 156, "frame_idx": 54, "global_frame_idx": 28153, "task_index": 31}, {"db_idx": 28154, "episode_idx": 156, "frame_idx": 55, "global_frame_idx": 28154, "task_index": 31}, {"db_idx": 28155, "episode_idx": 156, "frame_idx": 56, "global_frame_idx": 28155, "task_index": 31}, {"db_idx": 28156, "episode_idx": 156, "frame_idx": 57, "global_frame_idx": 28156, "task_index": 31}, {"db_idx": 28157, "episode_idx": 156, "frame_idx": 58, "global_frame_idx": 28157, "task_index": 31}, {"db_idx": 28158, "episode_idx": 156, "frame_idx": 59, "global_frame_idx": 28158, "task_index": 31}, {"db_idx": 28159, "episode_idx": 156, "frame_idx": 60, "global_frame_idx": 28159, "task_index": 31}, {"db_idx": 28160, "episode_idx": 156, "frame_idx": 61, "global_frame_idx": 28160, "task_index": 31}, {"db_idx": 28161, "episode_idx": 156, "frame_idx": 62, "global_frame_idx": 28161, "task_index": 31}, {"db_idx": 28162, "episode_idx": 156, "frame_idx": 63, "global_frame_idx": 28162, "task_index": 31}, {"db_idx": 28163, "episode_idx": 156, "frame_idx": 64, "global_frame_idx": 28163, "task_index": 31}, {"db_idx": 28164, "episode_idx": 156, "frame_idx": 65, "global_frame_idx": 28164, "task_index": 31}, {"db_idx": 28165, "episode_idx": 156, "frame_idx": 66, "global_frame_idx": 28165, "task_index": 31}, {"db_idx": 28166, "episode_idx": 156, "frame_idx": 67, "global_frame_idx": 28166, "task_index": 31}, {"db_idx": 28167, "episode_idx": 156, "frame_idx": 68, "global_frame_idx": 28167, "task_index": 31}, {"db_idx": 28168, "episode_idx": 156, "frame_idx": 69, "global_frame_idx": 28168, "task_index": 31}, {"db_idx": 28169, "episode_idx": 156, "frame_idx": 70, "global_frame_idx": 28169, "task_index": 31}, {"db_idx": 28170, "episode_idx": 156, "frame_idx": 71, "global_frame_idx": 28170, "task_index": 31}, {"db_idx": 28171, "episode_idx": 156, "frame_idx": 72, "global_frame_idx": 28171, "task_index": 31}, {"db_idx": 28172, "episode_idx": 156, "frame_idx": 73, "global_frame_idx": 28172, "task_index": 31}, {"db_idx": 28173, "episode_idx": 156, "frame_idx": 74, "global_frame_idx": 28173, "task_index": 31}, {"db_idx": 28174, "episode_idx": 156, "frame_idx": 75, "global_frame_idx": 28174, "task_index": 31}, {"db_idx": 28175, "episode_idx": 156, "frame_idx": 76, "global_frame_idx": 28175, "task_index": 31}, {"db_idx": 28176, "episode_idx": 156, "frame_idx": 77, "global_frame_idx": 28176, "task_index": 31}, {"db_idx": 28177, "episode_idx": 156, "frame_idx": 78, "global_frame_idx": 28177, "task_index": 31}, {"db_idx": 28178, "episode_idx": 156, "frame_idx": 79, "global_frame_idx": 28178, "task_index": 31}, {"db_idx": 28179, "episode_idx": 156, "frame_idx": 80, "global_frame_idx": 28179, "task_index": 31}, {"db_idx": 28180, "episode_idx": 156, "frame_idx": 81, "global_frame_idx": 28180, "task_index": 31}, {"db_idx": 28181, "episode_idx": 156, "frame_idx": 82, "global_frame_idx": 28181, "task_index": 31}, {"db_idx": 28182, "episode_idx": 156, "frame_idx": 83, "global_frame_idx": 28182, "task_index": 31}, {"db_idx": 28183, "episode_idx": 156, "frame_idx": 84, "global_frame_idx": 28183, "task_index": 31}, {"db_idx": 28184, "episode_idx": 156, "frame_idx": 85, "global_frame_idx": 28184, "task_index": 31}, {"db_idx": 28185, "episode_idx": 156, "frame_idx": 86, "global_frame_idx": 28185, "task_index": 31}, {"db_idx": 28186, "episode_idx": 156, "frame_idx": 87, "global_frame_idx": 28186, "task_index": 31}, {"db_idx": 28187, "episode_idx": 156, "frame_idx": 88, "global_frame_idx": 28187, "task_index": 31}, {"db_idx": 28188, "episode_idx": 156, "frame_idx": 89, "global_frame_idx": 28188, "task_index": 31}, {"db_idx": 28189, "episode_idx": 156, "frame_idx": 90, "global_frame_idx": 28189, "task_index": 31}, {"db_idx": 28190, "episode_idx": 156, "frame_idx": 91, "global_frame_idx": 28190, "task_index": 31}, {"db_idx": 28191, "episode_idx": 156, "frame_idx": 92, "global_frame_idx": 28191, "task_index": 31}, {"db_idx": 28192, "episode_idx": 156, "frame_idx": 93, "global_frame_idx": 28192, "task_index": 31}, {"db_idx": 28193, "episode_idx": 156, "frame_idx": 94, "global_frame_idx": 28193, "task_index": 31}, {"db_idx": 28194, "episode_idx": 156, "frame_idx": 95, "global_frame_idx": 28194, "task_index": 31}, {"db_idx": 28195, "episode_idx": 156, "frame_idx": 96, "global_frame_idx": 28195, "task_index": 31}, {"db_idx": 28196, "episode_idx": 156, "frame_idx": 97, "global_frame_idx": 28196, "task_index": 31}, {"db_idx": 28197, "episode_idx": 156, "frame_idx": 98, "global_frame_idx": 28197, "task_index": 31}, {"db_idx": 28198, "episode_idx": 156, "frame_idx": 99, "global_frame_idx": 28198, "task_index": 31}, {"db_idx": 28199, "episode_idx": 156, "frame_idx": 100, "global_frame_idx": 28199, "task_index": 31}, {"db_idx": 28200, "episode_idx": 156, "frame_idx": 101, "global_frame_idx": 28200, "task_index": 31}, {"db_idx": 28201, "episode_idx": 156, "frame_idx": 102, "global_frame_idx": 28201, "task_index": 31}, {"db_idx": 28202, "episode_idx": 156, "frame_idx": 103, "global_frame_idx": 28202, "task_index": 31}, {"db_idx": 28203, "episode_idx": 156, "frame_idx": 104, "global_frame_idx": 28203, "task_index": 31}, {"db_idx": 28204, "episode_idx": 157, "frame_idx": 0, "global_frame_idx": 28204, "task_index": 31}, {"db_idx": 28205, "episode_idx": 157, "frame_idx": 1, "global_frame_idx": 28205, "task_index": 31}, {"db_idx": 28206, "episode_idx": 157, "frame_idx": 2, "global_frame_idx": 28206, "task_index": 31}, {"db_idx": 28207, "episode_idx": 157, "frame_idx": 3, "global_frame_idx": 28207, "task_index": 31}, {"db_idx": 28208, "episode_idx": 157, "frame_idx": 4, "global_frame_idx": 28208, "task_index": 31}, {"db_idx": 28209, "episode_idx": 157, "frame_idx": 5, "global_frame_idx": 28209, "task_index": 31}, {"db_idx": 28210, "episode_idx": 157, "frame_idx": 6, "global_frame_idx": 28210, "task_index": 31}, {"db_idx": 28211, "episode_idx": 157, "frame_idx": 7, "global_frame_idx": 28211, "task_index": 31}, {"db_idx": 28212, "episode_idx": 157, "frame_idx": 8, "global_frame_idx": 28212, "task_index": 31}, {"db_idx": 28213, "episode_idx": 157, "frame_idx": 9, "global_frame_idx": 28213, "task_index": 31}, {"db_idx": 28214, "episode_idx": 157, "frame_idx": 10, "global_frame_idx": 28214, "task_index": 31}, {"db_idx": 28215, "episode_idx": 157, "frame_idx": 11, "global_frame_idx": 28215, "task_index": 31}, {"db_idx": 28216, "episode_idx": 157, "frame_idx": 12, "global_frame_idx": 28216, "task_index": 31}, {"db_idx": 28217, "episode_idx": 157, "frame_idx": 13, "global_frame_idx": 28217, "task_index": 31}, {"db_idx": 28218, "episode_idx": 157, "frame_idx": 14, "global_frame_idx": 28218, "task_index": 31}, {"db_idx": 28219, "episode_idx": 157, "frame_idx": 15, "global_frame_idx": 28219, "task_index": 31}, {"db_idx": 28220, "episode_idx": 157, "frame_idx": 16, "global_frame_idx": 28220, "task_index": 31}, {"db_idx": 28221, "episode_idx": 157, "frame_idx": 17, "global_frame_idx": 28221, "task_index": 31}, {"db_idx": 28222, "episode_idx": 157, "frame_idx": 18, "global_frame_idx": 28222, "task_index": 31}, {"db_idx": 28223, "episode_idx": 157, "frame_idx": 19, "global_frame_idx": 28223, "task_index": 31}, {"db_idx": 28224, "episode_idx": 157, "frame_idx": 20, "global_frame_idx": 28224, "task_index": 31}, {"db_idx": 28225, "episode_idx": 157, "frame_idx": 21, "global_frame_idx": 28225, "task_index": 31}, {"db_idx": 28226, "episode_idx": 157, "frame_idx": 22, "global_frame_idx": 28226, "task_index": 31}, {"db_idx": 28227, "episode_idx": 157, "frame_idx": 23, "global_frame_idx": 28227, "task_index": 31}, {"db_idx": 28228, "episode_idx": 157, "frame_idx": 24, "global_frame_idx": 28228, "task_index": 31}, {"db_idx": 28229, "episode_idx": 157, "frame_idx": 25, "global_frame_idx": 28229, "task_index": 31}, {"db_idx": 28230, "episode_idx": 157, "frame_idx": 26, "global_frame_idx": 28230, "task_index": 31}, {"db_idx": 28231, "episode_idx": 157, "frame_idx": 27, "global_frame_idx": 28231, "task_index": 31}, {"db_idx": 28232, "episode_idx": 157, "frame_idx": 28, "global_frame_idx": 28232, "task_index": 31}, {"db_idx": 28233, "episode_idx": 157, "frame_idx": 29, "global_frame_idx": 28233, "task_index": 31}, {"db_idx": 28234, "episode_idx": 157, "frame_idx": 30, "global_frame_idx": 28234, "task_index": 31}, {"db_idx": 28235, "episode_idx": 157, "frame_idx": 31, "global_frame_idx": 28235, "task_index": 31}, {"db_idx": 28236, "episode_idx": 157, "frame_idx": 32, "global_frame_idx": 28236, "task_index": 31}, {"db_idx": 28237, "episode_idx": 157, "frame_idx": 33, "global_frame_idx": 28237, "task_index": 31}, {"db_idx": 28238, "episode_idx": 157, "frame_idx": 34, "global_frame_idx": 28238, "task_index": 31}, {"db_idx": 28239, "episode_idx": 157, "frame_idx": 35, "global_frame_idx": 28239, "task_index": 31}, {"db_idx": 28240, "episode_idx": 157, "frame_idx": 36, "global_frame_idx": 28240, "task_index": 31}, {"db_idx": 28241, "episode_idx": 157, "frame_idx": 37, "global_frame_idx": 28241, "task_index": 31}, {"db_idx": 28242, "episode_idx": 157, "frame_idx": 38, "global_frame_idx": 28242, "task_index": 31}, {"db_idx": 28243, "episode_idx": 157, "frame_idx": 39, "global_frame_idx": 28243, "task_index": 31}, {"db_idx": 28244, "episode_idx": 157, "frame_idx": 40, "global_frame_idx": 28244, "task_index": 31}, {"db_idx": 28245, "episode_idx": 157, "frame_idx": 41, "global_frame_idx": 28245, "task_index": 31}, {"db_idx": 28246, "episode_idx": 157, "frame_idx": 42, "global_frame_idx": 28246, "task_index": 31}, {"db_idx": 28247, "episode_idx": 157, "frame_idx": 43, "global_frame_idx": 28247, "task_index": 31}, {"db_idx": 28248, "episode_idx": 157, "frame_idx": 44, "global_frame_idx": 28248, "task_index": 31}, {"db_idx": 28249, "episode_idx": 157, "frame_idx": 45, "global_frame_idx": 28249, "task_index": 31}, {"db_idx": 28250, "episode_idx": 157, "frame_idx": 46, "global_frame_idx": 28250, "task_index": 31}, {"db_idx": 28251, "episode_idx": 157, "frame_idx": 47, "global_frame_idx": 28251, "task_index": 31}, {"db_idx": 28252, "episode_idx": 157, "frame_idx": 48, "global_frame_idx": 28252, "task_index": 31}, {"db_idx": 28253, "episode_idx": 157, "frame_idx": 49, "global_frame_idx": 28253, "task_index": 31}, {"db_idx": 28254, "episode_idx": 157, "frame_idx": 50, "global_frame_idx": 28254, "task_index": 31}, {"db_idx": 28255, "episode_idx": 157, "frame_idx": 51, "global_frame_idx": 28255, "task_index": 31}, {"db_idx": 28256, "episode_idx": 157, "frame_idx": 52, "global_frame_idx": 28256, "task_index": 31}, {"db_idx": 28257, "episode_idx": 157, "frame_idx": 53, "global_frame_idx": 28257, "task_index": 31}, {"db_idx": 28258, "episode_idx": 157, "frame_idx": 54, "global_frame_idx": 28258, "task_index": 31}, {"db_idx": 28259, "episode_idx": 157, "frame_idx": 55, "global_frame_idx": 28259, "task_index": 31}, {"db_idx": 28260, "episode_idx": 157, "frame_idx": 56, "global_frame_idx": 28260, "task_index": 31}, {"db_idx": 28261, "episode_idx": 157, "frame_idx": 57, "global_frame_idx": 28261, "task_index": 31}, {"db_idx": 28262, "episode_idx": 157, "frame_idx": 58, "global_frame_idx": 28262, "task_index": 31}, {"db_idx": 28263, "episode_idx": 157, "frame_idx": 59, "global_frame_idx": 28263, "task_index": 31}, {"db_idx": 28264, "episode_idx": 157, "frame_idx": 60, "global_frame_idx": 28264, "task_index": 31}, {"db_idx": 28265, "episode_idx": 157, "frame_idx": 61, "global_frame_idx": 28265, "task_index": 31}, {"db_idx": 28266, "episode_idx": 157, "frame_idx": 62, "global_frame_idx": 28266, "task_index": 31}, {"db_idx": 28267, "episode_idx": 157, "frame_idx": 63, "global_frame_idx": 28267, "task_index": 31}, {"db_idx": 28268, "episode_idx": 157, "frame_idx": 64, "global_frame_idx": 28268, "task_index": 31}, {"db_idx": 28269, "episode_idx": 157, "frame_idx": 65, "global_frame_idx": 28269, "task_index": 31}, {"db_idx": 28270, "episode_idx": 157, "frame_idx": 66, "global_frame_idx": 28270, "task_index": 31}, {"db_idx": 28271, "episode_idx": 157, "frame_idx": 67, "global_frame_idx": 28271, "task_index": 31}, {"db_idx": 28272, "episode_idx": 157, "frame_idx": 68, "global_frame_idx": 28272, "task_index": 31}, {"db_idx": 28273, "episode_idx": 157, "frame_idx": 69, "global_frame_idx": 28273, "task_index": 31}, {"db_idx": 28274, "episode_idx": 157, "frame_idx": 70, "global_frame_idx": 28274, "task_index": 31}, {"db_idx": 28275, "episode_idx": 157, "frame_idx": 71, "global_frame_idx": 28275, "task_index": 31}, {"db_idx": 28276, "episode_idx": 157, "frame_idx": 72, "global_frame_idx": 28276, "task_index": 31}, {"db_idx": 28277, "episode_idx": 157, "frame_idx": 73, "global_frame_idx": 28277, "task_index": 31}, {"db_idx": 28278, "episode_idx": 157, "frame_idx": 74, "global_frame_idx": 28278, "task_index": 31}, {"db_idx": 28279, "episode_idx": 157, "frame_idx": 75, "global_frame_idx": 28279, "task_index": 31}, {"db_idx": 28280, "episode_idx": 157, "frame_idx": 76, "global_frame_idx": 28280, "task_index": 31}, {"db_idx": 28281, "episode_idx": 157, "frame_idx": 77, "global_frame_idx": 28281, "task_index": 31}, {"db_idx": 28282, "episode_idx": 157, "frame_idx": 78, "global_frame_idx": 28282, "task_index": 31}, {"db_idx": 28283, "episode_idx": 157, "frame_idx": 79, "global_frame_idx": 28283, "task_index": 31}, {"db_idx": 28284, "episode_idx": 157, "frame_idx": 80, "global_frame_idx": 28284, "task_index": 31}, {"db_idx": 28285, "episode_idx": 157, "frame_idx": 81, "global_frame_idx": 28285, "task_index": 31}, {"db_idx": 28286, "episode_idx": 157, "frame_idx": 82, "global_frame_idx": 28286, "task_index": 31}, {"db_idx": 28287, "episode_idx": 157, "frame_idx": 83, "global_frame_idx": 28287, "task_index": 31}, {"db_idx": 28288, "episode_idx": 157, "frame_idx": 84, "global_frame_idx": 28288, "task_index": 31}, {"db_idx": 28289, "episode_idx": 157, "frame_idx": 85, "global_frame_idx": 28289, "task_index": 31}, {"db_idx": 28290, "episode_idx": 157, "frame_idx": 86, "global_frame_idx": 28290, "task_index": 31}, {"db_idx": 28291, "episode_idx": 157, "frame_idx": 87, "global_frame_idx": 28291, "task_index": 31}, {"db_idx": 28292, "episode_idx": 157, "frame_idx": 88, "global_frame_idx": 28292, "task_index": 31}, {"db_idx": 28293, "episode_idx": 157, "frame_idx": 89, "global_frame_idx": 28293, "task_index": 31}, {"db_idx": 28294, "episode_idx": 157, "frame_idx": 90, "global_frame_idx": 28294, "task_index": 31}, {"db_idx": 28295, "episode_idx": 157, "frame_idx": 91, "global_frame_idx": 28295, "task_index": 31}, {"db_idx": 28296, "episode_idx": 157, "frame_idx": 92, "global_frame_idx": 28296, "task_index": 31}, {"db_idx": 28297, "episode_idx": 157, "frame_idx": 93, "global_frame_idx": 28297, "task_index": 31}, {"db_idx": 28298, "episode_idx": 157, "frame_idx": 94, "global_frame_idx": 28298, "task_index": 31}, {"db_idx": 28299, "episode_idx": 157, "frame_idx": 95, "global_frame_idx": 28299, "task_index": 31}, {"db_idx": 28300, "episode_idx": 157, "frame_idx": 96, "global_frame_idx": 28300, "task_index": 31}, {"db_idx": 28301, "episode_idx": 157, "frame_idx": 97, "global_frame_idx": 28301, "task_index": 31}, {"db_idx": 28302, "episode_idx": 157, "frame_idx": 98, "global_frame_idx": 28302, "task_index": 31}, {"db_idx": 28303, "episode_idx": 157, "frame_idx": 99, "global_frame_idx": 28303, "task_index": 31}, {"db_idx": 28304, "episode_idx": 157, "frame_idx": 100, "global_frame_idx": 28304, "task_index": 31}, {"db_idx": 28305, "episode_idx": 157, "frame_idx": 101, "global_frame_idx": 28305, "task_index": 31}, {"db_idx": 28306, "episode_idx": 157, "frame_idx": 102, "global_frame_idx": 28306, "task_index": 31}, {"db_idx": 28307, "episode_idx": 157, "frame_idx": 103, "global_frame_idx": 28307, "task_index": 31}, {"db_idx": 28308, "episode_idx": 157, "frame_idx": 104, "global_frame_idx": 28308, "task_index": 31}, {"db_idx": 28309, "episode_idx": 157, "frame_idx": 105, "global_frame_idx": 28309, "task_index": 31}, {"db_idx": 28310, "episode_idx": 157, "frame_idx": 106, "global_frame_idx": 28310, "task_index": 31}, {"db_idx": 28311, "episode_idx": 157, "frame_idx": 107, "global_frame_idx": 28311, "task_index": 31}, {"db_idx": 28312, "episode_idx": 157, "frame_idx": 108, "global_frame_idx": 28312, "task_index": 31}, {"db_idx": 28313, "episode_idx": 157, "frame_idx": 109, "global_frame_idx": 28313, "task_index": 31}, {"db_idx": 28314, "episode_idx": 157, "frame_idx": 110, "global_frame_idx": 28314, "task_index": 31}, {"db_idx": 28315, "episode_idx": 157, "frame_idx": 111, "global_frame_idx": 28315, "task_index": 31}, {"db_idx": 28316, "episode_idx": 157, "frame_idx": 112, "global_frame_idx": 28316, "task_index": 31}, {"db_idx": 28317, "episode_idx": 157, "frame_idx": 113, "global_frame_idx": 28317, "task_index": 31}, {"db_idx": 28318, "episode_idx": 157, "frame_idx": 114, "global_frame_idx": 28318, "task_index": 31}, {"db_idx": 28319, "episode_idx": 157, "frame_idx": 115, "global_frame_idx": 28319, "task_index": 31}, {"db_idx": 28320, "episode_idx": 157, "frame_idx": 116, "global_frame_idx": 28320, "task_index": 31}, {"db_idx": 28321, "episode_idx": 157, "frame_idx": 117, "global_frame_idx": 28321, "task_index": 31}, {"db_idx": 28322, "episode_idx": 157, "frame_idx": 118, "global_frame_idx": 28322, "task_index": 31}, {"db_idx": 28323, "episode_idx": 157, "frame_idx": 119, "global_frame_idx": 28323, "task_index": 31}, {"db_idx": 28324, "episode_idx": 157, "frame_idx": 120, "global_frame_idx": 28324, "task_index": 31}, {"db_idx": 28325, "episode_idx": 157, "frame_idx": 121, "global_frame_idx": 28325, "task_index": 31}, {"db_idx": 28326, "episode_idx": 157, "frame_idx": 122, "global_frame_idx": 28326, "task_index": 31}, {"db_idx": 28327, "episode_idx": 157, "frame_idx": 123, "global_frame_idx": 28327, "task_index": 31}, {"db_idx": 28328, "episode_idx": 157, "frame_idx": 124, "global_frame_idx": 28328, "task_index": 31}, {"db_idx": 28329, "episode_idx": 157, "frame_idx": 125, "global_frame_idx": 28329, "task_index": 31}, {"db_idx": 28330, "episode_idx": 157, "frame_idx": 126, "global_frame_idx": 28330, "task_index": 31}, {"db_idx": 28331, "episode_idx": 157, "frame_idx": 127, "global_frame_idx": 28331, "task_index": 31}, {"db_idx": 28332, "episode_idx": 157, "frame_idx": 128, "global_frame_idx": 28332, "task_index": 31}, {"db_idx": 28333, "episode_idx": 157, "frame_idx": 129, "global_frame_idx": 28333, "task_index": 31}, {"db_idx": 28334, "episode_idx": 157, "frame_idx": 130, "global_frame_idx": 28334, "task_index": 31}, {"db_idx": 28335, "episode_idx": 157, "frame_idx": 131, "global_frame_idx": 28335, "task_index": 31}, {"db_idx": 28336, "episode_idx": 157, "frame_idx": 132, "global_frame_idx": 28336, "task_index": 31}, {"db_idx": 28337, "episode_idx": 157, "frame_idx": 133, "global_frame_idx": 28337, "task_index": 31}, {"db_idx": 28338, "episode_idx": 157, "frame_idx": 134, "global_frame_idx": 28338, "task_index": 31}, {"db_idx": 28339, "episode_idx": 157, "frame_idx": 135, "global_frame_idx": 28339, "task_index": 31}, {"db_idx": 28340, "episode_idx": 157, "frame_idx": 136, "global_frame_idx": 28340, "task_index": 31}, {"db_idx": 28341, "episode_idx": 157, "frame_idx": 137, "global_frame_idx": 28341, "task_index": 31}, {"db_idx": 28342, "episode_idx": 158, "frame_idx": 0, "global_frame_idx": 28342, "task_index": 31}, {"db_idx": 28343, "episode_idx": 158, "frame_idx": 1, "global_frame_idx": 28343, "task_index": 31}, {"db_idx": 28344, "episode_idx": 158, "frame_idx": 2, "global_frame_idx": 28344, "task_index": 31}, {"db_idx": 28345, "episode_idx": 158, "frame_idx": 3, "global_frame_idx": 28345, "task_index": 31}, {"db_idx": 28346, "episode_idx": 158, "frame_idx": 4, "global_frame_idx": 28346, "task_index": 31}, {"db_idx": 28347, "episode_idx": 158, "frame_idx": 5, "global_frame_idx": 28347, "task_index": 31}, {"db_idx": 28348, "episode_idx": 158, "frame_idx": 6, "global_frame_idx": 28348, "task_index": 31}, {"db_idx": 28349, "episode_idx": 158, "frame_idx": 7, "global_frame_idx": 28349, "task_index": 31}, {"db_idx": 28350, "episode_idx": 158, "frame_idx": 8, "global_frame_idx": 28350, "task_index": 31}, {"db_idx": 28351, "episode_idx": 158, "frame_idx": 9, "global_frame_idx": 28351, "task_index": 31}, {"db_idx": 28352, "episode_idx": 158, "frame_idx": 10, "global_frame_idx": 28352, "task_index": 31}, {"db_idx": 28353, "episode_idx": 158, "frame_idx": 11, "global_frame_idx": 28353, "task_index": 31}, {"db_idx": 28354, "episode_idx": 158, "frame_idx": 12, "global_frame_idx": 28354, "task_index": 31}, {"db_idx": 28355, "episode_idx": 158, "frame_idx": 13, "global_frame_idx": 28355, "task_index": 31}, {"db_idx": 28356, "episode_idx": 158, "frame_idx": 14, "global_frame_idx": 28356, "task_index": 31}, {"db_idx": 28357, "episode_idx": 158, "frame_idx": 15, "global_frame_idx": 28357, "task_index": 31}, {"db_idx": 28358, "episode_idx": 158, "frame_idx": 16, "global_frame_idx": 28358, "task_index": 31}, {"db_idx": 28359, "episode_idx": 158, "frame_idx": 17, "global_frame_idx": 28359, "task_index": 31}, {"db_idx": 28360, "episode_idx": 158, "frame_idx": 18, "global_frame_idx": 28360, "task_index": 31}, {"db_idx": 28361, "episode_idx": 158, "frame_idx": 19, "global_frame_idx": 28361, "task_index": 31}, {"db_idx": 28362, "episode_idx": 158, "frame_idx": 20, "global_frame_idx": 28362, "task_index": 31}, {"db_idx": 28363, "episode_idx": 158, "frame_idx": 21, "global_frame_idx": 28363, "task_index": 31}, {"db_idx": 28364, "episode_idx": 158, "frame_idx": 22, "global_frame_idx": 28364, "task_index": 31}, {"db_idx": 28365, "episode_idx": 158, "frame_idx": 23, "global_frame_idx": 28365, "task_index": 31}, {"db_idx": 28366, "episode_idx": 158, "frame_idx": 24, "global_frame_idx": 28366, "task_index": 31}, {"db_idx": 28367, "episode_idx": 158, "frame_idx": 25, "global_frame_idx": 28367, "task_index": 31}, {"db_idx": 28368, "episode_idx": 158, "frame_idx": 26, "global_frame_idx": 28368, "task_index": 31}, {"db_idx": 28369, "episode_idx": 158, "frame_idx": 27, "global_frame_idx": 28369, "task_index": 31}, {"db_idx": 28370, "episode_idx": 158, "frame_idx": 28, "global_frame_idx": 28370, "task_index": 31}, {"db_idx": 28371, "episode_idx": 158, "frame_idx": 29, "global_frame_idx": 28371, "task_index": 31}, {"db_idx": 28372, "episode_idx": 158, "frame_idx": 30, "global_frame_idx": 28372, "task_index": 31}, {"db_idx": 28373, "episode_idx": 158, "frame_idx": 31, "global_frame_idx": 28373, "task_index": 31}, {"db_idx": 28374, "episode_idx": 158, "frame_idx": 32, "global_frame_idx": 28374, "task_index": 31}, {"db_idx": 28375, "episode_idx": 158, "frame_idx": 33, "global_frame_idx": 28375, "task_index": 31}, {"db_idx": 28376, "episode_idx": 158, "frame_idx": 34, "global_frame_idx": 28376, "task_index": 31}, {"db_idx": 28377, "episode_idx": 158, "frame_idx": 35, "global_frame_idx": 28377, "task_index": 31}, {"db_idx": 28378, "episode_idx": 158, "frame_idx": 36, "global_frame_idx": 28378, "task_index": 31}, {"db_idx": 28379, "episode_idx": 158, "frame_idx": 37, "global_frame_idx": 28379, "task_index": 31}, {"db_idx": 28380, "episode_idx": 158, "frame_idx": 38, "global_frame_idx": 28380, "task_index": 31}, {"db_idx": 28381, "episode_idx": 158, "frame_idx": 39, "global_frame_idx": 28381, "task_index": 31}, {"db_idx": 28382, "episode_idx": 158, "frame_idx": 40, "global_frame_idx": 28382, "task_index": 31}, {"db_idx": 28383, "episode_idx": 158, "frame_idx": 41, "global_frame_idx": 28383, "task_index": 31}, {"db_idx": 28384, "episode_idx": 158, "frame_idx": 42, "global_frame_idx": 28384, "task_index": 31}, {"db_idx": 28385, "episode_idx": 158, "frame_idx": 43, "global_frame_idx": 28385, "task_index": 31}, {"db_idx": 28386, "episode_idx": 158, "frame_idx": 44, "global_frame_idx": 28386, "task_index": 31}, {"db_idx": 28387, "episode_idx": 158, "frame_idx": 45, "global_frame_idx": 28387, "task_index": 31}, {"db_idx": 28388, "episode_idx": 158, "frame_idx": 46, "global_frame_idx": 28388, "task_index": 31}, {"db_idx": 28389, "episode_idx": 158, "frame_idx": 47, "global_frame_idx": 28389, "task_index": 31}, {"db_idx": 28390, "episode_idx": 158, "frame_idx": 48, "global_frame_idx": 28390, "task_index": 31}, {"db_idx": 28391, "episode_idx": 158, "frame_idx": 49, "global_frame_idx": 28391, "task_index": 31}, {"db_idx": 28392, "episode_idx": 158, "frame_idx": 50, "global_frame_idx": 28392, "task_index": 31}, {"db_idx": 28393, "episode_idx": 158, "frame_idx": 51, "global_frame_idx": 28393, "task_index": 31}, {"db_idx": 28394, "episode_idx": 158, "frame_idx": 52, "global_frame_idx": 28394, "task_index": 31}, {"db_idx": 28395, "episode_idx": 158, "frame_idx": 53, "global_frame_idx": 28395, "task_index": 31}, {"db_idx": 28396, "episode_idx": 158, "frame_idx": 54, "global_frame_idx": 28396, "task_index": 31}, {"db_idx": 28397, "episode_idx": 158, "frame_idx": 55, "global_frame_idx": 28397, "task_index": 31}, {"db_idx": 28398, "episode_idx": 158, "frame_idx": 56, "global_frame_idx": 28398, "task_index": 31}, {"db_idx": 28399, "episode_idx": 158, "frame_idx": 57, "global_frame_idx": 28399, "task_index": 31}, {"db_idx": 28400, "episode_idx": 158, "frame_idx": 58, "global_frame_idx": 28400, "task_index": 31}, {"db_idx": 28401, "episode_idx": 158, "frame_idx": 59, "global_frame_idx": 28401, "task_index": 31}, {"db_idx": 28402, "episode_idx": 158, "frame_idx": 60, "global_frame_idx": 28402, "task_index": 31}, {"db_idx": 28403, "episode_idx": 158, "frame_idx": 61, "global_frame_idx": 28403, "task_index": 31}, {"db_idx": 28404, "episode_idx": 158, "frame_idx": 62, "global_frame_idx": 28404, "task_index": 31}, {"db_idx": 28405, "episode_idx": 158, "frame_idx": 63, "global_frame_idx": 28405, "task_index": 31}, {"db_idx": 28406, "episode_idx": 158, "frame_idx": 64, "global_frame_idx": 28406, "task_index": 31}, {"db_idx": 28407, "episode_idx": 158, "frame_idx": 65, "global_frame_idx": 28407, "task_index": 31}, {"db_idx": 28408, "episode_idx": 158, "frame_idx": 66, "global_frame_idx": 28408, "task_index": 31}, {"db_idx": 28409, "episode_idx": 158, "frame_idx": 67, "global_frame_idx": 28409, "task_index": 31}, {"db_idx": 28410, "episode_idx": 158, "frame_idx": 68, "global_frame_idx": 28410, "task_index": 31}, {"db_idx": 28411, "episode_idx": 158, "frame_idx": 69, "global_frame_idx": 28411, "task_index": 31}, {"db_idx": 28412, "episode_idx": 158, "frame_idx": 70, "global_frame_idx": 28412, "task_index": 31}, {"db_idx": 28413, "episode_idx": 158, "frame_idx": 71, "global_frame_idx": 28413, "task_index": 31}, {"db_idx": 28414, "episode_idx": 158, "frame_idx": 72, "global_frame_idx": 28414, "task_index": 31}, {"db_idx": 28415, "episode_idx": 158, "frame_idx": 73, "global_frame_idx": 28415, "task_index": 31}, {"db_idx": 28416, "episode_idx": 158, "frame_idx": 74, "global_frame_idx": 28416, "task_index": 31}, {"db_idx": 28417, "episode_idx": 158, "frame_idx": 75, "global_frame_idx": 28417, "task_index": 31}, {"db_idx": 28418, "episode_idx": 158, "frame_idx": 76, "global_frame_idx": 28418, "task_index": 31}, {"db_idx": 28419, "episode_idx": 158, "frame_idx": 77, "global_frame_idx": 28419, "task_index": 31}, {"db_idx": 28420, "episode_idx": 158, "frame_idx": 78, "global_frame_idx": 28420, "task_index": 31}, {"db_idx": 28421, "episode_idx": 158, "frame_idx": 79, "global_frame_idx": 28421, "task_index": 31}, {"db_idx": 28422, "episode_idx": 158, "frame_idx": 80, "global_frame_idx": 28422, "task_index": 31}, {"db_idx": 28423, "episode_idx": 158, "frame_idx": 81, "global_frame_idx": 28423, "task_index": 31}, {"db_idx": 28424, "episode_idx": 158, "frame_idx": 82, "global_frame_idx": 28424, "task_index": 31}, {"db_idx": 28425, "episode_idx": 158, "frame_idx": 83, "global_frame_idx": 28425, "task_index": 31}, {"db_idx": 28426, "episode_idx": 158, "frame_idx": 84, "global_frame_idx": 28426, "task_index": 31}, {"db_idx": 28427, "episode_idx": 158, "frame_idx": 85, "global_frame_idx": 28427, "task_index": 31}, {"db_idx": 28428, "episode_idx": 158, "frame_idx": 86, "global_frame_idx": 28428, "task_index": 31}, {"db_idx": 28429, "episode_idx": 158, "frame_idx": 87, "global_frame_idx": 28429, "task_index": 31}, {"db_idx": 28430, "episode_idx": 158, "frame_idx": 88, "global_frame_idx": 28430, "task_index": 31}, {"db_idx": 28431, "episode_idx": 158, "frame_idx": 89, "global_frame_idx": 28431, "task_index": 31}, {"db_idx": 28432, "episode_idx": 158, "frame_idx": 90, "global_frame_idx": 28432, "task_index": 31}, {"db_idx": 28433, "episode_idx": 158, "frame_idx": 91, "global_frame_idx": 28433, "task_index": 31}, {"db_idx": 28434, "episode_idx": 158, "frame_idx": 92, "global_frame_idx": 28434, "task_index": 31}, {"db_idx": 28435, "episode_idx": 158, "frame_idx": 93, "global_frame_idx": 28435, "task_index": 31}, {"db_idx": 28436, "episode_idx": 158, "frame_idx": 94, "global_frame_idx": 28436, "task_index": 31}, {"db_idx": 28437, "episode_idx": 158, "frame_idx": 95, "global_frame_idx": 28437, "task_index": 31}, {"db_idx": 28438, "episode_idx": 158, "frame_idx": 96, "global_frame_idx": 28438, "task_index": 31}, {"db_idx": 28439, "episode_idx": 158, "frame_idx": 97, "global_frame_idx": 28439, "task_index": 31}, {"db_idx": 28440, "episode_idx": 158, "frame_idx": 98, "global_frame_idx": 28440, "task_index": 31}, {"db_idx": 28441, "episode_idx": 158, "frame_idx": 99, "global_frame_idx": 28441, "task_index": 31}, {"db_idx": 28442, "episode_idx": 158, "frame_idx": 100, "global_frame_idx": 28442, "task_index": 31}, {"db_idx": 28443, "episode_idx": 158, "frame_idx": 101, "global_frame_idx": 28443, "task_index": 31}, {"db_idx": 28444, "episode_idx": 158, "frame_idx": 102, "global_frame_idx": 28444, "task_index": 31}, {"db_idx": 28445, "episode_idx": 158, "frame_idx": 103, "global_frame_idx": 28445, "task_index": 31}, {"db_idx": 28446, "episode_idx": 158, "frame_idx": 104, "global_frame_idx": 28446, "task_index": 31}, {"db_idx": 28447, "episode_idx": 158, "frame_idx": 105, "global_frame_idx": 28447, "task_index": 31}, {"db_idx": 28448, "episode_idx": 158, "frame_idx": 106, "global_frame_idx": 28448, "task_index": 31}, {"db_idx": 28449, "episode_idx": 158, "frame_idx": 107, "global_frame_idx": 28449, "task_index": 31}, {"db_idx": 28450, "episode_idx": 158, "frame_idx": 108, "global_frame_idx": 28450, "task_index": 31}, {"db_idx": 28451, "episode_idx": 158, "frame_idx": 109, "global_frame_idx": 28451, "task_index": 31}, {"db_idx": 28452, "episode_idx": 158, "frame_idx": 110, "global_frame_idx": 28452, "task_index": 31}, {"db_idx": 28453, "episode_idx": 158, "frame_idx": 111, "global_frame_idx": 28453, "task_index": 31}, {"db_idx": 28454, "episode_idx": 158, "frame_idx": 112, "global_frame_idx": 28454, "task_index": 31}, {"db_idx": 28455, "episode_idx": 158, "frame_idx": 113, "global_frame_idx": 28455, "task_index": 31}, {"db_idx": 28456, "episode_idx": 158, "frame_idx": 114, "global_frame_idx": 28456, "task_index": 31}, {"db_idx": 28457, "episode_idx": 158, "frame_idx": 115, "global_frame_idx": 28457, "task_index": 31}, {"db_idx": 28458, "episode_idx": 158, "frame_idx": 116, "global_frame_idx": 28458, "task_index": 31}, {"db_idx": 28459, "episode_idx": 158, "frame_idx": 117, "global_frame_idx": 28459, "task_index": 31}, {"db_idx": 28460, "episode_idx": 158, "frame_idx": 118, "global_frame_idx": 28460, "task_index": 31}, {"db_idx": 28461, "episode_idx": 158, "frame_idx": 119, "global_frame_idx": 28461, "task_index": 31}, {"db_idx": 28462, "episode_idx": 158, "frame_idx": 120, "global_frame_idx": 28462, "task_index": 31}, {"db_idx": 28463, "episode_idx": 158, "frame_idx": 121, "global_frame_idx": 28463, "task_index": 31}, {"db_idx": 28464, "episode_idx": 159, "frame_idx": 0, "global_frame_idx": 28464, "task_index": 31}, {"db_idx": 28465, "episode_idx": 159, "frame_idx": 1, "global_frame_idx": 28465, "task_index": 31}, {"db_idx": 28466, "episode_idx": 159, "frame_idx": 2, "global_frame_idx": 28466, "task_index": 31}, {"db_idx": 28467, "episode_idx": 159, "frame_idx": 3, "global_frame_idx": 28467, "task_index": 31}, {"db_idx": 28468, "episode_idx": 159, "frame_idx": 4, "global_frame_idx": 28468, "task_index": 31}, {"db_idx": 28469, "episode_idx": 159, "frame_idx": 5, "global_frame_idx": 28469, "task_index": 31}, {"db_idx": 28470, "episode_idx": 159, "frame_idx": 6, "global_frame_idx": 28470, "task_index": 31}, {"db_idx": 28471, "episode_idx": 159, "frame_idx": 7, "global_frame_idx": 28471, "task_index": 31}, {"db_idx": 28472, "episode_idx": 159, "frame_idx": 8, "global_frame_idx": 28472, "task_index": 31}, {"db_idx": 28473, "episode_idx": 159, "frame_idx": 9, "global_frame_idx": 28473, "task_index": 31}, {"db_idx": 28474, "episode_idx": 159, "frame_idx": 10, "global_frame_idx": 28474, "task_index": 31}, {"db_idx": 28475, "episode_idx": 159, "frame_idx": 11, "global_frame_idx": 28475, "task_index": 31}, {"db_idx": 28476, "episode_idx": 159, "frame_idx": 12, "global_frame_idx": 28476, "task_index": 31}, {"db_idx": 28477, "episode_idx": 159, "frame_idx": 13, "global_frame_idx": 28477, "task_index": 31}, {"db_idx": 28478, "episode_idx": 159, "frame_idx": 14, "global_frame_idx": 28478, "task_index": 31}, {"db_idx": 28479, "episode_idx": 159, "frame_idx": 15, "global_frame_idx": 28479, "task_index": 31}, {"db_idx": 28480, "episode_idx": 159, "frame_idx": 16, "global_frame_idx": 28480, "task_index": 31}, {"db_idx": 28481, "episode_idx": 159, "frame_idx": 17, "global_frame_idx": 28481, "task_index": 31}, {"db_idx": 28482, "episode_idx": 159, "frame_idx": 18, "global_frame_idx": 28482, "task_index": 31}, {"db_idx": 28483, "episode_idx": 159, "frame_idx": 19, "global_frame_idx": 28483, "task_index": 31}, {"db_idx": 28484, "episode_idx": 159, "frame_idx": 20, "global_frame_idx": 28484, "task_index": 31}, {"db_idx": 28485, "episode_idx": 159, "frame_idx": 21, "global_frame_idx": 28485, "task_index": 31}, {"db_idx": 28486, "episode_idx": 159, "frame_idx": 22, "global_frame_idx": 28486, "task_index": 31}, {"db_idx": 28487, "episode_idx": 159, "frame_idx": 23, "global_frame_idx": 28487, "task_index": 31}, {"db_idx": 28488, "episode_idx": 159, "frame_idx": 24, "global_frame_idx": 28488, "task_index": 31}, {"db_idx": 28489, "episode_idx": 159, "frame_idx": 25, "global_frame_idx": 28489, "task_index": 31}, {"db_idx": 28490, "episode_idx": 159, "frame_idx": 26, "global_frame_idx": 28490, "task_index": 31}, {"db_idx": 28491, "episode_idx": 159, "frame_idx": 27, "global_frame_idx": 28491, "task_index": 31}, {"db_idx": 28492, "episode_idx": 159, "frame_idx": 28, "global_frame_idx": 28492, "task_index": 31}, {"db_idx": 28493, "episode_idx": 159, "frame_idx": 29, "global_frame_idx": 28493, "task_index": 31}, {"db_idx": 28494, "episode_idx": 159, "frame_idx": 30, "global_frame_idx": 28494, "task_index": 31}, {"db_idx": 28495, "episode_idx": 159, "frame_idx": 31, "global_frame_idx": 28495, "task_index": 31}, {"db_idx": 28496, "episode_idx": 159, "frame_idx": 32, "global_frame_idx": 28496, "task_index": 31}, {"db_idx": 28497, "episode_idx": 159, "frame_idx": 33, "global_frame_idx": 28497, "task_index": 31}, {"db_idx": 28498, "episode_idx": 159, "frame_idx": 34, "global_frame_idx": 28498, "task_index": 31}, {"db_idx": 28499, "episode_idx": 159, "frame_idx": 35, "global_frame_idx": 28499, "task_index": 31}, {"db_idx": 28500, "episode_idx": 159, "frame_idx": 36, "global_frame_idx": 28500, "task_index": 31}, {"db_idx": 28501, "episode_idx": 159, "frame_idx": 37, "global_frame_idx": 28501, "task_index": 31}, {"db_idx": 28502, "episode_idx": 159, "frame_idx": 38, "global_frame_idx": 28502, "task_index": 31}, {"db_idx": 28503, "episode_idx": 159, "frame_idx": 39, "global_frame_idx": 28503, "task_index": 31}, {"db_idx": 28504, "episode_idx": 159, "frame_idx": 40, "global_frame_idx": 28504, "task_index": 31}, {"db_idx": 28505, "episode_idx": 159, "frame_idx": 41, "global_frame_idx": 28505, "task_index": 31}, {"db_idx": 28506, "episode_idx": 159, "frame_idx": 42, "global_frame_idx": 28506, "task_index": 31}, {"db_idx": 28507, "episode_idx": 159, "frame_idx": 43, "global_frame_idx": 28507, "task_index": 31}, {"db_idx": 28508, "episode_idx": 159, "frame_idx": 44, "global_frame_idx": 28508, "task_index": 31}, {"db_idx": 28509, "episode_idx": 159, "frame_idx": 45, "global_frame_idx": 28509, "task_index": 31}, {"db_idx": 28510, "episode_idx": 159, "frame_idx": 46, "global_frame_idx": 28510, "task_index": 31}, {"db_idx": 28511, "episode_idx": 159, "frame_idx": 47, "global_frame_idx": 28511, "task_index": 31}, {"db_idx": 28512, "episode_idx": 159, "frame_idx": 48, "global_frame_idx": 28512, "task_index": 31}, {"db_idx": 28513, "episode_idx": 159, "frame_idx": 49, "global_frame_idx": 28513, "task_index": 31}, {"db_idx": 28514, "episode_idx": 159, "frame_idx": 50, "global_frame_idx": 28514, "task_index": 31}, {"db_idx": 28515, "episode_idx": 159, "frame_idx": 51, "global_frame_idx": 28515, "task_index": 31}, {"db_idx": 28516, "episode_idx": 159, "frame_idx": 52, "global_frame_idx": 28516, "task_index": 31}, {"db_idx": 28517, "episode_idx": 159, "frame_idx": 53, "global_frame_idx": 28517, "task_index": 31}, {"db_idx": 28518, "episode_idx": 159, "frame_idx": 54, "global_frame_idx": 28518, "task_index": 31}, {"db_idx": 28519, "episode_idx": 159, "frame_idx": 55, "global_frame_idx": 28519, "task_index": 31}, {"db_idx": 28520, "episode_idx": 159, "frame_idx": 56, "global_frame_idx": 28520, "task_index": 31}, {"db_idx": 28521, "episode_idx": 159, "frame_idx": 57, "global_frame_idx": 28521, "task_index": 31}, {"db_idx": 28522, "episode_idx": 159, "frame_idx": 58, "global_frame_idx": 28522, "task_index": 31}, {"db_idx": 28523, "episode_idx": 159, "frame_idx": 59, "global_frame_idx": 28523, "task_index": 31}, {"db_idx": 28524, "episode_idx": 159, "frame_idx": 60, "global_frame_idx": 28524, "task_index": 31}, {"db_idx": 28525, "episode_idx": 159, "frame_idx": 61, "global_frame_idx": 28525, "task_index": 31}, {"db_idx": 28526, "episode_idx": 159, "frame_idx": 62, "global_frame_idx": 28526, "task_index": 31}, {"db_idx": 28527, "episode_idx": 159, "frame_idx": 63, "global_frame_idx": 28527, "task_index": 31}, {"db_idx": 28528, "episode_idx": 159, "frame_idx": 64, "global_frame_idx": 28528, "task_index": 31}, {"db_idx": 28529, "episode_idx": 159, "frame_idx": 65, "global_frame_idx": 28529, "task_index": 31}, {"db_idx": 28530, "episode_idx": 159, "frame_idx": 66, "global_frame_idx": 28530, "task_index": 31}, {"db_idx": 28531, "episode_idx": 159, "frame_idx": 67, "global_frame_idx": 28531, "task_index": 31}, {"db_idx": 28532, "episode_idx": 159, "frame_idx": 68, "global_frame_idx": 28532, "task_index": 31}, {"db_idx": 28533, "episode_idx": 159, "frame_idx": 69, "global_frame_idx": 28533, "task_index": 31}, {"db_idx": 28534, "episode_idx": 159, "frame_idx": 70, "global_frame_idx": 28534, "task_index": 31}, {"db_idx": 28535, "episode_idx": 159, "frame_idx": 71, "global_frame_idx": 28535, "task_index": 31}, {"db_idx": 28536, "episode_idx": 159, "frame_idx": 72, "global_frame_idx": 28536, "task_index": 31}, {"db_idx": 28537, "episode_idx": 159, "frame_idx": 73, "global_frame_idx": 28537, "task_index": 31}, {"db_idx": 28538, "episode_idx": 159, "frame_idx": 74, "global_frame_idx": 28538, "task_index": 31}, {"db_idx": 28539, "episode_idx": 159, "frame_idx": 75, "global_frame_idx": 28539, "task_index": 31}, {"db_idx": 28540, "episode_idx": 159, "frame_idx": 76, "global_frame_idx": 28540, "task_index": 31}, {"db_idx": 28541, "episode_idx": 159, "frame_idx": 77, "global_frame_idx": 28541, "task_index": 31}, {"db_idx": 28542, "episode_idx": 159, "frame_idx": 78, "global_frame_idx": 28542, "task_index": 31}, {"db_idx": 28543, "episode_idx": 159, "frame_idx": 79, "global_frame_idx": 28543, "task_index": 31}, {"db_idx": 28544, "episode_idx": 159, "frame_idx": 80, "global_frame_idx": 28544, "task_index": 31}, {"db_idx": 28545, "episode_idx": 159, "frame_idx": 81, "global_frame_idx": 28545, "task_index": 31}, {"db_idx": 28546, "episode_idx": 159, "frame_idx": 82, "global_frame_idx": 28546, "task_index": 31}, {"db_idx": 28547, "episode_idx": 159, "frame_idx": 83, "global_frame_idx": 28547, "task_index": 31}, {"db_idx": 28548, "episode_idx": 159, "frame_idx": 84, "global_frame_idx": 28548, "task_index": 31}, {"db_idx": 28549, "episode_idx": 159, "frame_idx": 85, "global_frame_idx": 28549, "task_index": 31}, {"db_idx": 28550, "episode_idx": 159, "frame_idx": 86, "global_frame_idx": 28550, "task_index": 31}, {"db_idx": 28551, "episode_idx": 159, "frame_idx": 87, "global_frame_idx": 28551, "task_index": 31}, {"db_idx": 28552, "episode_idx": 159, "frame_idx": 88, "global_frame_idx": 28552, "task_index": 31}, {"db_idx": 28553, "episode_idx": 159, "frame_idx": 89, "global_frame_idx": 28553, "task_index": 31}, {"db_idx": 28554, "episode_idx": 159, "frame_idx": 90, "global_frame_idx": 28554, "task_index": 31}, {"db_idx": 28555, "episode_idx": 159, "frame_idx": 91, "global_frame_idx": 28555, "task_index": 31}, {"db_idx": 28556, "episode_idx": 159, "frame_idx": 92, "global_frame_idx": 28556, "task_index": 31}, {"db_idx": 28557, "episode_idx": 159, "frame_idx": 93, "global_frame_idx": 28557, "task_index": 31}, {"db_idx": 28558, "episode_idx": 159, "frame_idx": 94, "global_frame_idx": 28558, "task_index": 31}, {"db_idx": 28559, "episode_idx": 159, "frame_idx": 95, "global_frame_idx": 28559, "task_index": 31}, {"db_idx": 28560, "episode_idx": 159, "frame_idx": 96, "global_frame_idx": 28560, "task_index": 31}, {"db_idx": 28561, "episode_idx": 159, "frame_idx": 97, "global_frame_idx": 28561, "task_index": 31}, {"db_idx": 28562, "episode_idx": 159, "frame_idx": 98, "global_frame_idx": 28562, "task_index": 31}, {"db_idx": 28563, "episode_idx": 159, "frame_idx": 99, "global_frame_idx": 28563, "task_index": 31}, {"db_idx": 28564, "episode_idx": 159, "frame_idx": 100, "global_frame_idx": 28564, "task_index": 31}, {"db_idx": 28565, "episode_idx": 159, "frame_idx": 101, "global_frame_idx": 28565, "task_index": 31}, {"db_idx": 28566, "episode_idx": 159, "frame_idx": 102, "global_frame_idx": 28566, "task_index": 31}, {"db_idx": 28567, "episode_idx": 159, "frame_idx": 103, "global_frame_idx": 28567, "task_index": 31}, {"db_idx": 28568, "episode_idx": 159, "frame_idx": 104, "global_frame_idx": 28568, "task_index": 31}, {"db_idx": 28569, "episode_idx": 159, "frame_idx": 105, "global_frame_idx": 28569, "task_index": 31}, {"db_idx": 28570, "episode_idx": 159, "frame_idx": 106, "global_frame_idx": 28570, "task_index": 31}, {"db_idx": 28571, "episode_idx": 159, "frame_idx": 107, "global_frame_idx": 28571, "task_index": 31}, {"db_idx": 28572, "episode_idx": 159, "frame_idx": 108, "global_frame_idx": 28572, "task_index": 31}, {"db_idx": 28573, "episode_idx": 159, "frame_idx": 109, "global_frame_idx": 28573, "task_index": 31}, {"db_idx": 28574, "episode_idx": 159, "frame_idx": 110, "global_frame_idx": 28574, "task_index": 31}, {"db_idx": 28575, "episode_idx": 159, "frame_idx": 111, "global_frame_idx": 28575, "task_index": 31}, {"db_idx": 28576, "episode_idx": 159, "frame_idx": 112, "global_frame_idx": 28576, "task_index": 31}, {"db_idx": 28577, "episode_idx": 159, "frame_idx": 113, "global_frame_idx": 28577, "task_index": 31}, {"db_idx": 28578, "episode_idx": 159, "frame_idx": 114, "global_frame_idx": 28578, "task_index": 31}, {"db_idx": 28579, "episode_idx": 159, "frame_idx": 115, "global_frame_idx": 28579, "task_index": 31}, {"db_idx": 28580, "episode_idx": 159, "frame_idx": 116, "global_frame_idx": 28580, "task_index": 31}, {"db_idx": 28581, "episode_idx": 159, "frame_idx": 117, "global_frame_idx": 28581, "task_index": 31}, {"db_idx": 28582, "episode_idx": 159, "frame_idx": 118, "global_frame_idx": 28582, "task_index": 31}, {"db_idx": 28583, "episode_idx": 159, "frame_idx": 119, "global_frame_idx": 28583, "task_index": 31}, {"db_idx": 28584, "episode_idx": 159, "frame_idx": 120, "global_frame_idx": 28584, "task_index": 31}, {"db_idx": 28585, "episode_idx": 160, "frame_idx": 0, "global_frame_idx": 28585, "task_index": 32}, {"db_idx": 28586, "episode_idx": 160, "frame_idx": 1, "global_frame_idx": 28586, "task_index": 32}, {"db_idx": 28587, "episode_idx": 160, "frame_idx": 2, "global_frame_idx": 28587, "task_index": 32}, {"db_idx": 28588, "episode_idx": 160, "frame_idx": 3, "global_frame_idx": 28588, "task_index": 32}, {"db_idx": 28589, "episode_idx": 160, "frame_idx": 4, "global_frame_idx": 28589, "task_index": 32}, {"db_idx": 28590, "episode_idx": 160, "frame_idx": 5, "global_frame_idx": 28590, "task_index": 32}, {"db_idx": 28591, "episode_idx": 160, "frame_idx": 6, "global_frame_idx": 28591, "task_index": 32}, {"db_idx": 28592, "episode_idx": 160, "frame_idx": 7, "global_frame_idx": 28592, "task_index": 32}, {"db_idx": 28593, "episode_idx": 160, "frame_idx": 8, "global_frame_idx": 28593, "task_index": 32}, {"db_idx": 28594, "episode_idx": 160, "frame_idx": 9, "global_frame_idx": 28594, "task_index": 32}, {"db_idx": 28595, "episode_idx": 160, "frame_idx": 10, "global_frame_idx": 28595, "task_index": 32}, {"db_idx": 28596, "episode_idx": 160, "frame_idx": 11, "global_frame_idx": 28596, "task_index": 32}, {"db_idx": 28597, "episode_idx": 160, "frame_idx": 12, "global_frame_idx": 28597, "task_index": 32}, {"db_idx": 28598, "episode_idx": 160, "frame_idx": 13, "global_frame_idx": 28598, "task_index": 32}, {"db_idx": 28599, "episode_idx": 160, "frame_idx": 14, "global_frame_idx": 28599, "task_index": 32}, {"db_idx": 28600, "episode_idx": 160, "frame_idx": 15, "global_frame_idx": 28600, "task_index": 32}, {"db_idx": 28601, "episode_idx": 160, "frame_idx": 16, "global_frame_idx": 28601, "task_index": 32}, {"db_idx": 28602, "episode_idx": 160, "frame_idx": 17, "global_frame_idx": 28602, "task_index": 32}, {"db_idx": 28603, "episode_idx": 160, "frame_idx": 18, "global_frame_idx": 28603, "task_index": 32}, {"db_idx": 28604, "episode_idx": 160, "frame_idx": 19, "global_frame_idx": 28604, "task_index": 32}, {"db_idx": 28605, "episode_idx": 160, "frame_idx": 20, "global_frame_idx": 28605, "task_index": 32}, {"db_idx": 28606, "episode_idx": 160, "frame_idx": 21, "global_frame_idx": 28606, "task_index": 32}, {"db_idx": 28607, "episode_idx": 160, "frame_idx": 22, "global_frame_idx": 28607, "task_index": 32}, {"db_idx": 28608, "episode_idx": 160, "frame_idx": 23, "global_frame_idx": 28608, "task_index": 32}, {"db_idx": 28609, "episode_idx": 160, "frame_idx": 24, "global_frame_idx": 28609, "task_index": 32}, {"db_idx": 28610, "episode_idx": 160, "frame_idx": 25, "global_frame_idx": 28610, "task_index": 32}, {"db_idx": 28611, "episode_idx": 160, "frame_idx": 26, "global_frame_idx": 28611, "task_index": 32}, {"db_idx": 28612, "episode_idx": 160, "frame_idx": 27, "global_frame_idx": 28612, "task_index": 32}, {"db_idx": 28613, "episode_idx": 160, "frame_idx": 28, "global_frame_idx": 28613, "task_index": 32}, {"db_idx": 28614, "episode_idx": 160, "frame_idx": 29, "global_frame_idx": 28614, "task_index": 32}, {"db_idx": 28615, "episode_idx": 160, "frame_idx": 30, "global_frame_idx": 28615, "task_index": 32}, {"db_idx": 28616, "episode_idx": 160, "frame_idx": 31, "global_frame_idx": 28616, "task_index": 32}, {"db_idx": 28617, "episode_idx": 160, "frame_idx": 32, "global_frame_idx": 28617, "task_index": 32}, {"db_idx": 28618, "episode_idx": 160, "frame_idx": 33, "global_frame_idx": 28618, "task_index": 32}, {"db_idx": 28619, "episode_idx": 160, "frame_idx": 34, "global_frame_idx": 28619, "task_index": 32}, {"db_idx": 28620, "episode_idx": 160, "frame_idx": 35, "global_frame_idx": 28620, "task_index": 32}, {"db_idx": 28621, "episode_idx": 160, "frame_idx": 36, "global_frame_idx": 28621, "task_index": 32}, {"db_idx": 28622, "episode_idx": 160, "frame_idx": 37, "global_frame_idx": 28622, "task_index": 32}, {"db_idx": 28623, "episode_idx": 160, "frame_idx": 38, "global_frame_idx": 28623, "task_index": 32}, {"db_idx": 28624, "episode_idx": 160, "frame_idx": 39, "global_frame_idx": 28624, "task_index": 32}, {"db_idx": 28625, "episode_idx": 160, "frame_idx": 40, "global_frame_idx": 28625, "task_index": 32}, {"db_idx": 28626, "episode_idx": 160, "frame_idx": 41, "global_frame_idx": 28626, "task_index": 32}, {"db_idx": 28627, "episode_idx": 160, "frame_idx": 42, "global_frame_idx": 28627, "task_index": 32}, {"db_idx": 28628, "episode_idx": 160, "frame_idx": 43, "global_frame_idx": 28628, "task_index": 32}, {"db_idx": 28629, "episode_idx": 160, "frame_idx": 44, "global_frame_idx": 28629, "task_index": 32}, {"db_idx": 28630, "episode_idx": 160, "frame_idx": 45, "global_frame_idx": 28630, "task_index": 32}, {"db_idx": 28631, "episode_idx": 160, "frame_idx": 46, "global_frame_idx": 28631, "task_index": 32}, {"db_idx": 28632, "episode_idx": 160, "frame_idx": 47, "global_frame_idx": 28632, "task_index": 32}, {"db_idx": 28633, "episode_idx": 160, "frame_idx": 48, "global_frame_idx": 28633, "task_index": 32}, {"db_idx": 28634, "episode_idx": 160, "frame_idx": 49, "global_frame_idx": 28634, "task_index": 32}, {"db_idx": 28635, "episode_idx": 160, "frame_idx": 50, "global_frame_idx": 28635, "task_index": 32}, {"db_idx": 28636, "episode_idx": 160, "frame_idx": 51, "global_frame_idx": 28636, "task_index": 32}, {"db_idx": 28637, "episode_idx": 160, "frame_idx": 52, "global_frame_idx": 28637, "task_index": 32}, {"db_idx": 28638, "episode_idx": 160, "frame_idx": 53, "global_frame_idx": 28638, "task_index": 32}, {"db_idx": 28639, "episode_idx": 160, "frame_idx": 54, "global_frame_idx": 28639, "task_index": 32}, {"db_idx": 28640, "episode_idx": 160, "frame_idx": 55, "global_frame_idx": 28640, "task_index": 32}, {"db_idx": 28641, "episode_idx": 160, "frame_idx": 56, "global_frame_idx": 28641, "task_index": 32}, {"db_idx": 28642, "episode_idx": 160, "frame_idx": 57, "global_frame_idx": 28642, "task_index": 32}, {"db_idx": 28643, "episode_idx": 160, "frame_idx": 58, "global_frame_idx": 28643, "task_index": 32}, {"db_idx": 28644, "episode_idx": 160, "frame_idx": 59, "global_frame_idx": 28644, "task_index": 32}, {"db_idx": 28645, "episode_idx": 160, "frame_idx": 60, "global_frame_idx": 28645, "task_index": 32}, {"db_idx": 28646, "episode_idx": 160, "frame_idx": 61, "global_frame_idx": 28646, "task_index": 32}, {"db_idx": 28647, "episode_idx": 160, "frame_idx": 62, "global_frame_idx": 28647, "task_index": 32}, {"db_idx": 28648, "episode_idx": 160, "frame_idx": 63, "global_frame_idx": 28648, "task_index": 32}, {"db_idx": 28649, "episode_idx": 160, "frame_idx": 64, "global_frame_idx": 28649, "task_index": 32}, {"db_idx": 28650, "episode_idx": 160, "frame_idx": 65, "global_frame_idx": 28650, "task_index": 32}, {"db_idx": 28651, "episode_idx": 160, "frame_idx": 66, "global_frame_idx": 28651, "task_index": 32}, {"db_idx": 28652, "episode_idx": 160, "frame_idx": 67, "global_frame_idx": 28652, "task_index": 32}, {"db_idx": 28653, "episode_idx": 160, "frame_idx": 68, "global_frame_idx": 28653, "task_index": 32}, {"db_idx": 28654, "episode_idx": 160, "frame_idx": 69, "global_frame_idx": 28654, "task_index": 32}, {"db_idx": 28655, "episode_idx": 160, "frame_idx": 70, "global_frame_idx": 28655, "task_index": 32}, {"db_idx": 28656, "episode_idx": 160, "frame_idx": 71, "global_frame_idx": 28656, "task_index": 32}, {"db_idx": 28657, "episode_idx": 160, "frame_idx": 72, "global_frame_idx": 28657, "task_index": 32}, {"db_idx": 28658, "episode_idx": 160, "frame_idx": 73, "global_frame_idx": 28658, "task_index": 32}, {"db_idx": 28659, "episode_idx": 160, "frame_idx": 74, "global_frame_idx": 28659, "task_index": 32}, {"db_idx": 28660, "episode_idx": 160, "frame_idx": 75, "global_frame_idx": 28660, "task_index": 32}, {"db_idx": 28661, "episode_idx": 160, "frame_idx": 76, "global_frame_idx": 28661, "task_index": 32}, {"db_idx": 28662, "episode_idx": 160, "frame_idx": 77, "global_frame_idx": 28662, "task_index": 32}, {"db_idx": 28663, "episode_idx": 160, "frame_idx": 78, "global_frame_idx": 28663, "task_index": 32}, {"db_idx": 28664, "episode_idx": 160, "frame_idx": 79, "global_frame_idx": 28664, "task_index": 32}, {"db_idx": 28665, "episode_idx": 160, "frame_idx": 80, "global_frame_idx": 28665, "task_index": 32}, {"db_idx": 28666, "episode_idx": 160, "frame_idx": 81, "global_frame_idx": 28666, "task_index": 32}, {"db_idx": 28667, "episode_idx": 160, "frame_idx": 82, "global_frame_idx": 28667, "task_index": 32}, {"db_idx": 28668, "episode_idx": 160, "frame_idx": 83, "global_frame_idx": 28668, "task_index": 32}, {"db_idx": 28669, "episode_idx": 160, "frame_idx": 84, "global_frame_idx": 28669, "task_index": 32}, {"db_idx": 28670, "episode_idx": 160, "frame_idx": 85, "global_frame_idx": 28670, "task_index": 32}, {"db_idx": 28671, "episode_idx": 160, "frame_idx": 86, "global_frame_idx": 28671, "task_index": 32}, {"db_idx": 28672, "episode_idx": 160, "frame_idx": 87, "global_frame_idx": 28672, "task_index": 32}, {"db_idx": 28673, "episode_idx": 160, "frame_idx": 88, "global_frame_idx": 28673, "task_index": 32}, {"db_idx": 28674, "episode_idx": 161, "frame_idx": 0, "global_frame_idx": 28674, "task_index": 32}, {"db_idx": 28675, "episode_idx": 161, "frame_idx": 1, "global_frame_idx": 28675, "task_index": 32}, {"db_idx": 28676, "episode_idx": 161, "frame_idx": 2, "global_frame_idx": 28676, "task_index": 32}, {"db_idx": 28677, "episode_idx": 161, "frame_idx": 3, "global_frame_idx": 28677, "task_index": 32}, {"db_idx": 28678, "episode_idx": 161, "frame_idx": 4, "global_frame_idx": 28678, "task_index": 32}, {"db_idx": 28679, "episode_idx": 161, "frame_idx": 5, "global_frame_idx": 28679, "task_index": 32}, {"db_idx": 28680, "episode_idx": 161, "frame_idx": 6, "global_frame_idx": 28680, "task_index": 32}, {"db_idx": 28681, "episode_idx": 161, "frame_idx": 7, "global_frame_idx": 28681, "task_index": 32}, {"db_idx": 28682, "episode_idx": 161, "frame_idx": 8, "global_frame_idx": 28682, "task_index": 32}, {"db_idx": 28683, "episode_idx": 161, "frame_idx": 9, "global_frame_idx": 28683, "task_index": 32}, {"db_idx": 28684, "episode_idx": 161, "frame_idx": 10, "global_frame_idx": 28684, "task_index": 32}, {"db_idx": 28685, "episode_idx": 161, "frame_idx": 11, "global_frame_idx": 28685, "task_index": 32}, {"db_idx": 28686, "episode_idx": 161, "frame_idx": 12, "global_frame_idx": 28686, "task_index": 32}, {"db_idx": 28687, "episode_idx": 161, "frame_idx": 13, "global_frame_idx": 28687, "task_index": 32}, {"db_idx": 28688, "episode_idx": 161, "frame_idx": 14, "global_frame_idx": 28688, "task_index": 32}, {"db_idx": 28689, "episode_idx": 161, "frame_idx": 15, "global_frame_idx": 28689, "task_index": 32}, {"db_idx": 28690, "episode_idx": 161, "frame_idx": 16, "global_frame_idx": 28690, "task_index": 32}, {"db_idx": 28691, "episode_idx": 161, "frame_idx": 17, "global_frame_idx": 28691, "task_index": 32}, {"db_idx": 28692, "episode_idx": 161, "frame_idx": 18, "global_frame_idx": 28692, "task_index": 32}, {"db_idx": 28693, "episode_idx": 161, "frame_idx": 19, "global_frame_idx": 28693, "task_index": 32}, {"db_idx": 28694, "episode_idx": 161, "frame_idx": 20, "global_frame_idx": 28694, "task_index": 32}, {"db_idx": 28695, "episode_idx": 161, "frame_idx": 21, "global_frame_idx": 28695, "task_index": 32}, {"db_idx": 28696, "episode_idx": 161, "frame_idx": 22, "global_frame_idx": 28696, "task_index": 32}, {"db_idx": 28697, "episode_idx": 161, "frame_idx": 23, "global_frame_idx": 28697, "task_index": 32}, {"db_idx": 28698, "episode_idx": 161, "frame_idx": 24, "global_frame_idx": 28698, "task_index": 32}, {"db_idx": 28699, "episode_idx": 161, "frame_idx": 25, "global_frame_idx": 28699, "task_index": 32}, {"db_idx": 28700, "episode_idx": 161, "frame_idx": 26, "global_frame_idx": 28700, "task_index": 32}, {"db_idx": 28701, "episode_idx": 161, "frame_idx": 27, "global_frame_idx": 28701, "task_index": 32}, {"db_idx": 28702, "episode_idx": 161, "frame_idx": 28, "global_frame_idx": 28702, "task_index": 32}, {"db_idx": 28703, "episode_idx": 161, "frame_idx": 29, "global_frame_idx": 28703, "task_index": 32}, {"db_idx": 28704, "episode_idx": 161, "frame_idx": 30, "global_frame_idx": 28704, "task_index": 32}, {"db_idx": 28705, "episode_idx": 161, "frame_idx": 31, "global_frame_idx": 28705, "task_index": 32}, {"db_idx": 28706, "episode_idx": 161, "frame_idx": 32, "global_frame_idx": 28706, "task_index": 32}, {"db_idx": 28707, "episode_idx": 161, "frame_idx": 33, "global_frame_idx": 28707, "task_index": 32}, {"db_idx": 28708, "episode_idx": 161, "frame_idx": 34, "global_frame_idx": 28708, "task_index": 32}, {"db_idx": 28709, "episode_idx": 161, "frame_idx": 35, "global_frame_idx": 28709, "task_index": 32}, {"db_idx": 28710, "episode_idx": 161, "frame_idx": 36, "global_frame_idx": 28710, "task_index": 32}, {"db_idx": 28711, "episode_idx": 161, "frame_idx": 37, "global_frame_idx": 28711, "task_index": 32}, {"db_idx": 28712, "episode_idx": 161, "frame_idx": 38, "global_frame_idx": 28712, "task_index": 32}, {"db_idx": 28713, "episode_idx": 161, "frame_idx": 39, "global_frame_idx": 28713, "task_index": 32}, {"db_idx": 28714, "episode_idx": 161, "frame_idx": 40, "global_frame_idx": 28714, "task_index": 32}, {"db_idx": 28715, "episode_idx": 161, "frame_idx": 41, "global_frame_idx": 28715, "task_index": 32}, {"db_idx": 28716, "episode_idx": 161, "frame_idx": 42, "global_frame_idx": 28716, "task_index": 32}, {"db_idx": 28717, "episode_idx": 161, "frame_idx": 43, "global_frame_idx": 28717, "task_index": 32}, {"db_idx": 28718, "episode_idx": 161, "frame_idx": 44, "global_frame_idx": 28718, "task_index": 32}, {"db_idx": 28719, "episode_idx": 161, "frame_idx": 45, "global_frame_idx": 28719, "task_index": 32}, {"db_idx": 28720, "episode_idx": 161, "frame_idx": 46, "global_frame_idx": 28720, "task_index": 32}, {"db_idx": 28721, "episode_idx": 161, "frame_idx": 47, "global_frame_idx": 28721, "task_index": 32}, {"db_idx": 28722, "episode_idx": 161, "frame_idx": 48, "global_frame_idx": 28722, "task_index": 32}, {"db_idx": 28723, "episode_idx": 161, "frame_idx": 49, "global_frame_idx": 28723, "task_index": 32}, {"db_idx": 28724, "episode_idx": 161, "frame_idx": 50, "global_frame_idx": 28724, "task_index": 32}, {"db_idx": 28725, "episode_idx": 161, "frame_idx": 51, "global_frame_idx": 28725, "task_index": 32}, {"db_idx": 28726, "episode_idx": 161, "frame_idx": 52, "global_frame_idx": 28726, "task_index": 32}, {"db_idx": 28727, "episode_idx": 161, "frame_idx": 53, "global_frame_idx": 28727, "task_index": 32}, {"db_idx": 28728, "episode_idx": 161, "frame_idx": 54, "global_frame_idx": 28728, "task_index": 32}, {"db_idx": 28729, "episode_idx": 161, "frame_idx": 55, "global_frame_idx": 28729, "task_index": 32}, {"db_idx": 28730, "episode_idx": 161, "frame_idx": 56, "global_frame_idx": 28730, "task_index": 32}, {"db_idx": 28731, "episode_idx": 161, "frame_idx": 57, "global_frame_idx": 28731, "task_index": 32}, {"db_idx": 28732, "episode_idx": 161, "frame_idx": 58, "global_frame_idx": 28732, "task_index": 32}, {"db_idx": 28733, "episode_idx": 161, "frame_idx": 59, "global_frame_idx": 28733, "task_index": 32}, {"db_idx": 28734, "episode_idx": 161, "frame_idx": 60, "global_frame_idx": 28734, "task_index": 32}, {"db_idx": 28735, "episode_idx": 161, "frame_idx": 61, "global_frame_idx": 28735, "task_index": 32}, {"db_idx": 28736, "episode_idx": 161, "frame_idx": 62, "global_frame_idx": 28736, "task_index": 32}, {"db_idx": 28737, "episode_idx": 161, "frame_idx": 63, "global_frame_idx": 28737, "task_index": 32}, {"db_idx": 28738, "episode_idx": 161, "frame_idx": 64, "global_frame_idx": 28738, "task_index": 32}, {"db_idx": 28739, "episode_idx": 161, "frame_idx": 65, "global_frame_idx": 28739, "task_index": 32}, {"db_idx": 28740, "episode_idx": 161, "frame_idx": 66, "global_frame_idx": 28740, "task_index": 32}, {"db_idx": 28741, "episode_idx": 161, "frame_idx": 67, "global_frame_idx": 28741, "task_index": 32}, {"db_idx": 28742, "episode_idx": 161, "frame_idx": 68, "global_frame_idx": 28742, "task_index": 32}, {"db_idx": 28743, "episode_idx": 161, "frame_idx": 69, "global_frame_idx": 28743, "task_index": 32}, {"db_idx": 28744, "episode_idx": 161, "frame_idx": 70, "global_frame_idx": 28744, "task_index": 32}, {"db_idx": 28745, "episode_idx": 161, "frame_idx": 71, "global_frame_idx": 28745, "task_index": 32}, {"db_idx": 28746, "episode_idx": 161, "frame_idx": 72, "global_frame_idx": 28746, "task_index": 32}, {"db_idx": 28747, "episode_idx": 161, "frame_idx": 73, "global_frame_idx": 28747, "task_index": 32}, {"db_idx": 28748, "episode_idx": 161, "frame_idx": 74, "global_frame_idx": 28748, "task_index": 32}, {"db_idx": 28749, "episode_idx": 161, "frame_idx": 75, "global_frame_idx": 28749, "task_index": 32}, {"db_idx": 28750, "episode_idx": 161, "frame_idx": 76, "global_frame_idx": 28750, "task_index": 32}, {"db_idx": 28751, "episode_idx": 161, "frame_idx": 77, "global_frame_idx": 28751, "task_index": 32}, {"db_idx": 28752, "episode_idx": 161, "frame_idx": 78, "global_frame_idx": 28752, "task_index": 32}, {"db_idx": 28753, "episode_idx": 161, "frame_idx": 79, "global_frame_idx": 28753, "task_index": 32}, {"db_idx": 28754, "episode_idx": 161, "frame_idx": 80, "global_frame_idx": 28754, "task_index": 32}, {"db_idx": 28755, "episode_idx": 161, "frame_idx": 81, "global_frame_idx": 28755, "task_index": 32}, {"db_idx": 28756, "episode_idx": 161, "frame_idx": 82, "global_frame_idx": 28756, "task_index": 32}, {"db_idx": 28757, "episode_idx": 161, "frame_idx": 83, "global_frame_idx": 28757, "task_index": 32}, {"db_idx": 28758, "episode_idx": 161, "frame_idx": 84, "global_frame_idx": 28758, "task_index": 32}, {"db_idx": 28759, "episode_idx": 161, "frame_idx": 85, "global_frame_idx": 28759, "task_index": 32}, {"db_idx": 28760, "episode_idx": 161, "frame_idx": 86, "global_frame_idx": 28760, "task_index": 32}, {"db_idx": 28761, "episode_idx": 161, "frame_idx": 87, "global_frame_idx": 28761, "task_index": 32}, {"db_idx": 28762, "episode_idx": 161, "frame_idx": 88, "global_frame_idx": 28762, "task_index": 32}, {"db_idx": 28763, "episode_idx": 161, "frame_idx": 89, "global_frame_idx": 28763, "task_index": 32}, {"db_idx": 28764, "episode_idx": 161, "frame_idx": 90, "global_frame_idx": 28764, "task_index": 32}, {"db_idx": 28765, "episode_idx": 161, "frame_idx": 91, "global_frame_idx": 28765, "task_index": 32}, {"db_idx": 28766, "episode_idx": 161, "frame_idx": 92, "global_frame_idx": 28766, "task_index": 32}, {"db_idx": 28767, "episode_idx": 162, "frame_idx": 0, "global_frame_idx": 28767, "task_index": 32}, {"db_idx": 28768, "episode_idx": 162, "frame_idx": 1, "global_frame_idx": 28768, "task_index": 32}, {"db_idx": 28769, "episode_idx": 162, "frame_idx": 2, "global_frame_idx": 28769, "task_index": 32}, {"db_idx": 28770, "episode_idx": 162, "frame_idx": 3, "global_frame_idx": 28770, "task_index": 32}, {"db_idx": 28771, "episode_idx": 162, "frame_idx": 4, "global_frame_idx": 28771, "task_index": 32}, {"db_idx": 28772, "episode_idx": 162, "frame_idx": 5, "global_frame_idx": 28772, "task_index": 32}, {"db_idx": 28773, "episode_idx": 162, "frame_idx": 6, "global_frame_idx": 28773, "task_index": 32}, {"db_idx": 28774, "episode_idx": 162, "frame_idx": 7, "global_frame_idx": 28774, "task_index": 32}, {"db_idx": 28775, "episode_idx": 162, "frame_idx": 8, "global_frame_idx": 28775, "task_index": 32}, {"db_idx": 28776, "episode_idx": 162, "frame_idx": 9, "global_frame_idx": 28776, "task_index": 32}, {"db_idx": 28777, "episode_idx": 162, "frame_idx": 10, "global_frame_idx": 28777, "task_index": 32}, {"db_idx": 28778, "episode_idx": 162, "frame_idx": 11, "global_frame_idx": 28778, "task_index": 32}, {"db_idx": 28779, "episode_idx": 162, "frame_idx": 12, "global_frame_idx": 28779, "task_index": 32}, {"db_idx": 28780, "episode_idx": 162, "frame_idx": 13, "global_frame_idx": 28780, "task_index": 32}, {"db_idx": 28781, "episode_idx": 162, "frame_idx": 14, "global_frame_idx": 28781, "task_index": 32}, {"db_idx": 28782, "episode_idx": 162, "frame_idx": 15, "global_frame_idx": 28782, "task_index": 32}, {"db_idx": 28783, "episode_idx": 162, "frame_idx": 16, "global_frame_idx": 28783, "task_index": 32}, {"db_idx": 28784, "episode_idx": 162, "frame_idx": 17, "global_frame_idx": 28784, "task_index": 32}, {"db_idx": 28785, "episode_idx": 162, "frame_idx": 18, "global_frame_idx": 28785, "task_index": 32}, {"db_idx": 28786, "episode_idx": 162, "frame_idx": 19, "global_frame_idx": 28786, "task_index": 32}, {"db_idx": 28787, "episode_idx": 162, "frame_idx": 20, "global_frame_idx": 28787, "task_index": 32}, {"db_idx": 28788, "episode_idx": 162, "frame_idx": 21, "global_frame_idx": 28788, "task_index": 32}, {"db_idx": 28789, "episode_idx": 162, "frame_idx": 22, "global_frame_idx": 28789, "task_index": 32}, {"db_idx": 28790, "episode_idx": 162, "frame_idx": 23, "global_frame_idx": 28790, "task_index": 32}, {"db_idx": 28791, "episode_idx": 162, "frame_idx": 24, "global_frame_idx": 28791, "task_index": 32}, {"db_idx": 28792, "episode_idx": 162, "frame_idx": 25, "global_frame_idx": 28792, "task_index": 32}, {"db_idx": 28793, "episode_idx": 162, "frame_idx": 26, "global_frame_idx": 28793, "task_index": 32}, {"db_idx": 28794, "episode_idx": 162, "frame_idx": 27, "global_frame_idx": 28794, "task_index": 32}, {"db_idx": 28795, "episode_idx": 162, "frame_idx": 28, "global_frame_idx": 28795, "task_index": 32}, {"db_idx": 28796, "episode_idx": 162, "frame_idx": 29, "global_frame_idx": 28796, "task_index": 32}, {"db_idx": 28797, "episode_idx": 162, "frame_idx": 30, "global_frame_idx": 28797, "task_index": 32}, {"db_idx": 28798, "episode_idx": 162, "frame_idx": 31, "global_frame_idx": 28798, "task_index": 32}, {"db_idx": 28799, "episode_idx": 162, "frame_idx": 32, "global_frame_idx": 28799, "task_index": 32}, {"db_idx": 28800, "episode_idx": 162, "frame_idx": 33, "global_frame_idx": 28800, "task_index": 32}, {"db_idx": 28801, "episode_idx": 162, "frame_idx": 34, "global_frame_idx": 28801, "task_index": 32}, {"db_idx": 28802, "episode_idx": 162, "frame_idx": 35, "global_frame_idx": 28802, "task_index": 32}, {"db_idx": 28803, "episode_idx": 162, "frame_idx": 36, "global_frame_idx": 28803, "task_index": 32}, {"db_idx": 28804, "episode_idx": 162, "frame_idx": 37, "global_frame_idx": 28804, "task_index": 32}, {"db_idx": 28805, "episode_idx": 162, "frame_idx": 38, "global_frame_idx": 28805, "task_index": 32}, {"db_idx": 28806, "episode_idx": 162, "frame_idx": 39, "global_frame_idx": 28806, "task_index": 32}, {"db_idx": 28807, "episode_idx": 162, "frame_idx": 40, "global_frame_idx": 28807, "task_index": 32}, {"db_idx": 28808, "episode_idx": 162, "frame_idx": 41, "global_frame_idx": 28808, "task_index": 32}, {"db_idx": 28809, "episode_idx": 162, "frame_idx": 42, "global_frame_idx": 28809, "task_index": 32}, {"db_idx": 28810, "episode_idx": 162, "frame_idx": 43, "global_frame_idx": 28810, "task_index": 32}, {"db_idx": 28811, "episode_idx": 162, "frame_idx": 44, "global_frame_idx": 28811, "task_index": 32}, {"db_idx": 28812, "episode_idx": 162, "frame_idx": 45, "global_frame_idx": 28812, "task_index": 32}, {"db_idx": 28813, "episode_idx": 162, "frame_idx": 46, "global_frame_idx": 28813, "task_index": 32}, {"db_idx": 28814, "episode_idx": 162, "frame_idx": 47, "global_frame_idx": 28814, "task_index": 32}, {"db_idx": 28815, "episode_idx": 162, "frame_idx": 48, "global_frame_idx": 28815, "task_index": 32}, {"db_idx": 28816, "episode_idx": 162, "frame_idx": 49, "global_frame_idx": 28816, "task_index": 32}, {"db_idx": 28817, "episode_idx": 162, "frame_idx": 50, "global_frame_idx": 28817, "task_index": 32}, {"db_idx": 28818, "episode_idx": 162, "frame_idx": 51, "global_frame_idx": 28818, "task_index": 32}, {"db_idx": 28819, "episode_idx": 162, "frame_idx": 52, "global_frame_idx": 28819, "task_index": 32}, {"db_idx": 28820, "episode_idx": 162, "frame_idx": 53, "global_frame_idx": 28820, "task_index": 32}, {"db_idx": 28821, "episode_idx": 162, "frame_idx": 54, "global_frame_idx": 28821, "task_index": 32}, {"db_idx": 28822, "episode_idx": 162, "frame_idx": 55, "global_frame_idx": 28822, "task_index": 32}, {"db_idx": 28823, "episode_idx": 162, "frame_idx": 56, "global_frame_idx": 28823, "task_index": 32}, {"db_idx": 28824, "episode_idx": 162, "frame_idx": 57, "global_frame_idx": 28824, "task_index": 32}, {"db_idx": 28825, "episode_idx": 162, "frame_idx": 58, "global_frame_idx": 28825, "task_index": 32}, {"db_idx": 28826, "episode_idx": 162, "frame_idx": 59, "global_frame_idx": 28826, "task_index": 32}, {"db_idx": 28827, "episode_idx": 162, "frame_idx": 60, "global_frame_idx": 28827, "task_index": 32}, {"db_idx": 28828, "episode_idx": 162, "frame_idx": 61, "global_frame_idx": 28828, "task_index": 32}, {"db_idx": 28829, "episode_idx": 162, "frame_idx": 62, "global_frame_idx": 28829, "task_index": 32}, {"db_idx": 28830, "episode_idx": 162, "frame_idx": 63, "global_frame_idx": 28830, "task_index": 32}, {"db_idx": 28831, "episode_idx": 162, "frame_idx": 64, "global_frame_idx": 28831, "task_index": 32}, {"db_idx": 28832, "episode_idx": 162, "frame_idx": 65, "global_frame_idx": 28832, "task_index": 32}, {"db_idx": 28833, "episode_idx": 162, "frame_idx": 66, "global_frame_idx": 28833, "task_index": 32}, {"db_idx": 28834, "episode_idx": 162, "frame_idx": 67, "global_frame_idx": 28834, "task_index": 32}, {"db_idx": 28835, "episode_idx": 162, "frame_idx": 68, "global_frame_idx": 28835, "task_index": 32}, {"db_idx": 28836, "episode_idx": 162, "frame_idx": 69, "global_frame_idx": 28836, "task_index": 32}, {"db_idx": 28837, "episode_idx": 162, "frame_idx": 70, "global_frame_idx": 28837, "task_index": 32}, {"db_idx": 28838, "episode_idx": 162, "frame_idx": 71, "global_frame_idx": 28838, "task_index": 32}, {"db_idx": 28839, "episode_idx": 162, "frame_idx": 72, "global_frame_idx": 28839, "task_index": 32}, {"db_idx": 28840, "episode_idx": 162, "frame_idx": 73, "global_frame_idx": 28840, "task_index": 32}, {"db_idx": 28841, "episode_idx": 162, "frame_idx": 74, "global_frame_idx": 28841, "task_index": 32}, {"db_idx": 28842, "episode_idx": 162, "frame_idx": 75, "global_frame_idx": 28842, "task_index": 32}, {"db_idx": 28843, "episode_idx": 162, "frame_idx": 76, "global_frame_idx": 28843, "task_index": 32}, {"db_idx": 28844, "episode_idx": 162, "frame_idx": 77, "global_frame_idx": 28844, "task_index": 32}, {"db_idx": 28845, "episode_idx": 162, "frame_idx": 78, "global_frame_idx": 28845, "task_index": 32}, {"db_idx": 28846, "episode_idx": 162, "frame_idx": 79, "global_frame_idx": 28846, "task_index": 32}, {"db_idx": 28847, "episode_idx": 162, "frame_idx": 80, "global_frame_idx": 28847, "task_index": 32}, {"db_idx": 28848, "episode_idx": 162, "frame_idx": 81, "global_frame_idx": 28848, "task_index": 32}, {"db_idx": 28849, "episode_idx": 162, "frame_idx": 82, "global_frame_idx": 28849, "task_index": 32}, {"db_idx": 28850, "episode_idx": 162, "frame_idx": 83, "global_frame_idx": 28850, "task_index": 32}, {"db_idx": 28851, "episode_idx": 162, "frame_idx": 84, "global_frame_idx": 28851, "task_index": 32}, {"db_idx": 28852, "episode_idx": 162, "frame_idx": 85, "global_frame_idx": 28852, "task_index": 32}, {"db_idx": 28853, "episode_idx": 162, "frame_idx": 86, "global_frame_idx": 28853, "task_index": 32}, {"db_idx": 28854, "episode_idx": 162, "frame_idx": 87, "global_frame_idx": 28854, "task_index": 32}, {"db_idx": 28855, "episode_idx": 162, "frame_idx": 88, "global_frame_idx": 28855, "task_index": 32}, {"db_idx": 28856, "episode_idx": 162, "frame_idx": 89, "global_frame_idx": 28856, "task_index": 32}, {"db_idx": 28857, "episode_idx": 162, "frame_idx": 90, "global_frame_idx": 28857, "task_index": 32}, {"db_idx": 28858, "episode_idx": 162, "frame_idx": 91, "global_frame_idx": 28858, "task_index": 32}, {"db_idx": 28859, "episode_idx": 162, "frame_idx": 92, "global_frame_idx": 28859, "task_index": 32}, {"db_idx": 28860, "episode_idx": 162, "frame_idx": 93, "global_frame_idx": 28860, "task_index": 32}, {"db_idx": 28861, "episode_idx": 162, "frame_idx": 94, "global_frame_idx": 28861, "task_index": 32}, {"db_idx": 28862, "episode_idx": 162, "frame_idx": 95, "global_frame_idx": 28862, "task_index": 32}, {"db_idx": 28863, "episode_idx": 162, "frame_idx": 96, "global_frame_idx": 28863, "task_index": 32}, {"db_idx": 28864, "episode_idx": 162, "frame_idx": 97, "global_frame_idx": 28864, "task_index": 32}, {"db_idx": 28865, "episode_idx": 162, "frame_idx": 98, "global_frame_idx": 28865, "task_index": 32}, {"db_idx": 28866, "episode_idx": 162, "frame_idx": 99, "global_frame_idx": 28866, "task_index": 32}, {"db_idx": 28867, "episode_idx": 162, "frame_idx": 100, "global_frame_idx": 28867, "task_index": 32}, {"db_idx": 28868, "episode_idx": 162, "frame_idx": 101, "global_frame_idx": 28868, "task_index": 32}, {"db_idx": 28869, "episode_idx": 162, "frame_idx": 102, "global_frame_idx": 28869, "task_index": 32}, {"db_idx": 28870, "episode_idx": 162, "frame_idx": 103, "global_frame_idx": 28870, "task_index": 32}, {"db_idx": 28871, "episode_idx": 162, "frame_idx": 104, "global_frame_idx": 28871, "task_index": 32}, {"db_idx": 28872, "episode_idx": 162, "frame_idx": 105, "global_frame_idx": 28872, "task_index": 32}, {"db_idx": 28873, "episode_idx": 162, "frame_idx": 106, "global_frame_idx": 28873, "task_index": 32}, {"db_idx": 28874, "episode_idx": 162, "frame_idx": 107, "global_frame_idx": 28874, "task_index": 32}, {"db_idx": 28875, "episode_idx": 162, "frame_idx": 108, "global_frame_idx": 28875, "task_index": 32}, {"db_idx": 28876, "episode_idx": 162, "frame_idx": 109, "global_frame_idx": 28876, "task_index": 32}, {"db_idx": 28877, "episode_idx": 162, "frame_idx": 110, "global_frame_idx": 28877, "task_index": 32}, {"db_idx": 28878, "episode_idx": 162, "frame_idx": 111, "global_frame_idx": 28878, "task_index": 32}, {"db_idx": 28879, "episode_idx": 162, "frame_idx": 112, "global_frame_idx": 28879, "task_index": 32}, {"db_idx": 28880, "episode_idx": 162, "frame_idx": 113, "global_frame_idx": 28880, "task_index": 32}, {"db_idx": 28881, "episode_idx": 162, "frame_idx": 114, "global_frame_idx": 28881, "task_index": 32}, {"db_idx": 28882, "episode_idx": 162, "frame_idx": 115, "global_frame_idx": 28882, "task_index": 32}, {"db_idx": 28883, "episode_idx": 162, "frame_idx": 116, "global_frame_idx": 28883, "task_index": 32}, {"db_idx": 28884, "episode_idx": 162, "frame_idx": 117, "global_frame_idx": 28884, "task_index": 32}, {"db_idx": 28885, "episode_idx": 162, "frame_idx": 118, "global_frame_idx": 28885, "task_index": 32}, {"db_idx": 28886, "episode_idx": 162, "frame_idx": 119, "global_frame_idx": 28886, "task_index": 32}, {"db_idx": 28887, "episode_idx": 162, "frame_idx": 120, "global_frame_idx": 28887, "task_index": 32}, {"db_idx": 28888, "episode_idx": 162, "frame_idx": 121, "global_frame_idx": 28888, "task_index": 32}, {"db_idx": 28889, "episode_idx": 162, "frame_idx": 122, "global_frame_idx": 28889, "task_index": 32}, {"db_idx": 28890, "episode_idx": 162, "frame_idx": 123, "global_frame_idx": 28890, "task_index": 32}, {"db_idx": 28891, "episode_idx": 162, "frame_idx": 124, "global_frame_idx": 28891, "task_index": 32}, {"db_idx": 28892, "episode_idx": 162, "frame_idx": 125, "global_frame_idx": 28892, "task_index": 32}, {"db_idx": 28893, "episode_idx": 162, "frame_idx": 126, "global_frame_idx": 28893, "task_index": 32}, {"db_idx": 28894, "episode_idx": 162, "frame_idx": 127, "global_frame_idx": 28894, "task_index": 32}, {"db_idx": 28895, "episode_idx": 162, "frame_idx": 128, "global_frame_idx": 28895, "task_index": 32}, {"db_idx": 28896, "episode_idx": 162, "frame_idx": 129, "global_frame_idx": 28896, "task_index": 32}, {"db_idx": 28897, "episode_idx": 162, "frame_idx": 130, "global_frame_idx": 28897, "task_index": 32}, {"db_idx": 28898, "episode_idx": 162, "frame_idx": 131, "global_frame_idx": 28898, "task_index": 32}, {"db_idx": 28899, "episode_idx": 162, "frame_idx": 132, "global_frame_idx": 28899, "task_index": 32}, {"db_idx": 28900, "episode_idx": 162, "frame_idx": 133, "global_frame_idx": 28900, "task_index": 32}, {"db_idx": 28901, "episode_idx": 162, "frame_idx": 134, "global_frame_idx": 28901, "task_index": 32}, {"db_idx": 28902, "episode_idx": 162, "frame_idx": 135, "global_frame_idx": 28902, "task_index": 32}, {"db_idx": 28903, "episode_idx": 162, "frame_idx": 136, "global_frame_idx": 28903, "task_index": 32}, {"db_idx": 28904, "episode_idx": 162, "frame_idx": 137, "global_frame_idx": 28904, "task_index": 32}, {"db_idx": 28905, "episode_idx": 162, "frame_idx": 138, "global_frame_idx": 28905, "task_index": 32}, {"db_idx": 28906, "episode_idx": 162, "frame_idx": 139, "global_frame_idx": 28906, "task_index": 32}, {"db_idx": 28907, "episode_idx": 162, "frame_idx": 140, "global_frame_idx": 28907, "task_index": 32}, {"db_idx": 28908, "episode_idx": 162, "frame_idx": 141, "global_frame_idx": 28908, "task_index": 32}, {"db_idx": 28909, "episode_idx": 162, "frame_idx": 142, "global_frame_idx": 28909, "task_index": 32}, {"db_idx": 28910, "episode_idx": 162, "frame_idx": 143, "global_frame_idx": 28910, "task_index": 32}, {"db_idx": 28911, "episode_idx": 162, "frame_idx": 144, "global_frame_idx": 28911, "task_index": 32}, {"db_idx": 28912, "episode_idx": 162, "frame_idx": 145, "global_frame_idx": 28912, "task_index": 32}, {"db_idx": 28913, "episode_idx": 162, "frame_idx": 146, "global_frame_idx": 28913, "task_index": 32}, {"db_idx": 28914, "episode_idx": 162, "frame_idx": 147, "global_frame_idx": 28914, "task_index": 32}, {"db_idx": 28915, "episode_idx": 162, "frame_idx": 148, "global_frame_idx": 28915, "task_index": 32}, {"db_idx": 28916, "episode_idx": 162, "frame_idx": 149, "global_frame_idx": 28916, "task_index": 32}, {"db_idx": 28917, "episode_idx": 162, "frame_idx": 150, "global_frame_idx": 28917, "task_index": 32}, {"db_idx": 28918, "episode_idx": 162, "frame_idx": 151, "global_frame_idx": 28918, "task_index": 32}, {"db_idx": 28919, "episode_idx": 162, "frame_idx": 152, "global_frame_idx": 28919, "task_index": 32}, {"db_idx": 28920, "episode_idx": 162, "frame_idx": 153, "global_frame_idx": 28920, "task_index": 32}, {"db_idx": 28921, "episode_idx": 162, "frame_idx": 154, "global_frame_idx": 28921, "task_index": 32}, {"db_idx": 28922, "episode_idx": 162, "frame_idx": 155, "global_frame_idx": 28922, "task_index": 32}, {"db_idx": 28923, "episode_idx": 162, "frame_idx": 156, "global_frame_idx": 28923, "task_index": 32}, {"db_idx": 28924, "episode_idx": 162, "frame_idx": 157, "global_frame_idx": 28924, "task_index": 32}, {"db_idx": 28925, "episode_idx": 162, "frame_idx": 158, "global_frame_idx": 28925, "task_index": 32}, {"db_idx": 28926, "episode_idx": 162, "frame_idx": 159, "global_frame_idx": 28926, "task_index": 32}, {"db_idx": 28927, "episode_idx": 162, "frame_idx": 160, "global_frame_idx": 28927, "task_index": 32}, {"db_idx": 28928, "episode_idx": 162, "frame_idx": 161, "global_frame_idx": 28928, "task_index": 32}, {"db_idx": 28929, "episode_idx": 162, "frame_idx": 162, "global_frame_idx": 28929, "task_index": 32}, {"db_idx": 28930, "episode_idx": 162, "frame_idx": 163, "global_frame_idx": 28930, "task_index": 32}, {"db_idx": 28931, "episode_idx": 162, "frame_idx": 164, "global_frame_idx": 28931, "task_index": 32}, {"db_idx": 28932, "episode_idx": 162, "frame_idx": 165, "global_frame_idx": 28932, "task_index": 32}, {"db_idx": 28933, "episode_idx": 162, "frame_idx": 166, "global_frame_idx": 28933, "task_index": 32}, {"db_idx": 28934, "episode_idx": 162, "frame_idx": 167, "global_frame_idx": 28934, "task_index": 32}, {"db_idx": 28935, "episode_idx": 163, "frame_idx": 0, "global_frame_idx": 28935, "task_index": 32}, {"db_idx": 28936, "episode_idx": 163, "frame_idx": 1, "global_frame_idx": 28936, "task_index": 32}, {"db_idx": 28937, "episode_idx": 163, "frame_idx": 2, "global_frame_idx": 28937, "task_index": 32}, {"db_idx": 28938, "episode_idx": 163, "frame_idx": 3, "global_frame_idx": 28938, "task_index": 32}, {"db_idx": 28939, "episode_idx": 163, "frame_idx": 4, "global_frame_idx": 28939, "task_index": 32}, {"db_idx": 28940, "episode_idx": 163, "frame_idx": 5, "global_frame_idx": 28940, "task_index": 32}, {"db_idx": 28941, "episode_idx": 163, "frame_idx": 6, "global_frame_idx": 28941, "task_index": 32}, {"db_idx": 28942, "episode_idx": 163, "frame_idx": 7, "global_frame_idx": 28942, "task_index": 32}, {"db_idx": 28943, "episode_idx": 163, "frame_idx": 8, "global_frame_idx": 28943, "task_index": 32}, {"db_idx": 28944, "episode_idx": 163, "frame_idx": 9, "global_frame_idx": 28944, "task_index": 32}, {"db_idx": 28945, "episode_idx": 163, "frame_idx": 10, "global_frame_idx": 28945, "task_index": 32}, {"db_idx": 28946, "episode_idx": 163, "frame_idx": 11, "global_frame_idx": 28946, "task_index": 32}, {"db_idx": 28947, "episode_idx": 163, "frame_idx": 12, "global_frame_idx": 28947, "task_index": 32}, {"db_idx": 28948, "episode_idx": 163, "frame_idx": 13, "global_frame_idx": 28948, "task_index": 32}, {"db_idx": 28949, "episode_idx": 163, "frame_idx": 14, "global_frame_idx": 28949, "task_index": 32}, {"db_idx": 28950, "episode_idx": 163, "frame_idx": 15, "global_frame_idx": 28950, "task_index": 32}, {"db_idx": 28951, "episode_idx": 163, "frame_idx": 16, "global_frame_idx": 28951, "task_index": 32}, {"db_idx": 28952, "episode_idx": 163, "frame_idx": 17, "global_frame_idx": 28952, "task_index": 32}, {"db_idx": 28953, "episode_idx": 163, "frame_idx": 18, "global_frame_idx": 28953, "task_index": 32}, {"db_idx": 28954, "episode_idx": 163, "frame_idx": 19, "global_frame_idx": 28954, "task_index": 32}, {"db_idx": 28955, "episode_idx": 163, "frame_idx": 20, "global_frame_idx": 28955, "task_index": 32}, {"db_idx": 28956, "episode_idx": 163, "frame_idx": 21, "global_frame_idx": 28956, "task_index": 32}, {"db_idx": 28957, "episode_idx": 163, "frame_idx": 22, "global_frame_idx": 28957, "task_index": 32}, {"db_idx": 28958, "episode_idx": 163, "frame_idx": 23, "global_frame_idx": 28958, "task_index": 32}, {"db_idx": 28959, "episode_idx": 163, "frame_idx": 24, "global_frame_idx": 28959, "task_index": 32}, {"db_idx": 28960, "episode_idx": 163, "frame_idx": 25, "global_frame_idx": 28960, "task_index": 32}, {"db_idx": 28961, "episode_idx": 163, "frame_idx": 26, "global_frame_idx": 28961, "task_index": 32}, {"db_idx": 28962, "episode_idx": 163, "frame_idx": 27, "global_frame_idx": 28962, "task_index": 32}, {"db_idx": 28963, "episode_idx": 163, "frame_idx": 28, "global_frame_idx": 28963, "task_index": 32}, {"db_idx": 28964, "episode_idx": 163, "frame_idx": 29, "global_frame_idx": 28964, "task_index": 32}, {"db_idx": 28965, "episode_idx": 163, "frame_idx": 30, "global_frame_idx": 28965, "task_index": 32}, {"db_idx": 28966, "episode_idx": 163, "frame_idx": 31, "global_frame_idx": 28966, "task_index": 32}, {"db_idx": 28967, "episode_idx": 163, "frame_idx": 32, "global_frame_idx": 28967, "task_index": 32}, {"db_idx": 28968, "episode_idx": 163, "frame_idx": 33, "global_frame_idx": 28968, "task_index": 32}, {"db_idx": 28969, "episode_idx": 163, "frame_idx": 34, "global_frame_idx": 28969, "task_index": 32}, {"db_idx": 28970, "episode_idx": 163, "frame_idx": 35, "global_frame_idx": 28970, "task_index": 32}, {"db_idx": 28971, "episode_idx": 163, "frame_idx": 36, "global_frame_idx": 28971, "task_index": 32}, {"db_idx": 28972, "episode_idx": 163, "frame_idx": 37, "global_frame_idx": 28972, "task_index": 32}, {"db_idx": 28973, "episode_idx": 163, "frame_idx": 38, "global_frame_idx": 28973, "task_index": 32}, {"db_idx": 28974, "episode_idx": 163, "frame_idx": 39, "global_frame_idx": 28974, "task_index": 32}, {"db_idx": 28975, "episode_idx": 163, "frame_idx": 40, "global_frame_idx": 28975, "task_index": 32}, {"db_idx": 28976, "episode_idx": 163, "frame_idx": 41, "global_frame_idx": 28976, "task_index": 32}, {"db_idx": 28977, "episode_idx": 163, "frame_idx": 42, "global_frame_idx": 28977, "task_index": 32}, {"db_idx": 28978, "episode_idx": 163, "frame_idx": 43, "global_frame_idx": 28978, "task_index": 32}, {"db_idx": 28979, "episode_idx": 163, "frame_idx": 44, "global_frame_idx": 28979, "task_index": 32}, {"db_idx": 28980, "episode_idx": 163, "frame_idx": 45, "global_frame_idx": 28980, "task_index": 32}, {"db_idx": 28981, "episode_idx": 163, "frame_idx": 46, "global_frame_idx": 28981, "task_index": 32}, {"db_idx": 28982, "episode_idx": 163, "frame_idx": 47, "global_frame_idx": 28982, "task_index": 32}, {"db_idx": 28983, "episode_idx": 163, "frame_idx": 48, "global_frame_idx": 28983, "task_index": 32}, {"db_idx": 28984, "episode_idx": 163, "frame_idx": 49, "global_frame_idx": 28984, "task_index": 32}, {"db_idx": 28985, "episode_idx": 163, "frame_idx": 50, "global_frame_idx": 28985, "task_index": 32}, {"db_idx": 28986, "episode_idx": 163, "frame_idx": 51, "global_frame_idx": 28986, "task_index": 32}, {"db_idx": 28987, "episode_idx": 163, "frame_idx": 52, "global_frame_idx": 28987, "task_index": 32}, {"db_idx": 28988, "episode_idx": 163, "frame_idx": 53, "global_frame_idx": 28988, "task_index": 32}, {"db_idx": 28989, "episode_idx": 163, "frame_idx": 54, "global_frame_idx": 28989, "task_index": 32}, {"db_idx": 28990, "episode_idx": 163, "frame_idx": 55, "global_frame_idx": 28990, "task_index": 32}, {"db_idx": 28991, "episode_idx": 163, "frame_idx": 56, "global_frame_idx": 28991, "task_index": 32}, {"db_idx": 28992, "episode_idx": 163, "frame_idx": 57, "global_frame_idx": 28992, "task_index": 32}, {"db_idx": 28993, "episode_idx": 163, "frame_idx": 58, "global_frame_idx": 28993, "task_index": 32}, {"db_idx": 28994, "episode_idx": 163, "frame_idx": 59, "global_frame_idx": 28994, "task_index": 32}, {"db_idx": 28995, "episode_idx": 163, "frame_idx": 60, "global_frame_idx": 28995, "task_index": 32}, {"db_idx": 28996, "episode_idx": 163, "frame_idx": 61, "global_frame_idx": 28996, "task_index": 32}, {"db_idx": 28997, "episode_idx": 163, "frame_idx": 62, "global_frame_idx": 28997, "task_index": 32}, {"db_idx": 28998, "episode_idx": 163, "frame_idx": 63, "global_frame_idx": 28998, "task_index": 32}, {"db_idx": 28999, "episode_idx": 163, "frame_idx": 64, "global_frame_idx": 28999, "task_index": 32}, {"db_idx": 29000, "episode_idx": 163, "frame_idx": 65, "global_frame_idx": 29000, "task_index": 32}, {"db_idx": 29001, "episode_idx": 163, "frame_idx": 66, "global_frame_idx": 29001, "task_index": 32}, {"db_idx": 29002, "episode_idx": 163, "frame_idx": 67, "global_frame_idx": 29002, "task_index": 32}, {"db_idx": 29003, "episode_idx": 163, "frame_idx": 68, "global_frame_idx": 29003, "task_index": 32}, {"db_idx": 29004, "episode_idx": 163, "frame_idx": 69, "global_frame_idx": 29004, "task_index": 32}, {"db_idx": 29005, "episode_idx": 163, "frame_idx": 70, "global_frame_idx": 29005, "task_index": 32}, {"db_idx": 29006, "episode_idx": 163, "frame_idx": 71, "global_frame_idx": 29006, "task_index": 32}, {"db_idx": 29007, "episode_idx": 163, "frame_idx": 72, "global_frame_idx": 29007, "task_index": 32}, {"db_idx": 29008, "episode_idx": 163, "frame_idx": 73, "global_frame_idx": 29008, "task_index": 32}, {"db_idx": 29009, "episode_idx": 163, "frame_idx": 74, "global_frame_idx": 29009, "task_index": 32}, {"db_idx": 29010, "episode_idx": 163, "frame_idx": 75, "global_frame_idx": 29010, "task_index": 32}, {"db_idx": 29011, "episode_idx": 163, "frame_idx": 76, "global_frame_idx": 29011, "task_index": 32}, {"db_idx": 29012, "episode_idx": 163, "frame_idx": 77, "global_frame_idx": 29012, "task_index": 32}, {"db_idx": 29013, "episode_idx": 163, "frame_idx": 78, "global_frame_idx": 29013, "task_index": 32}, {"db_idx": 29014, "episode_idx": 163, "frame_idx": 79, "global_frame_idx": 29014, "task_index": 32}, {"db_idx": 29015, "episode_idx": 163, "frame_idx": 80, "global_frame_idx": 29015, "task_index": 32}, {"db_idx": 29016, "episode_idx": 163, "frame_idx": 81, "global_frame_idx": 29016, "task_index": 32}, {"db_idx": 29017, "episode_idx": 163, "frame_idx": 82, "global_frame_idx": 29017, "task_index": 32}, {"db_idx": 29018, "episode_idx": 163, "frame_idx": 83, "global_frame_idx": 29018, "task_index": 32}, {"db_idx": 29019, "episode_idx": 164, "frame_idx": 0, "global_frame_idx": 29019, "task_index": 32}, {"db_idx": 29020, "episode_idx": 164, "frame_idx": 1, "global_frame_idx": 29020, "task_index": 32}, {"db_idx": 29021, "episode_idx": 164, "frame_idx": 2, "global_frame_idx": 29021, "task_index": 32}, {"db_idx": 29022, "episode_idx": 164, "frame_idx": 3, "global_frame_idx": 29022, "task_index": 32}, {"db_idx": 29023, "episode_idx": 164, "frame_idx": 4, "global_frame_idx": 29023, "task_index": 32}, {"db_idx": 29024, "episode_idx": 164, "frame_idx": 5, "global_frame_idx": 29024, "task_index": 32}, {"db_idx": 29025, "episode_idx": 164, "frame_idx": 6, "global_frame_idx": 29025, "task_index": 32}, {"db_idx": 29026, "episode_idx": 164, "frame_idx": 7, "global_frame_idx": 29026, "task_index": 32}, {"db_idx": 29027, "episode_idx": 164, "frame_idx": 8, "global_frame_idx": 29027, "task_index": 32}, {"db_idx": 29028, "episode_idx": 164, "frame_idx": 9, "global_frame_idx": 29028, "task_index": 32}, {"db_idx": 29029, "episode_idx": 164, "frame_idx": 10, "global_frame_idx": 29029, "task_index": 32}, {"db_idx": 29030, "episode_idx": 164, "frame_idx": 11, "global_frame_idx": 29030, "task_index": 32}, {"db_idx": 29031, "episode_idx": 164, "frame_idx": 12, "global_frame_idx": 29031, "task_index": 32}, {"db_idx": 29032, "episode_idx": 164, "frame_idx": 13, "global_frame_idx": 29032, "task_index": 32}, {"db_idx": 29033, "episode_idx": 164, "frame_idx": 14, "global_frame_idx": 29033, "task_index": 32}, {"db_idx": 29034, "episode_idx": 164, "frame_idx": 15, "global_frame_idx": 29034, "task_index": 32}, {"db_idx": 29035, "episode_idx": 164, "frame_idx": 16, "global_frame_idx": 29035, "task_index": 32}, {"db_idx": 29036, "episode_idx": 164, "frame_idx": 17, "global_frame_idx": 29036, "task_index": 32}, {"db_idx": 29037, "episode_idx": 164, "frame_idx": 18, "global_frame_idx": 29037, "task_index": 32}, {"db_idx": 29038, "episode_idx": 164, "frame_idx": 19, "global_frame_idx": 29038, "task_index": 32}, {"db_idx": 29039, "episode_idx": 164, "frame_idx": 20, "global_frame_idx": 29039, "task_index": 32}, {"db_idx": 29040, "episode_idx": 164, "frame_idx": 21, "global_frame_idx": 29040, "task_index": 32}, {"db_idx": 29041, "episode_idx": 164, "frame_idx": 22, "global_frame_idx": 29041, "task_index": 32}, {"db_idx": 29042, "episode_idx": 164, "frame_idx": 23, "global_frame_idx": 29042, "task_index": 32}, {"db_idx": 29043, "episode_idx": 164, "frame_idx": 24, "global_frame_idx": 29043, "task_index": 32}, {"db_idx": 29044, "episode_idx": 164, "frame_idx": 25, "global_frame_idx": 29044, "task_index": 32}, {"db_idx": 29045, "episode_idx": 164, "frame_idx": 26, "global_frame_idx": 29045, "task_index": 32}, {"db_idx": 29046, "episode_idx": 164, "frame_idx": 27, "global_frame_idx": 29046, "task_index": 32}, {"db_idx": 29047, "episode_idx": 164, "frame_idx": 28, "global_frame_idx": 29047, "task_index": 32}, {"db_idx": 29048, "episode_idx": 164, "frame_idx": 29, "global_frame_idx": 29048, "task_index": 32}, {"db_idx": 29049, "episode_idx": 164, "frame_idx": 30, "global_frame_idx": 29049, "task_index": 32}, {"db_idx": 29050, "episode_idx": 164, "frame_idx": 31, "global_frame_idx": 29050, "task_index": 32}, {"db_idx": 29051, "episode_idx": 164, "frame_idx": 32, "global_frame_idx": 29051, "task_index": 32}, {"db_idx": 29052, "episode_idx": 164, "frame_idx": 33, "global_frame_idx": 29052, "task_index": 32}, {"db_idx": 29053, "episode_idx": 164, "frame_idx": 34, "global_frame_idx": 29053, "task_index": 32}, {"db_idx": 29054, "episode_idx": 164, "frame_idx": 35, "global_frame_idx": 29054, "task_index": 32}, {"db_idx": 29055, "episode_idx": 164, "frame_idx": 36, "global_frame_idx": 29055, "task_index": 32}, {"db_idx": 29056, "episode_idx": 164, "frame_idx": 37, "global_frame_idx": 29056, "task_index": 32}, {"db_idx": 29057, "episode_idx": 164, "frame_idx": 38, "global_frame_idx": 29057, "task_index": 32}, {"db_idx": 29058, "episode_idx": 164, "frame_idx": 39, "global_frame_idx": 29058, "task_index": 32}, {"db_idx": 29059, "episode_idx": 164, "frame_idx": 40, "global_frame_idx": 29059, "task_index": 32}, {"db_idx": 29060, "episode_idx": 164, "frame_idx": 41, "global_frame_idx": 29060, "task_index": 32}, {"db_idx": 29061, "episode_idx": 164, "frame_idx": 42, "global_frame_idx": 29061, "task_index": 32}, {"db_idx": 29062, "episode_idx": 164, "frame_idx": 43, "global_frame_idx": 29062, "task_index": 32}, {"db_idx": 29063, "episode_idx": 164, "frame_idx": 44, "global_frame_idx": 29063, "task_index": 32}, {"db_idx": 29064, "episode_idx": 164, "frame_idx": 45, "global_frame_idx": 29064, "task_index": 32}, {"db_idx": 29065, "episode_idx": 164, "frame_idx": 46, "global_frame_idx": 29065, "task_index": 32}, {"db_idx": 29066, "episode_idx": 164, "frame_idx": 47, "global_frame_idx": 29066, "task_index": 32}, {"db_idx": 29067, "episode_idx": 164, "frame_idx": 48, "global_frame_idx": 29067, "task_index": 32}, {"db_idx": 29068, "episode_idx": 164, "frame_idx": 49, "global_frame_idx": 29068, "task_index": 32}, {"db_idx": 29069, "episode_idx": 164, "frame_idx": 50, "global_frame_idx": 29069, "task_index": 32}, {"db_idx": 29070, "episode_idx": 164, "frame_idx": 51, "global_frame_idx": 29070, "task_index": 32}, {"db_idx": 29071, "episode_idx": 164, "frame_idx": 52, "global_frame_idx": 29071, "task_index": 32}, {"db_idx": 29072, "episode_idx": 164, "frame_idx": 53, "global_frame_idx": 29072, "task_index": 32}, {"db_idx": 29073, "episode_idx": 164, "frame_idx": 54, "global_frame_idx": 29073, "task_index": 32}, {"db_idx": 29074, "episode_idx": 164, "frame_idx": 55, "global_frame_idx": 29074, "task_index": 32}, {"db_idx": 29075, "episode_idx": 164, "frame_idx": 56, "global_frame_idx": 29075, "task_index": 32}, {"db_idx": 29076, "episode_idx": 164, "frame_idx": 57, "global_frame_idx": 29076, "task_index": 32}, {"db_idx": 29077, "episode_idx": 164, "frame_idx": 58, "global_frame_idx": 29077, "task_index": 32}, {"db_idx": 29078, "episode_idx": 164, "frame_idx": 59, "global_frame_idx": 29078, "task_index": 32}, {"db_idx": 29079, "episode_idx": 164, "frame_idx": 60, "global_frame_idx": 29079, "task_index": 32}, {"db_idx": 29080, "episode_idx": 164, "frame_idx": 61, "global_frame_idx": 29080, "task_index": 32}, {"db_idx": 29081, "episode_idx": 164, "frame_idx": 62, "global_frame_idx": 29081, "task_index": 32}, {"db_idx": 29082, "episode_idx": 164, "frame_idx": 63, "global_frame_idx": 29082, "task_index": 32}, {"db_idx": 29083, "episode_idx": 164, "frame_idx": 64, "global_frame_idx": 29083, "task_index": 32}, {"db_idx": 29084, "episode_idx": 164, "frame_idx": 65, "global_frame_idx": 29084, "task_index": 32}, {"db_idx": 29085, "episode_idx": 164, "frame_idx": 66, "global_frame_idx": 29085, "task_index": 32}, {"db_idx": 29086, "episode_idx": 164, "frame_idx": 67, "global_frame_idx": 29086, "task_index": 32}, {"db_idx": 29087, "episode_idx": 164, "frame_idx": 68, "global_frame_idx": 29087, "task_index": 32}, {"db_idx": 29088, "episode_idx": 164, "frame_idx": 69, "global_frame_idx": 29088, "task_index": 32}, {"db_idx": 29089, "episode_idx": 164, "frame_idx": 70, "global_frame_idx": 29089, "task_index": 32}, {"db_idx": 29090, "episode_idx": 164, "frame_idx": 71, "global_frame_idx": 29090, "task_index": 32}, {"db_idx": 29091, "episode_idx": 164, "frame_idx": 72, "global_frame_idx": 29091, "task_index": 32}, {"db_idx": 29092, "episode_idx": 164, "frame_idx": 73, "global_frame_idx": 29092, "task_index": 32}, {"db_idx": 29093, "episode_idx": 164, "frame_idx": 74, "global_frame_idx": 29093, "task_index": 32}, {"db_idx": 29094, "episode_idx": 164, "frame_idx": 75, "global_frame_idx": 29094, "task_index": 32}, {"db_idx": 29095, "episode_idx": 164, "frame_idx": 76, "global_frame_idx": 29095, "task_index": 32}, {"db_idx": 29096, "episode_idx": 164, "frame_idx": 77, "global_frame_idx": 29096, "task_index": 32}, {"db_idx": 29097, "episode_idx": 164, "frame_idx": 78, "global_frame_idx": 29097, "task_index": 32}, {"db_idx": 29098, "episode_idx": 164, "frame_idx": 79, "global_frame_idx": 29098, "task_index": 32}, {"db_idx": 29099, "episode_idx": 164, "frame_idx": 80, "global_frame_idx": 29099, "task_index": 32}, {"db_idx": 29100, "episode_idx": 164, "frame_idx": 81, "global_frame_idx": 29100, "task_index": 32}, {"db_idx": 29101, "episode_idx": 164, "frame_idx": 82, "global_frame_idx": 29101, "task_index": 32}, {"db_idx": 29102, "episode_idx": 164, "frame_idx": 83, "global_frame_idx": 29102, "task_index": 32}, {"db_idx": 29103, "episode_idx": 164, "frame_idx": 84, "global_frame_idx": 29103, "task_index": 32}, {"db_idx": 29104, "episode_idx": 164, "frame_idx": 85, "global_frame_idx": 29104, "task_index": 32}, {"db_idx": 29105, "episode_idx": 164, "frame_idx": 86, "global_frame_idx": 29105, "task_index": 32}, {"db_idx": 29106, "episode_idx": 164, "frame_idx": 87, "global_frame_idx": 29106, "task_index": 32}, {"db_idx": 29107, "episode_idx": 164, "frame_idx": 88, "global_frame_idx": 29107, "task_index": 32}, {"db_idx": 29108, "episode_idx": 164, "frame_idx": 89, "global_frame_idx": 29108, "task_index": 32}, {"db_idx": 29109, "episode_idx": 164, "frame_idx": 90, "global_frame_idx": 29109, "task_index": 32}, {"db_idx": 29110, "episode_idx": 164, "frame_idx": 91, "global_frame_idx": 29110, "task_index": 32}, {"db_idx": 29111, "episode_idx": 164, "frame_idx": 92, "global_frame_idx": 29111, "task_index": 32}, {"db_idx": 29112, "episode_idx": 165, "frame_idx": 0, "global_frame_idx": 29112, "task_index": 33}, {"db_idx": 29113, "episode_idx": 165, "frame_idx": 1, "global_frame_idx": 29113, "task_index": 33}, {"db_idx": 29114, "episode_idx": 165, "frame_idx": 2, "global_frame_idx": 29114, "task_index": 33}, {"db_idx": 29115, "episode_idx": 165, "frame_idx": 3, "global_frame_idx": 29115, "task_index": 33}, {"db_idx": 29116, "episode_idx": 165, "frame_idx": 4, "global_frame_idx": 29116, "task_index": 33}, {"db_idx": 29117, "episode_idx": 165, "frame_idx": 5, "global_frame_idx": 29117, "task_index": 33}, {"db_idx": 29118, "episode_idx": 165, "frame_idx": 6, "global_frame_idx": 29118, "task_index": 33}, {"db_idx": 29119, "episode_idx": 165, "frame_idx": 7, "global_frame_idx": 29119, "task_index": 33}, {"db_idx": 29120, "episode_idx": 165, "frame_idx": 8, "global_frame_idx": 29120, "task_index": 33}, {"db_idx": 29121, "episode_idx": 165, "frame_idx": 9, "global_frame_idx": 29121, "task_index": 33}, {"db_idx": 29122, "episode_idx": 165, "frame_idx": 10, "global_frame_idx": 29122, "task_index": 33}, {"db_idx": 29123, "episode_idx": 165, "frame_idx": 11, "global_frame_idx": 29123, "task_index": 33}, {"db_idx": 29124, "episode_idx": 165, "frame_idx": 12, "global_frame_idx": 29124, "task_index": 33}, {"db_idx": 29125, "episode_idx": 165, "frame_idx": 13, "global_frame_idx": 29125, "task_index": 33}, {"db_idx": 29126, "episode_idx": 165, "frame_idx": 14, "global_frame_idx": 29126, "task_index": 33}, {"db_idx": 29127, "episode_idx": 165, "frame_idx": 15, "global_frame_idx": 29127, "task_index": 33}, {"db_idx": 29128, "episode_idx": 165, "frame_idx": 16, "global_frame_idx": 29128, "task_index": 33}, {"db_idx": 29129, "episode_idx": 165, "frame_idx": 17, "global_frame_idx": 29129, "task_index": 33}, {"db_idx": 29130, "episode_idx": 165, "frame_idx": 18, "global_frame_idx": 29130, "task_index": 33}, {"db_idx": 29131, "episode_idx": 165, "frame_idx": 19, "global_frame_idx": 29131, "task_index": 33}, {"db_idx": 29132, "episode_idx": 165, "frame_idx": 20, "global_frame_idx": 29132, "task_index": 33}, {"db_idx": 29133, "episode_idx": 165, "frame_idx": 21, "global_frame_idx": 29133, "task_index": 33}, {"db_idx": 29134, "episode_idx": 165, "frame_idx": 22, "global_frame_idx": 29134, "task_index": 33}, {"db_idx": 29135, "episode_idx": 165, "frame_idx": 23, "global_frame_idx": 29135, "task_index": 33}, {"db_idx": 29136, "episode_idx": 165, "frame_idx": 24, "global_frame_idx": 29136, "task_index": 33}, {"db_idx": 29137, "episode_idx": 165, "frame_idx": 25, "global_frame_idx": 29137, "task_index": 33}, {"db_idx": 29138, "episode_idx": 165, "frame_idx": 26, "global_frame_idx": 29138, "task_index": 33}, {"db_idx": 29139, "episode_idx": 165, "frame_idx": 27, "global_frame_idx": 29139, "task_index": 33}, {"db_idx": 29140, "episode_idx": 165, "frame_idx": 28, "global_frame_idx": 29140, "task_index": 33}, {"db_idx": 29141, "episode_idx": 165, "frame_idx": 29, "global_frame_idx": 29141, "task_index": 33}, {"db_idx": 29142, "episode_idx": 165, "frame_idx": 30, "global_frame_idx": 29142, "task_index": 33}, {"db_idx": 29143, "episode_idx": 165, "frame_idx": 31, "global_frame_idx": 29143, "task_index": 33}, {"db_idx": 29144, "episode_idx": 165, "frame_idx": 32, "global_frame_idx": 29144, "task_index": 33}, {"db_idx": 29145, "episode_idx": 165, "frame_idx": 33, "global_frame_idx": 29145, "task_index": 33}, {"db_idx": 29146, "episode_idx": 165, "frame_idx": 34, "global_frame_idx": 29146, "task_index": 33}, {"db_idx": 29147, "episode_idx": 165, "frame_idx": 35, "global_frame_idx": 29147, "task_index": 33}, {"db_idx": 29148, "episode_idx": 165, "frame_idx": 36, "global_frame_idx": 29148, "task_index": 33}, {"db_idx": 29149, "episode_idx": 165, "frame_idx": 37, "global_frame_idx": 29149, "task_index": 33}, {"db_idx": 29150, "episode_idx": 165, "frame_idx": 38, "global_frame_idx": 29150, "task_index": 33}, {"db_idx": 29151, "episode_idx": 165, "frame_idx": 39, "global_frame_idx": 29151, "task_index": 33}, {"db_idx": 29152, "episode_idx": 165, "frame_idx": 40, "global_frame_idx": 29152, "task_index": 33}, {"db_idx": 29153, "episode_idx": 165, "frame_idx": 41, "global_frame_idx": 29153, "task_index": 33}, {"db_idx": 29154, "episode_idx": 165, "frame_idx": 42, "global_frame_idx": 29154, "task_index": 33}, {"db_idx": 29155, "episode_idx": 165, "frame_idx": 43, "global_frame_idx": 29155, "task_index": 33}, {"db_idx": 29156, "episode_idx": 165, "frame_idx": 44, "global_frame_idx": 29156, "task_index": 33}, {"db_idx": 29157, "episode_idx": 165, "frame_idx": 45, "global_frame_idx": 29157, "task_index": 33}, {"db_idx": 29158, "episode_idx": 165, "frame_idx": 46, "global_frame_idx": 29158, "task_index": 33}, {"db_idx": 29159, "episode_idx": 165, "frame_idx": 47, "global_frame_idx": 29159, "task_index": 33}, {"db_idx": 29160, "episode_idx": 165, "frame_idx": 48, "global_frame_idx": 29160, "task_index": 33}, {"db_idx": 29161, "episode_idx": 165, "frame_idx": 49, "global_frame_idx": 29161, "task_index": 33}, {"db_idx": 29162, "episode_idx": 165, "frame_idx": 50, "global_frame_idx": 29162, "task_index": 33}, {"db_idx": 29163, "episode_idx": 165, "frame_idx": 51, "global_frame_idx": 29163, "task_index": 33}, {"db_idx": 29164, "episode_idx": 165, "frame_idx": 52, "global_frame_idx": 29164, "task_index": 33}, {"db_idx": 29165, "episode_idx": 165, "frame_idx": 53, "global_frame_idx": 29165, "task_index": 33}, {"db_idx": 29166, "episode_idx": 165, "frame_idx": 54, "global_frame_idx": 29166, "task_index": 33}, {"db_idx": 29167, "episode_idx": 165, "frame_idx": 55, "global_frame_idx": 29167, "task_index": 33}, {"db_idx": 29168, "episode_idx": 165, "frame_idx": 56, "global_frame_idx": 29168, "task_index": 33}, {"db_idx": 29169, "episode_idx": 165, "frame_idx": 57, "global_frame_idx": 29169, "task_index": 33}, {"db_idx": 29170, "episode_idx": 165, "frame_idx": 58, "global_frame_idx": 29170, "task_index": 33}, {"db_idx": 29171, "episode_idx": 165, "frame_idx": 59, "global_frame_idx": 29171, "task_index": 33}, {"db_idx": 29172, "episode_idx": 165, "frame_idx": 60, "global_frame_idx": 29172, "task_index": 33}, {"db_idx": 29173, "episode_idx": 165, "frame_idx": 61, "global_frame_idx": 29173, "task_index": 33}, {"db_idx": 29174, "episode_idx": 165, "frame_idx": 62, "global_frame_idx": 29174, "task_index": 33}, {"db_idx": 29175, "episode_idx": 165, "frame_idx": 63, "global_frame_idx": 29175, "task_index": 33}, {"db_idx": 29176, "episode_idx": 165, "frame_idx": 64, "global_frame_idx": 29176, "task_index": 33}, {"db_idx": 29177, "episode_idx": 165, "frame_idx": 65, "global_frame_idx": 29177, "task_index": 33}, {"db_idx": 29178, "episode_idx": 165, "frame_idx": 66, "global_frame_idx": 29178, "task_index": 33}, {"db_idx": 29179, "episode_idx": 165, "frame_idx": 67, "global_frame_idx": 29179, "task_index": 33}, {"db_idx": 29180, "episode_idx": 165, "frame_idx": 68, "global_frame_idx": 29180, "task_index": 33}, {"db_idx": 29181, "episode_idx": 165, "frame_idx": 69, "global_frame_idx": 29181, "task_index": 33}, {"db_idx": 29182, "episode_idx": 165, "frame_idx": 70, "global_frame_idx": 29182, "task_index": 33}, {"db_idx": 29183, "episode_idx": 165, "frame_idx": 71, "global_frame_idx": 29183, "task_index": 33}, {"db_idx": 29184, "episode_idx": 165, "frame_idx": 72, "global_frame_idx": 29184, "task_index": 33}, {"db_idx": 29185, "episode_idx": 165, "frame_idx": 73, "global_frame_idx": 29185, "task_index": 33}, {"db_idx": 29186, "episode_idx": 165, "frame_idx": 74, "global_frame_idx": 29186, "task_index": 33}, {"db_idx": 29187, "episode_idx": 165, "frame_idx": 75, "global_frame_idx": 29187, "task_index": 33}, {"db_idx": 29188, "episode_idx": 165, "frame_idx": 76, "global_frame_idx": 29188, "task_index": 33}, {"db_idx": 29189, "episode_idx": 165, "frame_idx": 77, "global_frame_idx": 29189, "task_index": 33}, {"db_idx": 29190, "episode_idx": 165, "frame_idx": 78, "global_frame_idx": 29190, "task_index": 33}, {"db_idx": 29191, "episode_idx": 165, "frame_idx": 79, "global_frame_idx": 29191, "task_index": 33}, {"db_idx": 29192, "episode_idx": 165, "frame_idx": 80, "global_frame_idx": 29192, "task_index": 33}, {"db_idx": 29193, "episode_idx": 165, "frame_idx": 81, "global_frame_idx": 29193, "task_index": 33}, {"db_idx": 29194, "episode_idx": 165, "frame_idx": 82, "global_frame_idx": 29194, "task_index": 33}, {"db_idx": 29195, "episode_idx": 165, "frame_idx": 83, "global_frame_idx": 29195, "task_index": 33}, {"db_idx": 29196, "episode_idx": 165, "frame_idx": 84, "global_frame_idx": 29196, "task_index": 33}, {"db_idx": 29197, "episode_idx": 165, "frame_idx": 85, "global_frame_idx": 29197, "task_index": 33}, {"db_idx": 29198, "episode_idx": 165, "frame_idx": 86, "global_frame_idx": 29198, "task_index": 33}, {"db_idx": 29199, "episode_idx": 165, "frame_idx": 87, "global_frame_idx": 29199, "task_index": 33}, {"db_idx": 29200, "episode_idx": 165, "frame_idx": 88, "global_frame_idx": 29200, "task_index": 33}, {"db_idx": 29201, "episode_idx": 165, "frame_idx": 89, "global_frame_idx": 29201, "task_index": 33}, {"db_idx": 29202, "episode_idx": 165, "frame_idx": 90, "global_frame_idx": 29202, "task_index": 33}, {"db_idx": 29203, "episode_idx": 165, "frame_idx": 91, "global_frame_idx": 29203, "task_index": 33}, {"db_idx": 29204, "episode_idx": 165, "frame_idx": 92, "global_frame_idx": 29204, "task_index": 33}, {"db_idx": 29205, "episode_idx": 165, "frame_idx": 93, "global_frame_idx": 29205, "task_index": 33}, {"db_idx": 29206, "episode_idx": 165, "frame_idx": 94, "global_frame_idx": 29206, "task_index": 33}, {"db_idx": 29207, "episode_idx": 165, "frame_idx": 95, "global_frame_idx": 29207, "task_index": 33}, {"db_idx": 29208, "episode_idx": 165, "frame_idx": 96, "global_frame_idx": 29208, "task_index": 33}, {"db_idx": 29209, "episode_idx": 165, "frame_idx": 97, "global_frame_idx": 29209, "task_index": 33}, {"db_idx": 29210, "episode_idx": 165, "frame_idx": 98, "global_frame_idx": 29210, "task_index": 33}, {"db_idx": 29211, "episode_idx": 165, "frame_idx": 99, "global_frame_idx": 29211, "task_index": 33}, {"db_idx": 29212, "episode_idx": 165, "frame_idx": 100, "global_frame_idx": 29212, "task_index": 33}, {"db_idx": 29213, "episode_idx": 165, "frame_idx": 101, "global_frame_idx": 29213, "task_index": 33}, {"db_idx": 29214, "episode_idx": 165, "frame_idx": 102, "global_frame_idx": 29214, "task_index": 33}, {"db_idx": 29215, "episode_idx": 165, "frame_idx": 103, "global_frame_idx": 29215, "task_index": 33}, {"db_idx": 29216, "episode_idx": 165, "frame_idx": 104, "global_frame_idx": 29216, "task_index": 33}, {"db_idx": 29217, "episode_idx": 165, "frame_idx": 105, "global_frame_idx": 29217, "task_index": 33}, {"db_idx": 29218, "episode_idx": 165, "frame_idx": 106, "global_frame_idx": 29218, "task_index": 33}, {"db_idx": 29219, "episode_idx": 165, "frame_idx": 107, "global_frame_idx": 29219, "task_index": 33}, {"db_idx": 29220, "episode_idx": 165, "frame_idx": 108, "global_frame_idx": 29220, "task_index": 33}, {"db_idx": 29221, "episode_idx": 165, "frame_idx": 109, "global_frame_idx": 29221, "task_index": 33}, {"db_idx": 29222, "episode_idx": 165, "frame_idx": 110, "global_frame_idx": 29222, "task_index": 33}, {"db_idx": 29223, "episode_idx": 165, "frame_idx": 111, "global_frame_idx": 29223, "task_index": 33}, {"db_idx": 29224, "episode_idx": 165, "frame_idx": 112, "global_frame_idx": 29224, "task_index": 33}, {"db_idx": 29225, "episode_idx": 165, "frame_idx": 113, "global_frame_idx": 29225, "task_index": 33}, {"db_idx": 29226, "episode_idx": 165, "frame_idx": 114, "global_frame_idx": 29226, "task_index": 33}, {"db_idx": 29227, "episode_idx": 165, "frame_idx": 115, "global_frame_idx": 29227, "task_index": 33}, {"db_idx": 29228, "episode_idx": 165, "frame_idx": 116, "global_frame_idx": 29228, "task_index": 33}, {"db_idx": 29229, "episode_idx": 165, "frame_idx": 117, "global_frame_idx": 29229, "task_index": 33}, {"db_idx": 29230, "episode_idx": 165, "frame_idx": 118, "global_frame_idx": 29230, "task_index": 33}, {"db_idx": 29231, "episode_idx": 165, "frame_idx": 119, "global_frame_idx": 29231, "task_index": 33}, {"db_idx": 29232, "episode_idx": 165, "frame_idx": 120, "global_frame_idx": 29232, "task_index": 33}, {"db_idx": 29233, "episode_idx": 165, "frame_idx": 121, "global_frame_idx": 29233, "task_index": 33}, {"db_idx": 29234, "episode_idx": 165, "frame_idx": 122, "global_frame_idx": 29234, "task_index": 33}, {"db_idx": 29235, "episode_idx": 165, "frame_idx": 123, "global_frame_idx": 29235, "task_index": 33}, {"db_idx": 29236, "episode_idx": 165, "frame_idx": 124, "global_frame_idx": 29236, "task_index": 33}, {"db_idx": 29237, "episode_idx": 165, "frame_idx": 125, "global_frame_idx": 29237, "task_index": 33}, {"db_idx": 29238, "episode_idx": 165, "frame_idx": 126, "global_frame_idx": 29238, "task_index": 33}, {"db_idx": 29239, "episode_idx": 165, "frame_idx": 127, "global_frame_idx": 29239, "task_index": 33}, {"db_idx": 29240, "episode_idx": 165, "frame_idx": 128, "global_frame_idx": 29240, "task_index": 33}, {"db_idx": 29241, "episode_idx": 165, "frame_idx": 129, "global_frame_idx": 29241, "task_index": 33}, {"db_idx": 29242, "episode_idx": 165, "frame_idx": 130, "global_frame_idx": 29242, "task_index": 33}, {"db_idx": 29243, "episode_idx": 165, "frame_idx": 131, "global_frame_idx": 29243, "task_index": 33}, {"db_idx": 29244, "episode_idx": 165, "frame_idx": 132, "global_frame_idx": 29244, "task_index": 33}, {"db_idx": 29245, "episode_idx": 165, "frame_idx": 133, "global_frame_idx": 29245, "task_index": 33}, {"db_idx": 29246, "episode_idx": 165, "frame_idx": 134, "global_frame_idx": 29246, "task_index": 33}, {"db_idx": 29247, "episode_idx": 165, "frame_idx": 135, "global_frame_idx": 29247, "task_index": 33}, {"db_idx": 29248, "episode_idx": 165, "frame_idx": 136, "global_frame_idx": 29248, "task_index": 33}, {"db_idx": 29249, "episode_idx": 165, "frame_idx": 137, "global_frame_idx": 29249, "task_index": 33}, {"db_idx": 29250, "episode_idx": 165, "frame_idx": 138, "global_frame_idx": 29250, "task_index": 33}, {"db_idx": 29251, "episode_idx": 165, "frame_idx": 139, "global_frame_idx": 29251, "task_index": 33}, {"db_idx": 29252, "episode_idx": 166, "frame_idx": 0, "global_frame_idx": 29252, "task_index": 33}, {"db_idx": 29253, "episode_idx": 166, "frame_idx": 1, "global_frame_idx": 29253, "task_index": 33}, {"db_idx": 29254, "episode_idx": 166, "frame_idx": 2, "global_frame_idx": 29254, "task_index": 33}, {"db_idx": 29255, "episode_idx": 166, "frame_idx": 3, "global_frame_idx": 29255, "task_index": 33}, {"db_idx": 29256, "episode_idx": 166, "frame_idx": 4, "global_frame_idx": 29256, "task_index": 33}, {"db_idx": 29257, "episode_idx": 166, "frame_idx": 5, "global_frame_idx": 29257, "task_index": 33}, {"db_idx": 29258, "episode_idx": 166, "frame_idx": 6, "global_frame_idx": 29258, "task_index": 33}, {"db_idx": 29259, "episode_idx": 166, "frame_idx": 7, "global_frame_idx": 29259, "task_index": 33}, {"db_idx": 29260, "episode_idx": 166, "frame_idx": 8, "global_frame_idx": 29260, "task_index": 33}, {"db_idx": 29261, "episode_idx": 166, "frame_idx": 9, "global_frame_idx": 29261, "task_index": 33}, {"db_idx": 29262, "episode_idx": 166, "frame_idx": 10, "global_frame_idx": 29262, "task_index": 33}, {"db_idx": 29263, "episode_idx": 166, "frame_idx": 11, "global_frame_idx": 29263, "task_index": 33}, {"db_idx": 29264, "episode_idx": 166, "frame_idx": 12, "global_frame_idx": 29264, "task_index": 33}, {"db_idx": 29265, "episode_idx": 166, "frame_idx": 13, "global_frame_idx": 29265, "task_index": 33}, {"db_idx": 29266, "episode_idx": 166, "frame_idx": 14, "global_frame_idx": 29266, "task_index": 33}, {"db_idx": 29267, "episode_idx": 166, "frame_idx": 15, "global_frame_idx": 29267, "task_index": 33}, {"db_idx": 29268, "episode_idx": 166, "frame_idx": 16, "global_frame_idx": 29268, "task_index": 33}, {"db_idx": 29269, "episode_idx": 166, "frame_idx": 17, "global_frame_idx": 29269, "task_index": 33}, {"db_idx": 29270, "episode_idx": 166, "frame_idx": 18, "global_frame_idx": 29270, "task_index": 33}, {"db_idx": 29271, "episode_idx": 166, "frame_idx": 19, "global_frame_idx": 29271, "task_index": 33}, {"db_idx": 29272, "episode_idx": 166, "frame_idx": 20, "global_frame_idx": 29272, "task_index": 33}, {"db_idx": 29273, "episode_idx": 166, "frame_idx": 21, "global_frame_idx": 29273, "task_index": 33}, {"db_idx": 29274, "episode_idx": 166, "frame_idx": 22, "global_frame_idx": 29274, "task_index": 33}, {"db_idx": 29275, "episode_idx": 166, "frame_idx": 23, "global_frame_idx": 29275, "task_index": 33}, {"db_idx": 29276, "episode_idx": 166, "frame_idx": 24, "global_frame_idx": 29276, "task_index": 33}, {"db_idx": 29277, "episode_idx": 166, "frame_idx": 25, "global_frame_idx": 29277, "task_index": 33}, {"db_idx": 29278, "episode_idx": 166, "frame_idx": 26, "global_frame_idx": 29278, "task_index": 33}, {"db_idx": 29279, "episode_idx": 166, "frame_idx": 27, "global_frame_idx": 29279, "task_index": 33}, {"db_idx": 29280, "episode_idx": 166, "frame_idx": 28, "global_frame_idx": 29280, "task_index": 33}, {"db_idx": 29281, "episode_idx": 166, "frame_idx": 29, "global_frame_idx": 29281, "task_index": 33}, {"db_idx": 29282, "episode_idx": 166, "frame_idx": 30, "global_frame_idx": 29282, "task_index": 33}, {"db_idx": 29283, "episode_idx": 166, "frame_idx": 31, "global_frame_idx": 29283, "task_index": 33}, {"db_idx": 29284, "episode_idx": 166, "frame_idx": 32, "global_frame_idx": 29284, "task_index": 33}, {"db_idx": 29285, "episode_idx": 166, "frame_idx": 33, "global_frame_idx": 29285, "task_index": 33}, {"db_idx": 29286, "episode_idx": 166, "frame_idx": 34, "global_frame_idx": 29286, "task_index": 33}, {"db_idx": 29287, "episode_idx": 166, "frame_idx": 35, "global_frame_idx": 29287, "task_index": 33}, {"db_idx": 29288, "episode_idx": 166, "frame_idx": 36, "global_frame_idx": 29288, "task_index": 33}, {"db_idx": 29289, "episode_idx": 166, "frame_idx": 37, "global_frame_idx": 29289, "task_index": 33}, {"db_idx": 29290, "episode_idx": 166, "frame_idx": 38, "global_frame_idx": 29290, "task_index": 33}, {"db_idx": 29291, "episode_idx": 166, "frame_idx": 39, "global_frame_idx": 29291, "task_index": 33}, {"db_idx": 29292, "episode_idx": 166, "frame_idx": 40, "global_frame_idx": 29292, "task_index": 33}, {"db_idx": 29293, "episode_idx": 166, "frame_idx": 41, "global_frame_idx": 29293, "task_index": 33}, {"db_idx": 29294, "episode_idx": 166, "frame_idx": 42, "global_frame_idx": 29294, "task_index": 33}, {"db_idx": 29295, "episode_idx": 166, "frame_idx": 43, "global_frame_idx": 29295, "task_index": 33}, {"db_idx": 29296, "episode_idx": 166, "frame_idx": 44, "global_frame_idx": 29296, "task_index": 33}, {"db_idx": 29297, "episode_idx": 166, "frame_idx": 45, "global_frame_idx": 29297, "task_index": 33}, {"db_idx": 29298, "episode_idx": 166, "frame_idx": 46, "global_frame_idx": 29298, "task_index": 33}, {"db_idx": 29299, "episode_idx": 166, "frame_idx": 47, "global_frame_idx": 29299, "task_index": 33}, {"db_idx": 29300, "episode_idx": 166, "frame_idx": 48, "global_frame_idx": 29300, "task_index": 33}, {"db_idx": 29301, "episode_idx": 166, "frame_idx": 49, "global_frame_idx": 29301, "task_index": 33}, {"db_idx": 29302, "episode_idx": 166, "frame_idx": 50, "global_frame_idx": 29302, "task_index": 33}, {"db_idx": 29303, "episode_idx": 166, "frame_idx": 51, "global_frame_idx": 29303, "task_index": 33}, {"db_idx": 29304, "episode_idx": 166, "frame_idx": 52, "global_frame_idx": 29304, "task_index": 33}, {"db_idx": 29305, "episode_idx": 166, "frame_idx": 53, "global_frame_idx": 29305, "task_index": 33}, {"db_idx": 29306, "episode_idx": 166, "frame_idx": 54, "global_frame_idx": 29306, "task_index": 33}, {"db_idx": 29307, "episode_idx": 166, "frame_idx": 55, "global_frame_idx": 29307, "task_index": 33}, {"db_idx": 29308, "episode_idx": 166, "frame_idx": 56, "global_frame_idx": 29308, "task_index": 33}, {"db_idx": 29309, "episode_idx": 166, "frame_idx": 57, "global_frame_idx": 29309, "task_index": 33}, {"db_idx": 29310, "episode_idx": 166, "frame_idx": 58, "global_frame_idx": 29310, "task_index": 33}, {"db_idx": 29311, "episode_idx": 166, "frame_idx": 59, "global_frame_idx": 29311, "task_index": 33}, {"db_idx": 29312, "episode_idx": 166, "frame_idx": 60, "global_frame_idx": 29312, "task_index": 33}, {"db_idx": 29313, "episode_idx": 166, "frame_idx": 61, "global_frame_idx": 29313, "task_index": 33}, {"db_idx": 29314, "episode_idx": 166, "frame_idx": 62, "global_frame_idx": 29314, "task_index": 33}, {"db_idx": 29315, "episode_idx": 166, "frame_idx": 63, "global_frame_idx": 29315, "task_index": 33}, {"db_idx": 29316, "episode_idx": 166, "frame_idx": 64, "global_frame_idx": 29316, "task_index": 33}, {"db_idx": 29317, "episode_idx": 166, "frame_idx": 65, "global_frame_idx": 29317, "task_index": 33}, {"db_idx": 29318, "episode_idx": 166, "frame_idx": 66, "global_frame_idx": 29318, "task_index": 33}, {"db_idx": 29319, "episode_idx": 166, "frame_idx": 67, "global_frame_idx": 29319, "task_index": 33}, {"db_idx": 29320, "episode_idx": 166, "frame_idx": 68, "global_frame_idx": 29320, "task_index": 33}, {"db_idx": 29321, "episode_idx": 166, "frame_idx": 69, "global_frame_idx": 29321, "task_index": 33}, {"db_idx": 29322, "episode_idx": 166, "frame_idx": 70, "global_frame_idx": 29322, "task_index": 33}, {"db_idx": 29323, "episode_idx": 166, "frame_idx": 71, "global_frame_idx": 29323, "task_index": 33}, {"db_idx": 29324, "episode_idx": 166, "frame_idx": 72, "global_frame_idx": 29324, "task_index": 33}, {"db_idx": 29325, "episode_idx": 166, "frame_idx": 73, "global_frame_idx": 29325, "task_index": 33}, {"db_idx": 29326, "episode_idx": 166, "frame_idx": 74, "global_frame_idx": 29326, "task_index": 33}, {"db_idx": 29327, "episode_idx": 166, "frame_idx": 75, "global_frame_idx": 29327, "task_index": 33}, {"db_idx": 29328, "episode_idx": 166, "frame_idx": 76, "global_frame_idx": 29328, "task_index": 33}, {"db_idx": 29329, "episode_idx": 166, "frame_idx": 77, "global_frame_idx": 29329, "task_index": 33}, {"db_idx": 29330, "episode_idx": 166, "frame_idx": 78, "global_frame_idx": 29330, "task_index": 33}, {"db_idx": 29331, "episode_idx": 166, "frame_idx": 79, "global_frame_idx": 29331, "task_index": 33}, {"db_idx": 29332, "episode_idx": 166, "frame_idx": 80, "global_frame_idx": 29332, "task_index": 33}, {"db_idx": 29333, "episode_idx": 166, "frame_idx": 81, "global_frame_idx": 29333, "task_index": 33}, {"db_idx": 29334, "episode_idx": 166, "frame_idx": 82, "global_frame_idx": 29334, "task_index": 33}, {"db_idx": 29335, "episode_idx": 166, "frame_idx": 83, "global_frame_idx": 29335, "task_index": 33}, {"db_idx": 29336, "episode_idx": 166, "frame_idx": 84, "global_frame_idx": 29336, "task_index": 33}, {"db_idx": 29337, "episode_idx": 166, "frame_idx": 85, "global_frame_idx": 29337, "task_index": 33}, {"db_idx": 29338, "episode_idx": 166, "frame_idx": 86, "global_frame_idx": 29338, "task_index": 33}, {"db_idx": 29339, "episode_idx": 166, "frame_idx": 87, "global_frame_idx": 29339, "task_index": 33}, {"db_idx": 29340, "episode_idx": 166, "frame_idx": 88, "global_frame_idx": 29340, "task_index": 33}, {"db_idx": 29341, "episode_idx": 166, "frame_idx": 89, "global_frame_idx": 29341, "task_index": 33}, {"db_idx": 29342, "episode_idx": 166, "frame_idx": 90, "global_frame_idx": 29342, "task_index": 33}, {"db_idx": 29343, "episode_idx": 166, "frame_idx": 91, "global_frame_idx": 29343, "task_index": 33}, {"db_idx": 29344, "episode_idx": 166, "frame_idx": 92, "global_frame_idx": 29344, "task_index": 33}, {"db_idx": 29345, "episode_idx": 166, "frame_idx": 93, "global_frame_idx": 29345, "task_index": 33}, {"db_idx": 29346, "episode_idx": 166, "frame_idx": 94, "global_frame_idx": 29346, "task_index": 33}, {"db_idx": 29347, "episode_idx": 166, "frame_idx": 95, "global_frame_idx": 29347, "task_index": 33}, {"db_idx": 29348, "episode_idx": 166, "frame_idx": 96, "global_frame_idx": 29348, "task_index": 33}, {"db_idx": 29349, "episode_idx": 166, "frame_idx": 97, "global_frame_idx": 29349, "task_index": 33}, {"db_idx": 29350, "episode_idx": 166, "frame_idx": 98, "global_frame_idx": 29350, "task_index": 33}, {"db_idx": 29351, "episode_idx": 166, "frame_idx": 99, "global_frame_idx": 29351, "task_index": 33}, {"db_idx": 29352, "episode_idx": 166, "frame_idx": 100, "global_frame_idx": 29352, "task_index": 33}, {"db_idx": 29353, "episode_idx": 166, "frame_idx": 101, "global_frame_idx": 29353, "task_index": 33}, {"db_idx": 29354, "episode_idx": 166, "frame_idx": 102, "global_frame_idx": 29354, "task_index": 33}, {"db_idx": 29355, "episode_idx": 166, "frame_idx": 103, "global_frame_idx": 29355, "task_index": 33}, {"db_idx": 29356, "episode_idx": 166, "frame_idx": 104, "global_frame_idx": 29356, "task_index": 33}, {"db_idx": 29357, "episode_idx": 166, "frame_idx": 105, "global_frame_idx": 29357, "task_index": 33}, {"db_idx": 29358, "episode_idx": 166, "frame_idx": 106, "global_frame_idx": 29358, "task_index": 33}, {"db_idx": 29359, "episode_idx": 166, "frame_idx": 107, "global_frame_idx": 29359, "task_index": 33}, {"db_idx": 29360, "episode_idx": 166, "frame_idx": 108, "global_frame_idx": 29360, "task_index": 33}, {"db_idx": 29361, "episode_idx": 166, "frame_idx": 109, "global_frame_idx": 29361, "task_index": 33}, {"db_idx": 29362, "episode_idx": 166, "frame_idx": 110, "global_frame_idx": 29362, "task_index": 33}, {"db_idx": 29363, "episode_idx": 166, "frame_idx": 111, "global_frame_idx": 29363, "task_index": 33}, {"db_idx": 29364, "episode_idx": 166, "frame_idx": 112, "global_frame_idx": 29364, "task_index": 33}, {"db_idx": 29365, "episode_idx": 166, "frame_idx": 113, "global_frame_idx": 29365, "task_index": 33}, {"db_idx": 29366, "episode_idx": 166, "frame_idx": 114, "global_frame_idx": 29366, "task_index": 33}, {"db_idx": 29367, "episode_idx": 166, "frame_idx": 115, "global_frame_idx": 29367, "task_index": 33}, {"db_idx": 29368, "episode_idx": 166, "frame_idx": 116, "global_frame_idx": 29368, "task_index": 33}, {"db_idx": 29369, "episode_idx": 166, "frame_idx": 117, "global_frame_idx": 29369, "task_index": 33}, {"db_idx": 29370, "episode_idx": 166, "frame_idx": 118, "global_frame_idx": 29370, "task_index": 33}, {"db_idx": 29371, "episode_idx": 166, "frame_idx": 119, "global_frame_idx": 29371, "task_index": 33}, {"db_idx": 29372, "episode_idx": 166, "frame_idx": 120, "global_frame_idx": 29372, "task_index": 33}, {"db_idx": 29373, "episode_idx": 166, "frame_idx": 121, "global_frame_idx": 29373, "task_index": 33}, {"db_idx": 29374, "episode_idx": 166, "frame_idx": 122, "global_frame_idx": 29374, "task_index": 33}, {"db_idx": 29375, "episode_idx": 166, "frame_idx": 123, "global_frame_idx": 29375, "task_index": 33}, {"db_idx": 29376, "episode_idx": 166, "frame_idx": 124, "global_frame_idx": 29376, "task_index": 33}, {"db_idx": 29377, "episode_idx": 166, "frame_idx": 125, "global_frame_idx": 29377, "task_index": 33}, {"db_idx": 29378, "episode_idx": 166, "frame_idx": 126, "global_frame_idx": 29378, "task_index": 33}, {"db_idx": 29379, "episode_idx": 166, "frame_idx": 127, "global_frame_idx": 29379, "task_index": 33}, {"db_idx": 29380, "episode_idx": 167, "frame_idx": 0, "global_frame_idx": 29380, "task_index": 33}, {"db_idx": 29381, "episode_idx": 167, "frame_idx": 1, "global_frame_idx": 29381, "task_index": 33}, {"db_idx": 29382, "episode_idx": 167, "frame_idx": 2, "global_frame_idx": 29382, "task_index": 33}, {"db_idx": 29383, "episode_idx": 167, "frame_idx": 3, "global_frame_idx": 29383, "task_index": 33}, {"db_idx": 29384, "episode_idx": 167, "frame_idx": 4, "global_frame_idx": 29384, "task_index": 33}, {"db_idx": 29385, "episode_idx": 167, "frame_idx": 5, "global_frame_idx": 29385, "task_index": 33}, {"db_idx": 29386, "episode_idx": 167, "frame_idx": 6, "global_frame_idx": 29386, "task_index": 33}, {"db_idx": 29387, "episode_idx": 167, "frame_idx": 7, "global_frame_idx": 29387, "task_index": 33}, {"db_idx": 29388, "episode_idx": 167, "frame_idx": 8, "global_frame_idx": 29388, "task_index": 33}, {"db_idx": 29389, "episode_idx": 167, "frame_idx": 9, "global_frame_idx": 29389, "task_index": 33}, {"db_idx": 29390, "episode_idx": 167, "frame_idx": 10, "global_frame_idx": 29390, "task_index": 33}, {"db_idx": 29391, "episode_idx": 167, "frame_idx": 11, "global_frame_idx": 29391, "task_index": 33}, {"db_idx": 29392, "episode_idx": 167, "frame_idx": 12, "global_frame_idx": 29392, "task_index": 33}, {"db_idx": 29393, "episode_idx": 167, "frame_idx": 13, "global_frame_idx": 29393, "task_index": 33}, {"db_idx": 29394, "episode_idx": 167, "frame_idx": 14, "global_frame_idx": 29394, "task_index": 33}, {"db_idx": 29395, "episode_idx": 167, "frame_idx": 15, "global_frame_idx": 29395, "task_index": 33}, {"db_idx": 29396, "episode_idx": 167, "frame_idx": 16, "global_frame_idx": 29396, "task_index": 33}, {"db_idx": 29397, "episode_idx": 167, "frame_idx": 17, "global_frame_idx": 29397, "task_index": 33}, {"db_idx": 29398, "episode_idx": 167, "frame_idx": 18, "global_frame_idx": 29398, "task_index": 33}, {"db_idx": 29399, "episode_idx": 167, "frame_idx": 19, "global_frame_idx": 29399, "task_index": 33}, {"db_idx": 29400, "episode_idx": 167, "frame_idx": 20, "global_frame_idx": 29400, "task_index": 33}, {"db_idx": 29401, "episode_idx": 167, "frame_idx": 21, "global_frame_idx": 29401, "task_index": 33}, {"db_idx": 29402, "episode_idx": 167, "frame_idx": 22, "global_frame_idx": 29402, "task_index": 33}, {"db_idx": 29403, "episode_idx": 167, "frame_idx": 23, "global_frame_idx": 29403, "task_index": 33}, {"db_idx": 29404, "episode_idx": 167, "frame_idx": 24, "global_frame_idx": 29404, "task_index": 33}, {"db_idx": 29405, "episode_idx": 167, "frame_idx": 25, "global_frame_idx": 29405, "task_index": 33}, {"db_idx": 29406, "episode_idx": 167, "frame_idx": 26, "global_frame_idx": 29406, "task_index": 33}, {"db_idx": 29407, "episode_idx": 167, "frame_idx": 27, "global_frame_idx": 29407, "task_index": 33}, {"db_idx": 29408, "episode_idx": 167, "frame_idx": 28, "global_frame_idx": 29408, "task_index": 33}, {"db_idx": 29409, "episode_idx": 167, "frame_idx": 29, "global_frame_idx": 29409, "task_index": 33}, {"db_idx": 29410, "episode_idx": 167, "frame_idx": 30, "global_frame_idx": 29410, "task_index": 33}, {"db_idx": 29411, "episode_idx": 167, "frame_idx": 31, "global_frame_idx": 29411, "task_index": 33}, {"db_idx": 29412, "episode_idx": 167, "frame_idx": 32, "global_frame_idx": 29412, "task_index": 33}, {"db_idx": 29413, "episode_idx": 167, "frame_idx": 33, "global_frame_idx": 29413, "task_index": 33}, {"db_idx": 29414, "episode_idx": 167, "frame_idx": 34, "global_frame_idx": 29414, "task_index": 33}, {"db_idx": 29415, "episode_idx": 167, "frame_idx": 35, "global_frame_idx": 29415, "task_index": 33}, {"db_idx": 29416, "episode_idx": 167, "frame_idx": 36, "global_frame_idx": 29416, "task_index": 33}, {"db_idx": 29417, "episode_idx": 167, "frame_idx": 37, "global_frame_idx": 29417, "task_index": 33}, {"db_idx": 29418, "episode_idx": 167, "frame_idx": 38, "global_frame_idx": 29418, "task_index": 33}, {"db_idx": 29419, "episode_idx": 167, "frame_idx": 39, "global_frame_idx": 29419, "task_index": 33}, {"db_idx": 29420, "episode_idx": 167, "frame_idx": 40, "global_frame_idx": 29420, "task_index": 33}, {"db_idx": 29421, "episode_idx": 167, "frame_idx": 41, "global_frame_idx": 29421, "task_index": 33}, {"db_idx": 29422, "episode_idx": 167, "frame_idx": 42, "global_frame_idx": 29422, "task_index": 33}, {"db_idx": 29423, "episode_idx": 167, "frame_idx": 43, "global_frame_idx": 29423, "task_index": 33}, {"db_idx": 29424, "episode_idx": 167, "frame_idx": 44, "global_frame_idx": 29424, "task_index": 33}, {"db_idx": 29425, "episode_idx": 167, "frame_idx": 45, "global_frame_idx": 29425, "task_index": 33}, {"db_idx": 29426, "episode_idx": 167, "frame_idx": 46, "global_frame_idx": 29426, "task_index": 33}, {"db_idx": 29427, "episode_idx": 167, "frame_idx": 47, "global_frame_idx": 29427, "task_index": 33}, {"db_idx": 29428, "episode_idx": 167, "frame_idx": 48, "global_frame_idx": 29428, "task_index": 33}, {"db_idx": 29429, "episode_idx": 167, "frame_idx": 49, "global_frame_idx": 29429, "task_index": 33}, {"db_idx": 29430, "episode_idx": 167, "frame_idx": 50, "global_frame_idx": 29430, "task_index": 33}, {"db_idx": 29431, "episode_idx": 167, "frame_idx": 51, "global_frame_idx": 29431, "task_index": 33}, {"db_idx": 29432, "episode_idx": 167, "frame_idx": 52, "global_frame_idx": 29432, "task_index": 33}, {"db_idx": 29433, "episode_idx": 167, "frame_idx": 53, "global_frame_idx": 29433, "task_index": 33}, {"db_idx": 29434, "episode_idx": 167, "frame_idx": 54, "global_frame_idx": 29434, "task_index": 33}, {"db_idx": 29435, "episode_idx": 167, "frame_idx": 55, "global_frame_idx": 29435, "task_index": 33}, {"db_idx": 29436, "episode_idx": 167, "frame_idx": 56, "global_frame_idx": 29436, "task_index": 33}, {"db_idx": 29437, "episode_idx": 167, "frame_idx": 57, "global_frame_idx": 29437, "task_index": 33}, {"db_idx": 29438, "episode_idx": 167, "frame_idx": 58, "global_frame_idx": 29438, "task_index": 33}, {"db_idx": 29439, "episode_idx": 167, "frame_idx": 59, "global_frame_idx": 29439, "task_index": 33}, {"db_idx": 29440, "episode_idx": 167, "frame_idx": 60, "global_frame_idx": 29440, "task_index": 33}, {"db_idx": 29441, "episode_idx": 167, "frame_idx": 61, "global_frame_idx": 29441, "task_index": 33}, {"db_idx": 29442, "episode_idx": 167, "frame_idx": 62, "global_frame_idx": 29442, "task_index": 33}, {"db_idx": 29443, "episode_idx": 167, "frame_idx": 63, "global_frame_idx": 29443, "task_index": 33}, {"db_idx": 29444, "episode_idx": 167, "frame_idx": 64, "global_frame_idx": 29444, "task_index": 33}, {"db_idx": 29445, "episode_idx": 167, "frame_idx": 65, "global_frame_idx": 29445, "task_index": 33}, {"db_idx": 29446, "episode_idx": 167, "frame_idx": 66, "global_frame_idx": 29446, "task_index": 33}, {"db_idx": 29447, "episode_idx": 167, "frame_idx": 67, "global_frame_idx": 29447, "task_index": 33}, {"db_idx": 29448, "episode_idx": 167, "frame_idx": 68, "global_frame_idx": 29448, "task_index": 33}, {"db_idx": 29449, "episode_idx": 167, "frame_idx": 69, "global_frame_idx": 29449, "task_index": 33}, {"db_idx": 29450, "episode_idx": 167, "frame_idx": 70, "global_frame_idx": 29450, "task_index": 33}, {"db_idx": 29451, "episode_idx": 167, "frame_idx": 71, "global_frame_idx": 29451, "task_index": 33}, {"db_idx": 29452, "episode_idx": 167, "frame_idx": 72, "global_frame_idx": 29452, "task_index": 33}, {"db_idx": 29453, "episode_idx": 167, "frame_idx": 73, "global_frame_idx": 29453, "task_index": 33}, {"db_idx": 29454, "episode_idx": 167, "frame_idx": 74, "global_frame_idx": 29454, "task_index": 33}, {"db_idx": 29455, "episode_idx": 167, "frame_idx": 75, "global_frame_idx": 29455, "task_index": 33}, {"db_idx": 29456, "episode_idx": 167, "frame_idx": 76, "global_frame_idx": 29456, "task_index": 33}, {"db_idx": 29457, "episode_idx": 167, "frame_idx": 77, "global_frame_idx": 29457, "task_index": 33}, {"db_idx": 29458, "episode_idx": 167, "frame_idx": 78, "global_frame_idx": 29458, "task_index": 33}, {"db_idx": 29459, "episode_idx": 167, "frame_idx": 79, "global_frame_idx": 29459, "task_index": 33}, {"db_idx": 29460, "episode_idx": 167, "frame_idx": 80, "global_frame_idx": 29460, "task_index": 33}, {"db_idx": 29461, "episode_idx": 167, "frame_idx": 81, "global_frame_idx": 29461, "task_index": 33}, {"db_idx": 29462, "episode_idx": 167, "frame_idx": 82, "global_frame_idx": 29462, "task_index": 33}, {"db_idx": 29463, "episode_idx": 167, "frame_idx": 83, "global_frame_idx": 29463, "task_index": 33}, {"db_idx": 29464, "episode_idx": 167, "frame_idx": 84, "global_frame_idx": 29464, "task_index": 33}, {"db_idx": 29465, "episode_idx": 167, "frame_idx": 85, "global_frame_idx": 29465, "task_index": 33}, {"db_idx": 29466, "episode_idx": 167, "frame_idx": 86, "global_frame_idx": 29466, "task_index": 33}, {"db_idx": 29467, "episode_idx": 167, "frame_idx": 87, "global_frame_idx": 29467, "task_index": 33}, {"db_idx": 29468, "episode_idx": 167, "frame_idx": 88, "global_frame_idx": 29468, "task_index": 33}, {"db_idx": 29469, "episode_idx": 167, "frame_idx": 89, "global_frame_idx": 29469, "task_index": 33}, {"db_idx": 29470, "episode_idx": 167, "frame_idx": 90, "global_frame_idx": 29470, "task_index": 33}, {"db_idx": 29471, "episode_idx": 167, "frame_idx": 91, "global_frame_idx": 29471, "task_index": 33}, {"db_idx": 29472, "episode_idx": 167, "frame_idx": 92, "global_frame_idx": 29472, "task_index": 33}, {"db_idx": 29473, "episode_idx": 167, "frame_idx": 93, "global_frame_idx": 29473, "task_index": 33}, {"db_idx": 29474, "episode_idx": 167, "frame_idx": 94, "global_frame_idx": 29474, "task_index": 33}, {"db_idx": 29475, "episode_idx": 167, "frame_idx": 95, "global_frame_idx": 29475, "task_index": 33}, {"db_idx": 29476, "episode_idx": 167, "frame_idx": 96, "global_frame_idx": 29476, "task_index": 33}, {"db_idx": 29477, "episode_idx": 167, "frame_idx": 97, "global_frame_idx": 29477, "task_index": 33}, {"db_idx": 29478, "episode_idx": 167, "frame_idx": 98, "global_frame_idx": 29478, "task_index": 33}, {"db_idx": 29479, "episode_idx": 167, "frame_idx": 99, "global_frame_idx": 29479, "task_index": 33}, {"db_idx": 29480, "episode_idx": 167, "frame_idx": 100, "global_frame_idx": 29480, "task_index": 33}, {"db_idx": 29481, "episode_idx": 167, "frame_idx": 101, "global_frame_idx": 29481, "task_index": 33}, {"db_idx": 29482, "episode_idx": 167, "frame_idx": 102, "global_frame_idx": 29482, "task_index": 33}, {"db_idx": 29483, "episode_idx": 167, "frame_idx": 103, "global_frame_idx": 29483, "task_index": 33}, {"db_idx": 29484, "episode_idx": 167, "frame_idx": 104, "global_frame_idx": 29484, "task_index": 33}, {"db_idx": 29485, "episode_idx": 167, "frame_idx": 105, "global_frame_idx": 29485, "task_index": 33}, {"db_idx": 29486, "episode_idx": 167, "frame_idx": 106, "global_frame_idx": 29486, "task_index": 33}, {"db_idx": 29487, "episode_idx": 167, "frame_idx": 107, "global_frame_idx": 29487, "task_index": 33}, {"db_idx": 29488, "episode_idx": 167, "frame_idx": 108, "global_frame_idx": 29488, "task_index": 33}, {"db_idx": 29489, "episode_idx": 167, "frame_idx": 109, "global_frame_idx": 29489, "task_index": 33}, {"db_idx": 29490, "episode_idx": 167, "frame_idx": 110, "global_frame_idx": 29490, "task_index": 33}, {"db_idx": 29491, "episode_idx": 167, "frame_idx": 111, "global_frame_idx": 29491, "task_index": 33}, {"db_idx": 29492, "episode_idx": 167, "frame_idx": 112, "global_frame_idx": 29492, "task_index": 33}, {"db_idx": 29493, "episode_idx": 167, "frame_idx": 113, "global_frame_idx": 29493, "task_index": 33}, {"db_idx": 29494, "episode_idx": 167, "frame_idx": 114, "global_frame_idx": 29494, "task_index": 33}, {"db_idx": 29495, "episode_idx": 167, "frame_idx": 115, "global_frame_idx": 29495, "task_index": 33}, {"db_idx": 29496, "episode_idx": 167, "frame_idx": 116, "global_frame_idx": 29496, "task_index": 33}, {"db_idx": 29497, "episode_idx": 167, "frame_idx": 117, "global_frame_idx": 29497, "task_index": 33}, {"db_idx": 29498, "episode_idx": 167, "frame_idx": 118, "global_frame_idx": 29498, "task_index": 33}, {"db_idx": 29499, "episode_idx": 167, "frame_idx": 119, "global_frame_idx": 29499, "task_index": 33}, {"db_idx": 29500, "episode_idx": 167, "frame_idx": 120, "global_frame_idx": 29500, "task_index": 33}, {"db_idx": 29501, "episode_idx": 167, "frame_idx": 121, "global_frame_idx": 29501, "task_index": 33}, {"db_idx": 29502, "episode_idx": 167, "frame_idx": 122, "global_frame_idx": 29502, "task_index": 33}, {"db_idx": 29503, "episode_idx": 167, "frame_idx": 123, "global_frame_idx": 29503, "task_index": 33}, {"db_idx": 29504, "episode_idx": 167, "frame_idx": 124, "global_frame_idx": 29504, "task_index": 33}, {"db_idx": 29505, "episode_idx": 167, "frame_idx": 125, "global_frame_idx": 29505, "task_index": 33}, {"db_idx": 29506, "episode_idx": 168, "frame_idx": 0, "global_frame_idx": 29506, "task_index": 33}, {"db_idx": 29507, "episode_idx": 168, "frame_idx": 1, "global_frame_idx": 29507, "task_index": 33}, {"db_idx": 29508, "episode_idx": 168, "frame_idx": 2, "global_frame_idx": 29508, "task_index": 33}, {"db_idx": 29509, "episode_idx": 168, "frame_idx": 3, "global_frame_idx": 29509, "task_index": 33}, {"db_idx": 29510, "episode_idx": 168, "frame_idx": 4, "global_frame_idx": 29510, "task_index": 33}, {"db_idx": 29511, "episode_idx": 168, "frame_idx": 5, "global_frame_idx": 29511, "task_index": 33}, {"db_idx": 29512, "episode_idx": 168, "frame_idx": 6, "global_frame_idx": 29512, "task_index": 33}, {"db_idx": 29513, "episode_idx": 168, "frame_idx": 7, "global_frame_idx": 29513, "task_index": 33}, {"db_idx": 29514, "episode_idx": 168, "frame_idx": 8, "global_frame_idx": 29514, "task_index": 33}, {"db_idx": 29515, "episode_idx": 168, "frame_idx": 9, "global_frame_idx": 29515, "task_index": 33}, {"db_idx": 29516, "episode_idx": 168, "frame_idx": 10, "global_frame_idx": 29516, "task_index": 33}, {"db_idx": 29517, "episode_idx": 168, "frame_idx": 11, "global_frame_idx": 29517, "task_index": 33}, {"db_idx": 29518, "episode_idx": 168, "frame_idx": 12, "global_frame_idx": 29518, "task_index": 33}, {"db_idx": 29519, "episode_idx": 168, "frame_idx": 13, "global_frame_idx": 29519, "task_index": 33}, {"db_idx": 29520, "episode_idx": 168, "frame_idx": 14, "global_frame_idx": 29520, "task_index": 33}, {"db_idx": 29521, "episode_idx": 168, "frame_idx": 15, "global_frame_idx": 29521, "task_index": 33}, {"db_idx": 29522, "episode_idx": 168, "frame_idx": 16, "global_frame_idx": 29522, "task_index": 33}, {"db_idx": 29523, "episode_idx": 168, "frame_idx": 17, "global_frame_idx": 29523, "task_index": 33}, {"db_idx": 29524, "episode_idx": 168, "frame_idx": 18, "global_frame_idx": 29524, "task_index": 33}, {"db_idx": 29525, "episode_idx": 168, "frame_idx": 19, "global_frame_idx": 29525, "task_index": 33}, {"db_idx": 29526, "episode_idx": 168, "frame_idx": 20, "global_frame_idx": 29526, "task_index": 33}, {"db_idx": 29527, "episode_idx": 168, "frame_idx": 21, "global_frame_idx": 29527, "task_index": 33}, {"db_idx": 29528, "episode_idx": 168, "frame_idx": 22, "global_frame_idx": 29528, "task_index": 33}, {"db_idx": 29529, "episode_idx": 168, "frame_idx": 23, "global_frame_idx": 29529, "task_index": 33}, {"db_idx": 29530, "episode_idx": 168, "frame_idx": 24, "global_frame_idx": 29530, "task_index": 33}, {"db_idx": 29531, "episode_idx": 168, "frame_idx": 25, "global_frame_idx": 29531, "task_index": 33}, {"db_idx": 29532, "episode_idx": 168, "frame_idx": 26, "global_frame_idx": 29532, "task_index": 33}, {"db_idx": 29533, "episode_idx": 168, "frame_idx": 27, "global_frame_idx": 29533, "task_index": 33}, {"db_idx": 29534, "episode_idx": 168, "frame_idx": 28, "global_frame_idx": 29534, "task_index": 33}, {"db_idx": 29535, "episode_idx": 168, "frame_idx": 29, "global_frame_idx": 29535, "task_index": 33}, {"db_idx": 29536, "episode_idx": 168, "frame_idx": 30, "global_frame_idx": 29536, "task_index": 33}, {"db_idx": 29537, "episode_idx": 168, "frame_idx": 31, "global_frame_idx": 29537, "task_index": 33}, {"db_idx": 29538, "episode_idx": 168, "frame_idx": 32, "global_frame_idx": 29538, "task_index": 33}, {"db_idx": 29539, "episode_idx": 168, "frame_idx": 33, "global_frame_idx": 29539, "task_index": 33}, {"db_idx": 29540, "episode_idx": 168, "frame_idx": 34, "global_frame_idx": 29540, "task_index": 33}, {"db_idx": 29541, "episode_idx": 168, "frame_idx": 35, "global_frame_idx": 29541, "task_index": 33}, {"db_idx": 29542, "episode_idx": 168, "frame_idx": 36, "global_frame_idx": 29542, "task_index": 33}, {"db_idx": 29543, "episode_idx": 168, "frame_idx": 37, "global_frame_idx": 29543, "task_index": 33}, {"db_idx": 29544, "episode_idx": 168, "frame_idx": 38, "global_frame_idx": 29544, "task_index": 33}, {"db_idx": 29545, "episode_idx": 168, "frame_idx": 39, "global_frame_idx": 29545, "task_index": 33}, {"db_idx": 29546, "episode_idx": 168, "frame_idx": 40, "global_frame_idx": 29546, "task_index": 33}, {"db_idx": 29547, "episode_idx": 168, "frame_idx": 41, "global_frame_idx": 29547, "task_index": 33}, {"db_idx": 29548, "episode_idx": 168, "frame_idx": 42, "global_frame_idx": 29548, "task_index": 33}, {"db_idx": 29549, "episode_idx": 168, "frame_idx": 43, "global_frame_idx": 29549, "task_index": 33}, {"db_idx": 29550, "episode_idx": 168, "frame_idx": 44, "global_frame_idx": 29550, "task_index": 33}, {"db_idx": 29551, "episode_idx": 168, "frame_idx": 45, "global_frame_idx": 29551, "task_index": 33}, {"db_idx": 29552, "episode_idx": 168, "frame_idx": 46, "global_frame_idx": 29552, "task_index": 33}, {"db_idx": 29553, "episode_idx": 168, "frame_idx": 47, "global_frame_idx": 29553, "task_index": 33}, {"db_idx": 29554, "episode_idx": 168, "frame_idx": 48, "global_frame_idx": 29554, "task_index": 33}, {"db_idx": 29555, "episode_idx": 168, "frame_idx": 49, "global_frame_idx": 29555, "task_index": 33}, {"db_idx": 29556, "episode_idx": 168, "frame_idx": 50, "global_frame_idx": 29556, "task_index": 33}, {"db_idx": 29557, "episode_idx": 168, "frame_idx": 51, "global_frame_idx": 29557, "task_index": 33}, {"db_idx": 29558, "episode_idx": 168, "frame_idx": 52, "global_frame_idx": 29558, "task_index": 33}, {"db_idx": 29559, "episode_idx": 168, "frame_idx": 53, "global_frame_idx": 29559, "task_index": 33}, {"db_idx": 29560, "episode_idx": 168, "frame_idx": 54, "global_frame_idx": 29560, "task_index": 33}, {"db_idx": 29561, "episode_idx": 168, "frame_idx": 55, "global_frame_idx": 29561, "task_index": 33}, {"db_idx": 29562, "episode_idx": 168, "frame_idx": 56, "global_frame_idx": 29562, "task_index": 33}, {"db_idx": 29563, "episode_idx": 168, "frame_idx": 57, "global_frame_idx": 29563, "task_index": 33}, {"db_idx": 29564, "episode_idx": 168, "frame_idx": 58, "global_frame_idx": 29564, "task_index": 33}, {"db_idx": 29565, "episode_idx": 168, "frame_idx": 59, "global_frame_idx": 29565, "task_index": 33}, {"db_idx": 29566, "episode_idx": 168, "frame_idx": 60, "global_frame_idx": 29566, "task_index": 33}, {"db_idx": 29567, "episode_idx": 168, "frame_idx": 61, "global_frame_idx": 29567, "task_index": 33}, {"db_idx": 29568, "episode_idx": 168, "frame_idx": 62, "global_frame_idx": 29568, "task_index": 33}, {"db_idx": 29569, "episode_idx": 168, "frame_idx": 63, "global_frame_idx": 29569, "task_index": 33}, {"db_idx": 29570, "episode_idx": 168, "frame_idx": 64, "global_frame_idx": 29570, "task_index": 33}, {"db_idx": 29571, "episode_idx": 168, "frame_idx": 65, "global_frame_idx": 29571, "task_index": 33}, {"db_idx": 29572, "episode_idx": 168, "frame_idx": 66, "global_frame_idx": 29572, "task_index": 33}, {"db_idx": 29573, "episode_idx": 168, "frame_idx": 67, "global_frame_idx": 29573, "task_index": 33}, {"db_idx": 29574, "episode_idx": 168, "frame_idx": 68, "global_frame_idx": 29574, "task_index": 33}, {"db_idx": 29575, "episode_idx": 168, "frame_idx": 69, "global_frame_idx": 29575, "task_index": 33}, {"db_idx": 29576, "episode_idx": 168, "frame_idx": 70, "global_frame_idx": 29576, "task_index": 33}, {"db_idx": 29577, "episode_idx": 168, "frame_idx": 71, "global_frame_idx": 29577, "task_index": 33}, {"db_idx": 29578, "episode_idx": 168, "frame_idx": 72, "global_frame_idx": 29578, "task_index": 33}, {"db_idx": 29579, "episode_idx": 168, "frame_idx": 73, "global_frame_idx": 29579, "task_index": 33}, {"db_idx": 29580, "episode_idx": 168, "frame_idx": 74, "global_frame_idx": 29580, "task_index": 33}, {"db_idx": 29581, "episode_idx": 168, "frame_idx": 75, "global_frame_idx": 29581, "task_index": 33}, {"db_idx": 29582, "episode_idx": 168, "frame_idx": 76, "global_frame_idx": 29582, "task_index": 33}, {"db_idx": 29583, "episode_idx": 168, "frame_idx": 77, "global_frame_idx": 29583, "task_index": 33}, {"db_idx": 29584, "episode_idx": 168, "frame_idx": 78, "global_frame_idx": 29584, "task_index": 33}, {"db_idx": 29585, "episode_idx": 168, "frame_idx": 79, "global_frame_idx": 29585, "task_index": 33}, {"db_idx": 29586, "episode_idx": 168, "frame_idx": 80, "global_frame_idx": 29586, "task_index": 33}, {"db_idx": 29587, "episode_idx": 168, "frame_idx": 81, "global_frame_idx": 29587, "task_index": 33}, {"db_idx": 29588, "episode_idx": 168, "frame_idx": 82, "global_frame_idx": 29588, "task_index": 33}, {"db_idx": 29589, "episode_idx": 168, "frame_idx": 83, "global_frame_idx": 29589, "task_index": 33}, {"db_idx": 29590, "episode_idx": 168, "frame_idx": 84, "global_frame_idx": 29590, "task_index": 33}, {"db_idx": 29591, "episode_idx": 168, "frame_idx": 85, "global_frame_idx": 29591, "task_index": 33}, {"db_idx": 29592, "episode_idx": 168, "frame_idx": 86, "global_frame_idx": 29592, "task_index": 33}, {"db_idx": 29593, "episode_idx": 168, "frame_idx": 87, "global_frame_idx": 29593, "task_index": 33}, {"db_idx": 29594, "episode_idx": 168, "frame_idx": 88, "global_frame_idx": 29594, "task_index": 33}, {"db_idx": 29595, "episode_idx": 168, "frame_idx": 89, "global_frame_idx": 29595, "task_index": 33}, {"db_idx": 29596, "episode_idx": 168, "frame_idx": 90, "global_frame_idx": 29596, "task_index": 33}, {"db_idx": 29597, "episode_idx": 168, "frame_idx": 91, "global_frame_idx": 29597, "task_index": 33}, {"db_idx": 29598, "episode_idx": 168, "frame_idx": 92, "global_frame_idx": 29598, "task_index": 33}, {"db_idx": 29599, "episode_idx": 168, "frame_idx": 93, "global_frame_idx": 29599, "task_index": 33}, {"db_idx": 29600, "episode_idx": 168, "frame_idx": 94, "global_frame_idx": 29600, "task_index": 33}, {"db_idx": 29601, "episode_idx": 168, "frame_idx": 95, "global_frame_idx": 29601, "task_index": 33}, {"db_idx": 29602, "episode_idx": 168, "frame_idx": 96, "global_frame_idx": 29602, "task_index": 33}, {"db_idx": 29603, "episode_idx": 168, "frame_idx": 97, "global_frame_idx": 29603, "task_index": 33}, {"db_idx": 29604, "episode_idx": 168, "frame_idx": 98, "global_frame_idx": 29604, "task_index": 33}, {"db_idx": 29605, "episode_idx": 168, "frame_idx": 99, "global_frame_idx": 29605, "task_index": 33}, {"db_idx": 29606, "episode_idx": 168, "frame_idx": 100, "global_frame_idx": 29606, "task_index": 33}, {"db_idx": 29607, "episode_idx": 168, "frame_idx": 101, "global_frame_idx": 29607, "task_index": 33}, {"db_idx": 29608, "episode_idx": 168, "frame_idx": 102, "global_frame_idx": 29608, "task_index": 33}, {"db_idx": 29609, "episode_idx": 168, "frame_idx": 103, "global_frame_idx": 29609, "task_index": 33}, {"db_idx": 29610, "episode_idx": 168, "frame_idx": 104, "global_frame_idx": 29610, "task_index": 33}, {"db_idx": 29611, "episode_idx": 168, "frame_idx": 105, "global_frame_idx": 29611, "task_index": 33}, {"db_idx": 29612, "episode_idx": 168, "frame_idx": 106, "global_frame_idx": 29612, "task_index": 33}, {"db_idx": 29613, "episode_idx": 168, "frame_idx": 107, "global_frame_idx": 29613, "task_index": 33}, {"db_idx": 29614, "episode_idx": 168, "frame_idx": 108, "global_frame_idx": 29614, "task_index": 33}, {"db_idx": 29615, "episode_idx": 168, "frame_idx": 109, "global_frame_idx": 29615, "task_index": 33}, {"db_idx": 29616, "episode_idx": 168, "frame_idx": 110, "global_frame_idx": 29616, "task_index": 33}, {"db_idx": 29617, "episode_idx": 168, "frame_idx": 111, "global_frame_idx": 29617, "task_index": 33}, {"db_idx": 29618, "episode_idx": 168, "frame_idx": 112, "global_frame_idx": 29618, "task_index": 33}, {"db_idx": 29619, "episode_idx": 168, "frame_idx": 113, "global_frame_idx": 29619, "task_index": 33}, {"db_idx": 29620, "episode_idx": 168, "frame_idx": 114, "global_frame_idx": 29620, "task_index": 33}, {"db_idx": 29621, "episode_idx": 168, "frame_idx": 115, "global_frame_idx": 29621, "task_index": 33}, {"db_idx": 29622, "episode_idx": 168, "frame_idx": 116, "global_frame_idx": 29622, "task_index": 33}, {"db_idx": 29623, "episode_idx": 168, "frame_idx": 117, "global_frame_idx": 29623, "task_index": 33}, {"db_idx": 29624, "episode_idx": 168, "frame_idx": 118, "global_frame_idx": 29624, "task_index": 33}, {"db_idx": 29625, "episode_idx": 168, "frame_idx": 119, "global_frame_idx": 29625, "task_index": 33}, {"db_idx": 29626, "episode_idx": 168, "frame_idx": 120, "global_frame_idx": 29626, "task_index": 33}, {"db_idx": 29627, "episode_idx": 168, "frame_idx": 121, "global_frame_idx": 29627, "task_index": 33}, {"db_idx": 29628, "episode_idx": 168, "frame_idx": 122, "global_frame_idx": 29628, "task_index": 33}, {"db_idx": 29629, "episode_idx": 168, "frame_idx": 123, "global_frame_idx": 29629, "task_index": 33}, {"db_idx": 29630, "episode_idx": 168, "frame_idx": 124, "global_frame_idx": 29630, "task_index": 33}, {"db_idx": 29631, "episode_idx": 168, "frame_idx": 125, "global_frame_idx": 29631, "task_index": 33}, {"db_idx": 29632, "episode_idx": 168, "frame_idx": 126, "global_frame_idx": 29632, "task_index": 33}, {"db_idx": 29633, "episode_idx": 168, "frame_idx": 127, "global_frame_idx": 29633, "task_index": 33}, {"db_idx": 29634, "episode_idx": 168, "frame_idx": 128, "global_frame_idx": 29634, "task_index": 33}, {"db_idx": 29635, "episode_idx": 168, "frame_idx": 129, "global_frame_idx": 29635, "task_index": 33}, {"db_idx": 29636, "episode_idx": 168, "frame_idx": 130, "global_frame_idx": 29636, "task_index": 33}, {"db_idx": 29637, "episode_idx": 168, "frame_idx": 131, "global_frame_idx": 29637, "task_index": 33}, {"db_idx": 29638, "episode_idx": 168, "frame_idx": 132, "global_frame_idx": 29638, "task_index": 33}, {"db_idx": 29639, "episode_idx": 168, "frame_idx": 133, "global_frame_idx": 29639, "task_index": 33}, {"db_idx": 29640, "episode_idx": 168, "frame_idx": 134, "global_frame_idx": 29640, "task_index": 33}, {"db_idx": 29641, "episode_idx": 168, "frame_idx": 135, "global_frame_idx": 29641, "task_index": 33}, {"db_idx": 29642, "episode_idx": 168, "frame_idx": 136, "global_frame_idx": 29642, "task_index": 33}, {"db_idx": 29643, "episode_idx": 168, "frame_idx": 137, "global_frame_idx": 29643, "task_index": 33}, {"db_idx": 29644, "episode_idx": 168, "frame_idx": 138, "global_frame_idx": 29644, "task_index": 33}, {"db_idx": 29645, "episode_idx": 168, "frame_idx": 139, "global_frame_idx": 29645, "task_index": 33}, {"db_idx": 29646, "episode_idx": 168, "frame_idx": 140, "global_frame_idx": 29646, "task_index": 33}, {"db_idx": 29647, "episode_idx": 169, "frame_idx": 0, "global_frame_idx": 29647, "task_index": 33}, {"db_idx": 29648, "episode_idx": 169, "frame_idx": 1, "global_frame_idx": 29648, "task_index": 33}, {"db_idx": 29649, "episode_idx": 169, "frame_idx": 2, "global_frame_idx": 29649, "task_index": 33}, {"db_idx": 29650, "episode_idx": 169, "frame_idx": 3, "global_frame_idx": 29650, "task_index": 33}, {"db_idx": 29651, "episode_idx": 169, "frame_idx": 4, "global_frame_idx": 29651, "task_index": 33}, {"db_idx": 29652, "episode_idx": 169, "frame_idx": 5, "global_frame_idx": 29652, "task_index": 33}, {"db_idx": 29653, "episode_idx": 169, "frame_idx": 6, "global_frame_idx": 29653, "task_index": 33}, {"db_idx": 29654, "episode_idx": 169, "frame_idx": 7, "global_frame_idx": 29654, "task_index": 33}, {"db_idx": 29655, "episode_idx": 169, "frame_idx": 8, "global_frame_idx": 29655, "task_index": 33}, {"db_idx": 29656, "episode_idx": 169, "frame_idx": 9, "global_frame_idx": 29656, "task_index": 33}, {"db_idx": 29657, "episode_idx": 169, "frame_idx": 10, "global_frame_idx": 29657, "task_index": 33}, {"db_idx": 29658, "episode_idx": 169, "frame_idx": 11, "global_frame_idx": 29658, "task_index": 33}, {"db_idx": 29659, "episode_idx": 169, "frame_idx": 12, "global_frame_idx": 29659, "task_index": 33}, {"db_idx": 29660, "episode_idx": 169, "frame_idx": 13, "global_frame_idx": 29660, "task_index": 33}, {"db_idx": 29661, "episode_idx": 169, "frame_idx": 14, "global_frame_idx": 29661, "task_index": 33}, {"db_idx": 29662, "episode_idx": 169, "frame_idx": 15, "global_frame_idx": 29662, "task_index": 33}, {"db_idx": 29663, "episode_idx": 169, "frame_idx": 16, "global_frame_idx": 29663, "task_index": 33}, {"db_idx": 29664, "episode_idx": 169, "frame_idx": 17, "global_frame_idx": 29664, "task_index": 33}, {"db_idx": 29665, "episode_idx": 169, "frame_idx": 18, "global_frame_idx": 29665, "task_index": 33}, {"db_idx": 29666, "episode_idx": 169, "frame_idx": 19, "global_frame_idx": 29666, "task_index": 33}, {"db_idx": 29667, "episode_idx": 169, "frame_idx": 20, "global_frame_idx": 29667, "task_index": 33}, {"db_idx": 29668, "episode_idx": 169, "frame_idx": 21, "global_frame_idx": 29668, "task_index": 33}, {"db_idx": 29669, "episode_idx": 169, "frame_idx": 22, "global_frame_idx": 29669, "task_index": 33}, {"db_idx": 29670, "episode_idx": 169, "frame_idx": 23, "global_frame_idx": 29670, "task_index": 33}, {"db_idx": 29671, "episode_idx": 169, "frame_idx": 24, "global_frame_idx": 29671, "task_index": 33}, {"db_idx": 29672, "episode_idx": 169, "frame_idx": 25, "global_frame_idx": 29672, "task_index": 33}, {"db_idx": 29673, "episode_idx": 169, "frame_idx": 26, "global_frame_idx": 29673, "task_index": 33}, {"db_idx": 29674, "episode_idx": 169, "frame_idx": 27, "global_frame_idx": 29674, "task_index": 33}, {"db_idx": 29675, "episode_idx": 169, "frame_idx": 28, "global_frame_idx": 29675, "task_index": 33}, {"db_idx": 29676, "episode_idx": 169, "frame_idx": 29, "global_frame_idx": 29676, "task_index": 33}, {"db_idx": 29677, "episode_idx": 169, "frame_idx": 30, "global_frame_idx": 29677, "task_index": 33}, {"db_idx": 29678, "episode_idx": 169, "frame_idx": 31, "global_frame_idx": 29678, "task_index": 33}, {"db_idx": 29679, "episode_idx": 169, "frame_idx": 32, "global_frame_idx": 29679, "task_index": 33}, {"db_idx": 29680, "episode_idx": 169, "frame_idx": 33, "global_frame_idx": 29680, "task_index": 33}, {"db_idx": 29681, "episode_idx": 169, "frame_idx": 34, "global_frame_idx": 29681, "task_index": 33}, {"db_idx": 29682, "episode_idx": 169, "frame_idx": 35, "global_frame_idx": 29682, "task_index": 33}, {"db_idx": 29683, "episode_idx": 169, "frame_idx": 36, "global_frame_idx": 29683, "task_index": 33}, {"db_idx": 29684, "episode_idx": 169, "frame_idx": 37, "global_frame_idx": 29684, "task_index": 33}, {"db_idx": 29685, "episode_idx": 169, "frame_idx": 38, "global_frame_idx": 29685, "task_index": 33}, {"db_idx": 29686, "episode_idx": 169, "frame_idx": 39, "global_frame_idx": 29686, "task_index": 33}, {"db_idx": 29687, "episode_idx": 169, "frame_idx": 40, "global_frame_idx": 29687, "task_index": 33}, {"db_idx": 29688, "episode_idx": 169, "frame_idx": 41, "global_frame_idx": 29688, "task_index": 33}, {"db_idx": 29689, "episode_idx": 169, "frame_idx": 42, "global_frame_idx": 29689, "task_index": 33}, {"db_idx": 29690, "episode_idx": 169, "frame_idx": 43, "global_frame_idx": 29690, "task_index": 33}, {"db_idx": 29691, "episode_idx": 169, "frame_idx": 44, "global_frame_idx": 29691, "task_index": 33}, {"db_idx": 29692, "episode_idx": 169, "frame_idx": 45, "global_frame_idx": 29692, "task_index": 33}, {"db_idx": 29693, "episode_idx": 169, "frame_idx": 46, "global_frame_idx": 29693, "task_index": 33}, {"db_idx": 29694, "episode_idx": 169, "frame_idx": 47, "global_frame_idx": 29694, "task_index": 33}, {"db_idx": 29695, "episode_idx": 169, "frame_idx": 48, "global_frame_idx": 29695, "task_index": 33}, {"db_idx": 29696, "episode_idx": 169, "frame_idx": 49, "global_frame_idx": 29696, "task_index": 33}, {"db_idx": 29697, "episode_idx": 169, "frame_idx": 50, "global_frame_idx": 29697, "task_index": 33}, {"db_idx": 29698, "episode_idx": 169, "frame_idx": 51, "global_frame_idx": 29698, "task_index": 33}, {"db_idx": 29699, "episode_idx": 169, "frame_idx": 52, "global_frame_idx": 29699, "task_index": 33}, {"db_idx": 29700, "episode_idx": 169, "frame_idx": 53, "global_frame_idx": 29700, "task_index": 33}, {"db_idx": 29701, "episode_idx": 169, "frame_idx": 54, "global_frame_idx": 29701, "task_index": 33}, {"db_idx": 29702, "episode_idx": 169, "frame_idx": 55, "global_frame_idx": 29702, "task_index": 33}, {"db_idx": 29703, "episode_idx": 169, "frame_idx": 56, "global_frame_idx": 29703, "task_index": 33}, {"db_idx": 29704, "episode_idx": 169, "frame_idx": 57, "global_frame_idx": 29704, "task_index": 33}, {"db_idx": 29705, "episode_idx": 169, "frame_idx": 58, "global_frame_idx": 29705, "task_index": 33}, {"db_idx": 29706, "episode_idx": 169, "frame_idx": 59, "global_frame_idx": 29706, "task_index": 33}, {"db_idx": 29707, "episode_idx": 169, "frame_idx": 60, "global_frame_idx": 29707, "task_index": 33}, {"db_idx": 29708, "episode_idx": 169, "frame_idx": 61, "global_frame_idx": 29708, "task_index": 33}, {"db_idx": 29709, "episode_idx": 169, "frame_idx": 62, "global_frame_idx": 29709, "task_index": 33}, {"db_idx": 29710, "episode_idx": 169, "frame_idx": 63, "global_frame_idx": 29710, "task_index": 33}, {"db_idx": 29711, "episode_idx": 169, "frame_idx": 64, "global_frame_idx": 29711, "task_index": 33}, {"db_idx": 29712, "episode_idx": 169, "frame_idx": 65, "global_frame_idx": 29712, "task_index": 33}, {"db_idx": 29713, "episode_idx": 169, "frame_idx": 66, "global_frame_idx": 29713, "task_index": 33}, {"db_idx": 29714, "episode_idx": 169, "frame_idx": 67, "global_frame_idx": 29714, "task_index": 33}, {"db_idx": 29715, "episode_idx": 169, "frame_idx": 68, "global_frame_idx": 29715, "task_index": 33}, {"db_idx": 29716, "episode_idx": 169, "frame_idx": 69, "global_frame_idx": 29716, "task_index": 33}, {"db_idx": 29717, "episode_idx": 169, "frame_idx": 70, "global_frame_idx": 29717, "task_index": 33}, {"db_idx": 29718, "episode_idx": 169, "frame_idx": 71, "global_frame_idx": 29718, "task_index": 33}, {"db_idx": 29719, "episode_idx": 169, "frame_idx": 72, "global_frame_idx": 29719, "task_index": 33}, {"db_idx": 29720, "episode_idx": 169, "frame_idx": 73, "global_frame_idx": 29720, "task_index": 33}, {"db_idx": 29721, "episode_idx": 169, "frame_idx": 74, "global_frame_idx": 29721, "task_index": 33}, {"db_idx": 29722, "episode_idx": 169, "frame_idx": 75, "global_frame_idx": 29722, "task_index": 33}, {"db_idx": 29723, "episode_idx": 169, "frame_idx": 76, "global_frame_idx": 29723, "task_index": 33}, {"db_idx": 29724, "episode_idx": 169, "frame_idx": 77, "global_frame_idx": 29724, "task_index": 33}, {"db_idx": 29725, "episode_idx": 169, "frame_idx": 78, "global_frame_idx": 29725, "task_index": 33}, {"db_idx": 29726, "episode_idx": 169, "frame_idx": 79, "global_frame_idx": 29726, "task_index": 33}, {"db_idx": 29727, "episode_idx": 169, "frame_idx": 80, "global_frame_idx": 29727, "task_index": 33}, {"db_idx": 29728, "episode_idx": 169, "frame_idx": 81, "global_frame_idx": 29728, "task_index": 33}, {"db_idx": 29729, "episode_idx": 169, "frame_idx": 82, "global_frame_idx": 29729, "task_index": 33}, {"db_idx": 29730, "episode_idx": 169, "frame_idx": 83, "global_frame_idx": 29730, "task_index": 33}, {"db_idx": 29731, "episode_idx": 169, "frame_idx": 84, "global_frame_idx": 29731, "task_index": 33}, {"db_idx": 29732, "episode_idx": 169, "frame_idx": 85, "global_frame_idx": 29732, "task_index": 33}, {"db_idx": 29733, "episode_idx": 169, "frame_idx": 86, "global_frame_idx": 29733, "task_index": 33}, {"db_idx": 29734, "episode_idx": 169, "frame_idx": 87, "global_frame_idx": 29734, "task_index": 33}, {"db_idx": 29735, "episode_idx": 169, "frame_idx": 88, "global_frame_idx": 29735, "task_index": 33}, {"db_idx": 29736, "episode_idx": 169, "frame_idx": 89, "global_frame_idx": 29736, "task_index": 33}, {"db_idx": 29737, "episode_idx": 169, "frame_idx": 90, "global_frame_idx": 29737, "task_index": 33}, {"db_idx": 29738, "episode_idx": 169, "frame_idx": 91, "global_frame_idx": 29738, "task_index": 33}, {"db_idx": 29739, "episode_idx": 169, "frame_idx": 92, "global_frame_idx": 29739, "task_index": 33}, {"db_idx": 29740, "episode_idx": 169, "frame_idx": 93, "global_frame_idx": 29740, "task_index": 33}, {"db_idx": 29741, "episode_idx": 169, "frame_idx": 94, "global_frame_idx": 29741, "task_index": 33}, {"db_idx": 29742, "episode_idx": 169, "frame_idx": 95, "global_frame_idx": 29742, "task_index": 33}, {"db_idx": 29743, "episode_idx": 169, "frame_idx": 96, "global_frame_idx": 29743, "task_index": 33}, {"db_idx": 29744, "episode_idx": 169, "frame_idx": 97, "global_frame_idx": 29744, "task_index": 33}, {"db_idx": 29745, "episode_idx": 169, "frame_idx": 98, "global_frame_idx": 29745, "task_index": 33}, {"db_idx": 29746, "episode_idx": 169, "frame_idx": 99, "global_frame_idx": 29746, "task_index": 33}, {"db_idx": 29747, "episode_idx": 169, "frame_idx": 100, "global_frame_idx": 29747, "task_index": 33}, {"db_idx": 29748, "episode_idx": 169, "frame_idx": 101, "global_frame_idx": 29748, "task_index": 33}, {"db_idx": 29749, "episode_idx": 169, "frame_idx": 102, "global_frame_idx": 29749, "task_index": 33}, {"db_idx": 29750, "episode_idx": 169, "frame_idx": 103, "global_frame_idx": 29750, "task_index": 33}, {"db_idx": 29751, "episode_idx": 169, "frame_idx": 104, "global_frame_idx": 29751, "task_index": 33}, {"db_idx": 29752, "episode_idx": 169, "frame_idx": 105, "global_frame_idx": 29752, "task_index": 33}, {"db_idx": 29753, "episode_idx": 169, "frame_idx": 106, "global_frame_idx": 29753, "task_index": 33}, {"db_idx": 29754, "episode_idx": 169, "frame_idx": 107, "global_frame_idx": 29754, "task_index": 33}, {"db_idx": 29755, "episode_idx": 169, "frame_idx": 108, "global_frame_idx": 29755, "task_index": 33}, {"db_idx": 29756, "episode_idx": 169, "frame_idx": 109, "global_frame_idx": 29756, "task_index": 33}, {"db_idx": 29757, "episode_idx": 169, "frame_idx": 110, "global_frame_idx": 29757, "task_index": 33}, {"db_idx": 29758, "episode_idx": 169, "frame_idx": 111, "global_frame_idx": 29758, "task_index": 33}, {"db_idx": 29759, "episode_idx": 169, "frame_idx": 112, "global_frame_idx": 29759, "task_index": 33}, {"db_idx": 29760, "episode_idx": 169, "frame_idx": 113, "global_frame_idx": 29760, "task_index": 33}, {"db_idx": 29761, "episode_idx": 169, "frame_idx": 114, "global_frame_idx": 29761, "task_index": 33}, {"db_idx": 29762, "episode_idx": 169, "frame_idx": 115, "global_frame_idx": 29762, "task_index": 33}, {"db_idx": 29763, "episode_idx": 169, "frame_idx": 116, "global_frame_idx": 29763, "task_index": 33}, {"db_idx": 29764, "episode_idx": 169, "frame_idx": 117, "global_frame_idx": 29764, "task_index": 33}, {"db_idx": 29765, "episode_idx": 169, "frame_idx": 118, "global_frame_idx": 29765, "task_index": 33}, {"db_idx": 29766, "episode_idx": 169, "frame_idx": 119, "global_frame_idx": 29766, "task_index": 33}, {"db_idx": 29767, "episode_idx": 169, "frame_idx": 120, "global_frame_idx": 29767, "task_index": 33}, {"db_idx": 29768, "episode_idx": 169, "frame_idx": 121, "global_frame_idx": 29768, "task_index": 33}, {"db_idx": 29769, "episode_idx": 169, "frame_idx": 122, "global_frame_idx": 29769, "task_index": 33}, {"db_idx": 29770, "episode_idx": 169, "frame_idx": 123, "global_frame_idx": 29770, "task_index": 33}, {"db_idx": 29771, "episode_idx": 170, "frame_idx": 0, "global_frame_idx": 29771, "task_index": 34}, {"db_idx": 29772, "episode_idx": 170, "frame_idx": 1, "global_frame_idx": 29772, "task_index": 34}, {"db_idx": 29773, "episode_idx": 170, "frame_idx": 2, "global_frame_idx": 29773, "task_index": 34}, {"db_idx": 29774, "episode_idx": 170, "frame_idx": 3, "global_frame_idx": 29774, "task_index": 34}, {"db_idx": 29775, "episode_idx": 170, "frame_idx": 4, "global_frame_idx": 29775, "task_index": 34}, {"db_idx": 29776, "episode_idx": 170, "frame_idx": 5, "global_frame_idx": 29776, "task_index": 34}, {"db_idx": 29777, "episode_idx": 170, "frame_idx": 6, "global_frame_idx": 29777, "task_index": 34}, {"db_idx": 29778, "episode_idx": 170, "frame_idx": 7, "global_frame_idx": 29778, "task_index": 34}, {"db_idx": 29779, "episode_idx": 170, "frame_idx": 8, "global_frame_idx": 29779, "task_index": 34}, {"db_idx": 29780, "episode_idx": 170, "frame_idx": 9, "global_frame_idx": 29780, "task_index": 34}, {"db_idx": 29781, "episode_idx": 170, "frame_idx": 10, "global_frame_idx": 29781, "task_index": 34}, {"db_idx": 29782, "episode_idx": 170, "frame_idx": 11, "global_frame_idx": 29782, "task_index": 34}, {"db_idx": 29783, "episode_idx": 170, "frame_idx": 12, "global_frame_idx": 29783, "task_index": 34}, {"db_idx": 29784, "episode_idx": 170, "frame_idx": 13, "global_frame_idx": 29784, "task_index": 34}, {"db_idx": 29785, "episode_idx": 170, "frame_idx": 14, "global_frame_idx": 29785, "task_index": 34}, {"db_idx": 29786, "episode_idx": 170, "frame_idx": 15, "global_frame_idx": 29786, "task_index": 34}, {"db_idx": 29787, "episode_idx": 170, "frame_idx": 16, "global_frame_idx": 29787, "task_index": 34}, {"db_idx": 29788, "episode_idx": 170, "frame_idx": 17, "global_frame_idx": 29788, "task_index": 34}, {"db_idx": 29789, "episode_idx": 170, "frame_idx": 18, "global_frame_idx": 29789, "task_index": 34}, {"db_idx": 29790, "episode_idx": 170, "frame_idx": 19, "global_frame_idx": 29790, "task_index": 34}, {"db_idx": 29791, "episode_idx": 170, "frame_idx": 20, "global_frame_idx": 29791, "task_index": 34}, {"db_idx": 29792, "episode_idx": 170, "frame_idx": 21, "global_frame_idx": 29792, "task_index": 34}, {"db_idx": 29793, "episode_idx": 170, "frame_idx": 22, "global_frame_idx": 29793, "task_index": 34}, {"db_idx": 29794, "episode_idx": 170, "frame_idx": 23, "global_frame_idx": 29794, "task_index": 34}, {"db_idx": 29795, "episode_idx": 170, "frame_idx": 24, "global_frame_idx": 29795, "task_index": 34}, {"db_idx": 29796, "episode_idx": 170, "frame_idx": 25, "global_frame_idx": 29796, "task_index": 34}, {"db_idx": 29797, "episode_idx": 170, "frame_idx": 26, "global_frame_idx": 29797, "task_index": 34}, {"db_idx": 29798, "episode_idx": 170, "frame_idx": 27, "global_frame_idx": 29798, "task_index": 34}, {"db_idx": 29799, "episode_idx": 170, "frame_idx": 28, "global_frame_idx": 29799, "task_index": 34}, {"db_idx": 29800, "episode_idx": 170, "frame_idx": 29, "global_frame_idx": 29800, "task_index": 34}, {"db_idx": 29801, "episode_idx": 170, "frame_idx": 30, "global_frame_idx": 29801, "task_index": 34}, {"db_idx": 29802, "episode_idx": 170, "frame_idx": 31, "global_frame_idx": 29802, "task_index": 34}, {"db_idx": 29803, "episode_idx": 170, "frame_idx": 32, "global_frame_idx": 29803, "task_index": 34}, {"db_idx": 29804, "episode_idx": 170, "frame_idx": 33, "global_frame_idx": 29804, "task_index": 34}, {"db_idx": 29805, "episode_idx": 170, "frame_idx": 34, "global_frame_idx": 29805, "task_index": 34}, {"db_idx": 29806, "episode_idx": 170, "frame_idx": 35, "global_frame_idx": 29806, "task_index": 34}, {"db_idx": 29807, "episode_idx": 170, "frame_idx": 36, "global_frame_idx": 29807, "task_index": 34}, {"db_idx": 29808, "episode_idx": 170, "frame_idx": 37, "global_frame_idx": 29808, "task_index": 34}, {"db_idx": 29809, "episode_idx": 170, "frame_idx": 38, "global_frame_idx": 29809, "task_index": 34}, {"db_idx": 29810, "episode_idx": 170, "frame_idx": 39, "global_frame_idx": 29810, "task_index": 34}, {"db_idx": 29811, "episode_idx": 170, "frame_idx": 40, "global_frame_idx": 29811, "task_index": 34}, {"db_idx": 29812, "episode_idx": 170, "frame_idx": 41, "global_frame_idx": 29812, "task_index": 34}, {"db_idx": 29813, "episode_idx": 170, "frame_idx": 42, "global_frame_idx": 29813, "task_index": 34}, {"db_idx": 29814, "episode_idx": 170, "frame_idx": 43, "global_frame_idx": 29814, "task_index": 34}, {"db_idx": 29815, "episode_idx": 170, "frame_idx": 44, "global_frame_idx": 29815, "task_index": 34}, {"db_idx": 29816, "episode_idx": 170, "frame_idx": 45, "global_frame_idx": 29816, "task_index": 34}, {"db_idx": 29817, "episode_idx": 170, "frame_idx": 46, "global_frame_idx": 29817, "task_index": 34}, {"db_idx": 29818, "episode_idx": 170, "frame_idx": 47, "global_frame_idx": 29818, "task_index": 34}, {"db_idx": 29819, "episode_idx": 170, "frame_idx": 48, "global_frame_idx": 29819, "task_index": 34}, {"db_idx": 29820, "episode_idx": 170, "frame_idx": 49, "global_frame_idx": 29820, "task_index": 34}, {"db_idx": 29821, "episode_idx": 170, "frame_idx": 50, "global_frame_idx": 29821, "task_index": 34}, {"db_idx": 29822, "episode_idx": 170, "frame_idx": 51, "global_frame_idx": 29822, "task_index": 34}, {"db_idx": 29823, "episode_idx": 170, "frame_idx": 52, "global_frame_idx": 29823, "task_index": 34}, {"db_idx": 29824, "episode_idx": 170, "frame_idx": 53, "global_frame_idx": 29824, "task_index": 34}, {"db_idx": 29825, "episode_idx": 170, "frame_idx": 54, "global_frame_idx": 29825, "task_index": 34}, {"db_idx": 29826, "episode_idx": 170, "frame_idx": 55, "global_frame_idx": 29826, "task_index": 34}, {"db_idx": 29827, "episode_idx": 170, "frame_idx": 56, "global_frame_idx": 29827, "task_index": 34}, {"db_idx": 29828, "episode_idx": 170, "frame_idx": 57, "global_frame_idx": 29828, "task_index": 34}, {"db_idx": 29829, "episode_idx": 170, "frame_idx": 58, "global_frame_idx": 29829, "task_index": 34}, {"db_idx": 29830, "episode_idx": 170, "frame_idx": 59, "global_frame_idx": 29830, "task_index": 34}, {"db_idx": 29831, "episode_idx": 170, "frame_idx": 60, "global_frame_idx": 29831, "task_index": 34}, {"db_idx": 29832, "episode_idx": 170, "frame_idx": 61, "global_frame_idx": 29832, "task_index": 34}, {"db_idx": 29833, "episode_idx": 170, "frame_idx": 62, "global_frame_idx": 29833, "task_index": 34}, {"db_idx": 29834, "episode_idx": 170, "frame_idx": 63, "global_frame_idx": 29834, "task_index": 34}, {"db_idx": 29835, "episode_idx": 170, "frame_idx": 64, "global_frame_idx": 29835, "task_index": 34}, {"db_idx": 29836, "episode_idx": 170, "frame_idx": 65, "global_frame_idx": 29836, "task_index": 34}, {"db_idx": 29837, "episode_idx": 170, "frame_idx": 66, "global_frame_idx": 29837, "task_index": 34}, {"db_idx": 29838, "episode_idx": 170, "frame_idx": 67, "global_frame_idx": 29838, "task_index": 34}, {"db_idx": 29839, "episode_idx": 170, "frame_idx": 68, "global_frame_idx": 29839, "task_index": 34}, {"db_idx": 29840, "episode_idx": 170, "frame_idx": 69, "global_frame_idx": 29840, "task_index": 34}, {"db_idx": 29841, "episode_idx": 170, "frame_idx": 70, "global_frame_idx": 29841, "task_index": 34}, {"db_idx": 29842, "episode_idx": 170, "frame_idx": 71, "global_frame_idx": 29842, "task_index": 34}, {"db_idx": 29843, "episode_idx": 170, "frame_idx": 72, "global_frame_idx": 29843, "task_index": 34}, {"db_idx": 29844, "episode_idx": 170, "frame_idx": 73, "global_frame_idx": 29844, "task_index": 34}, {"db_idx": 29845, "episode_idx": 170, "frame_idx": 74, "global_frame_idx": 29845, "task_index": 34}, {"db_idx": 29846, "episode_idx": 170, "frame_idx": 75, "global_frame_idx": 29846, "task_index": 34}, {"db_idx": 29847, "episode_idx": 170, "frame_idx": 76, "global_frame_idx": 29847, "task_index": 34}, {"db_idx": 29848, "episode_idx": 170, "frame_idx": 77, "global_frame_idx": 29848, "task_index": 34}, {"db_idx": 29849, "episode_idx": 170, "frame_idx": 78, "global_frame_idx": 29849, "task_index": 34}, {"db_idx": 29850, "episode_idx": 170, "frame_idx": 79, "global_frame_idx": 29850, "task_index": 34}, {"db_idx": 29851, "episode_idx": 170, "frame_idx": 80, "global_frame_idx": 29851, "task_index": 34}, {"db_idx": 29852, "episode_idx": 170, "frame_idx": 81, "global_frame_idx": 29852, "task_index": 34}, {"db_idx": 29853, "episode_idx": 170, "frame_idx": 82, "global_frame_idx": 29853, "task_index": 34}, {"db_idx": 29854, "episode_idx": 170, "frame_idx": 83, "global_frame_idx": 29854, "task_index": 34}, {"db_idx": 29855, "episode_idx": 170, "frame_idx": 84, "global_frame_idx": 29855, "task_index": 34}, {"db_idx": 29856, "episode_idx": 170, "frame_idx": 85, "global_frame_idx": 29856, "task_index": 34}, {"db_idx": 29857, "episode_idx": 170, "frame_idx": 86, "global_frame_idx": 29857, "task_index": 34}, {"db_idx": 29858, "episode_idx": 170, "frame_idx": 87, "global_frame_idx": 29858, "task_index": 34}, {"db_idx": 29859, "episode_idx": 170, "frame_idx": 88, "global_frame_idx": 29859, "task_index": 34}, {"db_idx": 29860, "episode_idx": 170, "frame_idx": 89, "global_frame_idx": 29860, "task_index": 34}, {"db_idx": 29861, "episode_idx": 170, "frame_idx": 90, "global_frame_idx": 29861, "task_index": 34}, {"db_idx": 29862, "episode_idx": 170, "frame_idx": 91, "global_frame_idx": 29862, "task_index": 34}, {"db_idx": 29863, "episode_idx": 170, "frame_idx": 92, "global_frame_idx": 29863, "task_index": 34}, {"db_idx": 29864, "episode_idx": 170, "frame_idx": 93, "global_frame_idx": 29864, "task_index": 34}, {"db_idx": 29865, "episode_idx": 170, "frame_idx": 94, "global_frame_idx": 29865, "task_index": 34}, {"db_idx": 29866, "episode_idx": 170, "frame_idx": 95, "global_frame_idx": 29866, "task_index": 34}, {"db_idx": 29867, "episode_idx": 170, "frame_idx": 96, "global_frame_idx": 29867, "task_index": 34}, {"db_idx": 29868, "episode_idx": 170, "frame_idx": 97, "global_frame_idx": 29868, "task_index": 34}, {"db_idx": 29869, "episode_idx": 170, "frame_idx": 98, "global_frame_idx": 29869, "task_index": 34}, {"db_idx": 29870, "episode_idx": 170, "frame_idx": 99, "global_frame_idx": 29870, "task_index": 34}, {"db_idx": 29871, "episode_idx": 170, "frame_idx": 100, "global_frame_idx": 29871, "task_index": 34}, {"db_idx": 29872, "episode_idx": 170, "frame_idx": 101, "global_frame_idx": 29872, "task_index": 34}, {"db_idx": 29873, "episode_idx": 170, "frame_idx": 102, "global_frame_idx": 29873, "task_index": 34}, {"db_idx": 29874, "episode_idx": 170, "frame_idx": 103, "global_frame_idx": 29874, "task_index": 34}, {"db_idx": 29875, "episode_idx": 170, "frame_idx": 104, "global_frame_idx": 29875, "task_index": 34}, {"db_idx": 29876, "episode_idx": 170, "frame_idx": 105, "global_frame_idx": 29876, "task_index": 34}, {"db_idx": 29877, "episode_idx": 170, "frame_idx": 106, "global_frame_idx": 29877, "task_index": 34}, {"db_idx": 29878, "episode_idx": 170, "frame_idx": 107, "global_frame_idx": 29878, "task_index": 34}, {"db_idx": 29879, "episode_idx": 170, "frame_idx": 108, "global_frame_idx": 29879, "task_index": 34}, {"db_idx": 29880, "episode_idx": 170, "frame_idx": 109, "global_frame_idx": 29880, "task_index": 34}, {"db_idx": 29881, "episode_idx": 170, "frame_idx": 110, "global_frame_idx": 29881, "task_index": 34}, {"db_idx": 29882, "episode_idx": 170, "frame_idx": 111, "global_frame_idx": 29882, "task_index": 34}, {"db_idx": 29883, "episode_idx": 170, "frame_idx": 112, "global_frame_idx": 29883, "task_index": 34}, {"db_idx": 29884, "episode_idx": 170, "frame_idx": 113, "global_frame_idx": 29884, "task_index": 34}, {"db_idx": 29885, "episode_idx": 170, "frame_idx": 114, "global_frame_idx": 29885, "task_index": 34}, {"db_idx": 29886, "episode_idx": 170, "frame_idx": 115, "global_frame_idx": 29886, "task_index": 34}, {"db_idx": 29887, "episode_idx": 170, "frame_idx": 116, "global_frame_idx": 29887, "task_index": 34}, {"db_idx": 29888, "episode_idx": 170, "frame_idx": 117, "global_frame_idx": 29888, "task_index": 34}, {"db_idx": 29889, "episode_idx": 170, "frame_idx": 118, "global_frame_idx": 29889, "task_index": 34}, {"db_idx": 29890, "episode_idx": 170, "frame_idx": 119, "global_frame_idx": 29890, "task_index": 34}, {"db_idx": 29891, "episode_idx": 170, "frame_idx": 120, "global_frame_idx": 29891, "task_index": 34}, {"db_idx": 29892, "episode_idx": 170, "frame_idx": 121, "global_frame_idx": 29892, "task_index": 34}, {"db_idx": 29893, "episode_idx": 170, "frame_idx": 122, "global_frame_idx": 29893, "task_index": 34}, {"db_idx": 29894, "episode_idx": 170, "frame_idx": 123, "global_frame_idx": 29894, "task_index": 34}, {"db_idx": 29895, "episode_idx": 170, "frame_idx": 124, "global_frame_idx": 29895, "task_index": 34}, {"db_idx": 29896, "episode_idx": 170, "frame_idx": 125, "global_frame_idx": 29896, "task_index": 34}, {"db_idx": 29897, "episode_idx": 170, "frame_idx": 126, "global_frame_idx": 29897, "task_index": 34}, {"db_idx": 29898, "episode_idx": 170, "frame_idx": 127, "global_frame_idx": 29898, "task_index": 34}, {"db_idx": 29899, "episode_idx": 170, "frame_idx": 128, "global_frame_idx": 29899, "task_index": 34}, {"db_idx": 29900, "episode_idx": 170, "frame_idx": 129, "global_frame_idx": 29900, "task_index": 34}, {"db_idx": 29901, "episode_idx": 170, "frame_idx": 130, "global_frame_idx": 29901, "task_index": 34}, {"db_idx": 29902, "episode_idx": 170, "frame_idx": 131, "global_frame_idx": 29902, "task_index": 34}, {"db_idx": 29903, "episode_idx": 170, "frame_idx": 132, "global_frame_idx": 29903, "task_index": 34}, {"db_idx": 29904, "episode_idx": 170, "frame_idx": 133, "global_frame_idx": 29904, "task_index": 34}, {"db_idx": 29905, "episode_idx": 170, "frame_idx": 134, "global_frame_idx": 29905, "task_index": 34}, {"db_idx": 29906, "episode_idx": 170, "frame_idx": 135, "global_frame_idx": 29906, "task_index": 34}, {"db_idx": 29907, "episode_idx": 170, "frame_idx": 136, "global_frame_idx": 29907, "task_index": 34}, {"db_idx": 29908, "episode_idx": 170, "frame_idx": 137, "global_frame_idx": 29908, "task_index": 34}, {"db_idx": 29909, "episode_idx": 170, "frame_idx": 138, "global_frame_idx": 29909, "task_index": 34}, {"db_idx": 29910, "episode_idx": 170, "frame_idx": 139, "global_frame_idx": 29910, "task_index": 34}, {"db_idx": 29911, "episode_idx": 170, "frame_idx": 140, "global_frame_idx": 29911, "task_index": 34}, {"db_idx": 29912, "episode_idx": 170, "frame_idx": 141, "global_frame_idx": 29912, "task_index": 34}, {"db_idx": 29913, "episode_idx": 170, "frame_idx": 142, "global_frame_idx": 29913, "task_index": 34}, {"db_idx": 29914, "episode_idx": 170, "frame_idx": 143, "global_frame_idx": 29914, "task_index": 34}, {"db_idx": 29915, "episode_idx": 170, "frame_idx": 144, "global_frame_idx": 29915, "task_index": 34}, {"db_idx": 29916, "episode_idx": 170, "frame_idx": 145, "global_frame_idx": 29916, "task_index": 34}, {"db_idx": 29917, "episode_idx": 170, "frame_idx": 146, "global_frame_idx": 29917, "task_index": 34}, {"db_idx": 29918, "episode_idx": 170, "frame_idx": 147, "global_frame_idx": 29918, "task_index": 34}, {"db_idx": 29919, "episode_idx": 170, "frame_idx": 148, "global_frame_idx": 29919, "task_index": 34}, {"db_idx": 29920, "episode_idx": 170, "frame_idx": 149, "global_frame_idx": 29920, "task_index": 34}, {"db_idx": 29921, "episode_idx": 170, "frame_idx": 150, "global_frame_idx": 29921, "task_index": 34}, {"db_idx": 29922, "episode_idx": 170, "frame_idx": 151, "global_frame_idx": 29922, "task_index": 34}, {"db_idx": 29923, "episode_idx": 170, "frame_idx": 152, "global_frame_idx": 29923, "task_index": 34}, {"db_idx": 29924, "episode_idx": 170, "frame_idx": 153, "global_frame_idx": 29924, "task_index": 34}, {"db_idx": 29925, "episode_idx": 170, "frame_idx": 154, "global_frame_idx": 29925, "task_index": 34}, {"db_idx": 29926, "episode_idx": 170, "frame_idx": 155, "global_frame_idx": 29926, "task_index": 34}, {"db_idx": 29927, "episode_idx": 170, "frame_idx": 156, "global_frame_idx": 29927, "task_index": 34}, {"db_idx": 29928, "episode_idx": 170, "frame_idx": 157, "global_frame_idx": 29928, "task_index": 34}, {"db_idx": 29929, "episode_idx": 170, "frame_idx": 158, "global_frame_idx": 29929, "task_index": 34}, {"db_idx": 29930, "episode_idx": 170, "frame_idx": 159, "global_frame_idx": 29930, "task_index": 34}, {"db_idx": 29931, "episode_idx": 170, "frame_idx": 160, "global_frame_idx": 29931, "task_index": 34}, {"db_idx": 29932, "episode_idx": 170, "frame_idx": 161, "global_frame_idx": 29932, "task_index": 34}, {"db_idx": 29933, "episode_idx": 170, "frame_idx": 162, "global_frame_idx": 29933, "task_index": 34}, {"db_idx": 29934, "episode_idx": 170, "frame_idx": 163, "global_frame_idx": 29934, "task_index": 34}, {"db_idx": 29935, "episode_idx": 170, "frame_idx": 164, "global_frame_idx": 29935, "task_index": 34}, {"db_idx": 29936, "episode_idx": 170, "frame_idx": 165, "global_frame_idx": 29936, "task_index": 34}, {"db_idx": 29937, "episode_idx": 170, "frame_idx": 166, "global_frame_idx": 29937, "task_index": 34}, {"db_idx": 29938, "episode_idx": 170, "frame_idx": 167, "global_frame_idx": 29938, "task_index": 34}, {"db_idx": 29939, "episode_idx": 170, "frame_idx": 168, "global_frame_idx": 29939, "task_index": 34}, {"db_idx": 29940, "episode_idx": 170, "frame_idx": 169, "global_frame_idx": 29940, "task_index": 34}, {"db_idx": 29941, "episode_idx": 170, "frame_idx": 170, "global_frame_idx": 29941, "task_index": 34}, {"db_idx": 29942, "episode_idx": 170, "frame_idx": 171, "global_frame_idx": 29942, "task_index": 34}, {"db_idx": 29943, "episode_idx": 170, "frame_idx": 172, "global_frame_idx": 29943, "task_index": 34}, {"db_idx": 29944, "episode_idx": 170, "frame_idx": 173, "global_frame_idx": 29944, "task_index": 34}, {"db_idx": 29945, "episode_idx": 170, "frame_idx": 174, "global_frame_idx": 29945, "task_index": 34}, {"db_idx": 29946, "episode_idx": 170, "frame_idx": 175, "global_frame_idx": 29946, "task_index": 34}, {"db_idx": 29947, "episode_idx": 170, "frame_idx": 176, "global_frame_idx": 29947, "task_index": 34}, {"db_idx": 29948, "episode_idx": 170, "frame_idx": 177, "global_frame_idx": 29948, "task_index": 34}, {"db_idx": 29949, "episode_idx": 170, "frame_idx": 178, "global_frame_idx": 29949, "task_index": 34}, {"db_idx": 29950, "episode_idx": 170, "frame_idx": 179, "global_frame_idx": 29950, "task_index": 34}, {"db_idx": 29951, "episode_idx": 170, "frame_idx": 180, "global_frame_idx": 29951, "task_index": 34}, {"db_idx": 29952, "episode_idx": 170, "frame_idx": 181, "global_frame_idx": 29952, "task_index": 34}, {"db_idx": 29953, "episode_idx": 171, "frame_idx": 0, "global_frame_idx": 29953, "task_index": 34}, {"db_idx": 29954, "episode_idx": 171, "frame_idx": 1, "global_frame_idx": 29954, "task_index": 34}, {"db_idx": 29955, "episode_idx": 171, "frame_idx": 2, "global_frame_idx": 29955, "task_index": 34}, {"db_idx": 29956, "episode_idx": 171, "frame_idx": 3, "global_frame_idx": 29956, "task_index": 34}, {"db_idx": 29957, "episode_idx": 171, "frame_idx": 4, "global_frame_idx": 29957, "task_index": 34}, {"db_idx": 29958, "episode_idx": 171, "frame_idx": 5, "global_frame_idx": 29958, "task_index": 34}, {"db_idx": 29959, "episode_idx": 171, "frame_idx": 6, "global_frame_idx": 29959, "task_index": 34}, {"db_idx": 29960, "episode_idx": 171, "frame_idx": 7, "global_frame_idx": 29960, "task_index": 34}, {"db_idx": 29961, "episode_idx": 171, "frame_idx": 8, "global_frame_idx": 29961, "task_index": 34}, {"db_idx": 29962, "episode_idx": 171, "frame_idx": 9, "global_frame_idx": 29962, "task_index": 34}, {"db_idx": 29963, "episode_idx": 171, "frame_idx": 10, "global_frame_idx": 29963, "task_index": 34}, {"db_idx": 29964, "episode_idx": 171, "frame_idx": 11, "global_frame_idx": 29964, "task_index": 34}, {"db_idx": 29965, "episode_idx": 171, "frame_idx": 12, "global_frame_idx": 29965, "task_index": 34}, {"db_idx": 29966, "episode_idx": 171, "frame_idx": 13, "global_frame_idx": 29966, "task_index": 34}, {"db_idx": 29967, "episode_idx": 171, "frame_idx": 14, "global_frame_idx": 29967, "task_index": 34}, {"db_idx": 29968, "episode_idx": 171, "frame_idx": 15, "global_frame_idx": 29968, "task_index": 34}, {"db_idx": 29969, "episode_idx": 171, "frame_idx": 16, "global_frame_idx": 29969, "task_index": 34}, {"db_idx": 29970, "episode_idx": 171, "frame_idx": 17, "global_frame_idx": 29970, "task_index": 34}, {"db_idx": 29971, "episode_idx": 171, "frame_idx": 18, "global_frame_idx": 29971, "task_index": 34}, {"db_idx": 29972, "episode_idx": 171, "frame_idx": 19, "global_frame_idx": 29972, "task_index": 34}, {"db_idx": 29973, "episode_idx": 171, "frame_idx": 20, "global_frame_idx": 29973, "task_index": 34}, {"db_idx": 29974, "episode_idx": 171, "frame_idx": 21, "global_frame_idx": 29974, "task_index": 34}, {"db_idx": 29975, "episode_idx": 171, "frame_idx": 22, "global_frame_idx": 29975, "task_index": 34}, {"db_idx": 29976, "episode_idx": 171, "frame_idx": 23, "global_frame_idx": 29976, "task_index": 34}, {"db_idx": 29977, "episode_idx": 171, "frame_idx": 24, "global_frame_idx": 29977, "task_index": 34}, {"db_idx": 29978, "episode_idx": 171, "frame_idx": 25, "global_frame_idx": 29978, "task_index": 34}, {"db_idx": 29979, "episode_idx": 171, "frame_idx": 26, "global_frame_idx": 29979, "task_index": 34}, {"db_idx": 29980, "episode_idx": 171, "frame_idx": 27, "global_frame_idx": 29980, "task_index": 34}, {"db_idx": 29981, "episode_idx": 171, "frame_idx": 28, "global_frame_idx": 29981, "task_index": 34}, {"db_idx": 29982, "episode_idx": 171, "frame_idx": 29, "global_frame_idx": 29982, "task_index": 34}, {"db_idx": 29983, "episode_idx": 171, "frame_idx": 30, "global_frame_idx": 29983, "task_index": 34}, {"db_idx": 29984, "episode_idx": 171, "frame_idx": 31, "global_frame_idx": 29984, "task_index": 34}, {"db_idx": 29985, "episode_idx": 171, "frame_idx": 32, "global_frame_idx": 29985, "task_index": 34}, {"db_idx": 29986, "episode_idx": 171, "frame_idx": 33, "global_frame_idx": 29986, "task_index": 34}, {"db_idx": 29987, "episode_idx": 171, "frame_idx": 34, "global_frame_idx": 29987, "task_index": 34}, {"db_idx": 29988, "episode_idx": 171, "frame_idx": 35, "global_frame_idx": 29988, "task_index": 34}, {"db_idx": 29989, "episode_idx": 171, "frame_idx": 36, "global_frame_idx": 29989, "task_index": 34}, {"db_idx": 29990, "episode_idx": 171, "frame_idx": 37, "global_frame_idx": 29990, "task_index": 34}, {"db_idx": 29991, "episode_idx": 171, "frame_idx": 38, "global_frame_idx": 29991, "task_index": 34}, {"db_idx": 29992, "episode_idx": 171, "frame_idx": 39, "global_frame_idx": 29992, "task_index": 34}, {"db_idx": 29993, "episode_idx": 171, "frame_idx": 40, "global_frame_idx": 29993, "task_index": 34}, {"db_idx": 29994, "episode_idx": 171, "frame_idx": 41, "global_frame_idx": 29994, "task_index": 34}, {"db_idx": 29995, "episode_idx": 171, "frame_idx": 42, "global_frame_idx": 29995, "task_index": 34}, {"db_idx": 29996, "episode_idx": 171, "frame_idx": 43, "global_frame_idx": 29996, "task_index": 34}, {"db_idx": 29997, "episode_idx": 171, "frame_idx": 44, "global_frame_idx": 29997, "task_index": 34}, {"db_idx": 29998, "episode_idx": 171, "frame_idx": 45, "global_frame_idx": 29998, "task_index": 34}, {"db_idx": 29999, "episode_idx": 171, "frame_idx": 46, "global_frame_idx": 29999, "task_index": 34}, {"db_idx": 30000, "episode_idx": 171, "frame_idx": 47, "global_frame_idx": 30000, "task_index": 34}, {"db_idx": 30001, "episode_idx": 171, "frame_idx": 48, "global_frame_idx": 30001, "task_index": 34}, {"db_idx": 30002, "episode_idx": 171, "frame_idx": 49, "global_frame_idx": 30002, "task_index": 34}, {"db_idx": 30003, "episode_idx": 171, "frame_idx": 50, "global_frame_idx": 30003, "task_index": 34}, {"db_idx": 30004, "episode_idx": 171, "frame_idx": 51, "global_frame_idx": 30004, "task_index": 34}, {"db_idx": 30005, "episode_idx": 171, "frame_idx": 52, "global_frame_idx": 30005, "task_index": 34}, {"db_idx": 30006, "episode_idx": 171, "frame_idx": 53, "global_frame_idx": 30006, "task_index": 34}, {"db_idx": 30007, "episode_idx": 171, "frame_idx": 54, "global_frame_idx": 30007, "task_index": 34}, {"db_idx": 30008, "episode_idx": 171, "frame_idx": 55, "global_frame_idx": 30008, "task_index": 34}, {"db_idx": 30009, "episode_idx": 171, "frame_idx": 56, "global_frame_idx": 30009, "task_index": 34}, {"db_idx": 30010, "episode_idx": 171, "frame_idx": 57, "global_frame_idx": 30010, "task_index": 34}, {"db_idx": 30011, "episode_idx": 171, "frame_idx": 58, "global_frame_idx": 30011, "task_index": 34}, {"db_idx": 30012, "episode_idx": 171, "frame_idx": 59, "global_frame_idx": 30012, "task_index": 34}, {"db_idx": 30013, "episode_idx": 171, "frame_idx": 60, "global_frame_idx": 30013, "task_index": 34}, {"db_idx": 30014, "episode_idx": 171, "frame_idx": 61, "global_frame_idx": 30014, "task_index": 34}, {"db_idx": 30015, "episode_idx": 171, "frame_idx": 62, "global_frame_idx": 30015, "task_index": 34}, {"db_idx": 30016, "episode_idx": 171, "frame_idx": 63, "global_frame_idx": 30016, "task_index": 34}, {"db_idx": 30017, "episode_idx": 171, "frame_idx": 64, "global_frame_idx": 30017, "task_index": 34}, {"db_idx": 30018, "episode_idx": 171, "frame_idx": 65, "global_frame_idx": 30018, "task_index": 34}, {"db_idx": 30019, "episode_idx": 171, "frame_idx": 66, "global_frame_idx": 30019, "task_index": 34}, {"db_idx": 30020, "episode_idx": 171, "frame_idx": 67, "global_frame_idx": 30020, "task_index": 34}, {"db_idx": 30021, "episode_idx": 171, "frame_idx": 68, "global_frame_idx": 30021, "task_index": 34}, {"db_idx": 30022, "episode_idx": 171, "frame_idx": 69, "global_frame_idx": 30022, "task_index": 34}, {"db_idx": 30023, "episode_idx": 171, "frame_idx": 70, "global_frame_idx": 30023, "task_index": 34}, {"db_idx": 30024, "episode_idx": 171, "frame_idx": 71, "global_frame_idx": 30024, "task_index": 34}, {"db_idx": 30025, "episode_idx": 171, "frame_idx": 72, "global_frame_idx": 30025, "task_index": 34}, {"db_idx": 30026, "episode_idx": 171, "frame_idx": 73, "global_frame_idx": 30026, "task_index": 34}, {"db_idx": 30027, "episode_idx": 171, "frame_idx": 74, "global_frame_idx": 30027, "task_index": 34}, {"db_idx": 30028, "episode_idx": 171, "frame_idx": 75, "global_frame_idx": 30028, "task_index": 34}, {"db_idx": 30029, "episode_idx": 171, "frame_idx": 76, "global_frame_idx": 30029, "task_index": 34}, {"db_idx": 30030, "episode_idx": 171, "frame_idx": 77, "global_frame_idx": 30030, "task_index": 34}, {"db_idx": 30031, "episode_idx": 171, "frame_idx": 78, "global_frame_idx": 30031, "task_index": 34}, {"db_idx": 30032, "episode_idx": 171, "frame_idx": 79, "global_frame_idx": 30032, "task_index": 34}, {"db_idx": 30033, "episode_idx": 171, "frame_idx": 80, "global_frame_idx": 30033, "task_index": 34}, {"db_idx": 30034, "episode_idx": 171, "frame_idx": 81, "global_frame_idx": 30034, "task_index": 34}, {"db_idx": 30035, "episode_idx": 171, "frame_idx": 82, "global_frame_idx": 30035, "task_index": 34}, {"db_idx": 30036, "episode_idx": 171, "frame_idx": 83, "global_frame_idx": 30036, "task_index": 34}, {"db_idx": 30037, "episode_idx": 171, "frame_idx": 84, "global_frame_idx": 30037, "task_index": 34}, {"db_idx": 30038, "episode_idx": 171, "frame_idx": 85, "global_frame_idx": 30038, "task_index": 34}, {"db_idx": 30039, "episode_idx": 171, "frame_idx": 86, "global_frame_idx": 30039, "task_index": 34}, {"db_idx": 30040, "episode_idx": 171, "frame_idx": 87, "global_frame_idx": 30040, "task_index": 34}, {"db_idx": 30041, "episode_idx": 171, "frame_idx": 88, "global_frame_idx": 30041, "task_index": 34}, {"db_idx": 30042, "episode_idx": 171, "frame_idx": 89, "global_frame_idx": 30042, "task_index": 34}, {"db_idx": 30043, "episode_idx": 171, "frame_idx": 90, "global_frame_idx": 30043, "task_index": 34}, {"db_idx": 30044, "episode_idx": 171, "frame_idx": 91, "global_frame_idx": 30044, "task_index": 34}, {"db_idx": 30045, "episode_idx": 171, "frame_idx": 92, "global_frame_idx": 30045, "task_index": 34}, {"db_idx": 30046, "episode_idx": 171, "frame_idx": 93, "global_frame_idx": 30046, "task_index": 34}, {"db_idx": 30047, "episode_idx": 171, "frame_idx": 94, "global_frame_idx": 30047, "task_index": 34}, {"db_idx": 30048, "episode_idx": 171, "frame_idx": 95, "global_frame_idx": 30048, "task_index": 34}, {"db_idx": 30049, "episode_idx": 171, "frame_idx": 96, "global_frame_idx": 30049, "task_index": 34}, {"db_idx": 30050, "episode_idx": 171, "frame_idx": 97, "global_frame_idx": 30050, "task_index": 34}, {"db_idx": 30051, "episode_idx": 171, "frame_idx": 98, "global_frame_idx": 30051, "task_index": 34}, {"db_idx": 30052, "episode_idx": 171, "frame_idx": 99, "global_frame_idx": 30052, "task_index": 34}, {"db_idx": 30053, "episode_idx": 171, "frame_idx": 100, "global_frame_idx": 30053, "task_index": 34}, {"db_idx": 30054, "episode_idx": 171, "frame_idx": 101, "global_frame_idx": 30054, "task_index": 34}, {"db_idx": 30055, "episode_idx": 171, "frame_idx": 102, "global_frame_idx": 30055, "task_index": 34}, {"db_idx": 30056, "episode_idx": 171, "frame_idx": 103, "global_frame_idx": 30056, "task_index": 34}, {"db_idx": 30057, "episode_idx": 171, "frame_idx": 104, "global_frame_idx": 30057, "task_index": 34}, {"db_idx": 30058, "episode_idx": 171, "frame_idx": 105, "global_frame_idx": 30058, "task_index": 34}, {"db_idx": 30059, "episode_idx": 171, "frame_idx": 106, "global_frame_idx": 30059, "task_index": 34}, {"db_idx": 30060, "episode_idx": 171, "frame_idx": 107, "global_frame_idx": 30060, "task_index": 34}, {"db_idx": 30061, "episode_idx": 171, "frame_idx": 108, "global_frame_idx": 30061, "task_index": 34}, {"db_idx": 30062, "episode_idx": 171, "frame_idx": 109, "global_frame_idx": 30062, "task_index": 34}, {"db_idx": 30063, "episode_idx": 171, "frame_idx": 110, "global_frame_idx": 30063, "task_index": 34}, {"db_idx": 30064, "episode_idx": 171, "frame_idx": 111, "global_frame_idx": 30064, "task_index": 34}, {"db_idx": 30065, "episode_idx": 171, "frame_idx": 112, "global_frame_idx": 30065, "task_index": 34}, {"db_idx": 30066, "episode_idx": 171, "frame_idx": 113, "global_frame_idx": 30066, "task_index": 34}, {"db_idx": 30067, "episode_idx": 171, "frame_idx": 114, "global_frame_idx": 30067, "task_index": 34}, {"db_idx": 30068, "episode_idx": 171, "frame_idx": 115, "global_frame_idx": 30068, "task_index": 34}, {"db_idx": 30069, "episode_idx": 171, "frame_idx": 116, "global_frame_idx": 30069, "task_index": 34}, {"db_idx": 30070, "episode_idx": 171, "frame_idx": 117, "global_frame_idx": 30070, "task_index": 34}, {"db_idx": 30071, "episode_idx": 171, "frame_idx": 118, "global_frame_idx": 30071, "task_index": 34}, {"db_idx": 30072, "episode_idx": 171, "frame_idx": 119, "global_frame_idx": 30072, "task_index": 34}, {"db_idx": 30073, "episode_idx": 171, "frame_idx": 120, "global_frame_idx": 30073, "task_index": 34}, {"db_idx": 30074, "episode_idx": 171, "frame_idx": 121, "global_frame_idx": 30074, "task_index": 34}, {"db_idx": 30075, "episode_idx": 171, "frame_idx": 122, "global_frame_idx": 30075, "task_index": 34}, {"db_idx": 30076, "episode_idx": 171, "frame_idx": 123, "global_frame_idx": 30076, "task_index": 34}, {"db_idx": 30077, "episode_idx": 171, "frame_idx": 124, "global_frame_idx": 30077, "task_index": 34}, {"db_idx": 30078, "episode_idx": 171, "frame_idx": 125, "global_frame_idx": 30078, "task_index": 34}, {"db_idx": 30079, "episode_idx": 171, "frame_idx": 126, "global_frame_idx": 30079, "task_index": 34}, {"db_idx": 30080, "episode_idx": 171, "frame_idx": 127, "global_frame_idx": 30080, "task_index": 34}, {"db_idx": 30081, "episode_idx": 171, "frame_idx": 128, "global_frame_idx": 30081, "task_index": 34}, {"db_idx": 30082, "episode_idx": 171, "frame_idx": 129, "global_frame_idx": 30082, "task_index": 34}, {"db_idx": 30083, "episode_idx": 171, "frame_idx": 130, "global_frame_idx": 30083, "task_index": 34}, {"db_idx": 30084, "episode_idx": 171, "frame_idx": 131, "global_frame_idx": 30084, "task_index": 34}, {"db_idx": 30085, "episode_idx": 171, "frame_idx": 132, "global_frame_idx": 30085, "task_index": 34}, {"db_idx": 30086, "episode_idx": 171, "frame_idx": 133, "global_frame_idx": 30086, "task_index": 34}, {"db_idx": 30087, "episode_idx": 171, "frame_idx": 134, "global_frame_idx": 30087, "task_index": 34}, {"db_idx": 30088, "episode_idx": 171, "frame_idx": 135, "global_frame_idx": 30088, "task_index": 34}, {"db_idx": 30089, "episode_idx": 171, "frame_idx": 136, "global_frame_idx": 30089, "task_index": 34}, {"db_idx": 30090, "episode_idx": 171, "frame_idx": 137, "global_frame_idx": 30090, "task_index": 34}, {"db_idx": 30091, "episode_idx": 171, "frame_idx": 138, "global_frame_idx": 30091, "task_index": 34}, {"db_idx": 30092, "episode_idx": 172, "frame_idx": 0, "global_frame_idx": 30092, "task_index": 34}, {"db_idx": 30093, "episode_idx": 172, "frame_idx": 1, "global_frame_idx": 30093, "task_index": 34}, {"db_idx": 30094, "episode_idx": 172, "frame_idx": 2, "global_frame_idx": 30094, "task_index": 34}, {"db_idx": 30095, "episode_idx": 172, "frame_idx": 3, "global_frame_idx": 30095, "task_index": 34}, {"db_idx": 30096, "episode_idx": 172, "frame_idx": 4, "global_frame_idx": 30096, "task_index": 34}, {"db_idx": 30097, "episode_idx": 172, "frame_idx": 5, "global_frame_idx": 30097, "task_index": 34}, {"db_idx": 30098, "episode_idx": 172, "frame_idx": 6, "global_frame_idx": 30098, "task_index": 34}, {"db_idx": 30099, "episode_idx": 172, "frame_idx": 7, "global_frame_idx": 30099, "task_index": 34}, {"db_idx": 30100, "episode_idx": 172, "frame_idx": 8, "global_frame_idx": 30100, "task_index": 34}, {"db_idx": 30101, "episode_idx": 172, "frame_idx": 9, "global_frame_idx": 30101, "task_index": 34}, {"db_idx": 30102, "episode_idx": 172, "frame_idx": 10, "global_frame_idx": 30102, "task_index": 34}, {"db_idx": 30103, "episode_idx": 172, "frame_idx": 11, "global_frame_idx": 30103, "task_index": 34}, {"db_idx": 30104, "episode_idx": 172, "frame_idx": 12, "global_frame_idx": 30104, "task_index": 34}, {"db_idx": 30105, "episode_idx": 172, "frame_idx": 13, "global_frame_idx": 30105, "task_index": 34}, {"db_idx": 30106, "episode_idx": 172, "frame_idx": 14, "global_frame_idx": 30106, "task_index": 34}, {"db_idx": 30107, "episode_idx": 172, "frame_idx": 15, "global_frame_idx": 30107, "task_index": 34}, {"db_idx": 30108, "episode_idx": 172, "frame_idx": 16, "global_frame_idx": 30108, "task_index": 34}, {"db_idx": 30109, "episode_idx": 172, "frame_idx": 17, "global_frame_idx": 30109, "task_index": 34}, {"db_idx": 30110, "episode_idx": 172, "frame_idx": 18, "global_frame_idx": 30110, "task_index": 34}, {"db_idx": 30111, "episode_idx": 172, "frame_idx": 19, "global_frame_idx": 30111, "task_index": 34}, {"db_idx": 30112, "episode_idx": 172, "frame_idx": 20, "global_frame_idx": 30112, "task_index": 34}, {"db_idx": 30113, "episode_idx": 172, "frame_idx": 21, "global_frame_idx": 30113, "task_index": 34}, {"db_idx": 30114, "episode_idx": 172, "frame_idx": 22, "global_frame_idx": 30114, "task_index": 34}, {"db_idx": 30115, "episode_idx": 172, "frame_idx": 23, "global_frame_idx": 30115, "task_index": 34}, {"db_idx": 30116, "episode_idx": 172, "frame_idx": 24, "global_frame_idx": 30116, "task_index": 34}, {"db_idx": 30117, "episode_idx": 172, "frame_idx": 25, "global_frame_idx": 30117, "task_index": 34}, {"db_idx": 30118, "episode_idx": 172, "frame_idx": 26, "global_frame_idx": 30118, "task_index": 34}, {"db_idx": 30119, "episode_idx": 172, "frame_idx": 27, "global_frame_idx": 30119, "task_index": 34}, {"db_idx": 30120, "episode_idx": 172, "frame_idx": 28, "global_frame_idx": 30120, "task_index": 34}, {"db_idx": 30121, "episode_idx": 172, "frame_idx": 29, "global_frame_idx": 30121, "task_index": 34}, {"db_idx": 30122, "episode_idx": 172, "frame_idx": 30, "global_frame_idx": 30122, "task_index": 34}, {"db_idx": 30123, "episode_idx": 172, "frame_idx": 31, "global_frame_idx": 30123, "task_index": 34}, {"db_idx": 30124, "episode_idx": 172, "frame_idx": 32, "global_frame_idx": 30124, "task_index": 34}, {"db_idx": 30125, "episode_idx": 172, "frame_idx": 33, "global_frame_idx": 30125, "task_index": 34}, {"db_idx": 30126, "episode_idx": 172, "frame_idx": 34, "global_frame_idx": 30126, "task_index": 34}, {"db_idx": 30127, "episode_idx": 172, "frame_idx": 35, "global_frame_idx": 30127, "task_index": 34}, {"db_idx": 30128, "episode_idx": 172, "frame_idx": 36, "global_frame_idx": 30128, "task_index": 34}, {"db_idx": 30129, "episode_idx": 172, "frame_idx": 37, "global_frame_idx": 30129, "task_index": 34}, {"db_idx": 30130, "episode_idx": 172, "frame_idx": 38, "global_frame_idx": 30130, "task_index": 34}, {"db_idx": 30131, "episode_idx": 172, "frame_idx": 39, "global_frame_idx": 30131, "task_index": 34}, {"db_idx": 30132, "episode_idx": 172, "frame_idx": 40, "global_frame_idx": 30132, "task_index": 34}, {"db_idx": 30133, "episode_idx": 172, "frame_idx": 41, "global_frame_idx": 30133, "task_index": 34}, {"db_idx": 30134, "episode_idx": 172, "frame_idx": 42, "global_frame_idx": 30134, "task_index": 34}, {"db_idx": 30135, "episode_idx": 172, "frame_idx": 43, "global_frame_idx": 30135, "task_index": 34}, {"db_idx": 30136, "episode_idx": 172, "frame_idx": 44, "global_frame_idx": 30136, "task_index": 34}, {"db_idx": 30137, "episode_idx": 172, "frame_idx": 45, "global_frame_idx": 30137, "task_index": 34}, {"db_idx": 30138, "episode_idx": 172, "frame_idx": 46, "global_frame_idx": 30138, "task_index": 34}, {"db_idx": 30139, "episode_idx": 172, "frame_idx": 47, "global_frame_idx": 30139, "task_index": 34}, {"db_idx": 30140, "episode_idx": 172, "frame_idx": 48, "global_frame_idx": 30140, "task_index": 34}, {"db_idx": 30141, "episode_idx": 172, "frame_idx": 49, "global_frame_idx": 30141, "task_index": 34}, {"db_idx": 30142, "episode_idx": 172, "frame_idx": 50, "global_frame_idx": 30142, "task_index": 34}, {"db_idx": 30143, "episode_idx": 172, "frame_idx": 51, "global_frame_idx": 30143, "task_index": 34}, {"db_idx": 30144, "episode_idx": 172, "frame_idx": 52, "global_frame_idx": 30144, "task_index": 34}, {"db_idx": 30145, "episode_idx": 172, "frame_idx": 53, "global_frame_idx": 30145, "task_index": 34}, {"db_idx": 30146, "episode_idx": 172, "frame_idx": 54, "global_frame_idx": 30146, "task_index": 34}, {"db_idx": 30147, "episode_idx": 172, "frame_idx": 55, "global_frame_idx": 30147, "task_index": 34}, {"db_idx": 30148, "episode_idx": 172, "frame_idx": 56, "global_frame_idx": 30148, "task_index": 34}, {"db_idx": 30149, "episode_idx": 172, "frame_idx": 57, "global_frame_idx": 30149, "task_index": 34}, {"db_idx": 30150, "episode_idx": 172, "frame_idx": 58, "global_frame_idx": 30150, "task_index": 34}, {"db_idx": 30151, "episode_idx": 172, "frame_idx": 59, "global_frame_idx": 30151, "task_index": 34}, {"db_idx": 30152, "episode_idx": 172, "frame_idx": 60, "global_frame_idx": 30152, "task_index": 34}, {"db_idx": 30153, "episode_idx": 172, "frame_idx": 61, "global_frame_idx": 30153, "task_index": 34}, {"db_idx": 30154, "episode_idx": 172, "frame_idx": 62, "global_frame_idx": 30154, "task_index": 34}, {"db_idx": 30155, "episode_idx": 172, "frame_idx": 63, "global_frame_idx": 30155, "task_index": 34}, {"db_idx": 30156, "episode_idx": 172, "frame_idx": 64, "global_frame_idx": 30156, "task_index": 34}, {"db_idx": 30157, "episode_idx": 172, "frame_idx": 65, "global_frame_idx": 30157, "task_index": 34}, {"db_idx": 30158, "episode_idx": 172, "frame_idx": 66, "global_frame_idx": 30158, "task_index": 34}, {"db_idx": 30159, "episode_idx": 172, "frame_idx": 67, "global_frame_idx": 30159, "task_index": 34}, {"db_idx": 30160, "episode_idx": 172, "frame_idx": 68, "global_frame_idx": 30160, "task_index": 34}, {"db_idx": 30161, "episode_idx": 172, "frame_idx": 69, "global_frame_idx": 30161, "task_index": 34}, {"db_idx": 30162, "episode_idx": 172, "frame_idx": 70, "global_frame_idx": 30162, "task_index": 34}, {"db_idx": 30163, "episode_idx": 172, "frame_idx": 71, "global_frame_idx": 30163, "task_index": 34}, {"db_idx": 30164, "episode_idx": 172, "frame_idx": 72, "global_frame_idx": 30164, "task_index": 34}, {"db_idx": 30165, "episode_idx": 172, "frame_idx": 73, "global_frame_idx": 30165, "task_index": 34}, {"db_idx": 30166, "episode_idx": 172, "frame_idx": 74, "global_frame_idx": 30166, "task_index": 34}, {"db_idx": 30167, "episode_idx": 172, "frame_idx": 75, "global_frame_idx": 30167, "task_index": 34}, {"db_idx": 30168, "episode_idx": 172, "frame_idx": 76, "global_frame_idx": 30168, "task_index": 34}, {"db_idx": 30169, "episode_idx": 172, "frame_idx": 77, "global_frame_idx": 30169, "task_index": 34}, {"db_idx": 30170, "episode_idx": 172, "frame_idx": 78, "global_frame_idx": 30170, "task_index": 34}, {"db_idx": 30171, "episode_idx": 172, "frame_idx": 79, "global_frame_idx": 30171, "task_index": 34}, {"db_idx": 30172, "episode_idx": 172, "frame_idx": 80, "global_frame_idx": 30172, "task_index": 34}, {"db_idx": 30173, "episode_idx": 172, "frame_idx": 81, "global_frame_idx": 30173, "task_index": 34}, {"db_idx": 30174, "episode_idx": 172, "frame_idx": 82, "global_frame_idx": 30174, "task_index": 34}, {"db_idx": 30175, "episode_idx": 172, "frame_idx": 83, "global_frame_idx": 30175, "task_index": 34}, {"db_idx": 30176, "episode_idx": 172, "frame_idx": 84, "global_frame_idx": 30176, "task_index": 34}, {"db_idx": 30177, "episode_idx": 172, "frame_idx": 85, "global_frame_idx": 30177, "task_index": 34}, {"db_idx": 30178, "episode_idx": 172, "frame_idx": 86, "global_frame_idx": 30178, "task_index": 34}, {"db_idx": 30179, "episode_idx": 172, "frame_idx": 87, "global_frame_idx": 30179, "task_index": 34}, {"db_idx": 30180, "episode_idx": 172, "frame_idx": 88, "global_frame_idx": 30180, "task_index": 34}, {"db_idx": 30181, "episode_idx": 172, "frame_idx": 89, "global_frame_idx": 30181, "task_index": 34}, {"db_idx": 30182, "episode_idx": 172, "frame_idx": 90, "global_frame_idx": 30182, "task_index": 34}, {"db_idx": 30183, "episode_idx": 172, "frame_idx": 91, "global_frame_idx": 30183, "task_index": 34}, {"db_idx": 30184, "episode_idx": 172, "frame_idx": 92, "global_frame_idx": 30184, "task_index": 34}, {"db_idx": 30185, "episode_idx": 172, "frame_idx": 93, "global_frame_idx": 30185, "task_index": 34}, {"db_idx": 30186, "episode_idx": 172, "frame_idx": 94, "global_frame_idx": 30186, "task_index": 34}, {"db_idx": 30187, "episode_idx": 172, "frame_idx": 95, "global_frame_idx": 30187, "task_index": 34}, {"db_idx": 30188, "episode_idx": 172, "frame_idx": 96, "global_frame_idx": 30188, "task_index": 34}, {"db_idx": 30189, "episode_idx": 172, "frame_idx": 97, "global_frame_idx": 30189, "task_index": 34}, {"db_idx": 30190, "episode_idx": 172, "frame_idx": 98, "global_frame_idx": 30190, "task_index": 34}, {"db_idx": 30191, "episode_idx": 172, "frame_idx": 99, "global_frame_idx": 30191, "task_index": 34}, {"db_idx": 30192, "episode_idx": 172, "frame_idx": 100, "global_frame_idx": 30192, "task_index": 34}, {"db_idx": 30193, "episode_idx": 172, "frame_idx": 101, "global_frame_idx": 30193, "task_index": 34}, {"db_idx": 30194, "episode_idx": 172, "frame_idx": 102, "global_frame_idx": 30194, "task_index": 34}, {"db_idx": 30195, "episode_idx": 172, "frame_idx": 103, "global_frame_idx": 30195, "task_index": 34}, {"db_idx": 30196, "episode_idx": 172, "frame_idx": 104, "global_frame_idx": 30196, "task_index": 34}, {"db_idx": 30197, "episode_idx": 172, "frame_idx": 105, "global_frame_idx": 30197, "task_index": 34}, {"db_idx": 30198, "episode_idx": 172, "frame_idx": 106, "global_frame_idx": 30198, "task_index": 34}, {"db_idx": 30199, "episode_idx": 172, "frame_idx": 107, "global_frame_idx": 30199, "task_index": 34}, {"db_idx": 30200, "episode_idx": 172, "frame_idx": 108, "global_frame_idx": 30200, "task_index": 34}, {"db_idx": 30201, "episode_idx": 172, "frame_idx": 109, "global_frame_idx": 30201, "task_index": 34}, {"db_idx": 30202, "episode_idx": 172, "frame_idx": 110, "global_frame_idx": 30202, "task_index": 34}, {"db_idx": 30203, "episode_idx": 172, "frame_idx": 111, "global_frame_idx": 30203, "task_index": 34}, {"db_idx": 30204, "episode_idx": 172, "frame_idx": 112, "global_frame_idx": 30204, "task_index": 34}, {"db_idx": 30205, "episode_idx": 172, "frame_idx": 113, "global_frame_idx": 30205, "task_index": 34}, {"db_idx": 30206, "episode_idx": 172, "frame_idx": 114, "global_frame_idx": 30206, "task_index": 34}, {"db_idx": 30207, "episode_idx": 172, "frame_idx": 115, "global_frame_idx": 30207, "task_index": 34}, {"db_idx": 30208, "episode_idx": 172, "frame_idx": 116, "global_frame_idx": 30208, "task_index": 34}, {"db_idx": 30209, "episode_idx": 172, "frame_idx": 117, "global_frame_idx": 30209, "task_index": 34}, {"db_idx": 30210, "episode_idx": 172, "frame_idx": 118, "global_frame_idx": 30210, "task_index": 34}, {"db_idx": 30211, "episode_idx": 172, "frame_idx": 119, "global_frame_idx": 30211, "task_index": 34}, {"db_idx": 30212, "episode_idx": 172, "frame_idx": 120, "global_frame_idx": 30212, "task_index": 34}, {"db_idx": 30213, "episode_idx": 172, "frame_idx": 121, "global_frame_idx": 30213, "task_index": 34}, {"db_idx": 30214, "episode_idx": 172, "frame_idx": 122, "global_frame_idx": 30214, "task_index": 34}, {"db_idx": 30215, "episode_idx": 172, "frame_idx": 123, "global_frame_idx": 30215, "task_index": 34}, {"db_idx": 30216, "episode_idx": 172, "frame_idx": 124, "global_frame_idx": 30216, "task_index": 34}, {"db_idx": 30217, "episode_idx": 172, "frame_idx": 125, "global_frame_idx": 30217, "task_index": 34}, {"db_idx": 30218, "episode_idx": 172, "frame_idx": 126, "global_frame_idx": 30218, "task_index": 34}, {"db_idx": 30219, "episode_idx": 172, "frame_idx": 127, "global_frame_idx": 30219, "task_index": 34}, {"db_idx": 30220, "episode_idx": 172, "frame_idx": 128, "global_frame_idx": 30220, "task_index": 34}, {"db_idx": 30221, "episode_idx": 172, "frame_idx": 129, "global_frame_idx": 30221, "task_index": 34}, {"db_idx": 30222, "episode_idx": 172, "frame_idx": 130, "global_frame_idx": 30222, "task_index": 34}, {"db_idx": 30223, "episode_idx": 172, "frame_idx": 131, "global_frame_idx": 30223, "task_index": 34}, {"db_idx": 30224, "episode_idx": 172, "frame_idx": 132, "global_frame_idx": 30224, "task_index": 34}, {"db_idx": 30225, "episode_idx": 172, "frame_idx": 133, "global_frame_idx": 30225, "task_index": 34}, {"db_idx": 30226, "episode_idx": 172, "frame_idx": 134, "global_frame_idx": 30226, "task_index": 34}, {"db_idx": 30227, "episode_idx": 172, "frame_idx": 135, "global_frame_idx": 30227, "task_index": 34}, {"db_idx": 30228, "episode_idx": 172, "frame_idx": 136, "global_frame_idx": 30228, "task_index": 34}, {"db_idx": 30229, "episode_idx": 172, "frame_idx": 137, "global_frame_idx": 30229, "task_index": 34}, {"db_idx": 30230, "episode_idx": 173, "frame_idx": 0, "global_frame_idx": 30230, "task_index": 34}, {"db_idx": 30231, "episode_idx": 173, "frame_idx": 1, "global_frame_idx": 30231, "task_index": 34}, {"db_idx": 30232, "episode_idx": 173, "frame_idx": 2, "global_frame_idx": 30232, "task_index": 34}, {"db_idx": 30233, "episode_idx": 173, "frame_idx": 3, "global_frame_idx": 30233, "task_index": 34}, {"db_idx": 30234, "episode_idx": 173, "frame_idx": 4, "global_frame_idx": 30234, "task_index": 34}, {"db_idx": 30235, "episode_idx": 173, "frame_idx": 5, "global_frame_idx": 30235, "task_index": 34}, {"db_idx": 30236, "episode_idx": 173, "frame_idx": 6, "global_frame_idx": 30236, "task_index": 34}, {"db_idx": 30237, "episode_idx": 173, "frame_idx": 7, "global_frame_idx": 30237, "task_index": 34}, {"db_idx": 30238, "episode_idx": 173, "frame_idx": 8, "global_frame_idx": 30238, "task_index": 34}, {"db_idx": 30239, "episode_idx": 173, "frame_idx": 9, "global_frame_idx": 30239, "task_index": 34}, {"db_idx": 30240, "episode_idx": 173, "frame_idx": 10, "global_frame_idx": 30240, "task_index": 34}, {"db_idx": 30241, "episode_idx": 173, "frame_idx": 11, "global_frame_idx": 30241, "task_index": 34}, {"db_idx": 30242, "episode_idx": 173, "frame_idx": 12, "global_frame_idx": 30242, "task_index": 34}, {"db_idx": 30243, "episode_idx": 173, "frame_idx": 13, "global_frame_idx": 30243, "task_index": 34}, {"db_idx": 30244, "episode_idx": 173, "frame_idx": 14, "global_frame_idx": 30244, "task_index": 34}, {"db_idx": 30245, "episode_idx": 173, "frame_idx": 15, "global_frame_idx": 30245, "task_index": 34}, {"db_idx": 30246, "episode_idx": 173, "frame_idx": 16, "global_frame_idx": 30246, "task_index": 34}, {"db_idx": 30247, "episode_idx": 173, "frame_idx": 17, "global_frame_idx": 30247, "task_index": 34}, {"db_idx": 30248, "episode_idx": 173, "frame_idx": 18, "global_frame_idx": 30248, "task_index": 34}, {"db_idx": 30249, "episode_idx": 173, "frame_idx": 19, "global_frame_idx": 30249, "task_index": 34}, {"db_idx": 30250, "episode_idx": 173, "frame_idx": 20, "global_frame_idx": 30250, "task_index": 34}, {"db_idx": 30251, "episode_idx": 173, "frame_idx": 21, "global_frame_idx": 30251, "task_index": 34}, {"db_idx": 30252, "episode_idx": 173, "frame_idx": 22, "global_frame_idx": 30252, "task_index": 34}, {"db_idx": 30253, "episode_idx": 173, "frame_idx": 23, "global_frame_idx": 30253, "task_index": 34}, {"db_idx": 30254, "episode_idx": 173, "frame_idx": 24, "global_frame_idx": 30254, "task_index": 34}, {"db_idx": 30255, "episode_idx": 173, "frame_idx": 25, "global_frame_idx": 30255, "task_index": 34}, {"db_idx": 30256, "episode_idx": 173, "frame_idx": 26, "global_frame_idx": 30256, "task_index": 34}, {"db_idx": 30257, "episode_idx": 173, "frame_idx": 27, "global_frame_idx": 30257, "task_index": 34}, {"db_idx": 30258, "episode_idx": 173, "frame_idx": 28, "global_frame_idx": 30258, "task_index": 34}, {"db_idx": 30259, "episode_idx": 173, "frame_idx": 29, "global_frame_idx": 30259, "task_index": 34}, {"db_idx": 30260, "episode_idx": 173, "frame_idx": 30, "global_frame_idx": 30260, "task_index": 34}, {"db_idx": 30261, "episode_idx": 173, "frame_idx": 31, "global_frame_idx": 30261, "task_index": 34}, {"db_idx": 30262, "episode_idx": 173, "frame_idx": 32, "global_frame_idx": 30262, "task_index": 34}, {"db_idx": 30263, "episode_idx": 173, "frame_idx": 33, "global_frame_idx": 30263, "task_index": 34}, {"db_idx": 30264, "episode_idx": 173, "frame_idx": 34, "global_frame_idx": 30264, "task_index": 34}, {"db_idx": 30265, "episode_idx": 173, "frame_idx": 35, "global_frame_idx": 30265, "task_index": 34}, {"db_idx": 30266, "episode_idx": 173, "frame_idx": 36, "global_frame_idx": 30266, "task_index": 34}, {"db_idx": 30267, "episode_idx": 173, "frame_idx": 37, "global_frame_idx": 30267, "task_index": 34}, {"db_idx": 30268, "episode_idx": 173, "frame_idx": 38, "global_frame_idx": 30268, "task_index": 34}, {"db_idx": 30269, "episode_idx": 173, "frame_idx": 39, "global_frame_idx": 30269, "task_index": 34}, {"db_idx": 30270, "episode_idx": 173, "frame_idx": 40, "global_frame_idx": 30270, "task_index": 34}, {"db_idx": 30271, "episode_idx": 173, "frame_idx": 41, "global_frame_idx": 30271, "task_index": 34}, {"db_idx": 30272, "episode_idx": 173, "frame_idx": 42, "global_frame_idx": 30272, "task_index": 34}, {"db_idx": 30273, "episode_idx": 173, "frame_idx": 43, "global_frame_idx": 30273, "task_index": 34}, {"db_idx": 30274, "episode_idx": 173, "frame_idx": 44, "global_frame_idx": 30274, "task_index": 34}, {"db_idx": 30275, "episode_idx": 173, "frame_idx": 45, "global_frame_idx": 30275, "task_index": 34}, {"db_idx": 30276, "episode_idx": 173, "frame_idx": 46, "global_frame_idx": 30276, "task_index": 34}, {"db_idx": 30277, "episode_idx": 173, "frame_idx": 47, "global_frame_idx": 30277, "task_index": 34}, {"db_idx": 30278, "episode_idx": 173, "frame_idx": 48, "global_frame_idx": 30278, "task_index": 34}, {"db_idx": 30279, "episode_idx": 173, "frame_idx": 49, "global_frame_idx": 30279, "task_index": 34}, {"db_idx": 30280, "episode_idx": 173, "frame_idx": 50, "global_frame_idx": 30280, "task_index": 34}, {"db_idx": 30281, "episode_idx": 173, "frame_idx": 51, "global_frame_idx": 30281, "task_index": 34}, {"db_idx": 30282, "episode_idx": 173, "frame_idx": 52, "global_frame_idx": 30282, "task_index": 34}, {"db_idx": 30283, "episode_idx": 173, "frame_idx": 53, "global_frame_idx": 30283, "task_index": 34}, {"db_idx": 30284, "episode_idx": 173, "frame_idx": 54, "global_frame_idx": 30284, "task_index": 34}, {"db_idx": 30285, "episode_idx": 173, "frame_idx": 55, "global_frame_idx": 30285, "task_index": 34}, {"db_idx": 30286, "episode_idx": 173, "frame_idx": 56, "global_frame_idx": 30286, "task_index": 34}, {"db_idx": 30287, "episode_idx": 173, "frame_idx": 57, "global_frame_idx": 30287, "task_index": 34}, {"db_idx": 30288, "episode_idx": 173, "frame_idx": 58, "global_frame_idx": 30288, "task_index": 34}, {"db_idx": 30289, "episode_idx": 173, "frame_idx": 59, "global_frame_idx": 30289, "task_index": 34}, {"db_idx": 30290, "episode_idx": 173, "frame_idx": 60, "global_frame_idx": 30290, "task_index": 34}, {"db_idx": 30291, "episode_idx": 173, "frame_idx": 61, "global_frame_idx": 30291, "task_index": 34}, {"db_idx": 30292, "episode_idx": 173, "frame_idx": 62, "global_frame_idx": 30292, "task_index": 34}, {"db_idx": 30293, "episode_idx": 173, "frame_idx": 63, "global_frame_idx": 30293, "task_index": 34}, {"db_idx": 30294, "episode_idx": 173, "frame_idx": 64, "global_frame_idx": 30294, "task_index": 34}, {"db_idx": 30295, "episode_idx": 173, "frame_idx": 65, "global_frame_idx": 30295, "task_index": 34}, {"db_idx": 30296, "episode_idx": 173, "frame_idx": 66, "global_frame_idx": 30296, "task_index": 34}, {"db_idx": 30297, "episode_idx": 173, "frame_idx": 67, "global_frame_idx": 30297, "task_index": 34}, {"db_idx": 30298, "episode_idx": 173, "frame_idx": 68, "global_frame_idx": 30298, "task_index": 34}, {"db_idx": 30299, "episode_idx": 173, "frame_idx": 69, "global_frame_idx": 30299, "task_index": 34}, {"db_idx": 30300, "episode_idx": 173, "frame_idx": 70, "global_frame_idx": 30300, "task_index": 34}, {"db_idx": 30301, "episode_idx": 173, "frame_idx": 71, "global_frame_idx": 30301, "task_index": 34}, {"db_idx": 30302, "episode_idx": 173, "frame_idx": 72, "global_frame_idx": 30302, "task_index": 34}, {"db_idx": 30303, "episode_idx": 173, "frame_idx": 73, "global_frame_idx": 30303, "task_index": 34}, {"db_idx": 30304, "episode_idx": 173, "frame_idx": 74, "global_frame_idx": 30304, "task_index": 34}, {"db_idx": 30305, "episode_idx": 173, "frame_idx": 75, "global_frame_idx": 30305, "task_index": 34}, {"db_idx": 30306, "episode_idx": 173, "frame_idx": 76, "global_frame_idx": 30306, "task_index": 34}, {"db_idx": 30307, "episode_idx": 173, "frame_idx": 77, "global_frame_idx": 30307, "task_index": 34}, {"db_idx": 30308, "episode_idx": 173, "frame_idx": 78, "global_frame_idx": 30308, "task_index": 34}, {"db_idx": 30309, "episode_idx": 173, "frame_idx": 79, "global_frame_idx": 30309, "task_index": 34}, {"db_idx": 30310, "episode_idx": 173, "frame_idx": 80, "global_frame_idx": 30310, "task_index": 34}, {"db_idx": 30311, "episode_idx": 173, "frame_idx": 81, "global_frame_idx": 30311, "task_index": 34}, {"db_idx": 30312, "episode_idx": 173, "frame_idx": 82, "global_frame_idx": 30312, "task_index": 34}, {"db_idx": 30313, "episode_idx": 173, "frame_idx": 83, "global_frame_idx": 30313, "task_index": 34}, {"db_idx": 30314, "episode_idx": 173, "frame_idx": 84, "global_frame_idx": 30314, "task_index": 34}, {"db_idx": 30315, "episode_idx": 173, "frame_idx": 85, "global_frame_idx": 30315, "task_index": 34}, {"db_idx": 30316, "episode_idx": 173, "frame_idx": 86, "global_frame_idx": 30316, "task_index": 34}, {"db_idx": 30317, "episode_idx": 173, "frame_idx": 87, "global_frame_idx": 30317, "task_index": 34}, {"db_idx": 30318, "episode_idx": 173, "frame_idx": 88, "global_frame_idx": 30318, "task_index": 34}, {"db_idx": 30319, "episode_idx": 173, "frame_idx": 89, "global_frame_idx": 30319, "task_index": 34}, {"db_idx": 30320, "episode_idx": 173, "frame_idx": 90, "global_frame_idx": 30320, "task_index": 34}, {"db_idx": 30321, "episode_idx": 173, "frame_idx": 91, "global_frame_idx": 30321, "task_index": 34}, {"db_idx": 30322, "episode_idx": 173, "frame_idx": 92, "global_frame_idx": 30322, "task_index": 34}, {"db_idx": 30323, "episode_idx": 173, "frame_idx": 93, "global_frame_idx": 30323, "task_index": 34}, {"db_idx": 30324, "episode_idx": 173, "frame_idx": 94, "global_frame_idx": 30324, "task_index": 34}, {"db_idx": 30325, "episode_idx": 173, "frame_idx": 95, "global_frame_idx": 30325, "task_index": 34}, {"db_idx": 30326, "episode_idx": 173, "frame_idx": 96, "global_frame_idx": 30326, "task_index": 34}, {"db_idx": 30327, "episode_idx": 173, "frame_idx": 97, "global_frame_idx": 30327, "task_index": 34}, {"db_idx": 30328, "episode_idx": 173, "frame_idx": 98, "global_frame_idx": 30328, "task_index": 34}, {"db_idx": 30329, "episode_idx": 173, "frame_idx": 99, "global_frame_idx": 30329, "task_index": 34}, {"db_idx": 30330, "episode_idx": 173, "frame_idx": 100, "global_frame_idx": 30330, "task_index": 34}, {"db_idx": 30331, "episode_idx": 173, "frame_idx": 101, "global_frame_idx": 30331, "task_index": 34}, {"db_idx": 30332, "episode_idx": 173, "frame_idx": 102, "global_frame_idx": 30332, "task_index": 34}, {"db_idx": 30333, "episode_idx": 173, "frame_idx": 103, "global_frame_idx": 30333, "task_index": 34}, {"db_idx": 30334, "episode_idx": 173, "frame_idx": 104, "global_frame_idx": 30334, "task_index": 34}, {"db_idx": 30335, "episode_idx": 173, "frame_idx": 105, "global_frame_idx": 30335, "task_index": 34}, {"db_idx": 30336, "episode_idx": 173, "frame_idx": 106, "global_frame_idx": 30336, "task_index": 34}, {"db_idx": 30337, "episode_idx": 173, "frame_idx": 107, "global_frame_idx": 30337, "task_index": 34}, {"db_idx": 30338, "episode_idx": 173, "frame_idx": 108, "global_frame_idx": 30338, "task_index": 34}, {"db_idx": 30339, "episode_idx": 173, "frame_idx": 109, "global_frame_idx": 30339, "task_index": 34}, {"db_idx": 30340, "episode_idx": 173, "frame_idx": 110, "global_frame_idx": 30340, "task_index": 34}, {"db_idx": 30341, "episode_idx": 173, "frame_idx": 111, "global_frame_idx": 30341, "task_index": 34}, {"db_idx": 30342, "episode_idx": 173, "frame_idx": 112, "global_frame_idx": 30342, "task_index": 34}, {"db_idx": 30343, "episode_idx": 173, "frame_idx": 113, "global_frame_idx": 30343, "task_index": 34}, {"db_idx": 30344, "episode_idx": 173, "frame_idx": 114, "global_frame_idx": 30344, "task_index": 34}, {"db_idx": 30345, "episode_idx": 173, "frame_idx": 115, "global_frame_idx": 30345, "task_index": 34}, {"db_idx": 30346, "episode_idx": 173, "frame_idx": 116, "global_frame_idx": 30346, "task_index": 34}, {"db_idx": 30347, "episode_idx": 173, "frame_idx": 117, "global_frame_idx": 30347, "task_index": 34}, {"db_idx": 30348, "episode_idx": 173, "frame_idx": 118, "global_frame_idx": 30348, "task_index": 34}, {"db_idx": 30349, "episode_idx": 173, "frame_idx": 119, "global_frame_idx": 30349, "task_index": 34}, {"db_idx": 30350, "episode_idx": 173, "frame_idx": 120, "global_frame_idx": 30350, "task_index": 34}, {"db_idx": 30351, "episode_idx": 173, "frame_idx": 121, "global_frame_idx": 30351, "task_index": 34}, {"db_idx": 30352, "episode_idx": 173, "frame_idx": 122, "global_frame_idx": 30352, "task_index": 34}, {"db_idx": 30353, "episode_idx": 173, "frame_idx": 123, "global_frame_idx": 30353, "task_index": 34}, {"db_idx": 30354, "episode_idx": 173, "frame_idx": 124, "global_frame_idx": 30354, "task_index": 34}, {"db_idx": 30355, "episode_idx": 174, "frame_idx": 0, "global_frame_idx": 30355, "task_index": 34}, {"db_idx": 30356, "episode_idx": 174, "frame_idx": 1, "global_frame_idx": 30356, "task_index": 34}, {"db_idx": 30357, "episode_idx": 174, "frame_idx": 2, "global_frame_idx": 30357, "task_index": 34}, {"db_idx": 30358, "episode_idx": 174, "frame_idx": 3, "global_frame_idx": 30358, "task_index": 34}, {"db_idx": 30359, "episode_idx": 174, "frame_idx": 4, "global_frame_idx": 30359, "task_index": 34}, {"db_idx": 30360, "episode_idx": 174, "frame_idx": 5, "global_frame_idx": 30360, "task_index": 34}, {"db_idx": 30361, "episode_idx": 174, "frame_idx": 6, "global_frame_idx": 30361, "task_index": 34}, {"db_idx": 30362, "episode_idx": 174, "frame_idx": 7, "global_frame_idx": 30362, "task_index": 34}, {"db_idx": 30363, "episode_idx": 174, "frame_idx": 8, "global_frame_idx": 30363, "task_index": 34}, {"db_idx": 30364, "episode_idx": 174, "frame_idx": 9, "global_frame_idx": 30364, "task_index": 34}, {"db_idx": 30365, "episode_idx": 174, "frame_idx": 10, "global_frame_idx": 30365, "task_index": 34}, {"db_idx": 30366, "episode_idx": 174, "frame_idx": 11, "global_frame_idx": 30366, "task_index": 34}, {"db_idx": 30367, "episode_idx": 174, "frame_idx": 12, "global_frame_idx": 30367, "task_index": 34}, {"db_idx": 30368, "episode_idx": 174, "frame_idx": 13, "global_frame_idx": 30368, "task_index": 34}, {"db_idx": 30369, "episode_idx": 174, "frame_idx": 14, "global_frame_idx": 30369, "task_index": 34}, {"db_idx": 30370, "episode_idx": 174, "frame_idx": 15, "global_frame_idx": 30370, "task_index": 34}, {"db_idx": 30371, "episode_idx": 174, "frame_idx": 16, "global_frame_idx": 30371, "task_index": 34}, {"db_idx": 30372, "episode_idx": 174, "frame_idx": 17, "global_frame_idx": 30372, "task_index": 34}, {"db_idx": 30373, "episode_idx": 174, "frame_idx": 18, "global_frame_idx": 30373, "task_index": 34}, {"db_idx": 30374, "episode_idx": 174, "frame_idx": 19, "global_frame_idx": 30374, "task_index": 34}, {"db_idx": 30375, "episode_idx": 174, "frame_idx": 20, "global_frame_idx": 30375, "task_index": 34}, {"db_idx": 30376, "episode_idx": 174, "frame_idx": 21, "global_frame_idx": 30376, "task_index": 34}, {"db_idx": 30377, "episode_idx": 174, "frame_idx": 22, "global_frame_idx": 30377, "task_index": 34}, {"db_idx": 30378, "episode_idx": 174, "frame_idx": 23, "global_frame_idx": 30378, "task_index": 34}, {"db_idx": 30379, "episode_idx": 174, "frame_idx": 24, "global_frame_idx": 30379, "task_index": 34}, {"db_idx": 30380, "episode_idx": 174, "frame_idx": 25, "global_frame_idx": 30380, "task_index": 34}, {"db_idx": 30381, "episode_idx": 174, "frame_idx": 26, "global_frame_idx": 30381, "task_index": 34}, {"db_idx": 30382, "episode_idx": 174, "frame_idx": 27, "global_frame_idx": 30382, "task_index": 34}, {"db_idx": 30383, "episode_idx": 174, "frame_idx": 28, "global_frame_idx": 30383, "task_index": 34}, {"db_idx": 30384, "episode_idx": 174, "frame_idx": 29, "global_frame_idx": 30384, "task_index": 34}, {"db_idx": 30385, "episode_idx": 174, "frame_idx": 30, "global_frame_idx": 30385, "task_index": 34}, {"db_idx": 30386, "episode_idx": 174, "frame_idx": 31, "global_frame_idx": 30386, "task_index": 34}, {"db_idx": 30387, "episode_idx": 174, "frame_idx": 32, "global_frame_idx": 30387, "task_index": 34}, {"db_idx": 30388, "episode_idx": 174, "frame_idx": 33, "global_frame_idx": 30388, "task_index": 34}, {"db_idx": 30389, "episode_idx": 174, "frame_idx": 34, "global_frame_idx": 30389, "task_index": 34}, {"db_idx": 30390, "episode_idx": 174, "frame_idx": 35, "global_frame_idx": 30390, "task_index": 34}, {"db_idx": 30391, "episode_idx": 174, "frame_idx": 36, "global_frame_idx": 30391, "task_index": 34}, {"db_idx": 30392, "episode_idx": 174, "frame_idx": 37, "global_frame_idx": 30392, "task_index": 34}, {"db_idx": 30393, "episode_idx": 174, "frame_idx": 38, "global_frame_idx": 30393, "task_index": 34}, {"db_idx": 30394, "episode_idx": 174, "frame_idx": 39, "global_frame_idx": 30394, "task_index": 34}, {"db_idx": 30395, "episode_idx": 174, "frame_idx": 40, "global_frame_idx": 30395, "task_index": 34}, {"db_idx": 30396, "episode_idx": 174, "frame_idx": 41, "global_frame_idx": 30396, "task_index": 34}, {"db_idx": 30397, "episode_idx": 174, "frame_idx": 42, "global_frame_idx": 30397, "task_index": 34}, {"db_idx": 30398, "episode_idx": 174, "frame_idx": 43, "global_frame_idx": 30398, "task_index": 34}, {"db_idx": 30399, "episode_idx": 174, "frame_idx": 44, "global_frame_idx": 30399, "task_index": 34}, {"db_idx": 30400, "episode_idx": 174, "frame_idx": 45, "global_frame_idx": 30400, "task_index": 34}, {"db_idx": 30401, "episode_idx": 174, "frame_idx": 46, "global_frame_idx": 30401, "task_index": 34}, {"db_idx": 30402, "episode_idx": 174, "frame_idx": 47, "global_frame_idx": 30402, "task_index": 34}, {"db_idx": 30403, "episode_idx": 174, "frame_idx": 48, "global_frame_idx": 30403, "task_index": 34}, {"db_idx": 30404, "episode_idx": 174, "frame_idx": 49, "global_frame_idx": 30404, "task_index": 34}, {"db_idx": 30405, "episode_idx": 174, "frame_idx": 50, "global_frame_idx": 30405, "task_index": 34}, {"db_idx": 30406, "episode_idx": 174, "frame_idx": 51, "global_frame_idx": 30406, "task_index": 34}, {"db_idx": 30407, "episode_idx": 174, "frame_idx": 52, "global_frame_idx": 30407, "task_index": 34}, {"db_idx": 30408, "episode_idx": 174, "frame_idx": 53, "global_frame_idx": 30408, "task_index": 34}, {"db_idx": 30409, "episode_idx": 174, "frame_idx": 54, "global_frame_idx": 30409, "task_index": 34}, {"db_idx": 30410, "episode_idx": 174, "frame_idx": 55, "global_frame_idx": 30410, "task_index": 34}, {"db_idx": 30411, "episode_idx": 174, "frame_idx": 56, "global_frame_idx": 30411, "task_index": 34}, {"db_idx": 30412, "episode_idx": 174, "frame_idx": 57, "global_frame_idx": 30412, "task_index": 34}, {"db_idx": 30413, "episode_idx": 174, "frame_idx": 58, "global_frame_idx": 30413, "task_index": 34}, {"db_idx": 30414, "episode_idx": 174, "frame_idx": 59, "global_frame_idx": 30414, "task_index": 34}, {"db_idx": 30415, "episode_idx": 174, "frame_idx": 60, "global_frame_idx": 30415, "task_index": 34}, {"db_idx": 30416, "episode_idx": 174, "frame_idx": 61, "global_frame_idx": 30416, "task_index": 34}, {"db_idx": 30417, "episode_idx": 174, "frame_idx": 62, "global_frame_idx": 30417, "task_index": 34}, {"db_idx": 30418, "episode_idx": 174, "frame_idx": 63, "global_frame_idx": 30418, "task_index": 34}, {"db_idx": 30419, "episode_idx": 174, "frame_idx": 64, "global_frame_idx": 30419, "task_index": 34}, {"db_idx": 30420, "episode_idx": 174, "frame_idx": 65, "global_frame_idx": 30420, "task_index": 34}, {"db_idx": 30421, "episode_idx": 174, "frame_idx": 66, "global_frame_idx": 30421, "task_index": 34}, {"db_idx": 30422, "episode_idx": 174, "frame_idx": 67, "global_frame_idx": 30422, "task_index": 34}, {"db_idx": 30423, "episode_idx": 174, "frame_idx": 68, "global_frame_idx": 30423, "task_index": 34}, {"db_idx": 30424, "episode_idx": 174, "frame_idx": 69, "global_frame_idx": 30424, "task_index": 34}, {"db_idx": 30425, "episode_idx": 174, "frame_idx": 70, "global_frame_idx": 30425, "task_index": 34}, {"db_idx": 30426, "episode_idx": 174, "frame_idx": 71, "global_frame_idx": 30426, "task_index": 34}, {"db_idx": 30427, "episode_idx": 174, "frame_idx": 72, "global_frame_idx": 30427, "task_index": 34}, {"db_idx": 30428, "episode_idx": 174, "frame_idx": 73, "global_frame_idx": 30428, "task_index": 34}, {"db_idx": 30429, "episode_idx": 174, "frame_idx": 74, "global_frame_idx": 30429, "task_index": 34}, {"db_idx": 30430, "episode_idx": 174, "frame_idx": 75, "global_frame_idx": 30430, "task_index": 34}, {"db_idx": 30431, "episode_idx": 174, "frame_idx": 76, "global_frame_idx": 30431, "task_index": 34}, {"db_idx": 30432, "episode_idx": 174, "frame_idx": 77, "global_frame_idx": 30432, "task_index": 34}, {"db_idx": 30433, "episode_idx": 174, "frame_idx": 78, "global_frame_idx": 30433, "task_index": 34}, {"db_idx": 30434, "episode_idx": 174, "frame_idx": 79, "global_frame_idx": 30434, "task_index": 34}, {"db_idx": 30435, "episode_idx": 174, "frame_idx": 80, "global_frame_idx": 30435, "task_index": 34}, {"db_idx": 30436, "episode_idx": 174, "frame_idx": 81, "global_frame_idx": 30436, "task_index": 34}, {"db_idx": 30437, "episode_idx": 174, "frame_idx": 82, "global_frame_idx": 30437, "task_index": 34}, {"db_idx": 30438, "episode_idx": 174, "frame_idx": 83, "global_frame_idx": 30438, "task_index": 34}, {"db_idx": 30439, "episode_idx": 174, "frame_idx": 84, "global_frame_idx": 30439, "task_index": 34}, {"db_idx": 30440, "episode_idx": 174, "frame_idx": 85, "global_frame_idx": 30440, "task_index": 34}, {"db_idx": 30441, "episode_idx": 174, "frame_idx": 86, "global_frame_idx": 30441, "task_index": 34}, {"db_idx": 30442, "episode_idx": 174, "frame_idx": 87, "global_frame_idx": 30442, "task_index": 34}, {"db_idx": 30443, "episode_idx": 174, "frame_idx": 88, "global_frame_idx": 30443, "task_index": 34}, {"db_idx": 30444, "episode_idx": 174, "frame_idx": 89, "global_frame_idx": 30444, "task_index": 34}, {"db_idx": 30445, "episode_idx": 174, "frame_idx": 90, "global_frame_idx": 30445, "task_index": 34}, {"db_idx": 30446, "episode_idx": 174, "frame_idx": 91, "global_frame_idx": 30446, "task_index": 34}, {"db_idx": 30447, "episode_idx": 174, "frame_idx": 92, "global_frame_idx": 30447, "task_index": 34}, {"db_idx": 30448, "episode_idx": 174, "frame_idx": 93, "global_frame_idx": 30448, "task_index": 34}, {"db_idx": 30449, "episode_idx": 174, "frame_idx": 94, "global_frame_idx": 30449, "task_index": 34}, {"db_idx": 30450, "episode_idx": 174, "frame_idx": 95, "global_frame_idx": 30450, "task_index": 34}, {"db_idx": 30451, "episode_idx": 174, "frame_idx": 96, "global_frame_idx": 30451, "task_index": 34}, {"db_idx": 30452, "episode_idx": 174, "frame_idx": 97, "global_frame_idx": 30452, "task_index": 34}, {"db_idx": 30453, "episode_idx": 174, "frame_idx": 98, "global_frame_idx": 30453, "task_index": 34}, {"db_idx": 30454, "episode_idx": 174, "frame_idx": 99, "global_frame_idx": 30454, "task_index": 34}, {"db_idx": 30455, "episode_idx": 174, "frame_idx": 100, "global_frame_idx": 30455, "task_index": 34}, {"db_idx": 30456, "episode_idx": 174, "frame_idx": 101, "global_frame_idx": 30456, "task_index": 34}, {"db_idx": 30457, "episode_idx": 174, "frame_idx": 102, "global_frame_idx": 30457, "task_index": 34}, {"db_idx": 30458, "episode_idx": 174, "frame_idx": 103, "global_frame_idx": 30458, "task_index": 34}, {"db_idx": 30459, "episode_idx": 174, "frame_idx": 104, "global_frame_idx": 30459, "task_index": 34}, {"db_idx": 30460, "episode_idx": 174, "frame_idx": 105, "global_frame_idx": 30460, "task_index": 34}, {"db_idx": 30461, "episode_idx": 174, "frame_idx": 106, "global_frame_idx": 30461, "task_index": 34}, {"db_idx": 30462, "episode_idx": 174, "frame_idx": 107, "global_frame_idx": 30462, "task_index": 34}, {"db_idx": 30463, "episode_idx": 174, "frame_idx": 108, "global_frame_idx": 30463, "task_index": 34}, {"db_idx": 30464, "episode_idx": 174, "frame_idx": 109, "global_frame_idx": 30464, "task_index": 34}, {"db_idx": 30465, "episode_idx": 174, "frame_idx": 110, "global_frame_idx": 30465, "task_index": 34}, {"db_idx": 30466, "episode_idx": 174, "frame_idx": 111, "global_frame_idx": 30466, "task_index": 34}, {"db_idx": 30467, "episode_idx": 174, "frame_idx": 112, "global_frame_idx": 30467, "task_index": 34}, {"db_idx": 30468, "episode_idx": 174, "frame_idx": 113, "global_frame_idx": 30468, "task_index": 34}, {"db_idx": 30469, "episode_idx": 174, "frame_idx": 114, "global_frame_idx": 30469, "task_index": 34}, {"db_idx": 30470, "episode_idx": 174, "frame_idx": 115, "global_frame_idx": 30470, "task_index": 34}, {"db_idx": 30471, "episode_idx": 174, "frame_idx": 116, "global_frame_idx": 30471, "task_index": 34}, {"db_idx": 30472, "episode_idx": 174, "frame_idx": 117, "global_frame_idx": 30472, "task_index": 34}, {"db_idx": 30473, "episode_idx": 174, "frame_idx": 118, "global_frame_idx": 30473, "task_index": 34}, {"db_idx": 30474, "episode_idx": 174, "frame_idx": 119, "global_frame_idx": 30474, "task_index": 34}, {"db_idx": 30475, "episode_idx": 174, "frame_idx": 120, "global_frame_idx": 30475, "task_index": 34}, {"db_idx": 30476, "episode_idx": 174, "frame_idx": 121, "global_frame_idx": 30476, "task_index": 34}, {"db_idx": 30477, "episode_idx": 174, "frame_idx": 122, "global_frame_idx": 30477, "task_index": 34}, {"db_idx": 30478, "episode_idx": 174, "frame_idx": 123, "global_frame_idx": 30478, "task_index": 34}, {"db_idx": 30479, "episode_idx": 174, "frame_idx": 124, "global_frame_idx": 30479, "task_index": 34}, {"db_idx": 30480, "episode_idx": 174, "frame_idx": 125, "global_frame_idx": 30480, "task_index": 34}, {"db_idx": 30481, "episode_idx": 174, "frame_idx": 126, "global_frame_idx": 30481, "task_index": 34}, {"db_idx": 30482, "episode_idx": 174, "frame_idx": 127, "global_frame_idx": 30482, "task_index": 34}, {"db_idx": 30483, "episode_idx": 174, "frame_idx": 128, "global_frame_idx": 30483, "task_index": 34}, {"db_idx": 30484, "episode_idx": 174, "frame_idx": 129, "global_frame_idx": 30484, "task_index": 34}, {"db_idx": 30485, "episode_idx": 174, "frame_idx": 130, "global_frame_idx": 30485, "task_index": 34}, {"db_idx": 30486, "episode_idx": 174, "frame_idx": 131, "global_frame_idx": 30486, "task_index": 34}, {"db_idx": 30487, "episode_idx": 174, "frame_idx": 132, "global_frame_idx": 30487, "task_index": 34}, {"db_idx": 30488, "episode_idx": 174, "frame_idx": 133, "global_frame_idx": 30488, "task_index": 34}, {"db_idx": 30489, "episode_idx": 174, "frame_idx": 134, "global_frame_idx": 30489, "task_index": 34}, {"db_idx": 30490, "episode_idx": 174, "frame_idx": 135, "global_frame_idx": 30490, "task_index": 34}, {"db_idx": 30491, "episode_idx": 174, "frame_idx": 136, "global_frame_idx": 30491, "task_index": 34}, {"db_idx": 30492, "episode_idx": 174, "frame_idx": 137, "global_frame_idx": 30492, "task_index": 34}, {"db_idx": 30493, "episode_idx": 174, "frame_idx": 138, "global_frame_idx": 30493, "task_index": 34}, {"db_idx": 30494, "episode_idx": 174, "frame_idx": 139, "global_frame_idx": 30494, "task_index": 34}, {"db_idx": 30495, "episode_idx": 174, "frame_idx": 140, "global_frame_idx": 30495, "task_index": 34}, {"db_idx": 30496, "episode_idx": 175, "frame_idx": 0, "global_frame_idx": 30496, "task_index": 35}, {"db_idx": 30497, "episode_idx": 175, "frame_idx": 1, "global_frame_idx": 30497, "task_index": 35}, {"db_idx": 30498, "episode_idx": 175, "frame_idx": 2, "global_frame_idx": 30498, "task_index": 35}, {"db_idx": 30499, "episode_idx": 175, "frame_idx": 3, "global_frame_idx": 30499, "task_index": 35}, {"db_idx": 30500, "episode_idx": 175, "frame_idx": 4, "global_frame_idx": 30500, "task_index": 35}, {"db_idx": 30501, "episode_idx": 175, "frame_idx": 5, "global_frame_idx": 30501, "task_index": 35}, {"db_idx": 30502, "episode_idx": 175, "frame_idx": 6, "global_frame_idx": 30502, "task_index": 35}, {"db_idx": 30503, "episode_idx": 175, "frame_idx": 7, "global_frame_idx": 30503, "task_index": 35}, {"db_idx": 30504, "episode_idx": 175, "frame_idx": 8, "global_frame_idx": 30504, "task_index": 35}, {"db_idx": 30505, "episode_idx": 175, "frame_idx": 9, "global_frame_idx": 30505, "task_index": 35}, {"db_idx": 30506, "episode_idx": 175, "frame_idx": 10, "global_frame_idx": 30506, "task_index": 35}, {"db_idx": 30507, "episode_idx": 175, "frame_idx": 11, "global_frame_idx": 30507, "task_index": 35}, {"db_idx": 30508, "episode_idx": 175, "frame_idx": 12, "global_frame_idx": 30508, "task_index": 35}, {"db_idx": 30509, "episode_idx": 175, "frame_idx": 13, "global_frame_idx": 30509, "task_index": 35}, {"db_idx": 30510, "episode_idx": 175, "frame_idx": 14, "global_frame_idx": 30510, "task_index": 35}, {"db_idx": 30511, "episode_idx": 175, "frame_idx": 15, "global_frame_idx": 30511, "task_index": 35}, {"db_idx": 30512, "episode_idx": 175, "frame_idx": 16, "global_frame_idx": 30512, "task_index": 35}, {"db_idx": 30513, "episode_idx": 175, "frame_idx": 17, "global_frame_idx": 30513, "task_index": 35}, {"db_idx": 30514, "episode_idx": 175, "frame_idx": 18, "global_frame_idx": 30514, "task_index": 35}, {"db_idx": 30515, "episode_idx": 175, "frame_idx": 19, "global_frame_idx": 30515, "task_index": 35}, {"db_idx": 30516, "episode_idx": 175, "frame_idx": 20, "global_frame_idx": 30516, "task_index": 35}, {"db_idx": 30517, "episode_idx": 175, "frame_idx": 21, "global_frame_idx": 30517, "task_index": 35}, {"db_idx": 30518, "episode_idx": 175, "frame_idx": 22, "global_frame_idx": 30518, "task_index": 35}, {"db_idx": 30519, "episode_idx": 175, "frame_idx": 23, "global_frame_idx": 30519, "task_index": 35}, {"db_idx": 30520, "episode_idx": 175, "frame_idx": 24, "global_frame_idx": 30520, "task_index": 35}, {"db_idx": 30521, "episode_idx": 175, "frame_idx": 25, "global_frame_idx": 30521, "task_index": 35}, {"db_idx": 30522, "episode_idx": 175, "frame_idx": 26, "global_frame_idx": 30522, "task_index": 35}, {"db_idx": 30523, "episode_idx": 175, "frame_idx": 27, "global_frame_idx": 30523, "task_index": 35}, {"db_idx": 30524, "episode_idx": 175, "frame_idx": 28, "global_frame_idx": 30524, "task_index": 35}, {"db_idx": 30525, "episode_idx": 175, "frame_idx": 29, "global_frame_idx": 30525, "task_index": 35}, {"db_idx": 30526, "episode_idx": 175, "frame_idx": 30, "global_frame_idx": 30526, "task_index": 35}, {"db_idx": 30527, "episode_idx": 175, "frame_idx": 31, "global_frame_idx": 30527, "task_index": 35}, {"db_idx": 30528, "episode_idx": 175, "frame_idx": 32, "global_frame_idx": 30528, "task_index": 35}, {"db_idx": 30529, "episode_idx": 175, "frame_idx": 33, "global_frame_idx": 30529, "task_index": 35}, {"db_idx": 30530, "episode_idx": 175, "frame_idx": 34, "global_frame_idx": 30530, "task_index": 35}, {"db_idx": 30531, "episode_idx": 175, "frame_idx": 35, "global_frame_idx": 30531, "task_index": 35}, {"db_idx": 30532, "episode_idx": 175, "frame_idx": 36, "global_frame_idx": 30532, "task_index": 35}, {"db_idx": 30533, "episode_idx": 175, "frame_idx": 37, "global_frame_idx": 30533, "task_index": 35}, {"db_idx": 30534, "episode_idx": 175, "frame_idx": 38, "global_frame_idx": 30534, "task_index": 35}, {"db_idx": 30535, "episode_idx": 175, "frame_idx": 39, "global_frame_idx": 30535, "task_index": 35}, {"db_idx": 30536, "episode_idx": 175, "frame_idx": 40, "global_frame_idx": 30536, "task_index": 35}, {"db_idx": 30537, "episode_idx": 175, "frame_idx": 41, "global_frame_idx": 30537, "task_index": 35}, {"db_idx": 30538, "episode_idx": 175, "frame_idx": 42, "global_frame_idx": 30538, "task_index": 35}, {"db_idx": 30539, "episode_idx": 175, "frame_idx": 43, "global_frame_idx": 30539, "task_index": 35}, {"db_idx": 30540, "episode_idx": 175, "frame_idx": 44, "global_frame_idx": 30540, "task_index": 35}, {"db_idx": 30541, "episode_idx": 175, "frame_idx": 45, "global_frame_idx": 30541, "task_index": 35}, {"db_idx": 30542, "episode_idx": 175, "frame_idx": 46, "global_frame_idx": 30542, "task_index": 35}, {"db_idx": 30543, "episode_idx": 175, "frame_idx": 47, "global_frame_idx": 30543, "task_index": 35}, {"db_idx": 30544, "episode_idx": 175, "frame_idx": 48, "global_frame_idx": 30544, "task_index": 35}, {"db_idx": 30545, "episode_idx": 175, "frame_idx": 49, "global_frame_idx": 30545, "task_index": 35}, {"db_idx": 30546, "episode_idx": 175, "frame_idx": 50, "global_frame_idx": 30546, "task_index": 35}, {"db_idx": 30547, "episode_idx": 175, "frame_idx": 51, "global_frame_idx": 30547, "task_index": 35}, {"db_idx": 30548, "episode_idx": 175, "frame_idx": 52, "global_frame_idx": 30548, "task_index": 35}, {"db_idx": 30549, "episode_idx": 175, "frame_idx": 53, "global_frame_idx": 30549, "task_index": 35}, {"db_idx": 30550, "episode_idx": 175, "frame_idx": 54, "global_frame_idx": 30550, "task_index": 35}, {"db_idx": 30551, "episode_idx": 175, "frame_idx": 55, "global_frame_idx": 30551, "task_index": 35}, {"db_idx": 30552, "episode_idx": 175, "frame_idx": 56, "global_frame_idx": 30552, "task_index": 35}, {"db_idx": 30553, "episode_idx": 175, "frame_idx": 57, "global_frame_idx": 30553, "task_index": 35}, {"db_idx": 30554, "episode_idx": 175, "frame_idx": 58, "global_frame_idx": 30554, "task_index": 35}, {"db_idx": 30555, "episode_idx": 175, "frame_idx": 59, "global_frame_idx": 30555, "task_index": 35}, {"db_idx": 30556, "episode_idx": 175, "frame_idx": 60, "global_frame_idx": 30556, "task_index": 35}, {"db_idx": 30557, "episode_idx": 175, "frame_idx": 61, "global_frame_idx": 30557, "task_index": 35}, {"db_idx": 30558, "episode_idx": 175, "frame_idx": 62, "global_frame_idx": 30558, "task_index": 35}, {"db_idx": 30559, "episode_idx": 175, "frame_idx": 63, "global_frame_idx": 30559, "task_index": 35}, {"db_idx": 30560, "episode_idx": 175, "frame_idx": 64, "global_frame_idx": 30560, "task_index": 35}, {"db_idx": 30561, "episode_idx": 175, "frame_idx": 65, "global_frame_idx": 30561, "task_index": 35}, {"db_idx": 30562, "episode_idx": 175, "frame_idx": 66, "global_frame_idx": 30562, "task_index": 35}, {"db_idx": 30563, "episode_idx": 175, "frame_idx": 67, "global_frame_idx": 30563, "task_index": 35}, {"db_idx": 30564, "episode_idx": 175, "frame_idx": 68, "global_frame_idx": 30564, "task_index": 35}, {"db_idx": 30565, "episode_idx": 175, "frame_idx": 69, "global_frame_idx": 30565, "task_index": 35}, {"db_idx": 30566, "episode_idx": 175, "frame_idx": 70, "global_frame_idx": 30566, "task_index": 35}, {"db_idx": 30567, "episode_idx": 175, "frame_idx": 71, "global_frame_idx": 30567, "task_index": 35}, {"db_idx": 30568, "episode_idx": 175, "frame_idx": 72, "global_frame_idx": 30568, "task_index": 35}, {"db_idx": 30569, "episode_idx": 175, "frame_idx": 73, "global_frame_idx": 30569, "task_index": 35}, {"db_idx": 30570, "episode_idx": 175, "frame_idx": 74, "global_frame_idx": 30570, "task_index": 35}, {"db_idx": 30571, "episode_idx": 175, "frame_idx": 75, "global_frame_idx": 30571, "task_index": 35}, {"db_idx": 30572, "episode_idx": 175, "frame_idx": 76, "global_frame_idx": 30572, "task_index": 35}, {"db_idx": 30573, "episode_idx": 175, "frame_idx": 77, "global_frame_idx": 30573, "task_index": 35}, {"db_idx": 30574, "episode_idx": 175, "frame_idx": 78, "global_frame_idx": 30574, "task_index": 35}, {"db_idx": 30575, "episode_idx": 175, "frame_idx": 79, "global_frame_idx": 30575, "task_index": 35}, {"db_idx": 30576, "episode_idx": 175, "frame_idx": 80, "global_frame_idx": 30576, "task_index": 35}, {"db_idx": 30577, "episode_idx": 175, "frame_idx": 81, "global_frame_idx": 30577, "task_index": 35}, {"db_idx": 30578, "episode_idx": 175, "frame_idx": 82, "global_frame_idx": 30578, "task_index": 35}, {"db_idx": 30579, "episode_idx": 175, "frame_idx": 83, "global_frame_idx": 30579, "task_index": 35}, {"db_idx": 30580, "episode_idx": 175, "frame_idx": 84, "global_frame_idx": 30580, "task_index": 35}, {"db_idx": 30581, "episode_idx": 175, "frame_idx": 85, "global_frame_idx": 30581, "task_index": 35}, {"db_idx": 30582, "episode_idx": 175, "frame_idx": 86, "global_frame_idx": 30582, "task_index": 35}, {"db_idx": 30583, "episode_idx": 175, "frame_idx": 87, "global_frame_idx": 30583, "task_index": 35}, {"db_idx": 30584, "episode_idx": 175, "frame_idx": 88, "global_frame_idx": 30584, "task_index": 35}, {"db_idx": 30585, "episode_idx": 175, "frame_idx": 89, "global_frame_idx": 30585, "task_index": 35}, {"db_idx": 30586, "episode_idx": 175, "frame_idx": 90, "global_frame_idx": 30586, "task_index": 35}, {"db_idx": 30587, "episode_idx": 175, "frame_idx": 91, "global_frame_idx": 30587, "task_index": 35}, {"db_idx": 30588, "episode_idx": 175, "frame_idx": 92, "global_frame_idx": 30588, "task_index": 35}, {"db_idx": 30589, "episode_idx": 175, "frame_idx": 93, "global_frame_idx": 30589, "task_index": 35}, {"db_idx": 30590, "episode_idx": 175, "frame_idx": 94, "global_frame_idx": 30590, "task_index": 35}, {"db_idx": 30591, "episode_idx": 175, "frame_idx": 95, "global_frame_idx": 30591, "task_index": 35}, {"db_idx": 30592, "episode_idx": 176, "frame_idx": 0, "global_frame_idx": 30592, "task_index": 35}, {"db_idx": 30593, "episode_idx": 176, "frame_idx": 1, "global_frame_idx": 30593, "task_index": 35}, {"db_idx": 30594, "episode_idx": 176, "frame_idx": 2, "global_frame_idx": 30594, "task_index": 35}, {"db_idx": 30595, "episode_idx": 176, "frame_idx": 3, "global_frame_idx": 30595, "task_index": 35}, {"db_idx": 30596, "episode_idx": 176, "frame_idx": 4, "global_frame_idx": 30596, "task_index": 35}, {"db_idx": 30597, "episode_idx": 176, "frame_idx": 5, "global_frame_idx": 30597, "task_index": 35}, {"db_idx": 30598, "episode_idx": 176, "frame_idx": 6, "global_frame_idx": 30598, "task_index": 35}, {"db_idx": 30599, "episode_idx": 176, "frame_idx": 7, "global_frame_idx": 30599, "task_index": 35}, {"db_idx": 30600, "episode_idx": 176, "frame_idx": 8, "global_frame_idx": 30600, "task_index": 35}, {"db_idx": 30601, "episode_idx": 176, "frame_idx": 9, "global_frame_idx": 30601, "task_index": 35}, {"db_idx": 30602, "episode_idx": 176, "frame_idx": 10, "global_frame_idx": 30602, "task_index": 35}, {"db_idx": 30603, "episode_idx": 176, "frame_idx": 11, "global_frame_idx": 30603, "task_index": 35}, {"db_idx": 30604, "episode_idx": 176, "frame_idx": 12, "global_frame_idx": 30604, "task_index": 35}, {"db_idx": 30605, "episode_idx": 176, "frame_idx": 13, "global_frame_idx": 30605, "task_index": 35}, {"db_idx": 30606, "episode_idx": 176, "frame_idx": 14, "global_frame_idx": 30606, "task_index": 35}, {"db_idx": 30607, "episode_idx": 176, "frame_idx": 15, "global_frame_idx": 30607, "task_index": 35}, {"db_idx": 30608, "episode_idx": 176, "frame_idx": 16, "global_frame_idx": 30608, "task_index": 35}, {"db_idx": 30609, "episode_idx": 176, "frame_idx": 17, "global_frame_idx": 30609, "task_index": 35}, {"db_idx": 30610, "episode_idx": 176, "frame_idx": 18, "global_frame_idx": 30610, "task_index": 35}, {"db_idx": 30611, "episode_idx": 176, "frame_idx": 19, "global_frame_idx": 30611, "task_index": 35}, {"db_idx": 30612, "episode_idx": 176, "frame_idx": 20, "global_frame_idx": 30612, "task_index": 35}, {"db_idx": 30613, "episode_idx": 176, "frame_idx": 21, "global_frame_idx": 30613, "task_index": 35}, {"db_idx": 30614, "episode_idx": 176, "frame_idx": 22, "global_frame_idx": 30614, "task_index": 35}, {"db_idx": 30615, "episode_idx": 176, "frame_idx": 23, "global_frame_idx": 30615, "task_index": 35}, {"db_idx": 30616, "episode_idx": 176, "frame_idx": 24, "global_frame_idx": 30616, "task_index": 35}, {"db_idx": 30617, "episode_idx": 176, "frame_idx": 25, "global_frame_idx": 30617, "task_index": 35}, {"db_idx": 30618, "episode_idx": 176, "frame_idx": 26, "global_frame_idx": 30618, "task_index": 35}, {"db_idx": 30619, "episode_idx": 176, "frame_idx": 27, "global_frame_idx": 30619, "task_index": 35}, {"db_idx": 30620, "episode_idx": 176, "frame_idx": 28, "global_frame_idx": 30620, "task_index": 35}, {"db_idx": 30621, "episode_idx": 176, "frame_idx": 29, "global_frame_idx": 30621, "task_index": 35}, {"db_idx": 30622, "episode_idx": 176, "frame_idx": 30, "global_frame_idx": 30622, "task_index": 35}, {"db_idx": 30623, "episode_idx": 176, "frame_idx": 31, "global_frame_idx": 30623, "task_index": 35}, {"db_idx": 30624, "episode_idx": 176, "frame_idx": 32, "global_frame_idx": 30624, "task_index": 35}, {"db_idx": 30625, "episode_idx": 176, "frame_idx": 33, "global_frame_idx": 30625, "task_index": 35}, {"db_idx": 30626, "episode_idx": 176, "frame_idx": 34, "global_frame_idx": 30626, "task_index": 35}, {"db_idx": 30627, "episode_idx": 176, "frame_idx": 35, "global_frame_idx": 30627, "task_index": 35}, {"db_idx": 30628, "episode_idx": 176, "frame_idx": 36, "global_frame_idx": 30628, "task_index": 35}, {"db_idx": 30629, "episode_idx": 176, "frame_idx": 37, "global_frame_idx": 30629, "task_index": 35}, {"db_idx": 30630, "episode_idx": 176, "frame_idx": 38, "global_frame_idx": 30630, "task_index": 35}, {"db_idx": 30631, "episode_idx": 176, "frame_idx": 39, "global_frame_idx": 30631, "task_index": 35}, {"db_idx": 30632, "episode_idx": 176, "frame_idx": 40, "global_frame_idx": 30632, "task_index": 35}, {"db_idx": 30633, "episode_idx": 176, "frame_idx": 41, "global_frame_idx": 30633, "task_index": 35}, {"db_idx": 30634, "episode_idx": 176, "frame_idx": 42, "global_frame_idx": 30634, "task_index": 35}, {"db_idx": 30635, "episode_idx": 176, "frame_idx": 43, "global_frame_idx": 30635, "task_index": 35}, {"db_idx": 30636, "episode_idx": 176, "frame_idx": 44, "global_frame_idx": 30636, "task_index": 35}, {"db_idx": 30637, "episode_idx": 176, "frame_idx": 45, "global_frame_idx": 30637, "task_index": 35}, {"db_idx": 30638, "episode_idx": 176, "frame_idx": 46, "global_frame_idx": 30638, "task_index": 35}, {"db_idx": 30639, "episode_idx": 176, "frame_idx": 47, "global_frame_idx": 30639, "task_index": 35}, {"db_idx": 30640, "episode_idx": 176, "frame_idx": 48, "global_frame_idx": 30640, "task_index": 35}, {"db_idx": 30641, "episode_idx": 176, "frame_idx": 49, "global_frame_idx": 30641, "task_index": 35}, {"db_idx": 30642, "episode_idx": 176, "frame_idx": 50, "global_frame_idx": 30642, "task_index": 35}, {"db_idx": 30643, "episode_idx": 176, "frame_idx": 51, "global_frame_idx": 30643, "task_index": 35}, {"db_idx": 30644, "episode_idx": 176, "frame_idx": 52, "global_frame_idx": 30644, "task_index": 35}, {"db_idx": 30645, "episode_idx": 176, "frame_idx": 53, "global_frame_idx": 30645, "task_index": 35}, {"db_idx": 30646, "episode_idx": 176, "frame_idx": 54, "global_frame_idx": 30646, "task_index": 35}, {"db_idx": 30647, "episode_idx": 176, "frame_idx": 55, "global_frame_idx": 30647, "task_index": 35}, {"db_idx": 30648, "episode_idx": 176, "frame_idx": 56, "global_frame_idx": 30648, "task_index": 35}, {"db_idx": 30649, "episode_idx": 176, "frame_idx": 57, "global_frame_idx": 30649, "task_index": 35}, {"db_idx": 30650, "episode_idx": 176, "frame_idx": 58, "global_frame_idx": 30650, "task_index": 35}, {"db_idx": 30651, "episode_idx": 176, "frame_idx": 59, "global_frame_idx": 30651, "task_index": 35}, {"db_idx": 30652, "episode_idx": 176, "frame_idx": 60, "global_frame_idx": 30652, "task_index": 35}, {"db_idx": 30653, "episode_idx": 176, "frame_idx": 61, "global_frame_idx": 30653, "task_index": 35}, {"db_idx": 30654, "episode_idx": 176, "frame_idx": 62, "global_frame_idx": 30654, "task_index": 35}, {"db_idx": 30655, "episode_idx": 176, "frame_idx": 63, "global_frame_idx": 30655, "task_index": 35}, {"db_idx": 30656, "episode_idx": 176, "frame_idx": 64, "global_frame_idx": 30656, "task_index": 35}, {"db_idx": 30657, "episode_idx": 176, "frame_idx": 65, "global_frame_idx": 30657, "task_index": 35}, {"db_idx": 30658, "episode_idx": 176, "frame_idx": 66, "global_frame_idx": 30658, "task_index": 35}, {"db_idx": 30659, "episode_idx": 176, "frame_idx": 67, "global_frame_idx": 30659, "task_index": 35}, {"db_idx": 30660, "episode_idx": 176, "frame_idx": 68, "global_frame_idx": 30660, "task_index": 35}, {"db_idx": 30661, "episode_idx": 176, "frame_idx": 69, "global_frame_idx": 30661, "task_index": 35}, {"db_idx": 30662, "episode_idx": 176, "frame_idx": 70, "global_frame_idx": 30662, "task_index": 35}, {"db_idx": 30663, "episode_idx": 176, "frame_idx": 71, "global_frame_idx": 30663, "task_index": 35}, {"db_idx": 30664, "episode_idx": 176, "frame_idx": 72, "global_frame_idx": 30664, "task_index": 35}, {"db_idx": 30665, "episode_idx": 176, "frame_idx": 73, "global_frame_idx": 30665, "task_index": 35}, {"db_idx": 30666, "episode_idx": 176, "frame_idx": 74, "global_frame_idx": 30666, "task_index": 35}, {"db_idx": 30667, "episode_idx": 176, "frame_idx": 75, "global_frame_idx": 30667, "task_index": 35}, {"db_idx": 30668, "episode_idx": 176, "frame_idx": 76, "global_frame_idx": 30668, "task_index": 35}, {"db_idx": 30669, "episode_idx": 176, "frame_idx": 77, "global_frame_idx": 30669, "task_index": 35}, {"db_idx": 30670, "episode_idx": 176, "frame_idx": 78, "global_frame_idx": 30670, "task_index": 35}, {"db_idx": 30671, "episode_idx": 176, "frame_idx": 79, "global_frame_idx": 30671, "task_index": 35}, {"db_idx": 30672, "episode_idx": 176, "frame_idx": 80, "global_frame_idx": 30672, "task_index": 35}, {"db_idx": 30673, "episode_idx": 176, "frame_idx": 81, "global_frame_idx": 30673, "task_index": 35}, {"db_idx": 30674, "episode_idx": 176, "frame_idx": 82, "global_frame_idx": 30674, "task_index": 35}, {"db_idx": 30675, "episode_idx": 176, "frame_idx": 83, "global_frame_idx": 30675, "task_index": 35}, {"db_idx": 30676, "episode_idx": 176, "frame_idx": 84, "global_frame_idx": 30676, "task_index": 35}, {"db_idx": 30677, "episode_idx": 176, "frame_idx": 85, "global_frame_idx": 30677, "task_index": 35}, {"db_idx": 30678, "episode_idx": 176, "frame_idx": 86, "global_frame_idx": 30678, "task_index": 35}, {"db_idx": 30679, "episode_idx": 176, "frame_idx": 87, "global_frame_idx": 30679, "task_index": 35}, {"db_idx": 30680, "episode_idx": 176, "frame_idx": 88, "global_frame_idx": 30680, "task_index": 35}, {"db_idx": 30681, "episode_idx": 176, "frame_idx": 89, "global_frame_idx": 30681, "task_index": 35}, {"db_idx": 30682, "episode_idx": 176, "frame_idx": 90, "global_frame_idx": 30682, "task_index": 35}, {"db_idx": 30683, "episode_idx": 176, "frame_idx": 91, "global_frame_idx": 30683, "task_index": 35}, {"db_idx": 30684, "episode_idx": 176, "frame_idx": 92, "global_frame_idx": 30684, "task_index": 35}, {"db_idx": 30685, "episode_idx": 176, "frame_idx": 93, "global_frame_idx": 30685, "task_index": 35}, {"db_idx": 30686, "episode_idx": 176, "frame_idx": 94, "global_frame_idx": 30686, "task_index": 35}, {"db_idx": 30687, "episode_idx": 176, "frame_idx": 95, "global_frame_idx": 30687, "task_index": 35}, {"db_idx": 30688, "episode_idx": 176, "frame_idx": 96, "global_frame_idx": 30688, "task_index": 35}, {"db_idx": 30689, "episode_idx": 177, "frame_idx": 0, "global_frame_idx": 30689, "task_index": 35}, {"db_idx": 30690, "episode_idx": 177, "frame_idx": 1, "global_frame_idx": 30690, "task_index": 35}, {"db_idx": 30691, "episode_idx": 177, "frame_idx": 2, "global_frame_idx": 30691, "task_index": 35}, {"db_idx": 30692, "episode_idx": 177, "frame_idx": 3, "global_frame_idx": 30692, "task_index": 35}, {"db_idx": 30693, "episode_idx": 177, "frame_idx": 4, "global_frame_idx": 30693, "task_index": 35}, {"db_idx": 30694, "episode_idx": 177, "frame_idx": 5, "global_frame_idx": 30694, "task_index": 35}, {"db_idx": 30695, "episode_idx": 177, "frame_idx": 6, "global_frame_idx": 30695, "task_index": 35}, {"db_idx": 30696, "episode_idx": 177, "frame_idx": 7, "global_frame_idx": 30696, "task_index": 35}, {"db_idx": 30697, "episode_idx": 177, "frame_idx": 8, "global_frame_idx": 30697, "task_index": 35}, {"db_idx": 30698, "episode_idx": 177, "frame_idx": 9, "global_frame_idx": 30698, "task_index": 35}, {"db_idx": 30699, "episode_idx": 177, "frame_idx": 10, "global_frame_idx": 30699, "task_index": 35}, {"db_idx": 30700, "episode_idx": 177, "frame_idx": 11, "global_frame_idx": 30700, "task_index": 35}, {"db_idx": 30701, "episode_idx": 177, "frame_idx": 12, "global_frame_idx": 30701, "task_index": 35}, {"db_idx": 30702, "episode_idx": 177, "frame_idx": 13, "global_frame_idx": 30702, "task_index": 35}, {"db_idx": 30703, "episode_idx": 177, "frame_idx": 14, "global_frame_idx": 30703, "task_index": 35}, {"db_idx": 30704, "episode_idx": 177, "frame_idx": 15, "global_frame_idx": 30704, "task_index": 35}, {"db_idx": 30705, "episode_idx": 177, "frame_idx": 16, "global_frame_idx": 30705, "task_index": 35}, {"db_idx": 30706, "episode_idx": 177, "frame_idx": 17, "global_frame_idx": 30706, "task_index": 35}, {"db_idx": 30707, "episode_idx": 177, "frame_idx": 18, "global_frame_idx": 30707, "task_index": 35}, {"db_idx": 30708, "episode_idx": 177, "frame_idx": 19, "global_frame_idx": 30708, "task_index": 35}, {"db_idx": 30709, "episode_idx": 177, "frame_idx": 20, "global_frame_idx": 30709, "task_index": 35}, {"db_idx": 30710, "episode_idx": 177, "frame_idx": 21, "global_frame_idx": 30710, "task_index": 35}, {"db_idx": 30711, "episode_idx": 177, "frame_idx": 22, "global_frame_idx": 30711, "task_index": 35}, {"db_idx": 30712, "episode_idx": 177, "frame_idx": 23, "global_frame_idx": 30712, "task_index": 35}, {"db_idx": 30713, "episode_idx": 177, "frame_idx": 24, "global_frame_idx": 30713, "task_index": 35}, {"db_idx": 30714, "episode_idx": 177, "frame_idx": 25, "global_frame_idx": 30714, "task_index": 35}, {"db_idx": 30715, "episode_idx": 177, "frame_idx": 26, "global_frame_idx": 30715, "task_index": 35}, {"db_idx": 30716, "episode_idx": 177, "frame_idx": 27, "global_frame_idx": 30716, "task_index": 35}, {"db_idx": 30717, "episode_idx": 177, "frame_idx": 28, "global_frame_idx": 30717, "task_index": 35}, {"db_idx": 30718, "episode_idx": 177, "frame_idx": 29, "global_frame_idx": 30718, "task_index": 35}, {"db_idx": 30719, "episode_idx": 177, "frame_idx": 30, "global_frame_idx": 30719, "task_index": 35}, {"db_idx": 30720, "episode_idx": 177, "frame_idx": 31, "global_frame_idx": 30720, "task_index": 35}, {"db_idx": 30721, "episode_idx": 177, "frame_idx": 32, "global_frame_idx": 30721, "task_index": 35}, {"db_idx": 30722, "episode_idx": 177, "frame_idx": 33, "global_frame_idx": 30722, "task_index": 35}, {"db_idx": 30723, "episode_idx": 177, "frame_idx": 34, "global_frame_idx": 30723, "task_index": 35}, {"db_idx": 30724, "episode_idx": 177, "frame_idx": 35, "global_frame_idx": 30724, "task_index": 35}, {"db_idx": 30725, "episode_idx": 177, "frame_idx": 36, "global_frame_idx": 30725, "task_index": 35}, {"db_idx": 30726, "episode_idx": 177, "frame_idx": 37, "global_frame_idx": 30726, "task_index": 35}, {"db_idx": 30727, "episode_idx": 177, "frame_idx": 38, "global_frame_idx": 30727, "task_index": 35}, {"db_idx": 30728, "episode_idx": 177, "frame_idx": 39, "global_frame_idx": 30728, "task_index": 35}, {"db_idx": 30729, "episode_idx": 177, "frame_idx": 40, "global_frame_idx": 30729, "task_index": 35}, {"db_idx": 30730, "episode_idx": 177, "frame_idx": 41, "global_frame_idx": 30730, "task_index": 35}, {"db_idx": 30731, "episode_idx": 177, "frame_idx": 42, "global_frame_idx": 30731, "task_index": 35}, {"db_idx": 30732, "episode_idx": 177, "frame_idx": 43, "global_frame_idx": 30732, "task_index": 35}, {"db_idx": 30733, "episode_idx": 177, "frame_idx": 44, "global_frame_idx": 30733, "task_index": 35}, {"db_idx": 30734, "episode_idx": 177, "frame_idx": 45, "global_frame_idx": 30734, "task_index": 35}, {"db_idx": 30735, "episode_idx": 177, "frame_idx": 46, "global_frame_idx": 30735, "task_index": 35}, {"db_idx": 30736, "episode_idx": 177, "frame_idx": 47, "global_frame_idx": 30736, "task_index": 35}, {"db_idx": 30737, "episode_idx": 177, "frame_idx": 48, "global_frame_idx": 30737, "task_index": 35}, {"db_idx": 30738, "episode_idx": 177, "frame_idx": 49, "global_frame_idx": 30738, "task_index": 35}, {"db_idx": 30739, "episode_idx": 177, "frame_idx": 50, "global_frame_idx": 30739, "task_index": 35}, {"db_idx": 30740, "episode_idx": 177, "frame_idx": 51, "global_frame_idx": 30740, "task_index": 35}, {"db_idx": 30741, "episode_idx": 177, "frame_idx": 52, "global_frame_idx": 30741, "task_index": 35}, {"db_idx": 30742, "episode_idx": 177, "frame_idx": 53, "global_frame_idx": 30742, "task_index": 35}, {"db_idx": 30743, "episode_idx": 177, "frame_idx": 54, "global_frame_idx": 30743, "task_index": 35}, {"db_idx": 30744, "episode_idx": 177, "frame_idx": 55, "global_frame_idx": 30744, "task_index": 35}, {"db_idx": 30745, "episode_idx": 177, "frame_idx": 56, "global_frame_idx": 30745, "task_index": 35}, {"db_idx": 30746, "episode_idx": 177, "frame_idx": 57, "global_frame_idx": 30746, "task_index": 35}, {"db_idx": 30747, "episode_idx": 177, "frame_idx": 58, "global_frame_idx": 30747, "task_index": 35}, {"db_idx": 30748, "episode_idx": 177, "frame_idx": 59, "global_frame_idx": 30748, "task_index": 35}, {"db_idx": 30749, "episode_idx": 177, "frame_idx": 60, "global_frame_idx": 30749, "task_index": 35}, {"db_idx": 30750, "episode_idx": 177, "frame_idx": 61, "global_frame_idx": 30750, "task_index": 35}, {"db_idx": 30751, "episode_idx": 177, "frame_idx": 62, "global_frame_idx": 30751, "task_index": 35}, {"db_idx": 30752, "episode_idx": 177, "frame_idx": 63, "global_frame_idx": 30752, "task_index": 35}, {"db_idx": 30753, "episode_idx": 177, "frame_idx": 64, "global_frame_idx": 30753, "task_index": 35}, {"db_idx": 30754, "episode_idx": 177, "frame_idx": 65, "global_frame_idx": 30754, "task_index": 35}, {"db_idx": 30755, "episode_idx": 177, "frame_idx": 66, "global_frame_idx": 30755, "task_index": 35}, {"db_idx": 30756, "episode_idx": 177, "frame_idx": 67, "global_frame_idx": 30756, "task_index": 35}, {"db_idx": 30757, "episode_idx": 177, "frame_idx": 68, "global_frame_idx": 30757, "task_index": 35}, {"db_idx": 30758, "episode_idx": 177, "frame_idx": 69, "global_frame_idx": 30758, "task_index": 35}, {"db_idx": 30759, "episode_idx": 177, "frame_idx": 70, "global_frame_idx": 30759, "task_index": 35}, {"db_idx": 30760, "episode_idx": 177, "frame_idx": 71, "global_frame_idx": 30760, "task_index": 35}, {"db_idx": 30761, "episode_idx": 177, "frame_idx": 72, "global_frame_idx": 30761, "task_index": 35}, {"db_idx": 30762, "episode_idx": 177, "frame_idx": 73, "global_frame_idx": 30762, "task_index": 35}, {"db_idx": 30763, "episode_idx": 177, "frame_idx": 74, "global_frame_idx": 30763, "task_index": 35}, {"db_idx": 30764, "episode_idx": 177, "frame_idx": 75, "global_frame_idx": 30764, "task_index": 35}, {"db_idx": 30765, "episode_idx": 177, "frame_idx": 76, "global_frame_idx": 30765, "task_index": 35}, {"db_idx": 30766, "episode_idx": 177, "frame_idx": 77, "global_frame_idx": 30766, "task_index": 35}, {"db_idx": 30767, "episode_idx": 177, "frame_idx": 78, "global_frame_idx": 30767, "task_index": 35}, {"db_idx": 30768, "episode_idx": 177, "frame_idx": 79, "global_frame_idx": 30768, "task_index": 35}, {"db_idx": 30769, "episode_idx": 177, "frame_idx": 80, "global_frame_idx": 30769, "task_index": 35}, {"db_idx": 30770, "episode_idx": 177, "frame_idx": 81, "global_frame_idx": 30770, "task_index": 35}, {"db_idx": 30771, "episode_idx": 177, "frame_idx": 82, "global_frame_idx": 30771, "task_index": 35}, {"db_idx": 30772, "episode_idx": 177, "frame_idx": 83, "global_frame_idx": 30772, "task_index": 35}, {"db_idx": 30773, "episode_idx": 177, "frame_idx": 84, "global_frame_idx": 30773, "task_index": 35}, {"db_idx": 30774, "episode_idx": 177, "frame_idx": 85, "global_frame_idx": 30774, "task_index": 35}, {"db_idx": 30775, "episode_idx": 177, "frame_idx": 86, "global_frame_idx": 30775, "task_index": 35}, {"db_idx": 30776, "episode_idx": 177, "frame_idx": 87, "global_frame_idx": 30776, "task_index": 35}, {"db_idx": 30777, "episode_idx": 177, "frame_idx": 88, "global_frame_idx": 30777, "task_index": 35}, {"db_idx": 30778, "episode_idx": 177, "frame_idx": 89, "global_frame_idx": 30778, "task_index": 35}, {"db_idx": 30779, "episode_idx": 177, "frame_idx": 90, "global_frame_idx": 30779, "task_index": 35}, {"db_idx": 30780, "episode_idx": 177, "frame_idx": 91, "global_frame_idx": 30780, "task_index": 35}, {"db_idx": 30781, "episode_idx": 177, "frame_idx": 92, "global_frame_idx": 30781, "task_index": 35}, {"db_idx": 30782, "episode_idx": 177, "frame_idx": 93, "global_frame_idx": 30782, "task_index": 35}, {"db_idx": 30783, "episode_idx": 177, "frame_idx": 94, "global_frame_idx": 30783, "task_index": 35}, {"db_idx": 30784, "episode_idx": 177, "frame_idx": 95, "global_frame_idx": 30784, "task_index": 35}, {"db_idx": 30785, "episode_idx": 177, "frame_idx": 96, "global_frame_idx": 30785, "task_index": 35}, {"db_idx": 30786, "episode_idx": 177, "frame_idx": 97, "global_frame_idx": 30786, "task_index": 35}, {"db_idx": 30787, "episode_idx": 177, "frame_idx": 98, "global_frame_idx": 30787, "task_index": 35}, {"db_idx": 30788, "episode_idx": 178, "frame_idx": 0, "global_frame_idx": 30788, "task_index": 35}, {"db_idx": 30789, "episode_idx": 178, "frame_idx": 1, "global_frame_idx": 30789, "task_index": 35}, {"db_idx": 30790, "episode_idx": 178, "frame_idx": 2, "global_frame_idx": 30790, "task_index": 35}, {"db_idx": 30791, "episode_idx": 178, "frame_idx": 3, "global_frame_idx": 30791, "task_index": 35}, {"db_idx": 30792, "episode_idx": 178, "frame_idx": 4, "global_frame_idx": 30792, "task_index": 35}, {"db_idx": 30793, "episode_idx": 178, "frame_idx": 5, "global_frame_idx": 30793, "task_index": 35}, {"db_idx": 30794, "episode_idx": 178, "frame_idx": 6, "global_frame_idx": 30794, "task_index": 35}, {"db_idx": 30795, "episode_idx": 178, "frame_idx": 7, "global_frame_idx": 30795, "task_index": 35}, {"db_idx": 30796, "episode_idx": 178, "frame_idx": 8, "global_frame_idx": 30796, "task_index": 35}, {"db_idx": 30797, "episode_idx": 178, "frame_idx": 9, "global_frame_idx": 30797, "task_index": 35}, {"db_idx": 30798, "episode_idx": 178, "frame_idx": 10, "global_frame_idx": 30798, "task_index": 35}, {"db_idx": 30799, "episode_idx": 178, "frame_idx": 11, "global_frame_idx": 30799, "task_index": 35}, {"db_idx": 30800, "episode_idx": 178, "frame_idx": 12, "global_frame_idx": 30800, "task_index": 35}, {"db_idx": 30801, "episode_idx": 178, "frame_idx": 13, "global_frame_idx": 30801, "task_index": 35}, {"db_idx": 30802, "episode_idx": 178, "frame_idx": 14, "global_frame_idx": 30802, "task_index": 35}, {"db_idx": 30803, "episode_idx": 178, "frame_idx": 15, "global_frame_idx": 30803, "task_index": 35}, {"db_idx": 30804, "episode_idx": 178, "frame_idx": 16, "global_frame_idx": 30804, "task_index": 35}, {"db_idx": 30805, "episode_idx": 178, "frame_idx": 17, "global_frame_idx": 30805, "task_index": 35}, {"db_idx": 30806, "episode_idx": 178, "frame_idx": 18, "global_frame_idx": 30806, "task_index": 35}, {"db_idx": 30807, "episode_idx": 178, "frame_idx": 19, "global_frame_idx": 30807, "task_index": 35}, {"db_idx": 30808, "episode_idx": 178, "frame_idx": 20, "global_frame_idx": 30808, "task_index": 35}, {"db_idx": 30809, "episode_idx": 178, "frame_idx": 21, "global_frame_idx": 30809, "task_index": 35}, {"db_idx": 30810, "episode_idx": 178, "frame_idx": 22, "global_frame_idx": 30810, "task_index": 35}, {"db_idx": 30811, "episode_idx": 178, "frame_idx": 23, "global_frame_idx": 30811, "task_index": 35}, {"db_idx": 30812, "episode_idx": 178, "frame_idx": 24, "global_frame_idx": 30812, "task_index": 35}, {"db_idx": 30813, "episode_idx": 178, "frame_idx": 25, "global_frame_idx": 30813, "task_index": 35}, {"db_idx": 30814, "episode_idx": 178, "frame_idx": 26, "global_frame_idx": 30814, "task_index": 35}, {"db_idx": 30815, "episode_idx": 178, "frame_idx": 27, "global_frame_idx": 30815, "task_index": 35}, {"db_idx": 30816, "episode_idx": 178, "frame_idx": 28, "global_frame_idx": 30816, "task_index": 35}, {"db_idx": 30817, "episode_idx": 178, "frame_idx": 29, "global_frame_idx": 30817, "task_index": 35}, {"db_idx": 30818, "episode_idx": 178, "frame_idx": 30, "global_frame_idx": 30818, "task_index": 35}, {"db_idx": 30819, "episode_idx": 178, "frame_idx": 31, "global_frame_idx": 30819, "task_index": 35}, {"db_idx": 30820, "episode_idx": 178, "frame_idx": 32, "global_frame_idx": 30820, "task_index": 35}, {"db_idx": 30821, "episode_idx": 178, "frame_idx": 33, "global_frame_idx": 30821, "task_index": 35}, {"db_idx": 30822, "episode_idx": 178, "frame_idx": 34, "global_frame_idx": 30822, "task_index": 35}, {"db_idx": 30823, "episode_idx": 178, "frame_idx": 35, "global_frame_idx": 30823, "task_index": 35}, {"db_idx": 30824, "episode_idx": 178, "frame_idx": 36, "global_frame_idx": 30824, "task_index": 35}, {"db_idx": 30825, "episode_idx": 178, "frame_idx": 37, "global_frame_idx": 30825, "task_index": 35}, {"db_idx": 30826, "episode_idx": 178, "frame_idx": 38, "global_frame_idx": 30826, "task_index": 35}, {"db_idx": 30827, "episode_idx": 178, "frame_idx": 39, "global_frame_idx": 30827, "task_index": 35}, {"db_idx": 30828, "episode_idx": 178, "frame_idx": 40, "global_frame_idx": 30828, "task_index": 35}, {"db_idx": 30829, "episode_idx": 178, "frame_idx": 41, "global_frame_idx": 30829, "task_index": 35}, {"db_idx": 30830, "episode_idx": 178, "frame_idx": 42, "global_frame_idx": 30830, "task_index": 35}, {"db_idx": 30831, "episode_idx": 178, "frame_idx": 43, "global_frame_idx": 30831, "task_index": 35}, {"db_idx": 30832, "episode_idx": 178, "frame_idx": 44, "global_frame_idx": 30832, "task_index": 35}, {"db_idx": 30833, "episode_idx": 178, "frame_idx": 45, "global_frame_idx": 30833, "task_index": 35}, {"db_idx": 30834, "episode_idx": 178, "frame_idx": 46, "global_frame_idx": 30834, "task_index": 35}, {"db_idx": 30835, "episode_idx": 178, "frame_idx": 47, "global_frame_idx": 30835, "task_index": 35}, {"db_idx": 30836, "episode_idx": 178, "frame_idx": 48, "global_frame_idx": 30836, "task_index": 35}, {"db_idx": 30837, "episode_idx": 178, "frame_idx": 49, "global_frame_idx": 30837, "task_index": 35}, {"db_idx": 30838, "episode_idx": 178, "frame_idx": 50, "global_frame_idx": 30838, "task_index": 35}, {"db_idx": 30839, "episode_idx": 178, "frame_idx": 51, "global_frame_idx": 30839, "task_index": 35}, {"db_idx": 30840, "episode_idx": 178, "frame_idx": 52, "global_frame_idx": 30840, "task_index": 35}, {"db_idx": 30841, "episode_idx": 178, "frame_idx": 53, "global_frame_idx": 30841, "task_index": 35}, {"db_idx": 30842, "episode_idx": 178, "frame_idx": 54, "global_frame_idx": 30842, "task_index": 35}, {"db_idx": 30843, "episode_idx": 178, "frame_idx": 55, "global_frame_idx": 30843, "task_index": 35}, {"db_idx": 30844, "episode_idx": 178, "frame_idx": 56, "global_frame_idx": 30844, "task_index": 35}, {"db_idx": 30845, "episode_idx": 178, "frame_idx": 57, "global_frame_idx": 30845, "task_index": 35}, {"db_idx": 30846, "episode_idx": 178, "frame_idx": 58, "global_frame_idx": 30846, "task_index": 35}, {"db_idx": 30847, "episode_idx": 178, "frame_idx": 59, "global_frame_idx": 30847, "task_index": 35}, {"db_idx": 30848, "episode_idx": 178, "frame_idx": 60, "global_frame_idx": 30848, "task_index": 35}, {"db_idx": 30849, "episode_idx": 178, "frame_idx": 61, "global_frame_idx": 30849, "task_index": 35}, {"db_idx": 30850, "episode_idx": 178, "frame_idx": 62, "global_frame_idx": 30850, "task_index": 35}, {"db_idx": 30851, "episode_idx": 178, "frame_idx": 63, "global_frame_idx": 30851, "task_index": 35}, {"db_idx": 30852, "episode_idx": 178, "frame_idx": 64, "global_frame_idx": 30852, "task_index": 35}, {"db_idx": 30853, "episode_idx": 178, "frame_idx": 65, "global_frame_idx": 30853, "task_index": 35}, {"db_idx": 30854, "episode_idx": 178, "frame_idx": 66, "global_frame_idx": 30854, "task_index": 35}, {"db_idx": 30855, "episode_idx": 178, "frame_idx": 67, "global_frame_idx": 30855, "task_index": 35}, {"db_idx": 30856, "episode_idx": 178, "frame_idx": 68, "global_frame_idx": 30856, "task_index": 35}, {"db_idx": 30857, "episode_idx": 178, "frame_idx": 69, "global_frame_idx": 30857, "task_index": 35}, {"db_idx": 30858, "episode_idx": 178, "frame_idx": 70, "global_frame_idx": 30858, "task_index": 35}, {"db_idx": 30859, "episode_idx": 178, "frame_idx": 71, "global_frame_idx": 30859, "task_index": 35}, {"db_idx": 30860, "episode_idx": 178, "frame_idx": 72, "global_frame_idx": 30860, "task_index": 35}, {"db_idx": 30861, "episode_idx": 178, "frame_idx": 73, "global_frame_idx": 30861, "task_index": 35}, {"db_idx": 30862, "episode_idx": 178, "frame_idx": 74, "global_frame_idx": 30862, "task_index": 35}, {"db_idx": 30863, "episode_idx": 178, "frame_idx": 75, "global_frame_idx": 30863, "task_index": 35}, {"db_idx": 30864, "episode_idx": 178, "frame_idx": 76, "global_frame_idx": 30864, "task_index": 35}, {"db_idx": 30865, "episode_idx": 178, "frame_idx": 77, "global_frame_idx": 30865, "task_index": 35}, {"db_idx": 30866, "episode_idx": 178, "frame_idx": 78, "global_frame_idx": 30866, "task_index": 35}, {"db_idx": 30867, "episode_idx": 178, "frame_idx": 79, "global_frame_idx": 30867, "task_index": 35}, {"db_idx": 30868, "episode_idx": 178, "frame_idx": 80, "global_frame_idx": 30868, "task_index": 35}, {"db_idx": 30869, "episode_idx": 178, "frame_idx": 81, "global_frame_idx": 30869, "task_index": 35}, {"db_idx": 30870, "episode_idx": 178, "frame_idx": 82, "global_frame_idx": 30870, "task_index": 35}, {"db_idx": 30871, "episode_idx": 178, "frame_idx": 83, "global_frame_idx": 30871, "task_index": 35}, {"db_idx": 30872, "episode_idx": 178, "frame_idx": 84, "global_frame_idx": 30872, "task_index": 35}, {"db_idx": 30873, "episode_idx": 178, "frame_idx": 85, "global_frame_idx": 30873, "task_index": 35}, {"db_idx": 30874, "episode_idx": 178, "frame_idx": 86, "global_frame_idx": 30874, "task_index": 35}, {"db_idx": 30875, "episode_idx": 178, "frame_idx": 87, "global_frame_idx": 30875, "task_index": 35}, {"db_idx": 30876, "episode_idx": 178, "frame_idx": 88, "global_frame_idx": 30876, "task_index": 35}, {"db_idx": 30877, "episode_idx": 178, "frame_idx": 89, "global_frame_idx": 30877, "task_index": 35}, {"db_idx": 30878, "episode_idx": 178, "frame_idx": 90, "global_frame_idx": 30878, "task_index": 35}, {"db_idx": 30879, "episode_idx": 178, "frame_idx": 91, "global_frame_idx": 30879, "task_index": 35}, {"db_idx": 30880, "episode_idx": 178, "frame_idx": 92, "global_frame_idx": 30880, "task_index": 35}, {"db_idx": 30881, "episode_idx": 178, "frame_idx": 93, "global_frame_idx": 30881, "task_index": 35}, {"db_idx": 30882, "episode_idx": 178, "frame_idx": 94, "global_frame_idx": 30882, "task_index": 35}, {"db_idx": 30883, "episode_idx": 178, "frame_idx": 95, "global_frame_idx": 30883, "task_index": 35}, {"db_idx": 30884, "episode_idx": 178, "frame_idx": 96, "global_frame_idx": 30884, "task_index": 35}, {"db_idx": 30885, "episode_idx": 178, "frame_idx": 97, "global_frame_idx": 30885, "task_index": 35}, {"db_idx": 30886, "episode_idx": 178, "frame_idx": 98, "global_frame_idx": 30886, "task_index": 35}, {"db_idx": 30887, "episode_idx": 179, "frame_idx": 0, "global_frame_idx": 30887, "task_index": 35}, {"db_idx": 30888, "episode_idx": 179, "frame_idx": 1, "global_frame_idx": 30888, "task_index": 35}, {"db_idx": 30889, "episode_idx": 179, "frame_idx": 2, "global_frame_idx": 30889, "task_index": 35}, {"db_idx": 30890, "episode_idx": 179, "frame_idx": 3, "global_frame_idx": 30890, "task_index": 35}, {"db_idx": 30891, "episode_idx": 179, "frame_idx": 4, "global_frame_idx": 30891, "task_index": 35}, {"db_idx": 30892, "episode_idx": 179, "frame_idx": 5, "global_frame_idx": 30892, "task_index": 35}, {"db_idx": 30893, "episode_idx": 179, "frame_idx": 6, "global_frame_idx": 30893, "task_index": 35}, {"db_idx": 30894, "episode_idx": 179, "frame_idx": 7, "global_frame_idx": 30894, "task_index": 35}, {"db_idx": 30895, "episode_idx": 179, "frame_idx": 8, "global_frame_idx": 30895, "task_index": 35}, {"db_idx": 30896, "episode_idx": 179, "frame_idx": 9, "global_frame_idx": 30896, "task_index": 35}, {"db_idx": 30897, "episode_idx": 179, "frame_idx": 10, "global_frame_idx": 30897, "task_index": 35}, {"db_idx": 30898, "episode_idx": 179, "frame_idx": 11, "global_frame_idx": 30898, "task_index": 35}, {"db_idx": 30899, "episode_idx": 179, "frame_idx": 12, "global_frame_idx": 30899, "task_index": 35}, {"db_idx": 30900, "episode_idx": 179, "frame_idx": 13, "global_frame_idx": 30900, "task_index": 35}, {"db_idx": 30901, "episode_idx": 179, "frame_idx": 14, "global_frame_idx": 30901, "task_index": 35}, {"db_idx": 30902, "episode_idx": 179, "frame_idx": 15, "global_frame_idx": 30902, "task_index": 35}, {"db_idx": 30903, "episode_idx": 179, "frame_idx": 16, "global_frame_idx": 30903, "task_index": 35}, {"db_idx": 30904, "episode_idx": 179, "frame_idx": 17, "global_frame_idx": 30904, "task_index": 35}, {"db_idx": 30905, "episode_idx": 179, "frame_idx": 18, "global_frame_idx": 30905, "task_index": 35}, {"db_idx": 30906, "episode_idx": 179, "frame_idx": 19, "global_frame_idx": 30906, "task_index": 35}, {"db_idx": 30907, "episode_idx": 179, "frame_idx": 20, "global_frame_idx": 30907, "task_index": 35}, {"db_idx": 30908, "episode_idx": 179, "frame_idx": 21, "global_frame_idx": 30908, "task_index": 35}, {"db_idx": 30909, "episode_idx": 179, "frame_idx": 22, "global_frame_idx": 30909, "task_index": 35}, {"db_idx": 30910, "episode_idx": 179, "frame_idx": 23, "global_frame_idx": 30910, "task_index": 35}, {"db_idx": 30911, "episode_idx": 179, "frame_idx": 24, "global_frame_idx": 30911, "task_index": 35}, {"db_idx": 30912, "episode_idx": 179, "frame_idx": 25, "global_frame_idx": 30912, "task_index": 35}, {"db_idx": 30913, "episode_idx": 179, "frame_idx": 26, "global_frame_idx": 30913, "task_index": 35}, {"db_idx": 30914, "episode_idx": 179, "frame_idx": 27, "global_frame_idx": 30914, "task_index": 35}, {"db_idx": 30915, "episode_idx": 179, "frame_idx": 28, "global_frame_idx": 30915, "task_index": 35}, {"db_idx": 30916, "episode_idx": 179, "frame_idx": 29, "global_frame_idx": 30916, "task_index": 35}, {"db_idx": 30917, "episode_idx": 179, "frame_idx": 30, "global_frame_idx": 30917, "task_index": 35}, {"db_idx": 30918, "episode_idx": 179, "frame_idx": 31, "global_frame_idx": 30918, "task_index": 35}, {"db_idx": 30919, "episode_idx": 179, "frame_idx": 32, "global_frame_idx": 30919, "task_index": 35}, {"db_idx": 30920, "episode_idx": 179, "frame_idx": 33, "global_frame_idx": 30920, "task_index": 35}, {"db_idx": 30921, "episode_idx": 179, "frame_idx": 34, "global_frame_idx": 30921, "task_index": 35}, {"db_idx": 30922, "episode_idx": 179, "frame_idx": 35, "global_frame_idx": 30922, "task_index": 35}, {"db_idx": 30923, "episode_idx": 179, "frame_idx": 36, "global_frame_idx": 30923, "task_index": 35}, {"db_idx": 30924, "episode_idx": 179, "frame_idx": 37, "global_frame_idx": 30924, "task_index": 35}, {"db_idx": 30925, "episode_idx": 179, "frame_idx": 38, "global_frame_idx": 30925, "task_index": 35}, {"db_idx": 30926, "episode_idx": 179, "frame_idx": 39, "global_frame_idx": 30926, "task_index": 35}, {"db_idx": 30927, "episode_idx": 179, "frame_idx": 40, "global_frame_idx": 30927, "task_index": 35}, {"db_idx": 30928, "episode_idx": 179, "frame_idx": 41, "global_frame_idx": 30928, "task_index": 35}, {"db_idx": 30929, "episode_idx": 179, "frame_idx": 42, "global_frame_idx": 30929, "task_index": 35}, {"db_idx": 30930, "episode_idx": 179, "frame_idx": 43, "global_frame_idx": 30930, "task_index": 35}, {"db_idx": 30931, "episode_idx": 179, "frame_idx": 44, "global_frame_idx": 30931, "task_index": 35}, {"db_idx": 30932, "episode_idx": 179, "frame_idx": 45, "global_frame_idx": 30932, "task_index": 35}, {"db_idx": 30933, "episode_idx": 179, "frame_idx": 46, "global_frame_idx": 30933, "task_index": 35}, {"db_idx": 30934, "episode_idx": 179, "frame_idx": 47, "global_frame_idx": 30934, "task_index": 35}, {"db_idx": 30935, "episode_idx": 179, "frame_idx": 48, "global_frame_idx": 30935, "task_index": 35}, {"db_idx": 30936, "episode_idx": 179, "frame_idx": 49, "global_frame_idx": 30936, "task_index": 35}, {"db_idx": 30937, "episode_idx": 179, "frame_idx": 50, "global_frame_idx": 30937, "task_index": 35}, {"db_idx": 30938, "episode_idx": 179, "frame_idx": 51, "global_frame_idx": 30938, "task_index": 35}, {"db_idx": 30939, "episode_idx": 179, "frame_idx": 52, "global_frame_idx": 30939, "task_index": 35}, {"db_idx": 30940, "episode_idx": 179, "frame_idx": 53, "global_frame_idx": 30940, "task_index": 35}, {"db_idx": 30941, "episode_idx": 179, "frame_idx": 54, "global_frame_idx": 30941, "task_index": 35}, {"db_idx": 30942, "episode_idx": 179, "frame_idx": 55, "global_frame_idx": 30942, "task_index": 35}, {"db_idx": 30943, "episode_idx": 179, "frame_idx": 56, "global_frame_idx": 30943, "task_index": 35}, {"db_idx": 30944, "episode_idx": 179, "frame_idx": 57, "global_frame_idx": 30944, "task_index": 35}, {"db_idx": 30945, "episode_idx": 179, "frame_idx": 58, "global_frame_idx": 30945, "task_index": 35}, {"db_idx": 30946, "episode_idx": 179, "frame_idx": 59, "global_frame_idx": 30946, "task_index": 35}, {"db_idx": 30947, "episode_idx": 179, "frame_idx": 60, "global_frame_idx": 30947, "task_index": 35}, {"db_idx": 30948, "episode_idx": 179, "frame_idx": 61, "global_frame_idx": 30948, "task_index": 35}, {"db_idx": 30949, "episode_idx": 179, "frame_idx": 62, "global_frame_idx": 30949, "task_index": 35}, {"db_idx": 30950, "episode_idx": 179, "frame_idx": 63, "global_frame_idx": 30950, "task_index": 35}, {"db_idx": 30951, "episode_idx": 179, "frame_idx": 64, "global_frame_idx": 30951, "task_index": 35}, {"db_idx": 30952, "episode_idx": 179, "frame_idx": 65, "global_frame_idx": 30952, "task_index": 35}, {"db_idx": 30953, "episode_idx": 179, "frame_idx": 66, "global_frame_idx": 30953, "task_index": 35}, {"db_idx": 30954, "episode_idx": 179, "frame_idx": 67, "global_frame_idx": 30954, "task_index": 35}, {"db_idx": 30955, "episode_idx": 179, "frame_idx": 68, "global_frame_idx": 30955, "task_index": 35}, {"db_idx": 30956, "episode_idx": 179, "frame_idx": 69, "global_frame_idx": 30956, "task_index": 35}, {"db_idx": 30957, "episode_idx": 179, "frame_idx": 70, "global_frame_idx": 30957, "task_index": 35}, {"db_idx": 30958, "episode_idx": 179, "frame_idx": 71, "global_frame_idx": 30958, "task_index": 35}, {"db_idx": 30959, "episode_idx": 179, "frame_idx": 72, "global_frame_idx": 30959, "task_index": 35}, {"db_idx": 30960, "episode_idx": 179, "frame_idx": 73, "global_frame_idx": 30960, "task_index": 35}, {"db_idx": 30961, "episode_idx": 179, "frame_idx": 74, "global_frame_idx": 30961, "task_index": 35}, {"db_idx": 30962, "episode_idx": 179, "frame_idx": 75, "global_frame_idx": 30962, "task_index": 35}, {"db_idx": 30963, "episode_idx": 179, "frame_idx": 76, "global_frame_idx": 30963, "task_index": 35}, {"db_idx": 30964, "episode_idx": 179, "frame_idx": 77, "global_frame_idx": 30964, "task_index": 35}, {"db_idx": 30965, "episode_idx": 179, "frame_idx": 78, "global_frame_idx": 30965, "task_index": 35}, {"db_idx": 30966, "episode_idx": 179, "frame_idx": 79, "global_frame_idx": 30966, "task_index": 35}, {"db_idx": 30967, "episode_idx": 179, "frame_idx": 80, "global_frame_idx": 30967, "task_index": 35}, {"db_idx": 30968, "episode_idx": 179, "frame_idx": 81, "global_frame_idx": 30968, "task_index": 35}, {"db_idx": 30969, "episode_idx": 179, "frame_idx": 82, "global_frame_idx": 30969, "task_index": 35}, {"db_idx": 30970, "episode_idx": 179, "frame_idx": 83, "global_frame_idx": 30970, "task_index": 35}, {"db_idx": 30971, "episode_idx": 179, "frame_idx": 84, "global_frame_idx": 30971, "task_index": 35}, {"db_idx": 30972, "episode_idx": 179, "frame_idx": 85, "global_frame_idx": 30972, "task_index": 35}, {"db_idx": 30973, "episode_idx": 179, "frame_idx": 86, "global_frame_idx": 30973, "task_index": 35}, {"db_idx": 30974, "episode_idx": 179, "frame_idx": 87, "global_frame_idx": 30974, "task_index": 35}, {"db_idx": 30975, "episode_idx": 179, "frame_idx": 88, "global_frame_idx": 30975, "task_index": 35}, {"db_idx": 30976, "episode_idx": 179, "frame_idx": 89, "global_frame_idx": 30976, "task_index": 35}, {"db_idx": 30977, "episode_idx": 179, "frame_idx": 90, "global_frame_idx": 30977, "task_index": 35}, {"db_idx": 30978, "episode_idx": 179, "frame_idx": 91, "global_frame_idx": 30978, "task_index": 35}, {"db_idx": 30979, "episode_idx": 180, "frame_idx": 0, "global_frame_idx": 30979, "task_index": 36}, {"db_idx": 30980, "episode_idx": 180, "frame_idx": 1, "global_frame_idx": 30980, "task_index": 36}, {"db_idx": 30981, "episode_idx": 180, "frame_idx": 2, "global_frame_idx": 30981, "task_index": 36}, {"db_idx": 30982, "episode_idx": 180, "frame_idx": 3, "global_frame_idx": 30982, "task_index": 36}, {"db_idx": 30983, "episode_idx": 180, "frame_idx": 4, "global_frame_idx": 30983, "task_index": 36}, {"db_idx": 30984, "episode_idx": 180, "frame_idx": 5, "global_frame_idx": 30984, "task_index": 36}, {"db_idx": 30985, "episode_idx": 180, "frame_idx": 6, "global_frame_idx": 30985, "task_index": 36}, {"db_idx": 30986, "episode_idx": 180, "frame_idx": 7, "global_frame_idx": 30986, "task_index": 36}, {"db_idx": 30987, "episode_idx": 180, "frame_idx": 8, "global_frame_idx": 30987, "task_index": 36}, {"db_idx": 30988, "episode_idx": 180, "frame_idx": 9, "global_frame_idx": 30988, "task_index": 36}, {"db_idx": 30989, "episode_idx": 180, "frame_idx": 10, "global_frame_idx": 30989, "task_index": 36}, {"db_idx": 30990, "episode_idx": 180, "frame_idx": 11, "global_frame_idx": 30990, "task_index": 36}, {"db_idx": 30991, "episode_idx": 180, "frame_idx": 12, "global_frame_idx": 30991, "task_index": 36}, {"db_idx": 30992, "episode_idx": 180, "frame_idx": 13, "global_frame_idx": 30992, "task_index": 36}, {"db_idx": 30993, "episode_idx": 180, "frame_idx": 14, "global_frame_idx": 30993, "task_index": 36}, {"db_idx": 30994, "episode_idx": 180, "frame_idx": 15, "global_frame_idx": 30994, "task_index": 36}, {"db_idx": 30995, "episode_idx": 180, "frame_idx": 16, "global_frame_idx": 30995, "task_index": 36}, {"db_idx": 30996, "episode_idx": 180, "frame_idx": 17, "global_frame_idx": 30996, "task_index": 36}, {"db_idx": 30997, "episode_idx": 180, "frame_idx": 18, "global_frame_idx": 30997, "task_index": 36}, {"db_idx": 30998, "episode_idx": 180, "frame_idx": 19, "global_frame_idx": 30998, "task_index": 36}, {"db_idx": 30999, "episode_idx": 180, "frame_idx": 20, "global_frame_idx": 30999, "task_index": 36}, {"db_idx": 31000, "episode_idx": 180, "frame_idx": 21, "global_frame_idx": 31000, "task_index": 36}, {"db_idx": 31001, "episode_idx": 180, "frame_idx": 22, "global_frame_idx": 31001, "task_index": 36}, {"db_idx": 31002, "episode_idx": 180, "frame_idx": 23, "global_frame_idx": 31002, "task_index": 36}, {"db_idx": 31003, "episode_idx": 180, "frame_idx": 24, "global_frame_idx": 31003, "task_index": 36}, {"db_idx": 31004, "episode_idx": 180, "frame_idx": 25, "global_frame_idx": 31004, "task_index": 36}, {"db_idx": 31005, "episode_idx": 180, "frame_idx": 26, "global_frame_idx": 31005, "task_index": 36}, {"db_idx": 31006, "episode_idx": 180, "frame_idx": 27, "global_frame_idx": 31006, "task_index": 36}, {"db_idx": 31007, "episode_idx": 180, "frame_idx": 28, "global_frame_idx": 31007, "task_index": 36}, {"db_idx": 31008, "episode_idx": 180, "frame_idx": 29, "global_frame_idx": 31008, "task_index": 36}, {"db_idx": 31009, "episode_idx": 180, "frame_idx": 30, "global_frame_idx": 31009, "task_index": 36}, {"db_idx": 31010, "episode_idx": 180, "frame_idx": 31, "global_frame_idx": 31010, "task_index": 36}, {"db_idx": 31011, "episode_idx": 180, "frame_idx": 32, "global_frame_idx": 31011, "task_index": 36}, {"db_idx": 31012, "episode_idx": 180, "frame_idx": 33, "global_frame_idx": 31012, "task_index": 36}, {"db_idx": 31013, "episode_idx": 180, "frame_idx": 34, "global_frame_idx": 31013, "task_index": 36}, {"db_idx": 31014, "episode_idx": 180, "frame_idx": 35, "global_frame_idx": 31014, "task_index": 36}, {"db_idx": 31015, "episode_idx": 180, "frame_idx": 36, "global_frame_idx": 31015, "task_index": 36}, {"db_idx": 31016, "episode_idx": 180, "frame_idx": 37, "global_frame_idx": 31016, "task_index": 36}, {"db_idx": 31017, "episode_idx": 180, "frame_idx": 38, "global_frame_idx": 31017, "task_index": 36}, {"db_idx": 31018, "episode_idx": 180, "frame_idx": 39, "global_frame_idx": 31018, "task_index": 36}, {"db_idx": 31019, "episode_idx": 180, "frame_idx": 40, "global_frame_idx": 31019, "task_index": 36}, {"db_idx": 31020, "episode_idx": 180, "frame_idx": 41, "global_frame_idx": 31020, "task_index": 36}, {"db_idx": 31021, "episode_idx": 180, "frame_idx": 42, "global_frame_idx": 31021, "task_index": 36}, {"db_idx": 31022, "episode_idx": 180, "frame_idx": 43, "global_frame_idx": 31022, "task_index": 36}, {"db_idx": 31023, "episode_idx": 180, "frame_idx": 44, "global_frame_idx": 31023, "task_index": 36}, {"db_idx": 31024, "episode_idx": 180, "frame_idx": 45, "global_frame_idx": 31024, "task_index": 36}, {"db_idx": 31025, "episode_idx": 180, "frame_idx": 46, "global_frame_idx": 31025, "task_index": 36}, {"db_idx": 31026, "episode_idx": 180, "frame_idx": 47, "global_frame_idx": 31026, "task_index": 36}, {"db_idx": 31027, "episode_idx": 180, "frame_idx": 48, "global_frame_idx": 31027, "task_index": 36}, {"db_idx": 31028, "episode_idx": 180, "frame_idx": 49, "global_frame_idx": 31028, "task_index": 36}, {"db_idx": 31029, "episode_idx": 180, "frame_idx": 50, "global_frame_idx": 31029, "task_index": 36}, {"db_idx": 31030, "episode_idx": 180, "frame_idx": 51, "global_frame_idx": 31030, "task_index": 36}, {"db_idx": 31031, "episode_idx": 180, "frame_idx": 52, "global_frame_idx": 31031, "task_index": 36}, {"db_idx": 31032, "episode_idx": 180, "frame_idx": 53, "global_frame_idx": 31032, "task_index": 36}, {"db_idx": 31033, "episode_idx": 180, "frame_idx": 54, "global_frame_idx": 31033, "task_index": 36}, {"db_idx": 31034, "episode_idx": 180, "frame_idx": 55, "global_frame_idx": 31034, "task_index": 36}, {"db_idx": 31035, "episode_idx": 180, "frame_idx": 56, "global_frame_idx": 31035, "task_index": 36}, {"db_idx": 31036, "episode_idx": 180, "frame_idx": 57, "global_frame_idx": 31036, "task_index": 36}, {"db_idx": 31037, "episode_idx": 180, "frame_idx": 58, "global_frame_idx": 31037, "task_index": 36}, {"db_idx": 31038, "episode_idx": 180, "frame_idx": 59, "global_frame_idx": 31038, "task_index": 36}, {"db_idx": 31039, "episode_idx": 180, "frame_idx": 60, "global_frame_idx": 31039, "task_index": 36}, {"db_idx": 31040, "episode_idx": 180, "frame_idx": 61, "global_frame_idx": 31040, "task_index": 36}, {"db_idx": 31041, "episode_idx": 180, "frame_idx": 62, "global_frame_idx": 31041, "task_index": 36}, {"db_idx": 31042, "episode_idx": 180, "frame_idx": 63, "global_frame_idx": 31042, "task_index": 36}, {"db_idx": 31043, "episode_idx": 180, "frame_idx": 64, "global_frame_idx": 31043, "task_index": 36}, {"db_idx": 31044, "episode_idx": 180, "frame_idx": 65, "global_frame_idx": 31044, "task_index": 36}, {"db_idx": 31045, "episode_idx": 180, "frame_idx": 66, "global_frame_idx": 31045, "task_index": 36}, {"db_idx": 31046, "episode_idx": 180, "frame_idx": 67, "global_frame_idx": 31046, "task_index": 36}, {"db_idx": 31047, "episode_idx": 180, "frame_idx": 68, "global_frame_idx": 31047, "task_index": 36}, {"db_idx": 31048, "episode_idx": 180, "frame_idx": 69, "global_frame_idx": 31048, "task_index": 36}, {"db_idx": 31049, "episode_idx": 180, "frame_idx": 70, "global_frame_idx": 31049, "task_index": 36}, {"db_idx": 31050, "episode_idx": 180, "frame_idx": 71, "global_frame_idx": 31050, "task_index": 36}, {"db_idx": 31051, "episode_idx": 180, "frame_idx": 72, "global_frame_idx": 31051, "task_index": 36}, {"db_idx": 31052, "episode_idx": 180, "frame_idx": 73, "global_frame_idx": 31052, "task_index": 36}, {"db_idx": 31053, "episode_idx": 180, "frame_idx": 74, "global_frame_idx": 31053, "task_index": 36}, {"db_idx": 31054, "episode_idx": 180, "frame_idx": 75, "global_frame_idx": 31054, "task_index": 36}, {"db_idx": 31055, "episode_idx": 180, "frame_idx": 76, "global_frame_idx": 31055, "task_index": 36}, {"db_idx": 31056, "episode_idx": 180, "frame_idx": 77, "global_frame_idx": 31056, "task_index": 36}, {"db_idx": 31057, "episode_idx": 180, "frame_idx": 78, "global_frame_idx": 31057, "task_index": 36}, {"db_idx": 31058, "episode_idx": 180, "frame_idx": 79, "global_frame_idx": 31058, "task_index": 36}, {"db_idx": 31059, "episode_idx": 180, "frame_idx": 80, "global_frame_idx": 31059, "task_index": 36}, {"db_idx": 31060, "episode_idx": 180, "frame_idx": 81, "global_frame_idx": 31060, "task_index": 36}, {"db_idx": 31061, "episode_idx": 180, "frame_idx": 82, "global_frame_idx": 31061, "task_index": 36}, {"db_idx": 31062, "episode_idx": 180, "frame_idx": 83, "global_frame_idx": 31062, "task_index": 36}, {"db_idx": 31063, "episode_idx": 180, "frame_idx": 84, "global_frame_idx": 31063, "task_index": 36}, {"db_idx": 31064, "episode_idx": 180, "frame_idx": 85, "global_frame_idx": 31064, "task_index": 36}, {"db_idx": 31065, "episode_idx": 180, "frame_idx": 86, "global_frame_idx": 31065, "task_index": 36}, {"db_idx": 31066, "episode_idx": 180, "frame_idx": 87, "global_frame_idx": 31066, "task_index": 36}, {"db_idx": 31067, "episode_idx": 180, "frame_idx": 88, "global_frame_idx": 31067, "task_index": 36}, {"db_idx": 31068, "episode_idx": 180, "frame_idx": 89, "global_frame_idx": 31068, "task_index": 36}, {"db_idx": 31069, "episode_idx": 180, "frame_idx": 90, "global_frame_idx": 31069, "task_index": 36}, {"db_idx": 31070, "episode_idx": 180, "frame_idx": 91, "global_frame_idx": 31070, "task_index": 36}, {"db_idx": 31071, "episode_idx": 180, "frame_idx": 92, "global_frame_idx": 31071, "task_index": 36}, {"db_idx": 31072, "episode_idx": 180, "frame_idx": 93, "global_frame_idx": 31072, "task_index": 36}, {"db_idx": 31073, "episode_idx": 180, "frame_idx": 94, "global_frame_idx": 31073, "task_index": 36}, {"db_idx": 31074, "episode_idx": 180, "frame_idx": 95, "global_frame_idx": 31074, "task_index": 36}, {"db_idx": 31075, "episode_idx": 180, "frame_idx": 96, "global_frame_idx": 31075, "task_index": 36}, {"db_idx": 31076, "episode_idx": 180, "frame_idx": 97, "global_frame_idx": 31076, "task_index": 36}, {"db_idx": 31077, "episode_idx": 180, "frame_idx": 98, "global_frame_idx": 31077, "task_index": 36}, {"db_idx": 31078, "episode_idx": 180, "frame_idx": 99, "global_frame_idx": 31078, "task_index": 36}, {"db_idx": 31079, "episode_idx": 180, "frame_idx": 100, "global_frame_idx": 31079, "task_index": 36}, {"db_idx": 31080, "episode_idx": 180, "frame_idx": 101, "global_frame_idx": 31080, "task_index": 36}, {"db_idx": 31081, "episode_idx": 180, "frame_idx": 102, "global_frame_idx": 31081, "task_index": 36}, {"db_idx": 31082, "episode_idx": 180, "frame_idx": 103, "global_frame_idx": 31082, "task_index": 36}, {"db_idx": 31083, "episode_idx": 180, "frame_idx": 104, "global_frame_idx": 31083, "task_index": 36}, {"db_idx": 31084, "episode_idx": 180, "frame_idx": 105, "global_frame_idx": 31084, "task_index": 36}, {"db_idx": 31085, "episode_idx": 180, "frame_idx": 106, "global_frame_idx": 31085, "task_index": 36}, {"db_idx": 31086, "episode_idx": 180, "frame_idx": 107, "global_frame_idx": 31086, "task_index": 36}, {"db_idx": 31087, "episode_idx": 180, "frame_idx": 108, "global_frame_idx": 31087, "task_index": 36}, {"db_idx": 31088, "episode_idx": 180, "frame_idx": 109, "global_frame_idx": 31088, "task_index": 36}, {"db_idx": 31089, "episode_idx": 180, "frame_idx": 110, "global_frame_idx": 31089, "task_index": 36}, {"db_idx": 31090, "episode_idx": 180, "frame_idx": 111, "global_frame_idx": 31090, "task_index": 36}, {"db_idx": 31091, "episode_idx": 180, "frame_idx": 112, "global_frame_idx": 31091, "task_index": 36}, {"db_idx": 31092, "episode_idx": 180, "frame_idx": 113, "global_frame_idx": 31092, "task_index": 36}, {"db_idx": 31093, "episode_idx": 180, "frame_idx": 114, "global_frame_idx": 31093, "task_index": 36}, {"db_idx": 31094, "episode_idx": 180, "frame_idx": 115, "global_frame_idx": 31094, "task_index": 36}, {"db_idx": 31095, "episode_idx": 180, "frame_idx": 116, "global_frame_idx": 31095, "task_index": 36}, {"db_idx": 31096, "episode_idx": 180, "frame_idx": 117, "global_frame_idx": 31096, "task_index": 36}, {"db_idx": 31097, "episode_idx": 180, "frame_idx": 118, "global_frame_idx": 31097, "task_index": 36}, {"db_idx": 31098, "episode_idx": 180, "frame_idx": 119, "global_frame_idx": 31098, "task_index": 36}, {"db_idx": 31099, "episode_idx": 180, "frame_idx": 120, "global_frame_idx": 31099, "task_index": 36}, {"db_idx": 31100, "episode_idx": 180, "frame_idx": 121, "global_frame_idx": 31100, "task_index": 36}, {"db_idx": 31101, "episode_idx": 180, "frame_idx": 122, "global_frame_idx": 31101, "task_index": 36}, {"db_idx": 31102, "episode_idx": 180, "frame_idx": 123, "global_frame_idx": 31102, "task_index": 36}, {"db_idx": 31103, "episode_idx": 180, "frame_idx": 124, "global_frame_idx": 31103, "task_index": 36}, {"db_idx": 31104, "episode_idx": 180, "frame_idx": 125, "global_frame_idx": 31104, "task_index": 36}, {"db_idx": 31105, "episode_idx": 180, "frame_idx": 126, "global_frame_idx": 31105, "task_index": 36}, {"db_idx": 31106, "episode_idx": 180, "frame_idx": 127, "global_frame_idx": 31106, "task_index": 36}, {"db_idx": 31107, "episode_idx": 180, "frame_idx": 128, "global_frame_idx": 31107, "task_index": 36}, {"db_idx": 31108, "episode_idx": 180, "frame_idx": 129, "global_frame_idx": 31108, "task_index": 36}, {"db_idx": 31109, "episode_idx": 180, "frame_idx": 130, "global_frame_idx": 31109, "task_index": 36}, {"db_idx": 31110, "episode_idx": 180, "frame_idx": 131, "global_frame_idx": 31110, "task_index": 36}, {"db_idx": 31111, "episode_idx": 180, "frame_idx": 132, "global_frame_idx": 31111, "task_index": 36}, {"db_idx": 31112, "episode_idx": 180, "frame_idx": 133, "global_frame_idx": 31112, "task_index": 36}, {"db_idx": 31113, "episode_idx": 180, "frame_idx": 134, "global_frame_idx": 31113, "task_index": 36}, {"db_idx": 31114, "episode_idx": 180, "frame_idx": 135, "global_frame_idx": 31114, "task_index": 36}, {"db_idx": 31115, "episode_idx": 180, "frame_idx": 136, "global_frame_idx": 31115, "task_index": 36}, {"db_idx": 31116, "episode_idx": 180, "frame_idx": 137, "global_frame_idx": 31116, "task_index": 36}, {"db_idx": 31117, "episode_idx": 180, "frame_idx": 138, "global_frame_idx": 31117, "task_index": 36}, {"db_idx": 31118, "episode_idx": 180, "frame_idx": 139, "global_frame_idx": 31118, "task_index": 36}, {"db_idx": 31119, "episode_idx": 180, "frame_idx": 140, "global_frame_idx": 31119, "task_index": 36}, {"db_idx": 31120, "episode_idx": 180, "frame_idx": 141, "global_frame_idx": 31120, "task_index": 36}, {"db_idx": 31121, "episode_idx": 180, "frame_idx": 142, "global_frame_idx": 31121, "task_index": 36}, {"db_idx": 31122, "episode_idx": 180, "frame_idx": 143, "global_frame_idx": 31122, "task_index": 36}, {"db_idx": 31123, "episode_idx": 180, "frame_idx": 144, "global_frame_idx": 31123, "task_index": 36}, {"db_idx": 31124, "episode_idx": 180, "frame_idx": 145, "global_frame_idx": 31124, "task_index": 36}, {"db_idx": 31125, "episode_idx": 180, "frame_idx": 146, "global_frame_idx": 31125, "task_index": 36}, {"db_idx": 31126, "episode_idx": 180, "frame_idx": 147, "global_frame_idx": 31126, "task_index": 36}, {"db_idx": 31127, "episode_idx": 180, "frame_idx": 148, "global_frame_idx": 31127, "task_index": 36}, {"db_idx": 31128, "episode_idx": 180, "frame_idx": 149, "global_frame_idx": 31128, "task_index": 36}, {"db_idx": 31129, "episode_idx": 180, "frame_idx": 150, "global_frame_idx": 31129, "task_index": 36}, {"db_idx": 31130, "episode_idx": 181, "frame_idx": 0, "global_frame_idx": 31130, "task_index": 36}, {"db_idx": 31131, "episode_idx": 181, "frame_idx": 1, "global_frame_idx": 31131, "task_index": 36}, {"db_idx": 31132, "episode_idx": 181, "frame_idx": 2, "global_frame_idx": 31132, "task_index": 36}, {"db_idx": 31133, "episode_idx": 181, "frame_idx": 3, "global_frame_idx": 31133, "task_index": 36}, {"db_idx": 31134, "episode_idx": 181, "frame_idx": 4, "global_frame_idx": 31134, "task_index": 36}, {"db_idx": 31135, "episode_idx": 181, "frame_idx": 5, "global_frame_idx": 31135, "task_index": 36}, {"db_idx": 31136, "episode_idx": 181, "frame_idx": 6, "global_frame_idx": 31136, "task_index": 36}, {"db_idx": 31137, "episode_idx": 181, "frame_idx": 7, "global_frame_idx": 31137, "task_index": 36}, {"db_idx": 31138, "episode_idx": 181, "frame_idx": 8, "global_frame_idx": 31138, "task_index": 36}, {"db_idx": 31139, "episode_idx": 181, "frame_idx": 9, "global_frame_idx": 31139, "task_index": 36}, {"db_idx": 31140, "episode_idx": 181, "frame_idx": 10, "global_frame_idx": 31140, "task_index": 36}, {"db_idx": 31141, "episode_idx": 181, "frame_idx": 11, "global_frame_idx": 31141, "task_index": 36}, {"db_idx": 31142, "episode_idx": 181, "frame_idx": 12, "global_frame_idx": 31142, "task_index": 36}, {"db_idx": 31143, "episode_idx": 181, "frame_idx": 13, "global_frame_idx": 31143, "task_index": 36}, {"db_idx": 31144, "episode_idx": 181, "frame_idx": 14, "global_frame_idx": 31144, "task_index": 36}, {"db_idx": 31145, "episode_idx": 181, "frame_idx": 15, "global_frame_idx": 31145, "task_index": 36}, {"db_idx": 31146, "episode_idx": 181, "frame_idx": 16, "global_frame_idx": 31146, "task_index": 36}, {"db_idx": 31147, "episode_idx": 181, "frame_idx": 17, "global_frame_idx": 31147, "task_index": 36}, {"db_idx": 31148, "episode_idx": 181, "frame_idx": 18, "global_frame_idx": 31148, "task_index": 36}, {"db_idx": 31149, "episode_idx": 181, "frame_idx": 19, "global_frame_idx": 31149, "task_index": 36}, {"db_idx": 31150, "episode_idx": 181, "frame_idx": 20, "global_frame_idx": 31150, "task_index": 36}, {"db_idx": 31151, "episode_idx": 181, "frame_idx": 21, "global_frame_idx": 31151, "task_index": 36}, {"db_idx": 31152, "episode_idx": 181, "frame_idx": 22, "global_frame_idx": 31152, "task_index": 36}, {"db_idx": 31153, "episode_idx": 181, "frame_idx": 23, "global_frame_idx": 31153, "task_index": 36}, {"db_idx": 31154, "episode_idx": 181, "frame_idx": 24, "global_frame_idx": 31154, "task_index": 36}, {"db_idx": 31155, "episode_idx": 181, "frame_idx": 25, "global_frame_idx": 31155, "task_index": 36}, {"db_idx": 31156, "episode_idx": 181, "frame_idx": 26, "global_frame_idx": 31156, "task_index": 36}, {"db_idx": 31157, "episode_idx": 181, "frame_idx": 27, "global_frame_idx": 31157, "task_index": 36}, {"db_idx": 31158, "episode_idx": 181, "frame_idx": 28, "global_frame_idx": 31158, "task_index": 36}, {"db_idx": 31159, "episode_idx": 181, "frame_idx": 29, "global_frame_idx": 31159, "task_index": 36}, {"db_idx": 31160, "episode_idx": 181, "frame_idx": 30, "global_frame_idx": 31160, "task_index": 36}, {"db_idx": 31161, "episode_idx": 181, "frame_idx": 31, "global_frame_idx": 31161, "task_index": 36}, {"db_idx": 31162, "episode_idx": 181, "frame_idx": 32, "global_frame_idx": 31162, "task_index": 36}, {"db_idx": 31163, "episode_idx": 181, "frame_idx": 33, "global_frame_idx": 31163, "task_index": 36}, {"db_idx": 31164, "episode_idx": 181, "frame_idx": 34, "global_frame_idx": 31164, "task_index": 36}, {"db_idx": 31165, "episode_idx": 181, "frame_idx": 35, "global_frame_idx": 31165, "task_index": 36}, {"db_idx": 31166, "episode_idx": 181, "frame_idx": 36, "global_frame_idx": 31166, "task_index": 36}, {"db_idx": 31167, "episode_idx": 181, "frame_idx": 37, "global_frame_idx": 31167, "task_index": 36}, {"db_idx": 31168, "episode_idx": 181, "frame_idx": 38, "global_frame_idx": 31168, "task_index": 36}, {"db_idx": 31169, "episode_idx": 181, "frame_idx": 39, "global_frame_idx": 31169, "task_index": 36}, {"db_idx": 31170, "episode_idx": 181, "frame_idx": 40, "global_frame_idx": 31170, "task_index": 36}, {"db_idx": 31171, "episode_idx": 181, "frame_idx": 41, "global_frame_idx": 31171, "task_index": 36}, {"db_idx": 31172, "episode_idx": 181, "frame_idx": 42, "global_frame_idx": 31172, "task_index": 36}, {"db_idx": 31173, "episode_idx": 181, "frame_idx": 43, "global_frame_idx": 31173, "task_index": 36}, {"db_idx": 31174, "episode_idx": 181, "frame_idx": 44, "global_frame_idx": 31174, "task_index": 36}, {"db_idx": 31175, "episode_idx": 181, "frame_idx": 45, "global_frame_idx": 31175, "task_index": 36}, {"db_idx": 31176, "episode_idx": 181, "frame_idx": 46, "global_frame_idx": 31176, "task_index": 36}, {"db_idx": 31177, "episode_idx": 181, "frame_idx": 47, "global_frame_idx": 31177, "task_index": 36}, {"db_idx": 31178, "episode_idx": 181, "frame_idx": 48, "global_frame_idx": 31178, "task_index": 36}, {"db_idx": 31179, "episode_idx": 181, "frame_idx": 49, "global_frame_idx": 31179, "task_index": 36}, {"db_idx": 31180, "episode_idx": 181, "frame_idx": 50, "global_frame_idx": 31180, "task_index": 36}, {"db_idx": 31181, "episode_idx": 181, "frame_idx": 51, "global_frame_idx": 31181, "task_index": 36}, {"db_idx": 31182, "episode_idx": 181, "frame_idx": 52, "global_frame_idx": 31182, "task_index": 36}, {"db_idx": 31183, "episode_idx": 181, "frame_idx": 53, "global_frame_idx": 31183, "task_index": 36}, {"db_idx": 31184, "episode_idx": 181, "frame_idx": 54, "global_frame_idx": 31184, "task_index": 36}, {"db_idx": 31185, "episode_idx": 181, "frame_idx": 55, "global_frame_idx": 31185, "task_index": 36}, {"db_idx": 31186, "episode_idx": 181, "frame_idx": 56, "global_frame_idx": 31186, "task_index": 36}, {"db_idx": 31187, "episode_idx": 181, "frame_idx": 57, "global_frame_idx": 31187, "task_index": 36}, {"db_idx": 31188, "episode_idx": 181, "frame_idx": 58, "global_frame_idx": 31188, "task_index": 36}, {"db_idx": 31189, "episode_idx": 181, "frame_idx": 59, "global_frame_idx": 31189, "task_index": 36}, {"db_idx": 31190, "episode_idx": 181, "frame_idx": 60, "global_frame_idx": 31190, "task_index": 36}, {"db_idx": 31191, "episode_idx": 181, "frame_idx": 61, "global_frame_idx": 31191, "task_index": 36}, {"db_idx": 31192, "episode_idx": 181, "frame_idx": 62, "global_frame_idx": 31192, "task_index": 36}, {"db_idx": 31193, "episode_idx": 181, "frame_idx": 63, "global_frame_idx": 31193, "task_index": 36}, {"db_idx": 31194, "episode_idx": 181, "frame_idx": 64, "global_frame_idx": 31194, "task_index": 36}, {"db_idx": 31195, "episode_idx": 181, "frame_idx": 65, "global_frame_idx": 31195, "task_index": 36}, {"db_idx": 31196, "episode_idx": 181, "frame_idx": 66, "global_frame_idx": 31196, "task_index": 36}, {"db_idx": 31197, "episode_idx": 181, "frame_idx": 67, "global_frame_idx": 31197, "task_index": 36}, {"db_idx": 31198, "episode_idx": 181, "frame_idx": 68, "global_frame_idx": 31198, "task_index": 36}, {"db_idx": 31199, "episode_idx": 181, "frame_idx": 69, "global_frame_idx": 31199, "task_index": 36}, {"db_idx": 31200, "episode_idx": 181, "frame_idx": 70, "global_frame_idx": 31200, "task_index": 36}, {"db_idx": 31201, "episode_idx": 181, "frame_idx": 71, "global_frame_idx": 31201, "task_index": 36}, {"db_idx": 31202, "episode_idx": 181, "frame_idx": 72, "global_frame_idx": 31202, "task_index": 36}, {"db_idx": 31203, "episode_idx": 181, "frame_idx": 73, "global_frame_idx": 31203, "task_index": 36}, {"db_idx": 31204, "episode_idx": 181, "frame_idx": 74, "global_frame_idx": 31204, "task_index": 36}, {"db_idx": 31205, "episode_idx": 181, "frame_idx": 75, "global_frame_idx": 31205, "task_index": 36}, {"db_idx": 31206, "episode_idx": 181, "frame_idx": 76, "global_frame_idx": 31206, "task_index": 36}, {"db_idx": 31207, "episode_idx": 181, "frame_idx": 77, "global_frame_idx": 31207, "task_index": 36}, {"db_idx": 31208, "episode_idx": 181, "frame_idx": 78, "global_frame_idx": 31208, "task_index": 36}, {"db_idx": 31209, "episode_idx": 181, "frame_idx": 79, "global_frame_idx": 31209, "task_index": 36}, {"db_idx": 31210, "episode_idx": 181, "frame_idx": 80, "global_frame_idx": 31210, "task_index": 36}, {"db_idx": 31211, "episode_idx": 181, "frame_idx": 81, "global_frame_idx": 31211, "task_index": 36}, {"db_idx": 31212, "episode_idx": 181, "frame_idx": 82, "global_frame_idx": 31212, "task_index": 36}, {"db_idx": 31213, "episode_idx": 181, "frame_idx": 83, "global_frame_idx": 31213, "task_index": 36}, {"db_idx": 31214, "episode_idx": 181, "frame_idx": 84, "global_frame_idx": 31214, "task_index": 36}, {"db_idx": 31215, "episode_idx": 181, "frame_idx": 85, "global_frame_idx": 31215, "task_index": 36}, {"db_idx": 31216, "episode_idx": 181, "frame_idx": 86, "global_frame_idx": 31216, "task_index": 36}, {"db_idx": 31217, "episode_idx": 181, "frame_idx": 87, "global_frame_idx": 31217, "task_index": 36}, {"db_idx": 31218, "episode_idx": 181, "frame_idx": 88, "global_frame_idx": 31218, "task_index": 36}, {"db_idx": 31219, "episode_idx": 181, "frame_idx": 89, "global_frame_idx": 31219, "task_index": 36}, {"db_idx": 31220, "episode_idx": 181, "frame_idx": 90, "global_frame_idx": 31220, "task_index": 36}, {"db_idx": 31221, "episode_idx": 181, "frame_idx": 91, "global_frame_idx": 31221, "task_index": 36}, {"db_idx": 31222, "episode_idx": 181, "frame_idx": 92, "global_frame_idx": 31222, "task_index": 36}, {"db_idx": 31223, "episode_idx": 181, "frame_idx": 93, "global_frame_idx": 31223, "task_index": 36}, {"db_idx": 31224, "episode_idx": 181, "frame_idx": 94, "global_frame_idx": 31224, "task_index": 36}, {"db_idx": 31225, "episode_idx": 181, "frame_idx": 95, "global_frame_idx": 31225, "task_index": 36}, {"db_idx": 31226, "episode_idx": 181, "frame_idx": 96, "global_frame_idx": 31226, "task_index": 36}, {"db_idx": 31227, "episode_idx": 181, "frame_idx": 97, "global_frame_idx": 31227, "task_index": 36}, {"db_idx": 31228, "episode_idx": 181, "frame_idx": 98, "global_frame_idx": 31228, "task_index": 36}, {"db_idx": 31229, "episode_idx": 181, "frame_idx": 99, "global_frame_idx": 31229, "task_index": 36}, {"db_idx": 31230, "episode_idx": 181, "frame_idx": 100, "global_frame_idx": 31230, "task_index": 36}, {"db_idx": 31231, "episode_idx": 181, "frame_idx": 101, "global_frame_idx": 31231, "task_index": 36}, {"db_idx": 31232, "episode_idx": 181, "frame_idx": 102, "global_frame_idx": 31232, "task_index": 36}, {"db_idx": 31233, "episode_idx": 181, "frame_idx": 103, "global_frame_idx": 31233, "task_index": 36}, {"db_idx": 31234, "episode_idx": 181, "frame_idx": 104, "global_frame_idx": 31234, "task_index": 36}, {"db_idx": 31235, "episode_idx": 181, "frame_idx": 105, "global_frame_idx": 31235, "task_index": 36}, {"db_idx": 31236, "episode_idx": 181, "frame_idx": 106, "global_frame_idx": 31236, "task_index": 36}, {"db_idx": 31237, "episode_idx": 181, "frame_idx": 107, "global_frame_idx": 31237, "task_index": 36}, {"db_idx": 31238, "episode_idx": 181, "frame_idx": 108, "global_frame_idx": 31238, "task_index": 36}, {"db_idx": 31239, "episode_idx": 181, "frame_idx": 109, "global_frame_idx": 31239, "task_index": 36}, {"db_idx": 31240, "episode_idx": 181, "frame_idx": 110, "global_frame_idx": 31240, "task_index": 36}, {"db_idx": 31241, "episode_idx": 181, "frame_idx": 111, "global_frame_idx": 31241, "task_index": 36}, {"db_idx": 31242, "episode_idx": 181, "frame_idx": 112, "global_frame_idx": 31242, "task_index": 36}, {"db_idx": 31243, "episode_idx": 181, "frame_idx": 113, "global_frame_idx": 31243, "task_index": 36}, {"db_idx": 31244, "episode_idx": 181, "frame_idx": 114, "global_frame_idx": 31244, "task_index": 36}, {"db_idx": 31245, "episode_idx": 181, "frame_idx": 115, "global_frame_idx": 31245, "task_index": 36}, {"db_idx": 31246, "episode_idx": 181, "frame_idx": 116, "global_frame_idx": 31246, "task_index": 36}, {"db_idx": 31247, "episode_idx": 181, "frame_idx": 117, "global_frame_idx": 31247, "task_index": 36}, {"db_idx": 31248, "episode_idx": 181, "frame_idx": 118, "global_frame_idx": 31248, "task_index": 36}, {"db_idx": 31249, "episode_idx": 181, "frame_idx": 119, "global_frame_idx": 31249, "task_index": 36}, {"db_idx": 31250, "episode_idx": 181, "frame_idx": 120, "global_frame_idx": 31250, "task_index": 36}, {"db_idx": 31251, "episode_idx": 181, "frame_idx": 121, "global_frame_idx": 31251, "task_index": 36}, {"db_idx": 31252, "episode_idx": 181, "frame_idx": 122, "global_frame_idx": 31252, "task_index": 36}, {"db_idx": 31253, "episode_idx": 181, "frame_idx": 123, "global_frame_idx": 31253, "task_index": 36}, {"db_idx": 31254, "episode_idx": 181, "frame_idx": 124, "global_frame_idx": 31254, "task_index": 36}, {"db_idx": 31255, "episode_idx": 181, "frame_idx": 125, "global_frame_idx": 31255, "task_index": 36}, {"db_idx": 31256, "episode_idx": 181, "frame_idx": 126, "global_frame_idx": 31256, "task_index": 36}, {"db_idx": 31257, "episode_idx": 181, "frame_idx": 127, "global_frame_idx": 31257, "task_index": 36}, {"db_idx": 31258, "episode_idx": 181, "frame_idx": 128, "global_frame_idx": 31258, "task_index": 36}, {"db_idx": 31259, "episode_idx": 181, "frame_idx": 129, "global_frame_idx": 31259, "task_index": 36}, {"db_idx": 31260, "episode_idx": 181, "frame_idx": 130, "global_frame_idx": 31260, "task_index": 36}, {"db_idx": 31261, "episode_idx": 181, "frame_idx": 131, "global_frame_idx": 31261, "task_index": 36}, {"db_idx": 31262, "episode_idx": 181, "frame_idx": 132, "global_frame_idx": 31262, "task_index": 36}, {"db_idx": 31263, "episode_idx": 181, "frame_idx": 133, "global_frame_idx": 31263, "task_index": 36}, {"db_idx": 31264, "episode_idx": 181, "frame_idx": 134, "global_frame_idx": 31264, "task_index": 36}, {"db_idx": 31265, "episode_idx": 181, "frame_idx": 135, "global_frame_idx": 31265, "task_index": 36}, {"db_idx": 31266, "episode_idx": 181, "frame_idx": 136, "global_frame_idx": 31266, "task_index": 36}, {"db_idx": 31267, "episode_idx": 181, "frame_idx": 137, "global_frame_idx": 31267, "task_index": 36}, {"db_idx": 31268, "episode_idx": 181, "frame_idx": 138, "global_frame_idx": 31268, "task_index": 36}, {"db_idx": 31269, "episode_idx": 181, "frame_idx": 139, "global_frame_idx": 31269, "task_index": 36}, {"db_idx": 31270, "episode_idx": 181, "frame_idx": 140, "global_frame_idx": 31270, "task_index": 36}, {"db_idx": 31271, "episode_idx": 181, "frame_idx": 141, "global_frame_idx": 31271, "task_index": 36}, {"db_idx": 31272, "episode_idx": 181, "frame_idx": 142, "global_frame_idx": 31272, "task_index": 36}, {"db_idx": 31273, "episode_idx": 181, "frame_idx": 143, "global_frame_idx": 31273, "task_index": 36}, {"db_idx": 31274, "episode_idx": 181, "frame_idx": 144, "global_frame_idx": 31274, "task_index": 36}, {"db_idx": 31275, "episode_idx": 181, "frame_idx": 145, "global_frame_idx": 31275, "task_index": 36}, {"db_idx": 31276, "episode_idx": 181, "frame_idx": 146, "global_frame_idx": 31276, "task_index": 36}, {"db_idx": 31277, "episode_idx": 182, "frame_idx": 0, "global_frame_idx": 31277, "task_index": 36}, {"db_idx": 31278, "episode_idx": 182, "frame_idx": 1, "global_frame_idx": 31278, "task_index": 36}, {"db_idx": 31279, "episode_idx": 182, "frame_idx": 2, "global_frame_idx": 31279, "task_index": 36}, {"db_idx": 31280, "episode_idx": 182, "frame_idx": 3, "global_frame_idx": 31280, "task_index": 36}, {"db_idx": 31281, "episode_idx": 182, "frame_idx": 4, "global_frame_idx": 31281, "task_index": 36}, {"db_idx": 31282, "episode_idx": 182, "frame_idx": 5, "global_frame_idx": 31282, "task_index": 36}, {"db_idx": 31283, "episode_idx": 182, "frame_idx": 6, "global_frame_idx": 31283, "task_index": 36}, {"db_idx": 31284, "episode_idx": 182, "frame_idx": 7, "global_frame_idx": 31284, "task_index": 36}, {"db_idx": 31285, "episode_idx": 182, "frame_idx": 8, "global_frame_idx": 31285, "task_index": 36}, {"db_idx": 31286, "episode_idx": 182, "frame_idx": 9, "global_frame_idx": 31286, "task_index": 36}, {"db_idx": 31287, "episode_idx": 182, "frame_idx": 10, "global_frame_idx": 31287, "task_index": 36}, {"db_idx": 31288, "episode_idx": 182, "frame_idx": 11, "global_frame_idx": 31288, "task_index": 36}, {"db_idx": 31289, "episode_idx": 182, "frame_idx": 12, "global_frame_idx": 31289, "task_index": 36}, {"db_idx": 31290, "episode_idx": 182, "frame_idx": 13, "global_frame_idx": 31290, "task_index": 36}, {"db_idx": 31291, "episode_idx": 182, "frame_idx": 14, "global_frame_idx": 31291, "task_index": 36}, {"db_idx": 31292, "episode_idx": 182, "frame_idx": 15, "global_frame_idx": 31292, "task_index": 36}, {"db_idx": 31293, "episode_idx": 182, "frame_idx": 16, "global_frame_idx": 31293, "task_index": 36}, {"db_idx": 31294, "episode_idx": 182, "frame_idx": 17, "global_frame_idx": 31294, "task_index": 36}, {"db_idx": 31295, "episode_idx": 182, "frame_idx": 18, "global_frame_idx": 31295, "task_index": 36}, {"db_idx": 31296, "episode_idx": 182, "frame_idx": 19, "global_frame_idx": 31296, "task_index": 36}, {"db_idx": 31297, "episode_idx": 182, "frame_idx": 20, "global_frame_idx": 31297, "task_index": 36}, {"db_idx": 31298, "episode_idx": 182, "frame_idx": 21, "global_frame_idx": 31298, "task_index": 36}, {"db_idx": 31299, "episode_idx": 182, "frame_idx": 22, "global_frame_idx": 31299, "task_index": 36}, {"db_idx": 31300, "episode_idx": 182, "frame_idx": 23, "global_frame_idx": 31300, "task_index": 36}, {"db_idx": 31301, "episode_idx": 182, "frame_idx": 24, "global_frame_idx": 31301, "task_index": 36}, {"db_idx": 31302, "episode_idx": 182, "frame_idx": 25, "global_frame_idx": 31302, "task_index": 36}, {"db_idx": 31303, "episode_idx": 182, "frame_idx": 26, "global_frame_idx": 31303, "task_index": 36}, {"db_idx": 31304, "episode_idx": 182, "frame_idx": 27, "global_frame_idx": 31304, "task_index": 36}, {"db_idx": 31305, "episode_idx": 182, "frame_idx": 28, "global_frame_idx": 31305, "task_index": 36}, {"db_idx": 31306, "episode_idx": 182, "frame_idx": 29, "global_frame_idx": 31306, "task_index": 36}, {"db_idx": 31307, "episode_idx": 182, "frame_idx": 30, "global_frame_idx": 31307, "task_index": 36}, {"db_idx": 31308, "episode_idx": 182, "frame_idx": 31, "global_frame_idx": 31308, "task_index": 36}, {"db_idx": 31309, "episode_idx": 182, "frame_idx": 32, "global_frame_idx": 31309, "task_index": 36}, {"db_idx": 31310, "episode_idx": 182, "frame_idx": 33, "global_frame_idx": 31310, "task_index": 36}, {"db_idx": 31311, "episode_idx": 182, "frame_idx": 34, "global_frame_idx": 31311, "task_index": 36}, {"db_idx": 31312, "episode_idx": 182, "frame_idx": 35, "global_frame_idx": 31312, "task_index": 36}, {"db_idx": 31313, "episode_idx": 182, "frame_idx": 36, "global_frame_idx": 31313, "task_index": 36}, {"db_idx": 31314, "episode_idx": 182, "frame_idx": 37, "global_frame_idx": 31314, "task_index": 36}, {"db_idx": 31315, "episode_idx": 182, "frame_idx": 38, "global_frame_idx": 31315, "task_index": 36}, {"db_idx": 31316, "episode_idx": 182, "frame_idx": 39, "global_frame_idx": 31316, "task_index": 36}, {"db_idx": 31317, "episode_idx": 182, "frame_idx": 40, "global_frame_idx": 31317, "task_index": 36}, {"db_idx": 31318, "episode_idx": 182, "frame_idx": 41, "global_frame_idx": 31318, "task_index": 36}, {"db_idx": 31319, "episode_idx": 182, "frame_idx": 42, "global_frame_idx": 31319, "task_index": 36}, {"db_idx": 31320, "episode_idx": 182, "frame_idx": 43, "global_frame_idx": 31320, "task_index": 36}, {"db_idx": 31321, "episode_idx": 182, "frame_idx": 44, "global_frame_idx": 31321, "task_index": 36}, {"db_idx": 31322, "episode_idx": 182, "frame_idx": 45, "global_frame_idx": 31322, "task_index": 36}, {"db_idx": 31323, "episode_idx": 182, "frame_idx": 46, "global_frame_idx": 31323, "task_index": 36}, {"db_idx": 31324, "episode_idx": 182, "frame_idx": 47, "global_frame_idx": 31324, "task_index": 36}, {"db_idx": 31325, "episode_idx": 182, "frame_idx": 48, "global_frame_idx": 31325, "task_index": 36}, {"db_idx": 31326, "episode_idx": 182, "frame_idx": 49, "global_frame_idx": 31326, "task_index": 36}, {"db_idx": 31327, "episode_idx": 182, "frame_idx": 50, "global_frame_idx": 31327, "task_index": 36}, {"db_idx": 31328, "episode_idx": 182, "frame_idx": 51, "global_frame_idx": 31328, "task_index": 36}, {"db_idx": 31329, "episode_idx": 182, "frame_idx": 52, "global_frame_idx": 31329, "task_index": 36}, {"db_idx": 31330, "episode_idx": 182, "frame_idx": 53, "global_frame_idx": 31330, "task_index": 36}, {"db_idx": 31331, "episode_idx": 182, "frame_idx": 54, "global_frame_idx": 31331, "task_index": 36}, {"db_idx": 31332, "episode_idx": 182, "frame_idx": 55, "global_frame_idx": 31332, "task_index": 36}, {"db_idx": 31333, "episode_idx": 182, "frame_idx": 56, "global_frame_idx": 31333, "task_index": 36}, {"db_idx": 31334, "episode_idx": 182, "frame_idx": 57, "global_frame_idx": 31334, "task_index": 36}, {"db_idx": 31335, "episode_idx": 182, "frame_idx": 58, "global_frame_idx": 31335, "task_index": 36}, {"db_idx": 31336, "episode_idx": 182, "frame_idx": 59, "global_frame_idx": 31336, "task_index": 36}, {"db_idx": 31337, "episode_idx": 182, "frame_idx": 60, "global_frame_idx": 31337, "task_index": 36}, {"db_idx": 31338, "episode_idx": 182, "frame_idx": 61, "global_frame_idx": 31338, "task_index": 36}, {"db_idx": 31339, "episode_idx": 182, "frame_idx": 62, "global_frame_idx": 31339, "task_index": 36}, {"db_idx": 31340, "episode_idx": 182, "frame_idx": 63, "global_frame_idx": 31340, "task_index": 36}, {"db_idx": 31341, "episode_idx": 182, "frame_idx": 64, "global_frame_idx": 31341, "task_index": 36}, {"db_idx": 31342, "episode_idx": 182, "frame_idx": 65, "global_frame_idx": 31342, "task_index": 36}, {"db_idx": 31343, "episode_idx": 182, "frame_idx": 66, "global_frame_idx": 31343, "task_index": 36}, {"db_idx": 31344, "episode_idx": 182, "frame_idx": 67, "global_frame_idx": 31344, "task_index": 36}, {"db_idx": 31345, "episode_idx": 182, "frame_idx": 68, "global_frame_idx": 31345, "task_index": 36}, {"db_idx": 31346, "episode_idx": 182, "frame_idx": 69, "global_frame_idx": 31346, "task_index": 36}, {"db_idx": 31347, "episode_idx": 182, "frame_idx": 70, "global_frame_idx": 31347, "task_index": 36}, {"db_idx": 31348, "episode_idx": 182, "frame_idx": 71, "global_frame_idx": 31348, "task_index": 36}, {"db_idx": 31349, "episode_idx": 182, "frame_idx": 72, "global_frame_idx": 31349, "task_index": 36}, {"db_idx": 31350, "episode_idx": 182, "frame_idx": 73, "global_frame_idx": 31350, "task_index": 36}, {"db_idx": 31351, "episode_idx": 182, "frame_idx": 74, "global_frame_idx": 31351, "task_index": 36}, {"db_idx": 31352, "episode_idx": 182, "frame_idx": 75, "global_frame_idx": 31352, "task_index": 36}, {"db_idx": 31353, "episode_idx": 182, "frame_idx": 76, "global_frame_idx": 31353, "task_index": 36}, {"db_idx": 31354, "episode_idx": 182, "frame_idx": 77, "global_frame_idx": 31354, "task_index": 36}, {"db_idx": 31355, "episode_idx": 182, "frame_idx": 78, "global_frame_idx": 31355, "task_index": 36}, {"db_idx": 31356, "episode_idx": 182, "frame_idx": 79, "global_frame_idx": 31356, "task_index": 36}, {"db_idx": 31357, "episode_idx": 182, "frame_idx": 80, "global_frame_idx": 31357, "task_index": 36}, {"db_idx": 31358, "episode_idx": 182, "frame_idx": 81, "global_frame_idx": 31358, "task_index": 36}, {"db_idx": 31359, "episode_idx": 182, "frame_idx": 82, "global_frame_idx": 31359, "task_index": 36}, {"db_idx": 31360, "episode_idx": 182, "frame_idx": 83, "global_frame_idx": 31360, "task_index": 36}, {"db_idx": 31361, "episode_idx": 182, "frame_idx": 84, "global_frame_idx": 31361, "task_index": 36}, {"db_idx": 31362, "episode_idx": 182, "frame_idx": 85, "global_frame_idx": 31362, "task_index": 36}, {"db_idx": 31363, "episode_idx": 182, "frame_idx": 86, "global_frame_idx": 31363, "task_index": 36}, {"db_idx": 31364, "episode_idx": 182, "frame_idx": 87, "global_frame_idx": 31364, "task_index": 36}, {"db_idx": 31365, "episode_idx": 182, "frame_idx": 88, "global_frame_idx": 31365, "task_index": 36}, {"db_idx": 31366, "episode_idx": 182, "frame_idx": 89, "global_frame_idx": 31366, "task_index": 36}, {"db_idx": 31367, "episode_idx": 182, "frame_idx": 90, "global_frame_idx": 31367, "task_index": 36}, {"db_idx": 31368, "episode_idx": 182, "frame_idx": 91, "global_frame_idx": 31368, "task_index": 36}, {"db_idx": 31369, "episode_idx": 182, "frame_idx": 92, "global_frame_idx": 31369, "task_index": 36}, {"db_idx": 31370, "episode_idx": 182, "frame_idx": 93, "global_frame_idx": 31370, "task_index": 36}, {"db_idx": 31371, "episode_idx": 182, "frame_idx": 94, "global_frame_idx": 31371, "task_index": 36}, {"db_idx": 31372, "episode_idx": 182, "frame_idx": 95, "global_frame_idx": 31372, "task_index": 36}, {"db_idx": 31373, "episode_idx": 182, "frame_idx": 96, "global_frame_idx": 31373, "task_index": 36}, {"db_idx": 31374, "episode_idx": 182, "frame_idx": 97, "global_frame_idx": 31374, "task_index": 36}, {"db_idx": 31375, "episode_idx": 182, "frame_idx": 98, "global_frame_idx": 31375, "task_index": 36}, {"db_idx": 31376, "episode_idx": 182, "frame_idx": 99, "global_frame_idx": 31376, "task_index": 36}, {"db_idx": 31377, "episode_idx": 182, "frame_idx": 100, "global_frame_idx": 31377, "task_index": 36}, {"db_idx": 31378, "episode_idx": 182, "frame_idx": 101, "global_frame_idx": 31378, "task_index": 36}, {"db_idx": 31379, "episode_idx": 182, "frame_idx": 102, "global_frame_idx": 31379, "task_index": 36}, {"db_idx": 31380, "episode_idx": 182, "frame_idx": 103, "global_frame_idx": 31380, "task_index": 36}, {"db_idx": 31381, "episode_idx": 182, "frame_idx": 104, "global_frame_idx": 31381, "task_index": 36}, {"db_idx": 31382, "episode_idx": 182, "frame_idx": 105, "global_frame_idx": 31382, "task_index": 36}, {"db_idx": 31383, "episode_idx": 182, "frame_idx": 106, "global_frame_idx": 31383, "task_index": 36}, {"db_idx": 31384, "episode_idx": 182, "frame_idx": 107, "global_frame_idx": 31384, "task_index": 36}, {"db_idx": 31385, "episode_idx": 182, "frame_idx": 108, "global_frame_idx": 31385, "task_index": 36}, {"db_idx": 31386, "episode_idx": 182, "frame_idx": 109, "global_frame_idx": 31386, "task_index": 36}, {"db_idx": 31387, "episode_idx": 182, "frame_idx": 110, "global_frame_idx": 31387, "task_index": 36}, {"db_idx": 31388, "episode_idx": 182, "frame_idx": 111, "global_frame_idx": 31388, "task_index": 36}, {"db_idx": 31389, "episode_idx": 182, "frame_idx": 112, "global_frame_idx": 31389, "task_index": 36}, {"db_idx": 31390, "episode_idx": 182, "frame_idx": 113, "global_frame_idx": 31390, "task_index": 36}, {"db_idx": 31391, "episode_idx": 182, "frame_idx": 114, "global_frame_idx": 31391, "task_index": 36}, {"db_idx": 31392, "episode_idx": 182, "frame_idx": 115, "global_frame_idx": 31392, "task_index": 36}, {"db_idx": 31393, "episode_idx": 182, "frame_idx": 116, "global_frame_idx": 31393, "task_index": 36}, {"db_idx": 31394, "episode_idx": 182, "frame_idx": 117, "global_frame_idx": 31394, "task_index": 36}, {"db_idx": 31395, "episode_idx": 182, "frame_idx": 118, "global_frame_idx": 31395, "task_index": 36}, {"db_idx": 31396, "episode_idx": 182, "frame_idx": 119, "global_frame_idx": 31396, "task_index": 36}, {"db_idx": 31397, "episode_idx": 182, "frame_idx": 120, "global_frame_idx": 31397, "task_index": 36}, {"db_idx": 31398, "episode_idx": 182, "frame_idx": 121, "global_frame_idx": 31398, "task_index": 36}, {"db_idx": 31399, "episode_idx": 182, "frame_idx": 122, "global_frame_idx": 31399, "task_index": 36}, {"db_idx": 31400, "episode_idx": 182, "frame_idx": 123, "global_frame_idx": 31400, "task_index": 36}, {"db_idx": 31401, "episode_idx": 182, "frame_idx": 124, "global_frame_idx": 31401, "task_index": 36}, {"db_idx": 31402, "episode_idx": 182, "frame_idx": 125, "global_frame_idx": 31402, "task_index": 36}, {"db_idx": 31403, "episode_idx": 182, "frame_idx": 126, "global_frame_idx": 31403, "task_index": 36}, {"db_idx": 31404, "episode_idx": 182, "frame_idx": 127, "global_frame_idx": 31404, "task_index": 36}, {"db_idx": 31405, "episode_idx": 182, "frame_idx": 128, "global_frame_idx": 31405, "task_index": 36}, {"db_idx": 31406, "episode_idx": 182, "frame_idx": 129, "global_frame_idx": 31406, "task_index": 36}, {"db_idx": 31407, "episode_idx": 182, "frame_idx": 130, "global_frame_idx": 31407, "task_index": 36}, {"db_idx": 31408, "episode_idx": 182, "frame_idx": 131, "global_frame_idx": 31408, "task_index": 36}, {"db_idx": 31409, "episode_idx": 182, "frame_idx": 132, "global_frame_idx": 31409, "task_index": 36}, {"db_idx": 31410, "episode_idx": 182, "frame_idx": 133, "global_frame_idx": 31410, "task_index": 36}, {"db_idx": 31411, "episode_idx": 182, "frame_idx": 134, "global_frame_idx": 31411, "task_index": 36}, {"db_idx": 31412, "episode_idx": 182, "frame_idx": 135, "global_frame_idx": 31412, "task_index": 36}, {"db_idx": 31413, "episode_idx": 182, "frame_idx": 136, "global_frame_idx": 31413, "task_index": 36}, {"db_idx": 31414, "episode_idx": 182, "frame_idx": 137, "global_frame_idx": 31414, "task_index": 36}, {"db_idx": 31415, "episode_idx": 182, "frame_idx": 138, "global_frame_idx": 31415, "task_index": 36}, {"db_idx": 31416, "episode_idx": 182, "frame_idx": 139, "global_frame_idx": 31416, "task_index": 36}, {"db_idx": 31417, "episode_idx": 182, "frame_idx": 140, "global_frame_idx": 31417, "task_index": 36}, {"db_idx": 31418, "episode_idx": 182, "frame_idx": 141, "global_frame_idx": 31418, "task_index": 36}, {"db_idx": 31419, "episode_idx": 183, "frame_idx": 0, "global_frame_idx": 31419, "task_index": 36}, {"db_idx": 31420, "episode_idx": 183, "frame_idx": 1, "global_frame_idx": 31420, "task_index": 36}, {"db_idx": 31421, "episode_idx": 183, "frame_idx": 2, "global_frame_idx": 31421, "task_index": 36}, {"db_idx": 31422, "episode_idx": 183, "frame_idx": 3, "global_frame_idx": 31422, "task_index": 36}, {"db_idx": 31423, "episode_idx": 183, "frame_idx": 4, "global_frame_idx": 31423, "task_index": 36}, {"db_idx": 31424, "episode_idx": 183, "frame_idx": 5, "global_frame_idx": 31424, "task_index": 36}, {"db_idx": 31425, "episode_idx": 183, "frame_idx": 6, "global_frame_idx": 31425, "task_index": 36}, {"db_idx": 31426, "episode_idx": 183, "frame_idx": 7, "global_frame_idx": 31426, "task_index": 36}, {"db_idx": 31427, "episode_idx": 183, "frame_idx": 8, "global_frame_idx": 31427, "task_index": 36}, {"db_idx": 31428, "episode_idx": 183, "frame_idx": 9, "global_frame_idx": 31428, "task_index": 36}, {"db_idx": 31429, "episode_idx": 183, "frame_idx": 10, "global_frame_idx": 31429, "task_index": 36}, {"db_idx": 31430, "episode_idx": 183, "frame_idx": 11, "global_frame_idx": 31430, "task_index": 36}, {"db_idx": 31431, "episode_idx": 183, "frame_idx": 12, "global_frame_idx": 31431, "task_index": 36}, {"db_idx": 31432, "episode_idx": 183, "frame_idx": 13, "global_frame_idx": 31432, "task_index": 36}, {"db_idx": 31433, "episode_idx": 183, "frame_idx": 14, "global_frame_idx": 31433, "task_index": 36}, {"db_idx": 31434, "episode_idx": 183, "frame_idx": 15, "global_frame_idx": 31434, "task_index": 36}, {"db_idx": 31435, "episode_idx": 183, "frame_idx": 16, "global_frame_idx": 31435, "task_index": 36}, {"db_idx": 31436, "episode_idx": 183, "frame_idx": 17, "global_frame_idx": 31436, "task_index": 36}, {"db_idx": 31437, "episode_idx": 183, "frame_idx": 18, "global_frame_idx": 31437, "task_index": 36}, {"db_idx": 31438, "episode_idx": 183, "frame_idx": 19, "global_frame_idx": 31438, "task_index": 36}, {"db_idx": 31439, "episode_idx": 183, "frame_idx": 20, "global_frame_idx": 31439, "task_index": 36}, {"db_idx": 31440, "episode_idx": 183, "frame_idx": 21, "global_frame_idx": 31440, "task_index": 36}, {"db_idx": 31441, "episode_idx": 183, "frame_idx": 22, "global_frame_idx": 31441, "task_index": 36}, {"db_idx": 31442, "episode_idx": 183, "frame_idx": 23, "global_frame_idx": 31442, "task_index": 36}, {"db_idx": 31443, "episode_idx": 183, "frame_idx": 24, "global_frame_idx": 31443, "task_index": 36}, {"db_idx": 31444, "episode_idx": 183, "frame_idx": 25, "global_frame_idx": 31444, "task_index": 36}, {"db_idx": 31445, "episode_idx": 183, "frame_idx": 26, "global_frame_idx": 31445, "task_index": 36}, {"db_idx": 31446, "episode_idx": 183, "frame_idx": 27, "global_frame_idx": 31446, "task_index": 36}, {"db_idx": 31447, "episode_idx": 183, "frame_idx": 28, "global_frame_idx": 31447, "task_index": 36}, {"db_idx": 31448, "episode_idx": 183, "frame_idx": 29, "global_frame_idx": 31448, "task_index": 36}, {"db_idx": 31449, "episode_idx": 183, "frame_idx": 30, "global_frame_idx": 31449, "task_index": 36}, {"db_idx": 31450, "episode_idx": 183, "frame_idx": 31, "global_frame_idx": 31450, "task_index": 36}, {"db_idx": 31451, "episode_idx": 183, "frame_idx": 32, "global_frame_idx": 31451, "task_index": 36}, {"db_idx": 31452, "episode_idx": 183, "frame_idx": 33, "global_frame_idx": 31452, "task_index": 36}, {"db_idx": 31453, "episode_idx": 183, "frame_idx": 34, "global_frame_idx": 31453, "task_index": 36}, {"db_idx": 31454, "episode_idx": 183, "frame_idx": 35, "global_frame_idx": 31454, "task_index": 36}, {"db_idx": 31455, "episode_idx": 183, "frame_idx": 36, "global_frame_idx": 31455, "task_index": 36}, {"db_idx": 31456, "episode_idx": 183, "frame_idx": 37, "global_frame_idx": 31456, "task_index": 36}, {"db_idx": 31457, "episode_idx": 183, "frame_idx": 38, "global_frame_idx": 31457, "task_index": 36}, {"db_idx": 31458, "episode_idx": 183, "frame_idx": 39, "global_frame_idx": 31458, "task_index": 36}, {"db_idx": 31459, "episode_idx": 183, "frame_idx": 40, "global_frame_idx": 31459, "task_index": 36}, {"db_idx": 31460, "episode_idx": 183, "frame_idx": 41, "global_frame_idx": 31460, "task_index": 36}, {"db_idx": 31461, "episode_idx": 183, "frame_idx": 42, "global_frame_idx": 31461, "task_index": 36}, {"db_idx": 31462, "episode_idx": 183, "frame_idx": 43, "global_frame_idx": 31462, "task_index": 36}, {"db_idx": 31463, "episode_idx": 183, "frame_idx": 44, "global_frame_idx": 31463, "task_index": 36}, {"db_idx": 31464, "episode_idx": 183, "frame_idx": 45, "global_frame_idx": 31464, "task_index": 36}, {"db_idx": 31465, "episode_idx": 183, "frame_idx": 46, "global_frame_idx": 31465, "task_index": 36}, {"db_idx": 31466, "episode_idx": 183, "frame_idx": 47, "global_frame_idx": 31466, "task_index": 36}, {"db_idx": 31467, "episode_idx": 183, "frame_idx": 48, "global_frame_idx": 31467, "task_index": 36}, {"db_idx": 31468, "episode_idx": 183, "frame_idx": 49, "global_frame_idx": 31468, "task_index": 36}, {"db_idx": 31469, "episode_idx": 183, "frame_idx": 50, "global_frame_idx": 31469, "task_index": 36}, {"db_idx": 31470, "episode_idx": 183, "frame_idx": 51, "global_frame_idx": 31470, "task_index": 36}, {"db_idx": 31471, "episode_idx": 183, "frame_idx": 52, "global_frame_idx": 31471, "task_index": 36}, {"db_idx": 31472, "episode_idx": 183, "frame_idx": 53, "global_frame_idx": 31472, "task_index": 36}, {"db_idx": 31473, "episode_idx": 183, "frame_idx": 54, "global_frame_idx": 31473, "task_index": 36}, {"db_idx": 31474, "episode_idx": 183, "frame_idx": 55, "global_frame_idx": 31474, "task_index": 36}, {"db_idx": 31475, "episode_idx": 183, "frame_idx": 56, "global_frame_idx": 31475, "task_index": 36}, {"db_idx": 31476, "episode_idx": 183, "frame_idx": 57, "global_frame_idx": 31476, "task_index": 36}, {"db_idx": 31477, "episode_idx": 183, "frame_idx": 58, "global_frame_idx": 31477, "task_index": 36}, {"db_idx": 31478, "episode_idx": 183, "frame_idx": 59, "global_frame_idx": 31478, "task_index": 36}, {"db_idx": 31479, "episode_idx": 183, "frame_idx": 60, "global_frame_idx": 31479, "task_index": 36}, {"db_idx": 31480, "episode_idx": 183, "frame_idx": 61, "global_frame_idx": 31480, "task_index": 36}, {"db_idx": 31481, "episode_idx": 183, "frame_idx": 62, "global_frame_idx": 31481, "task_index": 36}, {"db_idx": 31482, "episode_idx": 183, "frame_idx": 63, "global_frame_idx": 31482, "task_index": 36}, {"db_idx": 31483, "episode_idx": 183, "frame_idx": 64, "global_frame_idx": 31483, "task_index": 36}, {"db_idx": 31484, "episode_idx": 183, "frame_idx": 65, "global_frame_idx": 31484, "task_index": 36}, {"db_idx": 31485, "episode_idx": 183, "frame_idx": 66, "global_frame_idx": 31485, "task_index": 36}, {"db_idx": 31486, "episode_idx": 183, "frame_idx": 67, "global_frame_idx": 31486, "task_index": 36}, {"db_idx": 31487, "episode_idx": 183, "frame_idx": 68, "global_frame_idx": 31487, "task_index": 36}, {"db_idx": 31488, "episode_idx": 183, "frame_idx": 69, "global_frame_idx": 31488, "task_index": 36}, {"db_idx": 31489, "episode_idx": 183, "frame_idx": 70, "global_frame_idx": 31489, "task_index": 36}, {"db_idx": 31490, "episode_idx": 183, "frame_idx": 71, "global_frame_idx": 31490, "task_index": 36}, {"db_idx": 31491, "episode_idx": 183, "frame_idx": 72, "global_frame_idx": 31491, "task_index": 36}, {"db_idx": 31492, "episode_idx": 183, "frame_idx": 73, "global_frame_idx": 31492, "task_index": 36}, {"db_idx": 31493, "episode_idx": 183, "frame_idx": 74, "global_frame_idx": 31493, "task_index": 36}, {"db_idx": 31494, "episode_idx": 183, "frame_idx": 75, "global_frame_idx": 31494, "task_index": 36}, {"db_idx": 31495, "episode_idx": 183, "frame_idx": 76, "global_frame_idx": 31495, "task_index": 36}, {"db_idx": 31496, "episode_idx": 183, "frame_idx": 77, "global_frame_idx": 31496, "task_index": 36}, {"db_idx": 31497, "episode_idx": 183, "frame_idx": 78, "global_frame_idx": 31497, "task_index": 36}, {"db_idx": 31498, "episode_idx": 183, "frame_idx": 79, "global_frame_idx": 31498, "task_index": 36}, {"db_idx": 31499, "episode_idx": 183, "frame_idx": 80, "global_frame_idx": 31499, "task_index": 36}, {"db_idx": 31500, "episode_idx": 183, "frame_idx": 81, "global_frame_idx": 31500, "task_index": 36}, {"db_idx": 31501, "episode_idx": 183, "frame_idx": 82, "global_frame_idx": 31501, "task_index": 36}, {"db_idx": 31502, "episode_idx": 183, "frame_idx": 83, "global_frame_idx": 31502, "task_index": 36}, {"db_idx": 31503, "episode_idx": 183, "frame_idx": 84, "global_frame_idx": 31503, "task_index": 36}, {"db_idx": 31504, "episode_idx": 183, "frame_idx": 85, "global_frame_idx": 31504, "task_index": 36}, {"db_idx": 31505, "episode_idx": 183, "frame_idx": 86, "global_frame_idx": 31505, "task_index": 36}, {"db_idx": 31506, "episode_idx": 183, "frame_idx": 87, "global_frame_idx": 31506, "task_index": 36}, {"db_idx": 31507, "episode_idx": 183, "frame_idx": 88, "global_frame_idx": 31507, "task_index": 36}, {"db_idx": 31508, "episode_idx": 183, "frame_idx": 89, "global_frame_idx": 31508, "task_index": 36}, {"db_idx": 31509, "episode_idx": 183, "frame_idx": 90, "global_frame_idx": 31509, "task_index": 36}, {"db_idx": 31510, "episode_idx": 183, "frame_idx": 91, "global_frame_idx": 31510, "task_index": 36}, {"db_idx": 31511, "episode_idx": 183, "frame_idx": 92, "global_frame_idx": 31511, "task_index": 36}, {"db_idx": 31512, "episode_idx": 183, "frame_idx": 93, "global_frame_idx": 31512, "task_index": 36}, {"db_idx": 31513, "episode_idx": 183, "frame_idx": 94, "global_frame_idx": 31513, "task_index": 36}, {"db_idx": 31514, "episode_idx": 183, "frame_idx": 95, "global_frame_idx": 31514, "task_index": 36}, {"db_idx": 31515, "episode_idx": 183, "frame_idx": 96, "global_frame_idx": 31515, "task_index": 36}, {"db_idx": 31516, "episode_idx": 183, "frame_idx": 97, "global_frame_idx": 31516, "task_index": 36}, {"db_idx": 31517, "episode_idx": 183, "frame_idx": 98, "global_frame_idx": 31517, "task_index": 36}, {"db_idx": 31518, "episode_idx": 183, "frame_idx": 99, "global_frame_idx": 31518, "task_index": 36}, {"db_idx": 31519, "episode_idx": 183, "frame_idx": 100, "global_frame_idx": 31519, "task_index": 36}, {"db_idx": 31520, "episode_idx": 183, "frame_idx": 101, "global_frame_idx": 31520, "task_index": 36}, {"db_idx": 31521, "episode_idx": 183, "frame_idx": 102, "global_frame_idx": 31521, "task_index": 36}, {"db_idx": 31522, "episode_idx": 183, "frame_idx": 103, "global_frame_idx": 31522, "task_index": 36}, {"db_idx": 31523, "episode_idx": 183, "frame_idx": 104, "global_frame_idx": 31523, "task_index": 36}, {"db_idx": 31524, "episode_idx": 183, "frame_idx": 105, "global_frame_idx": 31524, "task_index": 36}, {"db_idx": 31525, "episode_idx": 183, "frame_idx": 106, "global_frame_idx": 31525, "task_index": 36}, {"db_idx": 31526, "episode_idx": 183, "frame_idx": 107, "global_frame_idx": 31526, "task_index": 36}, {"db_idx": 31527, "episode_idx": 183, "frame_idx": 108, "global_frame_idx": 31527, "task_index": 36}, {"db_idx": 31528, "episode_idx": 183, "frame_idx": 109, "global_frame_idx": 31528, "task_index": 36}, {"db_idx": 31529, "episode_idx": 183, "frame_idx": 110, "global_frame_idx": 31529, "task_index": 36}, {"db_idx": 31530, "episode_idx": 183, "frame_idx": 111, "global_frame_idx": 31530, "task_index": 36}, {"db_idx": 31531, "episode_idx": 183, "frame_idx": 112, "global_frame_idx": 31531, "task_index": 36}, {"db_idx": 31532, "episode_idx": 183, "frame_idx": 113, "global_frame_idx": 31532, "task_index": 36}, {"db_idx": 31533, "episode_idx": 183, "frame_idx": 114, "global_frame_idx": 31533, "task_index": 36}, {"db_idx": 31534, "episode_idx": 183, "frame_idx": 115, "global_frame_idx": 31534, "task_index": 36}, {"db_idx": 31535, "episode_idx": 183, "frame_idx": 116, "global_frame_idx": 31535, "task_index": 36}, {"db_idx": 31536, "episode_idx": 183, "frame_idx": 117, "global_frame_idx": 31536, "task_index": 36}, {"db_idx": 31537, "episode_idx": 183, "frame_idx": 118, "global_frame_idx": 31537, "task_index": 36}, {"db_idx": 31538, "episode_idx": 183, "frame_idx": 119, "global_frame_idx": 31538, "task_index": 36}, {"db_idx": 31539, "episode_idx": 183, "frame_idx": 120, "global_frame_idx": 31539, "task_index": 36}, {"db_idx": 31540, "episode_idx": 183, "frame_idx": 121, "global_frame_idx": 31540, "task_index": 36}, {"db_idx": 31541, "episode_idx": 183, "frame_idx": 122, "global_frame_idx": 31541, "task_index": 36}, {"db_idx": 31542, "episode_idx": 183, "frame_idx": 123, "global_frame_idx": 31542, "task_index": 36}, {"db_idx": 31543, "episode_idx": 183, "frame_idx": 124, "global_frame_idx": 31543, "task_index": 36}, {"db_idx": 31544, "episode_idx": 183, "frame_idx": 125, "global_frame_idx": 31544, "task_index": 36}, {"db_idx": 31545, "episode_idx": 183, "frame_idx": 126, "global_frame_idx": 31545, "task_index": 36}, {"db_idx": 31546, "episode_idx": 183, "frame_idx": 127, "global_frame_idx": 31546, "task_index": 36}, {"db_idx": 31547, "episode_idx": 183, "frame_idx": 128, "global_frame_idx": 31547, "task_index": 36}, {"db_idx": 31548, "episode_idx": 183, "frame_idx": 129, "global_frame_idx": 31548, "task_index": 36}, {"db_idx": 31549, "episode_idx": 183, "frame_idx": 130, "global_frame_idx": 31549, "task_index": 36}, {"db_idx": 31550, "episode_idx": 183, "frame_idx": 131, "global_frame_idx": 31550, "task_index": 36}, {"db_idx": 31551, "episode_idx": 183, "frame_idx": 132, "global_frame_idx": 31551, "task_index": 36}, {"db_idx": 31552, "episode_idx": 183, "frame_idx": 133, "global_frame_idx": 31552, "task_index": 36}, {"db_idx": 31553, "episode_idx": 183, "frame_idx": 134, "global_frame_idx": 31553, "task_index": 36}, {"db_idx": 31554, "episode_idx": 183, "frame_idx": 135, "global_frame_idx": 31554, "task_index": 36}, {"db_idx": 31555, "episode_idx": 183, "frame_idx": 136, "global_frame_idx": 31555, "task_index": 36}, {"db_idx": 31556, "episode_idx": 183, "frame_idx": 137, "global_frame_idx": 31556, "task_index": 36}, {"db_idx": 31557, "episode_idx": 183, "frame_idx": 138, "global_frame_idx": 31557, "task_index": 36}, {"db_idx": 31558, "episode_idx": 183, "frame_idx": 139, "global_frame_idx": 31558, "task_index": 36}, {"db_idx": 31559, "episode_idx": 183, "frame_idx": 140, "global_frame_idx": 31559, "task_index": 36}, {"db_idx": 31560, "episode_idx": 183, "frame_idx": 141, "global_frame_idx": 31560, "task_index": 36}, {"db_idx": 31561, "episode_idx": 183, "frame_idx": 142, "global_frame_idx": 31561, "task_index": 36}, {"db_idx": 31562, "episode_idx": 183, "frame_idx": 143, "global_frame_idx": 31562, "task_index": 36}, {"db_idx": 31563, "episode_idx": 183, "frame_idx": 144, "global_frame_idx": 31563, "task_index": 36}, {"db_idx": 31564, "episode_idx": 183, "frame_idx": 145, "global_frame_idx": 31564, "task_index": 36}, {"db_idx": 31565, "episode_idx": 183, "frame_idx": 146, "global_frame_idx": 31565, "task_index": 36}, {"db_idx": 31566, "episode_idx": 183, "frame_idx": 147, "global_frame_idx": 31566, "task_index": 36}, {"db_idx": 31567, "episode_idx": 183, "frame_idx": 148, "global_frame_idx": 31567, "task_index": 36}, {"db_idx": 31568, "episode_idx": 183, "frame_idx": 149, "global_frame_idx": 31568, "task_index": 36}, {"db_idx": 31569, "episode_idx": 183, "frame_idx": 150, "global_frame_idx": 31569, "task_index": 36}, {"db_idx": 31570, "episode_idx": 183, "frame_idx": 151, "global_frame_idx": 31570, "task_index": 36}, {"db_idx": 31571, "episode_idx": 183, "frame_idx": 152, "global_frame_idx": 31571, "task_index": 36}, {"db_idx": 31572, "episode_idx": 183, "frame_idx": 153, "global_frame_idx": 31572, "task_index": 36}, {"db_idx": 31573, "episode_idx": 183, "frame_idx": 154, "global_frame_idx": 31573, "task_index": 36}, {"db_idx": 31574, "episode_idx": 183, "frame_idx": 155, "global_frame_idx": 31574, "task_index": 36}, {"db_idx": 31575, "episode_idx": 183, "frame_idx": 156, "global_frame_idx": 31575, "task_index": 36}, {"db_idx": 31576, "episode_idx": 183, "frame_idx": 157, "global_frame_idx": 31576, "task_index": 36}, {"db_idx": 31577, "episode_idx": 183, "frame_idx": 158, "global_frame_idx": 31577, "task_index": 36}, {"db_idx": 31578, "episode_idx": 183, "frame_idx": 159, "global_frame_idx": 31578, "task_index": 36}, {"db_idx": 31579, "episode_idx": 183, "frame_idx": 160, "global_frame_idx": 31579, "task_index": 36}, {"db_idx": 31580, "episode_idx": 183, "frame_idx": 161, "global_frame_idx": 31580, "task_index": 36}, {"db_idx": 31581, "episode_idx": 183, "frame_idx": 162, "global_frame_idx": 31581, "task_index": 36}, {"db_idx": 31582, "episode_idx": 183, "frame_idx": 163, "global_frame_idx": 31582, "task_index": 36}, {"db_idx": 31583, "episode_idx": 183, "frame_idx": 164, "global_frame_idx": 31583, "task_index": 36}, {"db_idx": 31584, "episode_idx": 183, "frame_idx": 165, "global_frame_idx": 31584, "task_index": 36}, {"db_idx": 31585, "episode_idx": 183, "frame_idx": 166, "global_frame_idx": 31585, "task_index": 36}, {"db_idx": 31586, "episode_idx": 183, "frame_idx": 167, "global_frame_idx": 31586, "task_index": 36}, {"db_idx": 31587, "episode_idx": 183, "frame_idx": 168, "global_frame_idx": 31587, "task_index": 36}, {"db_idx": 31588, "episode_idx": 184, "frame_idx": 0, "global_frame_idx": 31588, "task_index": 36}, {"db_idx": 31589, "episode_idx": 184, "frame_idx": 1, "global_frame_idx": 31589, "task_index": 36}, {"db_idx": 31590, "episode_idx": 184, "frame_idx": 2, "global_frame_idx": 31590, "task_index": 36}, {"db_idx": 31591, "episode_idx": 184, "frame_idx": 3, "global_frame_idx": 31591, "task_index": 36}, {"db_idx": 31592, "episode_idx": 184, "frame_idx": 4, "global_frame_idx": 31592, "task_index": 36}, {"db_idx": 31593, "episode_idx": 184, "frame_idx": 5, "global_frame_idx": 31593, "task_index": 36}, {"db_idx": 31594, "episode_idx": 184, "frame_idx": 6, "global_frame_idx": 31594, "task_index": 36}, {"db_idx": 31595, "episode_idx": 184, "frame_idx": 7, "global_frame_idx": 31595, "task_index": 36}, {"db_idx": 31596, "episode_idx": 184, "frame_idx": 8, "global_frame_idx": 31596, "task_index": 36}, {"db_idx": 31597, "episode_idx": 184, "frame_idx": 9, "global_frame_idx": 31597, "task_index": 36}, {"db_idx": 31598, "episode_idx": 184, "frame_idx": 10, "global_frame_idx": 31598, "task_index": 36}, {"db_idx": 31599, "episode_idx": 184, "frame_idx": 11, "global_frame_idx": 31599, "task_index": 36}, {"db_idx": 31600, "episode_idx": 184, "frame_idx": 12, "global_frame_idx": 31600, "task_index": 36}, {"db_idx": 31601, "episode_idx": 184, "frame_idx": 13, "global_frame_idx": 31601, "task_index": 36}, {"db_idx": 31602, "episode_idx": 184, "frame_idx": 14, "global_frame_idx": 31602, "task_index": 36}, {"db_idx": 31603, "episode_idx": 184, "frame_idx": 15, "global_frame_idx": 31603, "task_index": 36}, {"db_idx": 31604, "episode_idx": 184, "frame_idx": 16, "global_frame_idx": 31604, "task_index": 36}, {"db_idx": 31605, "episode_idx": 184, "frame_idx": 17, "global_frame_idx": 31605, "task_index": 36}, {"db_idx": 31606, "episode_idx": 184, "frame_idx": 18, "global_frame_idx": 31606, "task_index": 36}, {"db_idx": 31607, "episode_idx": 184, "frame_idx": 19, "global_frame_idx": 31607, "task_index": 36}, {"db_idx": 31608, "episode_idx": 184, "frame_idx": 20, "global_frame_idx": 31608, "task_index": 36}, {"db_idx": 31609, "episode_idx": 184, "frame_idx": 21, "global_frame_idx": 31609, "task_index": 36}, {"db_idx": 31610, "episode_idx": 184, "frame_idx": 22, "global_frame_idx": 31610, "task_index": 36}, {"db_idx": 31611, "episode_idx": 184, "frame_idx": 23, "global_frame_idx": 31611, "task_index": 36}, {"db_idx": 31612, "episode_idx": 184, "frame_idx": 24, "global_frame_idx": 31612, "task_index": 36}, {"db_idx": 31613, "episode_idx": 184, "frame_idx": 25, "global_frame_idx": 31613, "task_index": 36}, {"db_idx": 31614, "episode_idx": 184, "frame_idx": 26, "global_frame_idx": 31614, "task_index": 36}, {"db_idx": 31615, "episode_idx": 184, "frame_idx": 27, "global_frame_idx": 31615, "task_index": 36}, {"db_idx": 31616, "episode_idx": 184, "frame_idx": 28, "global_frame_idx": 31616, "task_index": 36}, {"db_idx": 31617, "episode_idx": 184, "frame_idx": 29, "global_frame_idx": 31617, "task_index": 36}, {"db_idx": 31618, "episode_idx": 184, "frame_idx": 30, "global_frame_idx": 31618, "task_index": 36}, {"db_idx": 31619, "episode_idx": 184, "frame_idx": 31, "global_frame_idx": 31619, "task_index": 36}, {"db_idx": 31620, "episode_idx": 184, "frame_idx": 32, "global_frame_idx": 31620, "task_index": 36}, {"db_idx": 31621, "episode_idx": 184, "frame_idx": 33, "global_frame_idx": 31621, "task_index": 36}, {"db_idx": 31622, "episode_idx": 184, "frame_idx": 34, "global_frame_idx": 31622, "task_index": 36}, {"db_idx": 31623, "episode_idx": 184, "frame_idx": 35, "global_frame_idx": 31623, "task_index": 36}, {"db_idx": 31624, "episode_idx": 184, "frame_idx": 36, "global_frame_idx": 31624, "task_index": 36}, {"db_idx": 31625, "episode_idx": 184, "frame_idx": 37, "global_frame_idx": 31625, "task_index": 36}, {"db_idx": 31626, "episode_idx": 184, "frame_idx": 38, "global_frame_idx": 31626, "task_index": 36}, {"db_idx": 31627, "episode_idx": 184, "frame_idx": 39, "global_frame_idx": 31627, "task_index": 36}, {"db_idx": 31628, "episode_idx": 184, "frame_idx": 40, "global_frame_idx": 31628, "task_index": 36}, {"db_idx": 31629, "episode_idx": 184, "frame_idx": 41, "global_frame_idx": 31629, "task_index": 36}, {"db_idx": 31630, "episode_idx": 184, "frame_idx": 42, "global_frame_idx": 31630, "task_index": 36}, {"db_idx": 31631, "episode_idx": 184, "frame_idx": 43, "global_frame_idx": 31631, "task_index": 36}, {"db_idx": 31632, "episode_idx": 184, "frame_idx": 44, "global_frame_idx": 31632, "task_index": 36}, {"db_idx": 31633, "episode_idx": 184, "frame_idx": 45, "global_frame_idx": 31633, "task_index": 36}, {"db_idx": 31634, "episode_idx": 184, "frame_idx": 46, "global_frame_idx": 31634, "task_index": 36}, {"db_idx": 31635, "episode_idx": 184, "frame_idx": 47, "global_frame_idx": 31635, "task_index": 36}, {"db_idx": 31636, "episode_idx": 184, "frame_idx": 48, "global_frame_idx": 31636, "task_index": 36}, {"db_idx": 31637, "episode_idx": 184, "frame_idx": 49, "global_frame_idx": 31637, "task_index": 36}, {"db_idx": 31638, "episode_idx": 184, "frame_idx": 50, "global_frame_idx": 31638, "task_index": 36}, {"db_idx": 31639, "episode_idx": 184, "frame_idx": 51, "global_frame_idx": 31639, "task_index": 36}, {"db_idx": 31640, "episode_idx": 184, "frame_idx": 52, "global_frame_idx": 31640, "task_index": 36}, {"db_idx": 31641, "episode_idx": 184, "frame_idx": 53, "global_frame_idx": 31641, "task_index": 36}, {"db_idx": 31642, "episode_idx": 184, "frame_idx": 54, "global_frame_idx": 31642, "task_index": 36}, {"db_idx": 31643, "episode_idx": 184, "frame_idx": 55, "global_frame_idx": 31643, "task_index": 36}, {"db_idx": 31644, "episode_idx": 184, "frame_idx": 56, "global_frame_idx": 31644, "task_index": 36}, {"db_idx": 31645, "episode_idx": 184, "frame_idx": 57, "global_frame_idx": 31645, "task_index": 36}, {"db_idx": 31646, "episode_idx": 184, "frame_idx": 58, "global_frame_idx": 31646, "task_index": 36}, {"db_idx": 31647, "episode_idx": 184, "frame_idx": 59, "global_frame_idx": 31647, "task_index": 36}, {"db_idx": 31648, "episode_idx": 184, "frame_idx": 60, "global_frame_idx": 31648, "task_index": 36}, {"db_idx": 31649, "episode_idx": 184, "frame_idx": 61, "global_frame_idx": 31649, "task_index": 36}, {"db_idx": 31650, "episode_idx": 184, "frame_idx": 62, "global_frame_idx": 31650, "task_index": 36}, {"db_idx": 31651, "episode_idx": 184, "frame_idx": 63, "global_frame_idx": 31651, "task_index": 36}, {"db_idx": 31652, "episode_idx": 184, "frame_idx": 64, "global_frame_idx": 31652, "task_index": 36}, {"db_idx": 31653, "episode_idx": 184, "frame_idx": 65, "global_frame_idx": 31653, "task_index": 36}, {"db_idx": 31654, "episode_idx": 184, "frame_idx": 66, "global_frame_idx": 31654, "task_index": 36}, {"db_idx": 31655, "episode_idx": 184, "frame_idx": 67, "global_frame_idx": 31655, "task_index": 36}, {"db_idx": 31656, "episode_idx": 184, "frame_idx": 68, "global_frame_idx": 31656, "task_index": 36}, {"db_idx": 31657, "episode_idx": 184, "frame_idx": 69, "global_frame_idx": 31657, "task_index": 36}, {"db_idx": 31658, "episode_idx": 184, "frame_idx": 70, "global_frame_idx": 31658, "task_index": 36}, {"db_idx": 31659, "episode_idx": 184, "frame_idx": 71, "global_frame_idx": 31659, "task_index": 36}, {"db_idx": 31660, "episode_idx": 184, "frame_idx": 72, "global_frame_idx": 31660, "task_index": 36}, {"db_idx": 31661, "episode_idx": 184, "frame_idx": 73, "global_frame_idx": 31661, "task_index": 36}, {"db_idx": 31662, "episode_idx": 184, "frame_idx": 74, "global_frame_idx": 31662, "task_index": 36}, {"db_idx": 31663, "episode_idx": 184, "frame_idx": 75, "global_frame_idx": 31663, "task_index": 36}, {"db_idx": 31664, "episode_idx": 184, "frame_idx": 76, "global_frame_idx": 31664, "task_index": 36}, {"db_idx": 31665, "episode_idx": 184, "frame_idx": 77, "global_frame_idx": 31665, "task_index": 36}, {"db_idx": 31666, "episode_idx": 184, "frame_idx": 78, "global_frame_idx": 31666, "task_index": 36}, {"db_idx": 31667, "episode_idx": 184, "frame_idx": 79, "global_frame_idx": 31667, "task_index": 36}, {"db_idx": 31668, "episode_idx": 184, "frame_idx": 80, "global_frame_idx": 31668, "task_index": 36}, {"db_idx": 31669, "episode_idx": 184, "frame_idx": 81, "global_frame_idx": 31669, "task_index": 36}, {"db_idx": 31670, "episode_idx": 184, "frame_idx": 82, "global_frame_idx": 31670, "task_index": 36}, {"db_idx": 31671, "episode_idx": 184, "frame_idx": 83, "global_frame_idx": 31671, "task_index": 36}, {"db_idx": 31672, "episode_idx": 184, "frame_idx": 84, "global_frame_idx": 31672, "task_index": 36}, {"db_idx": 31673, "episode_idx": 184, "frame_idx": 85, "global_frame_idx": 31673, "task_index": 36}, {"db_idx": 31674, "episode_idx": 184, "frame_idx": 86, "global_frame_idx": 31674, "task_index": 36}, {"db_idx": 31675, "episode_idx": 184, "frame_idx": 87, "global_frame_idx": 31675, "task_index": 36}, {"db_idx": 31676, "episode_idx": 184, "frame_idx": 88, "global_frame_idx": 31676, "task_index": 36}, {"db_idx": 31677, "episode_idx": 184, "frame_idx": 89, "global_frame_idx": 31677, "task_index": 36}, {"db_idx": 31678, "episode_idx": 184, "frame_idx": 90, "global_frame_idx": 31678, "task_index": 36}, {"db_idx": 31679, "episode_idx": 184, "frame_idx": 91, "global_frame_idx": 31679, "task_index": 36}, {"db_idx": 31680, "episode_idx": 184, "frame_idx": 92, "global_frame_idx": 31680, "task_index": 36}, {"db_idx": 31681, "episode_idx": 184, "frame_idx": 93, "global_frame_idx": 31681, "task_index": 36}, {"db_idx": 31682, "episode_idx": 184, "frame_idx": 94, "global_frame_idx": 31682, "task_index": 36}, {"db_idx": 31683, "episode_idx": 184, "frame_idx": 95, "global_frame_idx": 31683, "task_index": 36}, {"db_idx": 31684, "episode_idx": 184, "frame_idx": 96, "global_frame_idx": 31684, "task_index": 36}, {"db_idx": 31685, "episode_idx": 184, "frame_idx": 97, "global_frame_idx": 31685, "task_index": 36}, {"db_idx": 31686, "episode_idx": 184, "frame_idx": 98, "global_frame_idx": 31686, "task_index": 36}, {"db_idx": 31687, "episode_idx": 184, "frame_idx": 99, "global_frame_idx": 31687, "task_index": 36}, {"db_idx": 31688, "episode_idx": 184, "frame_idx": 100, "global_frame_idx": 31688, "task_index": 36}, {"db_idx": 31689, "episode_idx": 184, "frame_idx": 101, "global_frame_idx": 31689, "task_index": 36}, {"db_idx": 31690, "episode_idx": 184, "frame_idx": 102, "global_frame_idx": 31690, "task_index": 36}, {"db_idx": 31691, "episode_idx": 184, "frame_idx": 103, "global_frame_idx": 31691, "task_index": 36}, {"db_idx": 31692, "episode_idx": 184, "frame_idx": 104, "global_frame_idx": 31692, "task_index": 36}, {"db_idx": 31693, "episode_idx": 184, "frame_idx": 105, "global_frame_idx": 31693, "task_index": 36}, {"db_idx": 31694, "episode_idx": 184, "frame_idx": 106, "global_frame_idx": 31694, "task_index": 36}, {"db_idx": 31695, "episode_idx": 184, "frame_idx": 107, "global_frame_idx": 31695, "task_index": 36}, {"db_idx": 31696, "episode_idx": 184, "frame_idx": 108, "global_frame_idx": 31696, "task_index": 36}, {"db_idx": 31697, "episode_idx": 184, "frame_idx": 109, "global_frame_idx": 31697, "task_index": 36}, {"db_idx": 31698, "episode_idx": 184, "frame_idx": 110, "global_frame_idx": 31698, "task_index": 36}, {"db_idx": 31699, "episode_idx": 184, "frame_idx": 111, "global_frame_idx": 31699, "task_index": 36}, {"db_idx": 31700, "episode_idx": 184, "frame_idx": 112, "global_frame_idx": 31700, "task_index": 36}, {"db_idx": 31701, "episode_idx": 184, "frame_idx": 113, "global_frame_idx": 31701, "task_index": 36}, {"db_idx": 31702, "episode_idx": 184, "frame_idx": 114, "global_frame_idx": 31702, "task_index": 36}, {"db_idx": 31703, "episode_idx": 184, "frame_idx": 115, "global_frame_idx": 31703, "task_index": 36}, {"db_idx": 31704, "episode_idx": 184, "frame_idx": 116, "global_frame_idx": 31704, "task_index": 36}, {"db_idx": 31705, "episode_idx": 184, "frame_idx": 117, "global_frame_idx": 31705, "task_index": 36}, {"db_idx": 31706, "episode_idx": 184, "frame_idx": 118, "global_frame_idx": 31706, "task_index": 36}, {"db_idx": 31707, "episode_idx": 184, "frame_idx": 119, "global_frame_idx": 31707, "task_index": 36}, {"db_idx": 31708, "episode_idx": 184, "frame_idx": 120, "global_frame_idx": 31708, "task_index": 36}, {"db_idx": 31709, "episode_idx": 184, "frame_idx": 121, "global_frame_idx": 31709, "task_index": 36}, {"db_idx": 31710, "episode_idx": 184, "frame_idx": 122, "global_frame_idx": 31710, "task_index": 36}, {"db_idx": 31711, "episode_idx": 184, "frame_idx": 123, "global_frame_idx": 31711, "task_index": 36}, {"db_idx": 31712, "episode_idx": 184, "frame_idx": 124, "global_frame_idx": 31712, "task_index": 36}, {"db_idx": 31713, "episode_idx": 184, "frame_idx": 125, "global_frame_idx": 31713, "task_index": 36}, {"db_idx": 31714, "episode_idx": 184, "frame_idx": 126, "global_frame_idx": 31714, "task_index": 36}, {"db_idx": 31715, "episode_idx": 184, "frame_idx": 127, "global_frame_idx": 31715, "task_index": 36}, {"db_idx": 31716, "episode_idx": 184, "frame_idx": 128, "global_frame_idx": 31716, "task_index": 36}, {"db_idx": 31717, "episode_idx": 184, "frame_idx": 129, "global_frame_idx": 31717, "task_index": 36}, {"db_idx": 31718, "episode_idx": 184, "frame_idx": 130, "global_frame_idx": 31718, "task_index": 36}, {"db_idx": 31719, "episode_idx": 184, "frame_idx": 131, "global_frame_idx": 31719, "task_index": 36}, {"db_idx": 31720, "episode_idx": 184, "frame_idx": 132, "global_frame_idx": 31720, "task_index": 36}, {"db_idx": 31721, "episode_idx": 184, "frame_idx": 133, "global_frame_idx": 31721, "task_index": 36}, {"db_idx": 31722, "episode_idx": 184, "frame_idx": 134, "global_frame_idx": 31722, "task_index": 36}, {"db_idx": 31723, "episode_idx": 184, "frame_idx": 135, "global_frame_idx": 31723, "task_index": 36}, {"db_idx": 31724, "episode_idx": 184, "frame_idx": 136, "global_frame_idx": 31724, "task_index": 36}, {"db_idx": 31725, "episode_idx": 184, "frame_idx": 137, "global_frame_idx": 31725, "task_index": 36}, {"db_idx": 31726, "episode_idx": 184, "frame_idx": 138, "global_frame_idx": 31726, "task_index": 36}, {"db_idx": 31727, "episode_idx": 184, "frame_idx": 139, "global_frame_idx": 31727, "task_index": 36}, {"db_idx": 31728, "episode_idx": 184, "frame_idx": 140, "global_frame_idx": 31728, "task_index": 36}, {"db_idx": 31729, "episode_idx": 184, "frame_idx": 141, "global_frame_idx": 31729, "task_index": 36}, {"db_idx": 31730, "episode_idx": 184, "frame_idx": 142, "global_frame_idx": 31730, "task_index": 36}, {"db_idx": 31731, "episode_idx": 184, "frame_idx": 143, "global_frame_idx": 31731, "task_index": 36}, {"db_idx": 31732, "episode_idx": 184, "frame_idx": 144, "global_frame_idx": 31732, "task_index": 36}, {"db_idx": 31733, "episode_idx": 184, "frame_idx": 145, "global_frame_idx": 31733, "task_index": 36}, {"db_idx": 31734, "episode_idx": 184, "frame_idx": 146, "global_frame_idx": 31734, "task_index": 36}, {"db_idx": 31735, "episode_idx": 184, "frame_idx": 147, "global_frame_idx": 31735, "task_index": 36}, {"db_idx": 31736, "episode_idx": 184, "frame_idx": 148, "global_frame_idx": 31736, "task_index": 36}, {"db_idx": 31737, "episode_idx": 185, "frame_idx": 0, "global_frame_idx": 31737, "task_index": 37}, {"db_idx": 31738, "episode_idx": 185, "frame_idx": 1, "global_frame_idx": 31738, "task_index": 37}, {"db_idx": 31739, "episode_idx": 185, "frame_idx": 2, "global_frame_idx": 31739, "task_index": 37}, {"db_idx": 31740, "episode_idx": 185, "frame_idx": 3, "global_frame_idx": 31740, "task_index": 37}, {"db_idx": 31741, "episode_idx": 185, "frame_idx": 4, "global_frame_idx": 31741, "task_index": 37}, {"db_idx": 31742, "episode_idx": 185, "frame_idx": 5, "global_frame_idx": 31742, "task_index": 37}, {"db_idx": 31743, "episode_idx": 185, "frame_idx": 6, "global_frame_idx": 31743, "task_index": 37}, {"db_idx": 31744, "episode_idx": 185, "frame_idx": 7, "global_frame_idx": 31744, "task_index": 37}, {"db_idx": 31745, "episode_idx": 185, "frame_idx": 8, "global_frame_idx": 31745, "task_index": 37}, {"db_idx": 31746, "episode_idx": 185, "frame_idx": 9, "global_frame_idx": 31746, "task_index": 37}, {"db_idx": 31747, "episode_idx": 185, "frame_idx": 10, "global_frame_idx": 31747, "task_index": 37}, {"db_idx": 31748, "episode_idx": 185, "frame_idx": 11, "global_frame_idx": 31748, "task_index": 37}, {"db_idx": 31749, "episode_idx": 185, "frame_idx": 12, "global_frame_idx": 31749, "task_index": 37}, {"db_idx": 31750, "episode_idx": 185, "frame_idx": 13, "global_frame_idx": 31750, "task_index": 37}, {"db_idx": 31751, "episode_idx": 185, "frame_idx": 14, "global_frame_idx": 31751, "task_index": 37}, {"db_idx": 31752, "episode_idx": 185, "frame_idx": 15, "global_frame_idx": 31752, "task_index": 37}, {"db_idx": 31753, "episode_idx": 185, "frame_idx": 16, "global_frame_idx": 31753, "task_index": 37}, {"db_idx": 31754, "episode_idx": 185, "frame_idx": 17, "global_frame_idx": 31754, "task_index": 37}, {"db_idx": 31755, "episode_idx": 185, "frame_idx": 18, "global_frame_idx": 31755, "task_index": 37}, {"db_idx": 31756, "episode_idx": 185, "frame_idx": 19, "global_frame_idx": 31756, "task_index": 37}, {"db_idx": 31757, "episode_idx": 185, "frame_idx": 20, "global_frame_idx": 31757, "task_index": 37}, {"db_idx": 31758, "episode_idx": 185, "frame_idx": 21, "global_frame_idx": 31758, "task_index": 37}, {"db_idx": 31759, "episode_idx": 185, "frame_idx": 22, "global_frame_idx": 31759, "task_index": 37}, {"db_idx": 31760, "episode_idx": 185, "frame_idx": 23, "global_frame_idx": 31760, "task_index": 37}, {"db_idx": 31761, "episode_idx": 185, "frame_idx": 24, "global_frame_idx": 31761, "task_index": 37}, {"db_idx": 31762, "episode_idx": 185, "frame_idx": 25, "global_frame_idx": 31762, "task_index": 37}, {"db_idx": 31763, "episode_idx": 185, "frame_idx": 26, "global_frame_idx": 31763, "task_index": 37}, {"db_idx": 31764, "episode_idx": 185, "frame_idx": 27, "global_frame_idx": 31764, "task_index": 37}, {"db_idx": 31765, "episode_idx": 185, "frame_idx": 28, "global_frame_idx": 31765, "task_index": 37}, {"db_idx": 31766, "episode_idx": 185, "frame_idx": 29, "global_frame_idx": 31766, "task_index": 37}, {"db_idx": 31767, "episode_idx": 185, "frame_idx": 30, "global_frame_idx": 31767, "task_index": 37}, {"db_idx": 31768, "episode_idx": 185, "frame_idx": 31, "global_frame_idx": 31768, "task_index": 37}, {"db_idx": 31769, "episode_idx": 185, "frame_idx": 32, "global_frame_idx": 31769, "task_index": 37}, {"db_idx": 31770, "episode_idx": 185, "frame_idx": 33, "global_frame_idx": 31770, "task_index": 37}, {"db_idx": 31771, "episode_idx": 185, "frame_idx": 34, "global_frame_idx": 31771, "task_index": 37}, {"db_idx": 31772, "episode_idx": 185, "frame_idx": 35, "global_frame_idx": 31772, "task_index": 37}, {"db_idx": 31773, "episode_idx": 185, "frame_idx": 36, "global_frame_idx": 31773, "task_index": 37}, {"db_idx": 31774, "episode_idx": 185, "frame_idx": 37, "global_frame_idx": 31774, "task_index": 37}, {"db_idx": 31775, "episode_idx": 185, "frame_idx": 38, "global_frame_idx": 31775, "task_index": 37}, {"db_idx": 31776, "episode_idx": 185, "frame_idx": 39, "global_frame_idx": 31776, "task_index": 37}, {"db_idx": 31777, "episode_idx": 185, "frame_idx": 40, "global_frame_idx": 31777, "task_index": 37}, {"db_idx": 31778, "episode_idx": 185, "frame_idx": 41, "global_frame_idx": 31778, "task_index": 37}, {"db_idx": 31779, "episode_idx": 185, "frame_idx": 42, "global_frame_idx": 31779, "task_index": 37}, {"db_idx": 31780, "episode_idx": 185, "frame_idx": 43, "global_frame_idx": 31780, "task_index": 37}, {"db_idx": 31781, "episode_idx": 185, "frame_idx": 44, "global_frame_idx": 31781, "task_index": 37}, {"db_idx": 31782, "episode_idx": 185, "frame_idx": 45, "global_frame_idx": 31782, "task_index": 37}, {"db_idx": 31783, "episode_idx": 185, "frame_idx": 46, "global_frame_idx": 31783, "task_index": 37}, {"db_idx": 31784, "episode_idx": 185, "frame_idx": 47, "global_frame_idx": 31784, "task_index": 37}, {"db_idx": 31785, "episode_idx": 185, "frame_idx": 48, "global_frame_idx": 31785, "task_index": 37}, {"db_idx": 31786, "episode_idx": 185, "frame_idx": 49, "global_frame_idx": 31786, "task_index": 37}, {"db_idx": 31787, "episode_idx": 185, "frame_idx": 50, "global_frame_idx": 31787, "task_index": 37}, {"db_idx": 31788, "episode_idx": 185, "frame_idx": 51, "global_frame_idx": 31788, "task_index": 37}, {"db_idx": 31789, "episode_idx": 185, "frame_idx": 52, "global_frame_idx": 31789, "task_index": 37}, {"db_idx": 31790, "episode_idx": 185, "frame_idx": 53, "global_frame_idx": 31790, "task_index": 37}, {"db_idx": 31791, "episode_idx": 185, "frame_idx": 54, "global_frame_idx": 31791, "task_index": 37}, {"db_idx": 31792, "episode_idx": 185, "frame_idx": 55, "global_frame_idx": 31792, "task_index": 37}, {"db_idx": 31793, "episode_idx": 185, "frame_idx": 56, "global_frame_idx": 31793, "task_index": 37}, {"db_idx": 31794, "episode_idx": 185, "frame_idx": 57, "global_frame_idx": 31794, "task_index": 37}, {"db_idx": 31795, "episode_idx": 185, "frame_idx": 58, "global_frame_idx": 31795, "task_index": 37}, {"db_idx": 31796, "episode_idx": 185, "frame_idx": 59, "global_frame_idx": 31796, "task_index": 37}, {"db_idx": 31797, "episode_idx": 185, "frame_idx": 60, "global_frame_idx": 31797, "task_index": 37}, {"db_idx": 31798, "episode_idx": 185, "frame_idx": 61, "global_frame_idx": 31798, "task_index": 37}, {"db_idx": 31799, "episode_idx": 185, "frame_idx": 62, "global_frame_idx": 31799, "task_index": 37}, {"db_idx": 31800, "episode_idx": 185, "frame_idx": 63, "global_frame_idx": 31800, "task_index": 37}, {"db_idx": 31801, "episode_idx": 185, "frame_idx": 64, "global_frame_idx": 31801, "task_index": 37}, {"db_idx": 31802, "episode_idx": 185, "frame_idx": 65, "global_frame_idx": 31802, "task_index": 37}, {"db_idx": 31803, "episode_idx": 185, "frame_idx": 66, "global_frame_idx": 31803, "task_index": 37}, {"db_idx": 31804, "episode_idx": 185, "frame_idx": 67, "global_frame_idx": 31804, "task_index": 37}, {"db_idx": 31805, "episode_idx": 185, "frame_idx": 68, "global_frame_idx": 31805, "task_index": 37}, {"db_idx": 31806, "episode_idx": 185, "frame_idx": 69, "global_frame_idx": 31806, "task_index": 37}, {"db_idx": 31807, "episode_idx": 185, "frame_idx": 70, "global_frame_idx": 31807, "task_index": 37}, {"db_idx": 31808, "episode_idx": 185, "frame_idx": 71, "global_frame_idx": 31808, "task_index": 37}, {"db_idx": 31809, "episode_idx": 185, "frame_idx": 72, "global_frame_idx": 31809, "task_index": 37}, {"db_idx": 31810, "episode_idx": 185, "frame_idx": 73, "global_frame_idx": 31810, "task_index": 37}, {"db_idx": 31811, "episode_idx": 185, "frame_idx": 74, "global_frame_idx": 31811, "task_index": 37}, {"db_idx": 31812, "episode_idx": 185, "frame_idx": 75, "global_frame_idx": 31812, "task_index": 37}, {"db_idx": 31813, "episode_idx": 185, "frame_idx": 76, "global_frame_idx": 31813, "task_index": 37}, {"db_idx": 31814, "episode_idx": 185, "frame_idx": 77, "global_frame_idx": 31814, "task_index": 37}, {"db_idx": 31815, "episode_idx": 185, "frame_idx": 78, "global_frame_idx": 31815, "task_index": 37}, {"db_idx": 31816, "episode_idx": 185, "frame_idx": 79, "global_frame_idx": 31816, "task_index": 37}, {"db_idx": 31817, "episode_idx": 185, "frame_idx": 80, "global_frame_idx": 31817, "task_index": 37}, {"db_idx": 31818, "episode_idx": 185, "frame_idx": 81, "global_frame_idx": 31818, "task_index": 37}, {"db_idx": 31819, "episode_idx": 185, "frame_idx": 82, "global_frame_idx": 31819, "task_index": 37}, {"db_idx": 31820, "episode_idx": 185, "frame_idx": 83, "global_frame_idx": 31820, "task_index": 37}, {"db_idx": 31821, "episode_idx": 185, "frame_idx": 84, "global_frame_idx": 31821, "task_index": 37}, {"db_idx": 31822, "episode_idx": 185, "frame_idx": 85, "global_frame_idx": 31822, "task_index": 37}, {"db_idx": 31823, "episode_idx": 185, "frame_idx": 86, "global_frame_idx": 31823, "task_index": 37}, {"db_idx": 31824, "episode_idx": 185, "frame_idx": 87, "global_frame_idx": 31824, "task_index": 37}, {"db_idx": 31825, "episode_idx": 185, "frame_idx": 88, "global_frame_idx": 31825, "task_index": 37}, {"db_idx": 31826, "episode_idx": 185, "frame_idx": 89, "global_frame_idx": 31826, "task_index": 37}, {"db_idx": 31827, "episode_idx": 185, "frame_idx": 90, "global_frame_idx": 31827, "task_index": 37}, {"db_idx": 31828, "episode_idx": 185, "frame_idx": 91, "global_frame_idx": 31828, "task_index": 37}, {"db_idx": 31829, "episode_idx": 185, "frame_idx": 92, "global_frame_idx": 31829, "task_index": 37}, {"db_idx": 31830, "episode_idx": 185, "frame_idx": 93, "global_frame_idx": 31830, "task_index": 37}, {"db_idx": 31831, "episode_idx": 185, "frame_idx": 94, "global_frame_idx": 31831, "task_index": 37}, {"db_idx": 31832, "episode_idx": 185, "frame_idx": 95, "global_frame_idx": 31832, "task_index": 37}, {"db_idx": 31833, "episode_idx": 185, "frame_idx": 96, "global_frame_idx": 31833, "task_index": 37}, {"db_idx": 31834, "episode_idx": 185, "frame_idx": 97, "global_frame_idx": 31834, "task_index": 37}, {"db_idx": 31835, "episode_idx": 185, "frame_idx": 98, "global_frame_idx": 31835, "task_index": 37}, {"db_idx": 31836, "episode_idx": 185, "frame_idx": 99, "global_frame_idx": 31836, "task_index": 37}, {"db_idx": 31837, "episode_idx": 185, "frame_idx": 100, "global_frame_idx": 31837, "task_index": 37}, {"db_idx": 31838, "episode_idx": 185, "frame_idx": 101, "global_frame_idx": 31838, "task_index": 37}, {"db_idx": 31839, "episode_idx": 185, "frame_idx": 102, "global_frame_idx": 31839, "task_index": 37}, {"db_idx": 31840, "episode_idx": 185, "frame_idx": 103, "global_frame_idx": 31840, "task_index": 37}, {"db_idx": 31841, "episode_idx": 185, "frame_idx": 104, "global_frame_idx": 31841, "task_index": 37}, {"db_idx": 31842, "episode_idx": 185, "frame_idx": 105, "global_frame_idx": 31842, "task_index": 37}, {"db_idx": 31843, "episode_idx": 185, "frame_idx": 106, "global_frame_idx": 31843, "task_index": 37}, {"db_idx": 31844, "episode_idx": 185, "frame_idx": 107, "global_frame_idx": 31844, "task_index": 37}, {"db_idx": 31845, "episode_idx": 185, "frame_idx": 108, "global_frame_idx": 31845, "task_index": 37}, {"db_idx": 31846, "episode_idx": 185, "frame_idx": 109, "global_frame_idx": 31846, "task_index": 37}, {"db_idx": 31847, "episode_idx": 185, "frame_idx": 110, "global_frame_idx": 31847, "task_index": 37}, {"db_idx": 31848, "episode_idx": 185, "frame_idx": 111, "global_frame_idx": 31848, "task_index": 37}, {"db_idx": 31849, "episode_idx": 185, "frame_idx": 112, "global_frame_idx": 31849, "task_index": 37}, {"db_idx": 31850, "episode_idx": 185, "frame_idx": 113, "global_frame_idx": 31850, "task_index": 37}, {"db_idx": 31851, "episode_idx": 185, "frame_idx": 114, "global_frame_idx": 31851, "task_index": 37}, {"db_idx": 31852, "episode_idx": 185, "frame_idx": 115, "global_frame_idx": 31852, "task_index": 37}, {"db_idx": 31853, "episode_idx": 185, "frame_idx": 116, "global_frame_idx": 31853, "task_index": 37}, {"db_idx": 31854, "episode_idx": 185, "frame_idx": 117, "global_frame_idx": 31854, "task_index": 37}, {"db_idx": 31855, "episode_idx": 185, "frame_idx": 118, "global_frame_idx": 31855, "task_index": 37}, {"db_idx": 31856, "episode_idx": 185, "frame_idx": 119, "global_frame_idx": 31856, "task_index": 37}, {"db_idx": 31857, "episode_idx": 185, "frame_idx": 120, "global_frame_idx": 31857, "task_index": 37}, {"db_idx": 31858, "episode_idx": 186, "frame_idx": 0, "global_frame_idx": 31858, "task_index": 37}, {"db_idx": 31859, "episode_idx": 186, "frame_idx": 1, "global_frame_idx": 31859, "task_index": 37}, {"db_idx": 31860, "episode_idx": 186, "frame_idx": 2, "global_frame_idx": 31860, "task_index": 37}, {"db_idx": 31861, "episode_idx": 186, "frame_idx": 3, "global_frame_idx": 31861, "task_index": 37}, {"db_idx": 31862, "episode_idx": 186, "frame_idx": 4, "global_frame_idx": 31862, "task_index": 37}, {"db_idx": 31863, "episode_idx": 186, "frame_idx": 5, "global_frame_idx": 31863, "task_index": 37}, {"db_idx": 31864, "episode_idx": 186, "frame_idx": 6, "global_frame_idx": 31864, "task_index": 37}, {"db_idx": 31865, "episode_idx": 186, "frame_idx": 7, "global_frame_idx": 31865, "task_index": 37}, {"db_idx": 31866, "episode_idx": 186, "frame_idx": 8, "global_frame_idx": 31866, "task_index": 37}, {"db_idx": 31867, "episode_idx": 186, "frame_idx": 9, "global_frame_idx": 31867, "task_index": 37}, {"db_idx": 31868, "episode_idx": 186, "frame_idx": 10, "global_frame_idx": 31868, "task_index": 37}, {"db_idx": 31869, "episode_idx": 186, "frame_idx": 11, "global_frame_idx": 31869, "task_index": 37}, {"db_idx": 31870, "episode_idx": 186, "frame_idx": 12, "global_frame_idx": 31870, "task_index": 37}, {"db_idx": 31871, "episode_idx": 186, "frame_idx": 13, "global_frame_idx": 31871, "task_index": 37}, {"db_idx": 31872, "episode_idx": 186, "frame_idx": 14, "global_frame_idx": 31872, "task_index": 37}, {"db_idx": 31873, "episode_idx": 186, "frame_idx": 15, "global_frame_idx": 31873, "task_index": 37}, {"db_idx": 31874, "episode_idx": 186, "frame_idx": 16, "global_frame_idx": 31874, "task_index": 37}, {"db_idx": 31875, "episode_idx": 186, "frame_idx": 17, "global_frame_idx": 31875, "task_index": 37}, {"db_idx": 31876, "episode_idx": 186, "frame_idx": 18, "global_frame_idx": 31876, "task_index": 37}, {"db_idx": 31877, "episode_idx": 186, "frame_idx": 19, "global_frame_idx": 31877, "task_index": 37}, {"db_idx": 31878, "episode_idx": 186, "frame_idx": 20, "global_frame_idx": 31878, "task_index": 37}, {"db_idx": 31879, "episode_idx": 186, "frame_idx": 21, "global_frame_idx": 31879, "task_index": 37}, {"db_idx": 31880, "episode_idx": 186, "frame_idx": 22, "global_frame_idx": 31880, "task_index": 37}, {"db_idx": 31881, "episode_idx": 186, "frame_idx": 23, "global_frame_idx": 31881, "task_index": 37}, {"db_idx": 31882, "episode_idx": 186, "frame_idx": 24, "global_frame_idx": 31882, "task_index": 37}, {"db_idx": 31883, "episode_idx": 186, "frame_idx": 25, "global_frame_idx": 31883, "task_index": 37}, {"db_idx": 31884, "episode_idx": 186, "frame_idx": 26, "global_frame_idx": 31884, "task_index": 37}, {"db_idx": 31885, "episode_idx": 186, "frame_idx": 27, "global_frame_idx": 31885, "task_index": 37}, {"db_idx": 31886, "episode_idx": 186, "frame_idx": 28, "global_frame_idx": 31886, "task_index": 37}, {"db_idx": 31887, "episode_idx": 186, "frame_idx": 29, "global_frame_idx": 31887, "task_index": 37}, {"db_idx": 31888, "episode_idx": 186, "frame_idx": 30, "global_frame_idx": 31888, "task_index": 37}, {"db_idx": 31889, "episode_idx": 186, "frame_idx": 31, "global_frame_idx": 31889, "task_index": 37}, {"db_idx": 31890, "episode_idx": 186, "frame_idx": 32, "global_frame_idx": 31890, "task_index": 37}, {"db_idx": 31891, "episode_idx": 186, "frame_idx": 33, "global_frame_idx": 31891, "task_index": 37}, {"db_idx": 31892, "episode_idx": 186, "frame_idx": 34, "global_frame_idx": 31892, "task_index": 37}, {"db_idx": 31893, "episode_idx": 186, "frame_idx": 35, "global_frame_idx": 31893, "task_index": 37}, {"db_idx": 31894, "episode_idx": 186, "frame_idx": 36, "global_frame_idx": 31894, "task_index": 37}, {"db_idx": 31895, "episode_idx": 186, "frame_idx": 37, "global_frame_idx": 31895, "task_index": 37}, {"db_idx": 31896, "episode_idx": 186, "frame_idx": 38, "global_frame_idx": 31896, "task_index": 37}, {"db_idx": 31897, "episode_idx": 186, "frame_idx": 39, "global_frame_idx": 31897, "task_index": 37}, {"db_idx": 31898, "episode_idx": 186, "frame_idx": 40, "global_frame_idx": 31898, "task_index": 37}, {"db_idx": 31899, "episode_idx": 186, "frame_idx": 41, "global_frame_idx": 31899, "task_index": 37}, {"db_idx": 31900, "episode_idx": 186, "frame_idx": 42, "global_frame_idx": 31900, "task_index": 37}, {"db_idx": 31901, "episode_idx": 186, "frame_idx": 43, "global_frame_idx": 31901, "task_index": 37}, {"db_idx": 31902, "episode_idx": 186, "frame_idx": 44, "global_frame_idx": 31902, "task_index": 37}, {"db_idx": 31903, "episode_idx": 186, "frame_idx": 45, "global_frame_idx": 31903, "task_index": 37}, {"db_idx": 31904, "episode_idx": 186, "frame_idx": 46, "global_frame_idx": 31904, "task_index": 37}, {"db_idx": 31905, "episode_idx": 186, "frame_idx": 47, "global_frame_idx": 31905, "task_index": 37}, {"db_idx": 31906, "episode_idx": 186, "frame_idx": 48, "global_frame_idx": 31906, "task_index": 37}, {"db_idx": 31907, "episode_idx": 186, "frame_idx": 49, "global_frame_idx": 31907, "task_index": 37}, {"db_idx": 31908, "episode_idx": 186, "frame_idx": 50, "global_frame_idx": 31908, "task_index": 37}, {"db_idx": 31909, "episode_idx": 186, "frame_idx": 51, "global_frame_idx": 31909, "task_index": 37}, {"db_idx": 31910, "episode_idx": 186, "frame_idx": 52, "global_frame_idx": 31910, "task_index": 37}, {"db_idx": 31911, "episode_idx": 186, "frame_idx": 53, "global_frame_idx": 31911, "task_index": 37}, {"db_idx": 31912, "episode_idx": 186, "frame_idx": 54, "global_frame_idx": 31912, "task_index": 37}, {"db_idx": 31913, "episode_idx": 186, "frame_idx": 55, "global_frame_idx": 31913, "task_index": 37}, {"db_idx": 31914, "episode_idx": 186, "frame_idx": 56, "global_frame_idx": 31914, "task_index": 37}, {"db_idx": 31915, "episode_idx": 186, "frame_idx": 57, "global_frame_idx": 31915, "task_index": 37}, {"db_idx": 31916, "episode_idx": 186, "frame_idx": 58, "global_frame_idx": 31916, "task_index": 37}, {"db_idx": 31917, "episode_idx": 186, "frame_idx": 59, "global_frame_idx": 31917, "task_index": 37}, {"db_idx": 31918, "episode_idx": 186, "frame_idx": 60, "global_frame_idx": 31918, "task_index": 37}, {"db_idx": 31919, "episode_idx": 186, "frame_idx": 61, "global_frame_idx": 31919, "task_index": 37}, {"db_idx": 31920, "episode_idx": 186, "frame_idx": 62, "global_frame_idx": 31920, "task_index": 37}, {"db_idx": 31921, "episode_idx": 186, "frame_idx": 63, "global_frame_idx": 31921, "task_index": 37}, {"db_idx": 31922, "episode_idx": 186, "frame_idx": 64, "global_frame_idx": 31922, "task_index": 37}, {"db_idx": 31923, "episode_idx": 186, "frame_idx": 65, "global_frame_idx": 31923, "task_index": 37}, {"db_idx": 31924, "episode_idx": 186, "frame_idx": 66, "global_frame_idx": 31924, "task_index": 37}, {"db_idx": 31925, "episode_idx": 186, "frame_idx": 67, "global_frame_idx": 31925, "task_index": 37}, {"db_idx": 31926, "episode_idx": 186, "frame_idx": 68, "global_frame_idx": 31926, "task_index": 37}, {"db_idx": 31927, "episode_idx": 186, "frame_idx": 69, "global_frame_idx": 31927, "task_index": 37}, {"db_idx": 31928, "episode_idx": 186, "frame_idx": 70, "global_frame_idx": 31928, "task_index": 37}, {"db_idx": 31929, "episode_idx": 186, "frame_idx": 71, "global_frame_idx": 31929, "task_index": 37}, {"db_idx": 31930, "episode_idx": 186, "frame_idx": 72, "global_frame_idx": 31930, "task_index": 37}, {"db_idx": 31931, "episode_idx": 186, "frame_idx": 73, "global_frame_idx": 31931, "task_index": 37}, {"db_idx": 31932, "episode_idx": 186, "frame_idx": 74, "global_frame_idx": 31932, "task_index": 37}, {"db_idx": 31933, "episode_idx": 186, "frame_idx": 75, "global_frame_idx": 31933, "task_index": 37}, {"db_idx": 31934, "episode_idx": 186, "frame_idx": 76, "global_frame_idx": 31934, "task_index": 37}, {"db_idx": 31935, "episode_idx": 186, "frame_idx": 77, "global_frame_idx": 31935, "task_index": 37}, {"db_idx": 31936, "episode_idx": 186, "frame_idx": 78, "global_frame_idx": 31936, "task_index": 37}, {"db_idx": 31937, "episode_idx": 186, "frame_idx": 79, "global_frame_idx": 31937, "task_index": 37}, {"db_idx": 31938, "episode_idx": 186, "frame_idx": 80, "global_frame_idx": 31938, "task_index": 37}, {"db_idx": 31939, "episode_idx": 186, "frame_idx": 81, "global_frame_idx": 31939, "task_index": 37}, {"db_idx": 31940, "episode_idx": 186, "frame_idx": 82, "global_frame_idx": 31940, "task_index": 37}, {"db_idx": 31941, "episode_idx": 186, "frame_idx": 83, "global_frame_idx": 31941, "task_index": 37}, {"db_idx": 31942, "episode_idx": 186, "frame_idx": 84, "global_frame_idx": 31942, "task_index": 37}, {"db_idx": 31943, "episode_idx": 186, "frame_idx": 85, "global_frame_idx": 31943, "task_index": 37}, {"db_idx": 31944, "episode_idx": 186, "frame_idx": 86, "global_frame_idx": 31944, "task_index": 37}, {"db_idx": 31945, "episode_idx": 186, "frame_idx": 87, "global_frame_idx": 31945, "task_index": 37}, {"db_idx": 31946, "episode_idx": 186, "frame_idx": 88, "global_frame_idx": 31946, "task_index": 37}, {"db_idx": 31947, "episode_idx": 186, "frame_idx": 89, "global_frame_idx": 31947, "task_index": 37}, {"db_idx": 31948, "episode_idx": 186, "frame_idx": 90, "global_frame_idx": 31948, "task_index": 37}, {"db_idx": 31949, "episode_idx": 186, "frame_idx": 91, "global_frame_idx": 31949, "task_index": 37}, {"db_idx": 31950, "episode_idx": 186, "frame_idx": 92, "global_frame_idx": 31950, "task_index": 37}, {"db_idx": 31951, "episode_idx": 186, "frame_idx": 93, "global_frame_idx": 31951, "task_index": 37}, {"db_idx": 31952, "episode_idx": 186, "frame_idx": 94, "global_frame_idx": 31952, "task_index": 37}, {"db_idx": 31953, "episode_idx": 186, "frame_idx": 95, "global_frame_idx": 31953, "task_index": 37}, {"db_idx": 31954, "episode_idx": 186, "frame_idx": 96, "global_frame_idx": 31954, "task_index": 37}, {"db_idx": 31955, "episode_idx": 186, "frame_idx": 97, "global_frame_idx": 31955, "task_index": 37}, {"db_idx": 31956, "episode_idx": 186, "frame_idx": 98, "global_frame_idx": 31956, "task_index": 37}, {"db_idx": 31957, "episode_idx": 186, "frame_idx": 99, "global_frame_idx": 31957, "task_index": 37}, {"db_idx": 31958, "episode_idx": 186, "frame_idx": 100, "global_frame_idx": 31958, "task_index": 37}, {"db_idx": 31959, "episode_idx": 186, "frame_idx": 101, "global_frame_idx": 31959, "task_index": 37}, {"db_idx": 31960, "episode_idx": 186, "frame_idx": 102, "global_frame_idx": 31960, "task_index": 37}, {"db_idx": 31961, "episode_idx": 186, "frame_idx": 103, "global_frame_idx": 31961, "task_index": 37}, {"db_idx": 31962, "episode_idx": 186, "frame_idx": 104, "global_frame_idx": 31962, "task_index": 37}, {"db_idx": 31963, "episode_idx": 186, "frame_idx": 105, "global_frame_idx": 31963, "task_index": 37}, {"db_idx": 31964, "episode_idx": 186, "frame_idx": 106, "global_frame_idx": 31964, "task_index": 37}, {"db_idx": 31965, "episode_idx": 186, "frame_idx": 107, "global_frame_idx": 31965, "task_index": 37}, {"db_idx": 31966, "episode_idx": 186, "frame_idx": 108, "global_frame_idx": 31966, "task_index": 37}, {"db_idx": 31967, "episode_idx": 186, "frame_idx": 109, "global_frame_idx": 31967, "task_index": 37}, {"db_idx": 31968, "episode_idx": 186, "frame_idx": 110, "global_frame_idx": 31968, "task_index": 37}, {"db_idx": 31969, "episode_idx": 186, "frame_idx": 111, "global_frame_idx": 31969, "task_index": 37}, {"db_idx": 31970, "episode_idx": 186, "frame_idx": 112, "global_frame_idx": 31970, "task_index": 37}, {"db_idx": 31971, "episode_idx": 186, "frame_idx": 113, "global_frame_idx": 31971, "task_index": 37}, {"db_idx": 31972, "episode_idx": 187, "frame_idx": 0, "global_frame_idx": 31972, "task_index": 37}, {"db_idx": 31973, "episode_idx": 187, "frame_idx": 1, "global_frame_idx": 31973, "task_index": 37}, {"db_idx": 31974, "episode_idx": 187, "frame_idx": 2, "global_frame_idx": 31974, "task_index": 37}, {"db_idx": 31975, "episode_idx": 187, "frame_idx": 3, "global_frame_idx": 31975, "task_index": 37}, {"db_idx": 31976, "episode_idx": 187, "frame_idx": 4, "global_frame_idx": 31976, "task_index": 37}, {"db_idx": 31977, "episode_idx": 187, "frame_idx": 5, "global_frame_idx": 31977, "task_index": 37}, {"db_idx": 31978, "episode_idx": 187, "frame_idx": 6, "global_frame_idx": 31978, "task_index": 37}, {"db_idx": 31979, "episode_idx": 187, "frame_idx": 7, "global_frame_idx": 31979, "task_index": 37}, {"db_idx": 31980, "episode_idx": 187, "frame_idx": 8, "global_frame_idx": 31980, "task_index": 37}, {"db_idx": 31981, "episode_idx": 187, "frame_idx": 9, "global_frame_idx": 31981, "task_index": 37}, {"db_idx": 31982, "episode_idx": 187, "frame_idx": 10, "global_frame_idx": 31982, "task_index": 37}, {"db_idx": 31983, "episode_idx": 187, "frame_idx": 11, "global_frame_idx": 31983, "task_index": 37}, {"db_idx": 31984, "episode_idx": 187, "frame_idx": 12, "global_frame_idx": 31984, "task_index": 37}, {"db_idx": 31985, "episode_idx": 187, "frame_idx": 13, "global_frame_idx": 31985, "task_index": 37}, {"db_idx": 31986, "episode_idx": 187, "frame_idx": 14, "global_frame_idx": 31986, "task_index": 37}, {"db_idx": 31987, "episode_idx": 187, "frame_idx": 15, "global_frame_idx": 31987, "task_index": 37}, {"db_idx": 31988, "episode_idx": 187, "frame_idx": 16, "global_frame_idx": 31988, "task_index": 37}, {"db_idx": 31989, "episode_idx": 187, "frame_idx": 17, "global_frame_idx": 31989, "task_index": 37}, {"db_idx": 31990, "episode_idx": 187, "frame_idx": 18, "global_frame_idx": 31990, "task_index": 37}, {"db_idx": 31991, "episode_idx": 187, "frame_idx": 19, "global_frame_idx": 31991, "task_index": 37}, {"db_idx": 31992, "episode_idx": 187, "frame_idx": 20, "global_frame_idx": 31992, "task_index": 37}, {"db_idx": 31993, "episode_idx": 187, "frame_idx": 21, "global_frame_idx": 31993, "task_index": 37}, {"db_idx": 31994, "episode_idx": 187, "frame_idx": 22, "global_frame_idx": 31994, "task_index": 37}, {"db_idx": 31995, "episode_idx": 187, "frame_idx": 23, "global_frame_idx": 31995, "task_index": 37}, {"db_idx": 31996, "episode_idx": 187, "frame_idx": 24, "global_frame_idx": 31996, "task_index": 37}, {"db_idx": 31997, "episode_idx": 187, "frame_idx": 25, "global_frame_idx": 31997, "task_index": 37}, {"db_idx": 31998, "episode_idx": 187, "frame_idx": 26, "global_frame_idx": 31998, "task_index": 37}, {"db_idx": 31999, "episode_idx": 187, "frame_idx": 27, "global_frame_idx": 31999, "task_index": 37}, {"db_idx": 32000, "episode_idx": 187, "frame_idx": 28, "global_frame_idx": 32000, "task_index": 37}, {"db_idx": 32001, "episode_idx": 187, "frame_idx": 29, "global_frame_idx": 32001, "task_index": 37}, {"db_idx": 32002, "episode_idx": 187, "frame_idx": 30, "global_frame_idx": 32002, "task_index": 37}, {"db_idx": 32003, "episode_idx": 187, "frame_idx": 31, "global_frame_idx": 32003, "task_index": 37}, {"db_idx": 32004, "episode_idx": 187, "frame_idx": 32, "global_frame_idx": 32004, "task_index": 37}, {"db_idx": 32005, "episode_idx": 187, "frame_idx": 33, "global_frame_idx": 32005, "task_index": 37}, {"db_idx": 32006, "episode_idx": 187, "frame_idx": 34, "global_frame_idx": 32006, "task_index": 37}, {"db_idx": 32007, "episode_idx": 187, "frame_idx": 35, "global_frame_idx": 32007, "task_index": 37}, {"db_idx": 32008, "episode_idx": 187, "frame_idx": 36, "global_frame_idx": 32008, "task_index": 37}, {"db_idx": 32009, "episode_idx": 187, "frame_idx": 37, "global_frame_idx": 32009, "task_index": 37}, {"db_idx": 32010, "episode_idx": 187, "frame_idx": 38, "global_frame_idx": 32010, "task_index": 37}, {"db_idx": 32011, "episode_idx": 187, "frame_idx": 39, "global_frame_idx": 32011, "task_index": 37}, {"db_idx": 32012, "episode_idx": 187, "frame_idx": 40, "global_frame_idx": 32012, "task_index": 37}, {"db_idx": 32013, "episode_idx": 187, "frame_idx": 41, "global_frame_idx": 32013, "task_index": 37}, {"db_idx": 32014, "episode_idx": 187, "frame_idx": 42, "global_frame_idx": 32014, "task_index": 37}, {"db_idx": 32015, "episode_idx": 187, "frame_idx": 43, "global_frame_idx": 32015, "task_index": 37}, {"db_idx": 32016, "episode_idx": 187, "frame_idx": 44, "global_frame_idx": 32016, "task_index": 37}, {"db_idx": 32017, "episode_idx": 187, "frame_idx": 45, "global_frame_idx": 32017, "task_index": 37}, {"db_idx": 32018, "episode_idx": 187, "frame_idx": 46, "global_frame_idx": 32018, "task_index": 37}, {"db_idx": 32019, "episode_idx": 187, "frame_idx": 47, "global_frame_idx": 32019, "task_index": 37}, {"db_idx": 32020, "episode_idx": 187, "frame_idx": 48, "global_frame_idx": 32020, "task_index": 37}, {"db_idx": 32021, "episode_idx": 187, "frame_idx": 49, "global_frame_idx": 32021, "task_index": 37}, {"db_idx": 32022, "episode_idx": 187, "frame_idx": 50, "global_frame_idx": 32022, "task_index": 37}, {"db_idx": 32023, "episode_idx": 187, "frame_idx": 51, "global_frame_idx": 32023, "task_index": 37}, {"db_idx": 32024, "episode_idx": 187, "frame_idx": 52, "global_frame_idx": 32024, "task_index": 37}, {"db_idx": 32025, "episode_idx": 187, "frame_idx": 53, "global_frame_idx": 32025, "task_index": 37}, {"db_idx": 32026, "episode_idx": 187, "frame_idx": 54, "global_frame_idx": 32026, "task_index": 37}, {"db_idx": 32027, "episode_idx": 187, "frame_idx": 55, "global_frame_idx": 32027, "task_index": 37}, {"db_idx": 32028, "episode_idx": 187, "frame_idx": 56, "global_frame_idx": 32028, "task_index": 37}, {"db_idx": 32029, "episode_idx": 187, "frame_idx": 57, "global_frame_idx": 32029, "task_index": 37}, {"db_idx": 32030, "episode_idx": 187, "frame_idx": 58, "global_frame_idx": 32030, "task_index": 37}, {"db_idx": 32031, "episode_idx": 187, "frame_idx": 59, "global_frame_idx": 32031, "task_index": 37}, {"db_idx": 32032, "episode_idx": 187, "frame_idx": 60, "global_frame_idx": 32032, "task_index": 37}, {"db_idx": 32033, "episode_idx": 187, "frame_idx": 61, "global_frame_idx": 32033, "task_index": 37}, {"db_idx": 32034, "episode_idx": 187, "frame_idx": 62, "global_frame_idx": 32034, "task_index": 37}, {"db_idx": 32035, "episode_idx": 187, "frame_idx": 63, "global_frame_idx": 32035, "task_index": 37}, {"db_idx": 32036, "episode_idx": 187, "frame_idx": 64, "global_frame_idx": 32036, "task_index": 37}, {"db_idx": 32037, "episode_idx": 187, "frame_idx": 65, "global_frame_idx": 32037, "task_index": 37}, {"db_idx": 32038, "episode_idx": 187, "frame_idx": 66, "global_frame_idx": 32038, "task_index": 37}, {"db_idx": 32039, "episode_idx": 187, "frame_idx": 67, "global_frame_idx": 32039, "task_index": 37}, {"db_idx": 32040, "episode_idx": 187, "frame_idx": 68, "global_frame_idx": 32040, "task_index": 37}, {"db_idx": 32041, "episode_idx": 187, "frame_idx": 69, "global_frame_idx": 32041, "task_index": 37}, {"db_idx": 32042, "episode_idx": 187, "frame_idx": 70, "global_frame_idx": 32042, "task_index": 37}, {"db_idx": 32043, "episode_idx": 187, "frame_idx": 71, "global_frame_idx": 32043, "task_index": 37}, {"db_idx": 32044, "episode_idx": 187, "frame_idx": 72, "global_frame_idx": 32044, "task_index": 37}, {"db_idx": 32045, "episode_idx": 187, "frame_idx": 73, "global_frame_idx": 32045, "task_index": 37}, {"db_idx": 32046, "episode_idx": 187, "frame_idx": 74, "global_frame_idx": 32046, "task_index": 37}, {"db_idx": 32047, "episode_idx": 187, "frame_idx": 75, "global_frame_idx": 32047, "task_index": 37}, {"db_idx": 32048, "episode_idx": 187, "frame_idx": 76, "global_frame_idx": 32048, "task_index": 37}, {"db_idx": 32049, "episode_idx": 187, "frame_idx": 77, "global_frame_idx": 32049, "task_index": 37}, {"db_idx": 32050, "episode_idx": 187, "frame_idx": 78, "global_frame_idx": 32050, "task_index": 37}, {"db_idx": 32051, "episode_idx": 187, "frame_idx": 79, "global_frame_idx": 32051, "task_index": 37}, {"db_idx": 32052, "episode_idx": 187, "frame_idx": 80, "global_frame_idx": 32052, "task_index": 37}, {"db_idx": 32053, "episode_idx": 187, "frame_idx": 81, "global_frame_idx": 32053, "task_index": 37}, {"db_idx": 32054, "episode_idx": 187, "frame_idx": 82, "global_frame_idx": 32054, "task_index": 37}, {"db_idx": 32055, "episode_idx": 187, "frame_idx": 83, "global_frame_idx": 32055, "task_index": 37}, {"db_idx": 32056, "episode_idx": 187, "frame_idx": 84, "global_frame_idx": 32056, "task_index": 37}, {"db_idx": 32057, "episode_idx": 187, "frame_idx": 85, "global_frame_idx": 32057, "task_index": 37}, {"db_idx": 32058, "episode_idx": 187, "frame_idx": 86, "global_frame_idx": 32058, "task_index": 37}, {"db_idx": 32059, "episode_idx": 187, "frame_idx": 87, "global_frame_idx": 32059, "task_index": 37}, {"db_idx": 32060, "episode_idx": 187, "frame_idx": 88, "global_frame_idx": 32060, "task_index": 37}, {"db_idx": 32061, "episode_idx": 187, "frame_idx": 89, "global_frame_idx": 32061, "task_index": 37}, {"db_idx": 32062, "episode_idx": 187, "frame_idx": 90, "global_frame_idx": 32062, "task_index": 37}, {"db_idx": 32063, "episode_idx": 187, "frame_idx": 91, "global_frame_idx": 32063, "task_index": 37}, {"db_idx": 32064, "episode_idx": 187, "frame_idx": 92, "global_frame_idx": 32064, "task_index": 37}, {"db_idx": 32065, "episode_idx": 187, "frame_idx": 93, "global_frame_idx": 32065, "task_index": 37}, {"db_idx": 32066, "episode_idx": 187, "frame_idx": 94, "global_frame_idx": 32066, "task_index": 37}, {"db_idx": 32067, "episode_idx": 187, "frame_idx": 95, "global_frame_idx": 32067, "task_index": 37}, {"db_idx": 32068, "episode_idx": 187, "frame_idx": 96, "global_frame_idx": 32068, "task_index": 37}, {"db_idx": 32069, "episode_idx": 187, "frame_idx": 97, "global_frame_idx": 32069, "task_index": 37}, {"db_idx": 32070, "episode_idx": 187, "frame_idx": 98, "global_frame_idx": 32070, "task_index": 37}, {"db_idx": 32071, "episode_idx": 187, "frame_idx": 99, "global_frame_idx": 32071, "task_index": 37}, {"db_idx": 32072, "episode_idx": 187, "frame_idx": 100, "global_frame_idx": 32072, "task_index": 37}, {"db_idx": 32073, "episode_idx": 187, "frame_idx": 101, "global_frame_idx": 32073, "task_index": 37}, {"db_idx": 32074, "episode_idx": 187, "frame_idx": 102, "global_frame_idx": 32074, "task_index": 37}, {"db_idx": 32075, "episode_idx": 187, "frame_idx": 103, "global_frame_idx": 32075, "task_index": 37}, {"db_idx": 32076, "episode_idx": 187, "frame_idx": 104, "global_frame_idx": 32076, "task_index": 37}, {"db_idx": 32077, "episode_idx": 187, "frame_idx": 105, "global_frame_idx": 32077, "task_index": 37}, {"db_idx": 32078, "episode_idx": 187, "frame_idx": 106, "global_frame_idx": 32078, "task_index": 37}, {"db_idx": 32079, "episode_idx": 187, "frame_idx": 107, "global_frame_idx": 32079, "task_index": 37}, {"db_idx": 32080, "episode_idx": 187, "frame_idx": 108, "global_frame_idx": 32080, "task_index": 37}, {"db_idx": 32081, "episode_idx": 187, "frame_idx": 109, "global_frame_idx": 32081, "task_index": 37}, {"db_idx": 32082, "episode_idx": 187, "frame_idx": 110, "global_frame_idx": 32082, "task_index": 37}, {"db_idx": 32083, "episode_idx": 187, "frame_idx": 111, "global_frame_idx": 32083, "task_index": 37}, {"db_idx": 32084, "episode_idx": 187, "frame_idx": 112, "global_frame_idx": 32084, "task_index": 37}, {"db_idx": 32085, "episode_idx": 187, "frame_idx": 113, "global_frame_idx": 32085, "task_index": 37}, {"db_idx": 32086, "episode_idx": 187, "frame_idx": 114, "global_frame_idx": 32086, "task_index": 37}, {"db_idx": 32087, "episode_idx": 187, "frame_idx": 115, "global_frame_idx": 32087, "task_index": 37}, {"db_idx": 32088, "episode_idx": 187, "frame_idx": 116, "global_frame_idx": 32088, "task_index": 37}, {"db_idx": 32089, "episode_idx": 188, "frame_idx": 0, "global_frame_idx": 32089, "task_index": 37}, {"db_idx": 32090, "episode_idx": 188, "frame_idx": 1, "global_frame_idx": 32090, "task_index": 37}, {"db_idx": 32091, "episode_idx": 188, "frame_idx": 2, "global_frame_idx": 32091, "task_index": 37}, {"db_idx": 32092, "episode_idx": 188, "frame_idx": 3, "global_frame_idx": 32092, "task_index": 37}, {"db_idx": 32093, "episode_idx": 188, "frame_idx": 4, "global_frame_idx": 32093, "task_index": 37}, {"db_idx": 32094, "episode_idx": 188, "frame_idx": 5, "global_frame_idx": 32094, "task_index": 37}, {"db_idx": 32095, "episode_idx": 188, "frame_idx": 6, "global_frame_idx": 32095, "task_index": 37}, {"db_idx": 32096, "episode_idx": 188, "frame_idx": 7, "global_frame_idx": 32096, "task_index": 37}, {"db_idx": 32097, "episode_idx": 188, "frame_idx": 8, "global_frame_idx": 32097, "task_index": 37}, {"db_idx": 32098, "episode_idx": 188, "frame_idx": 9, "global_frame_idx": 32098, "task_index": 37}, {"db_idx": 32099, "episode_idx": 188, "frame_idx": 10, "global_frame_idx": 32099, "task_index": 37}, {"db_idx": 32100, "episode_idx": 188, "frame_idx": 11, "global_frame_idx": 32100, "task_index": 37}, {"db_idx": 32101, "episode_idx": 188, "frame_idx": 12, "global_frame_idx": 32101, "task_index": 37}, {"db_idx": 32102, "episode_idx": 188, "frame_idx": 13, "global_frame_idx": 32102, "task_index": 37}, {"db_idx": 32103, "episode_idx": 188, "frame_idx": 14, "global_frame_idx": 32103, "task_index": 37}, {"db_idx": 32104, "episode_idx": 188, "frame_idx": 15, "global_frame_idx": 32104, "task_index": 37}, {"db_idx": 32105, "episode_idx": 188, "frame_idx": 16, "global_frame_idx": 32105, "task_index": 37}, {"db_idx": 32106, "episode_idx": 188, "frame_idx": 17, "global_frame_idx": 32106, "task_index": 37}, {"db_idx": 32107, "episode_idx": 188, "frame_idx": 18, "global_frame_idx": 32107, "task_index": 37}, {"db_idx": 32108, "episode_idx": 188, "frame_idx": 19, "global_frame_idx": 32108, "task_index": 37}, {"db_idx": 32109, "episode_idx": 188, "frame_idx": 20, "global_frame_idx": 32109, "task_index": 37}, {"db_idx": 32110, "episode_idx": 188, "frame_idx": 21, "global_frame_idx": 32110, "task_index": 37}, {"db_idx": 32111, "episode_idx": 188, "frame_idx": 22, "global_frame_idx": 32111, "task_index": 37}, {"db_idx": 32112, "episode_idx": 188, "frame_idx": 23, "global_frame_idx": 32112, "task_index": 37}, {"db_idx": 32113, "episode_idx": 188, "frame_idx": 24, "global_frame_idx": 32113, "task_index": 37}, {"db_idx": 32114, "episode_idx": 188, "frame_idx": 25, "global_frame_idx": 32114, "task_index": 37}, {"db_idx": 32115, "episode_idx": 188, "frame_idx": 26, "global_frame_idx": 32115, "task_index": 37}, {"db_idx": 32116, "episode_idx": 188, "frame_idx": 27, "global_frame_idx": 32116, "task_index": 37}, {"db_idx": 32117, "episode_idx": 188, "frame_idx": 28, "global_frame_idx": 32117, "task_index": 37}, {"db_idx": 32118, "episode_idx": 188, "frame_idx": 29, "global_frame_idx": 32118, "task_index": 37}, {"db_idx": 32119, "episode_idx": 188, "frame_idx": 30, "global_frame_idx": 32119, "task_index": 37}, {"db_idx": 32120, "episode_idx": 188, "frame_idx": 31, "global_frame_idx": 32120, "task_index": 37}, {"db_idx": 32121, "episode_idx": 188, "frame_idx": 32, "global_frame_idx": 32121, "task_index": 37}, {"db_idx": 32122, "episode_idx": 188, "frame_idx": 33, "global_frame_idx": 32122, "task_index": 37}, {"db_idx": 32123, "episode_idx": 188, "frame_idx": 34, "global_frame_idx": 32123, "task_index": 37}, {"db_idx": 32124, "episode_idx": 188, "frame_idx": 35, "global_frame_idx": 32124, "task_index": 37}, {"db_idx": 32125, "episode_idx": 188, "frame_idx": 36, "global_frame_idx": 32125, "task_index": 37}, {"db_idx": 32126, "episode_idx": 188, "frame_idx": 37, "global_frame_idx": 32126, "task_index": 37}, {"db_idx": 32127, "episode_idx": 188, "frame_idx": 38, "global_frame_idx": 32127, "task_index": 37}, {"db_idx": 32128, "episode_idx": 188, "frame_idx": 39, "global_frame_idx": 32128, "task_index": 37}, {"db_idx": 32129, "episode_idx": 188, "frame_idx": 40, "global_frame_idx": 32129, "task_index": 37}, {"db_idx": 32130, "episode_idx": 188, "frame_idx": 41, "global_frame_idx": 32130, "task_index": 37}, {"db_idx": 32131, "episode_idx": 188, "frame_idx": 42, "global_frame_idx": 32131, "task_index": 37}, {"db_idx": 32132, "episode_idx": 188, "frame_idx": 43, "global_frame_idx": 32132, "task_index": 37}, {"db_idx": 32133, "episode_idx": 188, "frame_idx": 44, "global_frame_idx": 32133, "task_index": 37}, {"db_idx": 32134, "episode_idx": 188, "frame_idx": 45, "global_frame_idx": 32134, "task_index": 37}, {"db_idx": 32135, "episode_idx": 188, "frame_idx": 46, "global_frame_idx": 32135, "task_index": 37}, {"db_idx": 32136, "episode_idx": 188, "frame_idx": 47, "global_frame_idx": 32136, "task_index": 37}, {"db_idx": 32137, "episode_idx": 188, "frame_idx": 48, "global_frame_idx": 32137, "task_index": 37}, {"db_idx": 32138, "episode_idx": 188, "frame_idx": 49, "global_frame_idx": 32138, "task_index": 37}, {"db_idx": 32139, "episode_idx": 188, "frame_idx": 50, "global_frame_idx": 32139, "task_index": 37}, {"db_idx": 32140, "episode_idx": 188, "frame_idx": 51, "global_frame_idx": 32140, "task_index": 37}, {"db_idx": 32141, "episode_idx": 188, "frame_idx": 52, "global_frame_idx": 32141, "task_index": 37}, {"db_idx": 32142, "episode_idx": 188, "frame_idx": 53, "global_frame_idx": 32142, "task_index": 37}, {"db_idx": 32143, "episode_idx": 188, "frame_idx": 54, "global_frame_idx": 32143, "task_index": 37}, {"db_idx": 32144, "episode_idx": 188, "frame_idx": 55, "global_frame_idx": 32144, "task_index": 37}, {"db_idx": 32145, "episode_idx": 188, "frame_idx": 56, "global_frame_idx": 32145, "task_index": 37}, {"db_idx": 32146, "episode_idx": 188, "frame_idx": 57, "global_frame_idx": 32146, "task_index": 37}, {"db_idx": 32147, "episode_idx": 188, "frame_idx": 58, "global_frame_idx": 32147, "task_index": 37}, {"db_idx": 32148, "episode_idx": 188, "frame_idx": 59, "global_frame_idx": 32148, "task_index": 37}, {"db_idx": 32149, "episode_idx": 188, "frame_idx": 60, "global_frame_idx": 32149, "task_index": 37}, {"db_idx": 32150, "episode_idx": 188, "frame_idx": 61, "global_frame_idx": 32150, "task_index": 37}, {"db_idx": 32151, "episode_idx": 188, "frame_idx": 62, "global_frame_idx": 32151, "task_index": 37}, {"db_idx": 32152, "episode_idx": 188, "frame_idx": 63, "global_frame_idx": 32152, "task_index": 37}, {"db_idx": 32153, "episode_idx": 188, "frame_idx": 64, "global_frame_idx": 32153, "task_index": 37}, {"db_idx": 32154, "episode_idx": 188, "frame_idx": 65, "global_frame_idx": 32154, "task_index": 37}, {"db_idx": 32155, "episode_idx": 188, "frame_idx": 66, "global_frame_idx": 32155, "task_index": 37}, {"db_idx": 32156, "episode_idx": 188, "frame_idx": 67, "global_frame_idx": 32156, "task_index": 37}, {"db_idx": 32157, "episode_idx": 188, "frame_idx": 68, "global_frame_idx": 32157, "task_index": 37}, {"db_idx": 32158, "episode_idx": 188, "frame_idx": 69, "global_frame_idx": 32158, "task_index": 37}, {"db_idx": 32159, "episode_idx": 188, "frame_idx": 70, "global_frame_idx": 32159, "task_index": 37}, {"db_idx": 32160, "episode_idx": 188, "frame_idx": 71, "global_frame_idx": 32160, "task_index": 37}, {"db_idx": 32161, "episode_idx": 188, "frame_idx": 72, "global_frame_idx": 32161, "task_index": 37}, {"db_idx": 32162, "episode_idx": 188, "frame_idx": 73, "global_frame_idx": 32162, "task_index": 37}, {"db_idx": 32163, "episode_idx": 188, "frame_idx": 74, "global_frame_idx": 32163, "task_index": 37}, {"db_idx": 32164, "episode_idx": 188, "frame_idx": 75, "global_frame_idx": 32164, "task_index": 37}, {"db_idx": 32165, "episode_idx": 188, "frame_idx": 76, "global_frame_idx": 32165, "task_index": 37}, {"db_idx": 32166, "episode_idx": 188, "frame_idx": 77, "global_frame_idx": 32166, "task_index": 37}, {"db_idx": 32167, "episode_idx": 188, "frame_idx": 78, "global_frame_idx": 32167, "task_index": 37}, {"db_idx": 32168, "episode_idx": 188, "frame_idx": 79, "global_frame_idx": 32168, "task_index": 37}, {"db_idx": 32169, "episode_idx": 188, "frame_idx": 80, "global_frame_idx": 32169, "task_index": 37}, {"db_idx": 32170, "episode_idx": 188, "frame_idx": 81, "global_frame_idx": 32170, "task_index": 37}, {"db_idx": 32171, "episode_idx": 188, "frame_idx": 82, "global_frame_idx": 32171, "task_index": 37}, {"db_idx": 32172, "episode_idx": 188, "frame_idx": 83, "global_frame_idx": 32172, "task_index": 37}, {"db_idx": 32173, "episode_idx": 188, "frame_idx": 84, "global_frame_idx": 32173, "task_index": 37}, {"db_idx": 32174, "episode_idx": 188, "frame_idx": 85, "global_frame_idx": 32174, "task_index": 37}, {"db_idx": 32175, "episode_idx": 188, "frame_idx": 86, "global_frame_idx": 32175, "task_index": 37}, {"db_idx": 32176, "episode_idx": 188, "frame_idx": 87, "global_frame_idx": 32176, "task_index": 37}, {"db_idx": 32177, "episode_idx": 188, "frame_idx": 88, "global_frame_idx": 32177, "task_index": 37}, {"db_idx": 32178, "episode_idx": 188, "frame_idx": 89, "global_frame_idx": 32178, "task_index": 37}, {"db_idx": 32179, "episode_idx": 188, "frame_idx": 90, "global_frame_idx": 32179, "task_index": 37}, {"db_idx": 32180, "episode_idx": 188, "frame_idx": 91, "global_frame_idx": 32180, "task_index": 37}, {"db_idx": 32181, "episode_idx": 188, "frame_idx": 92, "global_frame_idx": 32181, "task_index": 37}, {"db_idx": 32182, "episode_idx": 188, "frame_idx": 93, "global_frame_idx": 32182, "task_index": 37}, {"db_idx": 32183, "episode_idx": 188, "frame_idx": 94, "global_frame_idx": 32183, "task_index": 37}, {"db_idx": 32184, "episode_idx": 188, "frame_idx": 95, "global_frame_idx": 32184, "task_index": 37}, {"db_idx": 32185, "episode_idx": 188, "frame_idx": 96, "global_frame_idx": 32185, "task_index": 37}, {"db_idx": 32186, "episode_idx": 188, "frame_idx": 97, "global_frame_idx": 32186, "task_index": 37}, {"db_idx": 32187, "episode_idx": 188, "frame_idx": 98, "global_frame_idx": 32187, "task_index": 37}, {"db_idx": 32188, "episode_idx": 188, "frame_idx": 99, "global_frame_idx": 32188, "task_index": 37}, {"db_idx": 32189, "episode_idx": 188, "frame_idx": 100, "global_frame_idx": 32189, "task_index": 37}, {"db_idx": 32190, "episode_idx": 188, "frame_idx": 101, "global_frame_idx": 32190, "task_index": 37}, {"db_idx": 32191, "episode_idx": 188, "frame_idx": 102, "global_frame_idx": 32191, "task_index": 37}, {"db_idx": 32192, "episode_idx": 188, "frame_idx": 103, "global_frame_idx": 32192, "task_index": 37}, {"db_idx": 32193, "episode_idx": 188, "frame_idx": 104, "global_frame_idx": 32193, "task_index": 37}, {"db_idx": 32194, "episode_idx": 188, "frame_idx": 105, "global_frame_idx": 32194, "task_index": 37}, {"db_idx": 32195, "episode_idx": 188, "frame_idx": 106, "global_frame_idx": 32195, "task_index": 37}, {"db_idx": 32196, "episode_idx": 188, "frame_idx": 107, "global_frame_idx": 32196, "task_index": 37}, {"db_idx": 32197, "episode_idx": 188, "frame_idx": 108, "global_frame_idx": 32197, "task_index": 37}, {"db_idx": 32198, "episode_idx": 188, "frame_idx": 109, "global_frame_idx": 32198, "task_index": 37}, {"db_idx": 32199, "episode_idx": 188, "frame_idx": 110, "global_frame_idx": 32199, "task_index": 37}, {"db_idx": 32200, "episode_idx": 188, "frame_idx": 111, "global_frame_idx": 32200, "task_index": 37}, {"db_idx": 32201, "episode_idx": 188, "frame_idx": 112, "global_frame_idx": 32201, "task_index": 37}, {"db_idx": 32202, "episode_idx": 188, "frame_idx": 113, "global_frame_idx": 32202, "task_index": 37}, {"db_idx": 32203, "episode_idx": 188, "frame_idx": 114, "global_frame_idx": 32203, "task_index": 37}, {"db_idx": 32204, "episode_idx": 189, "frame_idx": 0, "global_frame_idx": 32204, "task_index": 37}, {"db_idx": 32205, "episode_idx": 189, "frame_idx": 1, "global_frame_idx": 32205, "task_index": 37}, {"db_idx": 32206, "episode_idx": 189, "frame_idx": 2, "global_frame_idx": 32206, "task_index": 37}, {"db_idx": 32207, "episode_idx": 189, "frame_idx": 3, "global_frame_idx": 32207, "task_index": 37}, {"db_idx": 32208, "episode_idx": 189, "frame_idx": 4, "global_frame_idx": 32208, "task_index": 37}, {"db_idx": 32209, "episode_idx": 189, "frame_idx": 5, "global_frame_idx": 32209, "task_index": 37}, {"db_idx": 32210, "episode_idx": 189, "frame_idx": 6, "global_frame_idx": 32210, "task_index": 37}, {"db_idx": 32211, "episode_idx": 189, "frame_idx": 7, "global_frame_idx": 32211, "task_index": 37}, {"db_idx": 32212, "episode_idx": 189, "frame_idx": 8, "global_frame_idx": 32212, "task_index": 37}, {"db_idx": 32213, "episode_idx": 189, "frame_idx": 9, "global_frame_idx": 32213, "task_index": 37}, {"db_idx": 32214, "episode_idx": 189, "frame_idx": 10, "global_frame_idx": 32214, "task_index": 37}, {"db_idx": 32215, "episode_idx": 189, "frame_idx": 11, "global_frame_idx": 32215, "task_index": 37}, {"db_idx": 32216, "episode_idx": 189, "frame_idx": 12, "global_frame_idx": 32216, "task_index": 37}, {"db_idx": 32217, "episode_idx": 189, "frame_idx": 13, "global_frame_idx": 32217, "task_index": 37}, {"db_idx": 32218, "episode_idx": 189, "frame_idx": 14, "global_frame_idx": 32218, "task_index": 37}, {"db_idx": 32219, "episode_idx": 189, "frame_idx": 15, "global_frame_idx": 32219, "task_index": 37}, {"db_idx": 32220, "episode_idx": 189, "frame_idx": 16, "global_frame_idx": 32220, "task_index": 37}, {"db_idx": 32221, "episode_idx": 189, "frame_idx": 17, "global_frame_idx": 32221, "task_index": 37}, {"db_idx": 32222, "episode_idx": 189, "frame_idx": 18, "global_frame_idx": 32222, "task_index": 37}, {"db_idx": 32223, "episode_idx": 189, "frame_idx": 19, "global_frame_idx": 32223, "task_index": 37}, {"db_idx": 32224, "episode_idx": 189, "frame_idx": 20, "global_frame_idx": 32224, "task_index": 37}, {"db_idx": 32225, "episode_idx": 189, "frame_idx": 21, "global_frame_idx": 32225, "task_index": 37}, {"db_idx": 32226, "episode_idx": 189, "frame_idx": 22, "global_frame_idx": 32226, "task_index": 37}, {"db_idx": 32227, "episode_idx": 189, "frame_idx": 23, "global_frame_idx": 32227, "task_index": 37}, {"db_idx": 32228, "episode_idx": 189, "frame_idx": 24, "global_frame_idx": 32228, "task_index": 37}, {"db_idx": 32229, "episode_idx": 189, "frame_idx": 25, "global_frame_idx": 32229, "task_index": 37}, {"db_idx": 32230, "episode_idx": 189, "frame_idx": 26, "global_frame_idx": 32230, "task_index": 37}, {"db_idx": 32231, "episode_idx": 189, "frame_idx": 27, "global_frame_idx": 32231, "task_index": 37}, {"db_idx": 32232, "episode_idx": 189, "frame_idx": 28, "global_frame_idx": 32232, "task_index": 37}, {"db_idx": 32233, "episode_idx": 189, "frame_idx": 29, "global_frame_idx": 32233, "task_index": 37}, {"db_idx": 32234, "episode_idx": 189, "frame_idx": 30, "global_frame_idx": 32234, "task_index": 37}, {"db_idx": 32235, "episode_idx": 189, "frame_idx": 31, "global_frame_idx": 32235, "task_index": 37}, {"db_idx": 32236, "episode_idx": 189, "frame_idx": 32, "global_frame_idx": 32236, "task_index": 37}, {"db_idx": 32237, "episode_idx": 189, "frame_idx": 33, "global_frame_idx": 32237, "task_index": 37}, {"db_idx": 32238, "episode_idx": 189, "frame_idx": 34, "global_frame_idx": 32238, "task_index": 37}, {"db_idx": 32239, "episode_idx": 189, "frame_idx": 35, "global_frame_idx": 32239, "task_index": 37}, {"db_idx": 32240, "episode_idx": 189, "frame_idx": 36, "global_frame_idx": 32240, "task_index": 37}, {"db_idx": 32241, "episode_idx": 189, "frame_idx": 37, "global_frame_idx": 32241, "task_index": 37}, {"db_idx": 32242, "episode_idx": 189, "frame_idx": 38, "global_frame_idx": 32242, "task_index": 37}, {"db_idx": 32243, "episode_idx": 189, "frame_idx": 39, "global_frame_idx": 32243, "task_index": 37}, {"db_idx": 32244, "episode_idx": 189, "frame_idx": 40, "global_frame_idx": 32244, "task_index": 37}, {"db_idx": 32245, "episode_idx": 189, "frame_idx": 41, "global_frame_idx": 32245, "task_index": 37}, {"db_idx": 32246, "episode_idx": 189, "frame_idx": 42, "global_frame_idx": 32246, "task_index": 37}, {"db_idx": 32247, "episode_idx": 189, "frame_idx": 43, "global_frame_idx": 32247, "task_index": 37}, {"db_idx": 32248, "episode_idx": 189, "frame_idx": 44, "global_frame_idx": 32248, "task_index": 37}, {"db_idx": 32249, "episode_idx": 189, "frame_idx": 45, "global_frame_idx": 32249, "task_index": 37}, {"db_idx": 32250, "episode_idx": 189, "frame_idx": 46, "global_frame_idx": 32250, "task_index": 37}, {"db_idx": 32251, "episode_idx": 189, "frame_idx": 47, "global_frame_idx": 32251, "task_index": 37}, {"db_idx": 32252, "episode_idx": 189, "frame_idx": 48, "global_frame_idx": 32252, "task_index": 37}, {"db_idx": 32253, "episode_idx": 189, "frame_idx": 49, "global_frame_idx": 32253, "task_index": 37}, {"db_idx": 32254, "episode_idx": 189, "frame_idx": 50, "global_frame_idx": 32254, "task_index": 37}, {"db_idx": 32255, "episode_idx": 189, "frame_idx": 51, "global_frame_idx": 32255, "task_index": 37}, {"db_idx": 32256, "episode_idx": 189, "frame_idx": 52, "global_frame_idx": 32256, "task_index": 37}, {"db_idx": 32257, "episode_idx": 189, "frame_idx": 53, "global_frame_idx": 32257, "task_index": 37}, {"db_idx": 32258, "episode_idx": 189, "frame_idx": 54, "global_frame_idx": 32258, "task_index": 37}, {"db_idx": 32259, "episode_idx": 189, "frame_idx": 55, "global_frame_idx": 32259, "task_index": 37}, {"db_idx": 32260, "episode_idx": 189, "frame_idx": 56, "global_frame_idx": 32260, "task_index": 37}, {"db_idx": 32261, "episode_idx": 189, "frame_idx": 57, "global_frame_idx": 32261, "task_index": 37}, {"db_idx": 32262, "episode_idx": 189, "frame_idx": 58, "global_frame_idx": 32262, "task_index": 37}, {"db_idx": 32263, "episode_idx": 189, "frame_idx": 59, "global_frame_idx": 32263, "task_index": 37}, {"db_idx": 32264, "episode_idx": 189, "frame_idx": 60, "global_frame_idx": 32264, "task_index": 37}, {"db_idx": 32265, "episode_idx": 189, "frame_idx": 61, "global_frame_idx": 32265, "task_index": 37}, {"db_idx": 32266, "episode_idx": 189, "frame_idx": 62, "global_frame_idx": 32266, "task_index": 37}, {"db_idx": 32267, "episode_idx": 189, "frame_idx": 63, "global_frame_idx": 32267, "task_index": 37}, {"db_idx": 32268, "episode_idx": 189, "frame_idx": 64, "global_frame_idx": 32268, "task_index": 37}, {"db_idx": 32269, "episode_idx": 189, "frame_idx": 65, "global_frame_idx": 32269, "task_index": 37}, {"db_idx": 32270, "episode_idx": 189, "frame_idx": 66, "global_frame_idx": 32270, "task_index": 37}, {"db_idx": 32271, "episode_idx": 189, "frame_idx": 67, "global_frame_idx": 32271, "task_index": 37}, {"db_idx": 32272, "episode_idx": 189, "frame_idx": 68, "global_frame_idx": 32272, "task_index": 37}, {"db_idx": 32273, "episode_idx": 189, "frame_idx": 69, "global_frame_idx": 32273, "task_index": 37}, {"db_idx": 32274, "episode_idx": 189, "frame_idx": 70, "global_frame_idx": 32274, "task_index": 37}, {"db_idx": 32275, "episode_idx": 189, "frame_idx": 71, "global_frame_idx": 32275, "task_index": 37}, {"db_idx": 32276, "episode_idx": 189, "frame_idx": 72, "global_frame_idx": 32276, "task_index": 37}, {"db_idx": 32277, "episode_idx": 189, "frame_idx": 73, "global_frame_idx": 32277, "task_index": 37}, {"db_idx": 32278, "episode_idx": 189, "frame_idx": 74, "global_frame_idx": 32278, "task_index": 37}, {"db_idx": 32279, "episode_idx": 189, "frame_idx": 75, "global_frame_idx": 32279, "task_index": 37}, {"db_idx": 32280, "episode_idx": 189, "frame_idx": 76, "global_frame_idx": 32280, "task_index": 37}, {"db_idx": 32281, "episode_idx": 189, "frame_idx": 77, "global_frame_idx": 32281, "task_index": 37}, {"db_idx": 32282, "episode_idx": 189, "frame_idx": 78, "global_frame_idx": 32282, "task_index": 37}, {"db_idx": 32283, "episode_idx": 189, "frame_idx": 79, "global_frame_idx": 32283, "task_index": 37}, {"db_idx": 32284, "episode_idx": 189, "frame_idx": 80, "global_frame_idx": 32284, "task_index": 37}, {"db_idx": 32285, "episode_idx": 189, "frame_idx": 81, "global_frame_idx": 32285, "task_index": 37}, {"db_idx": 32286, "episode_idx": 189, "frame_idx": 82, "global_frame_idx": 32286, "task_index": 37}, {"db_idx": 32287, "episode_idx": 189, "frame_idx": 83, "global_frame_idx": 32287, "task_index": 37}, {"db_idx": 32288, "episode_idx": 189, "frame_idx": 84, "global_frame_idx": 32288, "task_index": 37}, {"db_idx": 32289, "episode_idx": 189, "frame_idx": 85, "global_frame_idx": 32289, "task_index": 37}, {"db_idx": 32290, "episode_idx": 189, "frame_idx": 86, "global_frame_idx": 32290, "task_index": 37}, {"db_idx": 32291, "episode_idx": 189, "frame_idx": 87, "global_frame_idx": 32291, "task_index": 37}, {"db_idx": 32292, "episode_idx": 189, "frame_idx": 88, "global_frame_idx": 32292, "task_index": 37}, {"db_idx": 32293, "episode_idx": 189, "frame_idx": 89, "global_frame_idx": 32293, "task_index": 37}, {"db_idx": 32294, "episode_idx": 189, "frame_idx": 90, "global_frame_idx": 32294, "task_index": 37}, {"db_idx": 32295, "episode_idx": 189, "frame_idx": 91, "global_frame_idx": 32295, "task_index": 37}, {"db_idx": 32296, "episode_idx": 189, "frame_idx": 92, "global_frame_idx": 32296, "task_index": 37}, {"db_idx": 32297, "episode_idx": 189, "frame_idx": 93, "global_frame_idx": 32297, "task_index": 37}, {"db_idx": 32298, "episode_idx": 189, "frame_idx": 94, "global_frame_idx": 32298, "task_index": 37}, {"db_idx": 32299, "episode_idx": 189, "frame_idx": 95, "global_frame_idx": 32299, "task_index": 37}, {"db_idx": 32300, "episode_idx": 189, "frame_idx": 96, "global_frame_idx": 32300, "task_index": 37}, {"db_idx": 32301, "episode_idx": 189, "frame_idx": 97, "global_frame_idx": 32301, "task_index": 37}, {"db_idx": 32302, "episode_idx": 189, "frame_idx": 98, "global_frame_idx": 32302, "task_index": 37}, {"db_idx": 32303, "episode_idx": 189, "frame_idx": 99, "global_frame_idx": 32303, "task_index": 37}, {"db_idx": 32304, "episode_idx": 189, "frame_idx": 100, "global_frame_idx": 32304, "task_index": 37}, {"db_idx": 32305, "episode_idx": 189, "frame_idx": 101, "global_frame_idx": 32305, "task_index": 37}, {"db_idx": 32306, "episode_idx": 189, "frame_idx": 102, "global_frame_idx": 32306, "task_index": 37}, {"db_idx": 32307, "episode_idx": 189, "frame_idx": 103, "global_frame_idx": 32307, "task_index": 37}, {"db_idx": 32308, "episode_idx": 189, "frame_idx": 104, "global_frame_idx": 32308, "task_index": 37}, {"db_idx": 32309, "episode_idx": 189, "frame_idx": 105, "global_frame_idx": 32309, "task_index": 37}, {"db_idx": 32310, "episode_idx": 189, "frame_idx": 106, "global_frame_idx": 32310, "task_index": 37}, {"db_idx": 32311, "episode_idx": 189, "frame_idx": 107, "global_frame_idx": 32311, "task_index": 37}, {"db_idx": 32312, "episode_idx": 189, "frame_idx": 108, "global_frame_idx": 32312, "task_index": 37}, {"db_idx": 32313, "episode_idx": 189, "frame_idx": 109, "global_frame_idx": 32313, "task_index": 37}, {"db_idx": 32314, "episode_idx": 189, "frame_idx": 110, "global_frame_idx": 32314, "task_index": 37}, {"db_idx": 32315, "episode_idx": 189, "frame_idx": 111, "global_frame_idx": 32315, "task_index": 37}, {"db_idx": 32316, "episode_idx": 189, "frame_idx": 112, "global_frame_idx": 32316, "task_index": 37}, {"db_idx": 32317, "episode_idx": 189, "frame_idx": 113, "global_frame_idx": 32317, "task_index": 37}, {"db_idx": 32318, "episode_idx": 189, "frame_idx": 114, "global_frame_idx": 32318, "task_index": 37}, {"db_idx": 32319, "episode_idx": 189, "frame_idx": 115, "global_frame_idx": 32319, "task_index": 37}, {"db_idx": 32320, "episode_idx": 189, "frame_idx": 116, "global_frame_idx": 32320, "task_index": 37}, {"db_idx": 32321, "episode_idx": 189, "frame_idx": 117, "global_frame_idx": 32321, "task_index": 37}, {"db_idx": 32322, "episode_idx": 189, "frame_idx": 118, "global_frame_idx": 32322, "task_index": 37}, {"db_idx": 32323, "episode_idx": 189, "frame_idx": 119, "global_frame_idx": 32323, "task_index": 37}, {"db_idx": 32324, "episode_idx": 189, "frame_idx": 120, "global_frame_idx": 32324, "task_index": 37}, {"db_idx": 32325, "episode_idx": 189, "frame_idx": 121, "global_frame_idx": 32325, "task_index": 37}, {"db_idx": 32326, "episode_idx": 189, "frame_idx": 122, "global_frame_idx": 32326, "task_index": 37}, {"db_idx": 32327, "episode_idx": 189, "frame_idx": 123, "global_frame_idx": 32327, "task_index": 37}, {"db_idx": 32328, "episode_idx": 189, "frame_idx": 124, "global_frame_idx": 32328, "task_index": 37}, {"db_idx": 32329, "episode_idx": 189, "frame_idx": 125, "global_frame_idx": 32329, "task_index": 37}, {"db_idx": 32330, "episode_idx": 189, "frame_idx": 126, "global_frame_idx": 32330, "task_index": 37}, {"db_idx": 32331, "episode_idx": 189, "frame_idx": 127, "global_frame_idx": 32331, "task_index": 37}, {"db_idx": 32332, "episode_idx": 189, "frame_idx": 128, "global_frame_idx": 32332, "task_index": 37}, {"db_idx": 32333, "episode_idx": 189, "frame_idx": 129, "global_frame_idx": 32333, "task_index": 37}, {"db_idx": 32334, "episode_idx": 189, "frame_idx": 130, "global_frame_idx": 32334, "task_index": 37}, {"db_idx": 32335, "episode_idx": 189, "frame_idx": 131, "global_frame_idx": 32335, "task_index": 37}, {"db_idx": 32336, "episode_idx": 189, "frame_idx": 132, "global_frame_idx": 32336, "task_index": 37}, {"db_idx": 32337, "episode_idx": 189, "frame_idx": 133, "global_frame_idx": 32337, "task_index": 37}, {"db_idx": 32338, "episode_idx": 189, "frame_idx": 134, "global_frame_idx": 32338, "task_index": 37}, {"db_idx": 32339, "episode_idx": 189, "frame_idx": 135, "global_frame_idx": 32339, "task_index": 37}, {"db_idx": 32340, "episode_idx": 189, "frame_idx": 136, "global_frame_idx": 32340, "task_index": 37}, {"db_idx": 32341, "episode_idx": 189, "frame_idx": 137, "global_frame_idx": 32341, "task_index": 37}, {"db_idx": 32342, "episode_idx": 189, "frame_idx": 138, "global_frame_idx": 32342, "task_index": 37}, {"db_idx": 32343, "episode_idx": 189, "frame_idx": 139, "global_frame_idx": 32343, "task_index": 37}, {"db_idx": 32344, "episode_idx": 189, "frame_idx": 140, "global_frame_idx": 32344, "task_index": 37}, {"db_idx": 32345, "episode_idx": 189, "frame_idx": 141, "global_frame_idx": 32345, "task_index": 37}, {"db_idx": 32346, "episode_idx": 189, "frame_idx": 142, "global_frame_idx": 32346, "task_index": 37}, {"db_idx": 32347, "episode_idx": 189, "frame_idx": 143, "global_frame_idx": 32347, "task_index": 37}, {"db_idx": 32348, "episode_idx": 189, "frame_idx": 144, "global_frame_idx": 32348, "task_index": 37}, {"db_idx": 32349, "episode_idx": 189, "frame_idx": 145, "global_frame_idx": 32349, "task_index": 37}, {"db_idx": 32350, "episode_idx": 189, "frame_idx": 146, "global_frame_idx": 32350, "task_index": 37}, {"db_idx": 32351, "episode_idx": 189, "frame_idx": 147, "global_frame_idx": 32351, "task_index": 37}, {"db_idx": 32352, "episode_idx": 189, "frame_idx": 148, "global_frame_idx": 32352, "task_index": 37}, {"db_idx": 32353, "episode_idx": 189, "frame_idx": 149, "global_frame_idx": 32353, "task_index": 37}, {"db_idx": 32354, "episode_idx": 189, "frame_idx": 150, "global_frame_idx": 32354, "task_index": 37}, {"db_idx": 32355, "episode_idx": 189, "frame_idx": 151, "global_frame_idx": 32355, "task_index": 37}, {"db_idx": 32356, "episode_idx": 189, "frame_idx": 152, "global_frame_idx": 32356, "task_index": 37}, {"db_idx": 32357, "episode_idx": 189, "frame_idx": 153, "global_frame_idx": 32357, "task_index": 37}, {"db_idx": 32358, "episode_idx": 189, "frame_idx": 154, "global_frame_idx": 32358, "task_index": 37}, {"db_idx": 32359, "episode_idx": 189, "frame_idx": 155, "global_frame_idx": 32359, "task_index": 37}, {"db_idx": 32360, "episode_idx": 189, "frame_idx": 156, "global_frame_idx": 32360, "task_index": 37}, {"db_idx": 32361, "episode_idx": 189, "frame_idx": 157, "global_frame_idx": 32361, "task_index": 37}, {"db_idx": 32362, "episode_idx": 189, "frame_idx": 158, "global_frame_idx": 32362, "task_index": 37}, {"db_idx": 32363, "episode_idx": 189, "frame_idx": 159, "global_frame_idx": 32363, "task_index": 37}, {"db_idx": 32364, "episode_idx": 189, "frame_idx": 160, "global_frame_idx": 32364, "task_index": 37}, {"db_idx": 32365, "episode_idx": 189, "frame_idx": 161, "global_frame_idx": 32365, "task_index": 37}, {"db_idx": 32366, "episode_idx": 189, "frame_idx": 162, "global_frame_idx": 32366, "task_index": 37}, {"db_idx": 32367, "episode_idx": 189, "frame_idx": 163, "global_frame_idx": 32367, "task_index": 37}, {"db_idx": 32368, "episode_idx": 189, "frame_idx": 164, "global_frame_idx": 32368, "task_index": 37}, {"db_idx": 32369, "episode_idx": 189, "frame_idx": 165, "global_frame_idx": 32369, "task_index": 37}, {"db_idx": 32370, "episode_idx": 189, "frame_idx": 166, "global_frame_idx": 32370, "task_index": 37}, {"db_idx": 32371, "episode_idx": 189, "frame_idx": 167, "global_frame_idx": 32371, "task_index": 37}, {"db_idx": 32372, "episode_idx": 189, "frame_idx": 168, "global_frame_idx": 32372, "task_index": 37}, {"db_idx": 32373, "episode_idx": 189, "frame_idx": 169, "global_frame_idx": 32373, "task_index": 37}, {"db_idx": 32374, "episode_idx": 189, "frame_idx": 170, "global_frame_idx": 32374, "task_index": 37}, {"db_idx": 32375, "episode_idx": 189, "frame_idx": 171, "global_frame_idx": 32375, "task_index": 37}, {"db_idx": 32376, "episode_idx": 189, "frame_idx": 172, "global_frame_idx": 32376, "task_index": 37}, {"db_idx": 32377, "episode_idx": 190, "frame_idx": 0, "global_frame_idx": 32377, "task_index": 38}, {"db_idx": 32378, "episode_idx": 190, "frame_idx": 1, "global_frame_idx": 32378, "task_index": 38}, {"db_idx": 32379, "episode_idx": 190, "frame_idx": 2, "global_frame_idx": 32379, "task_index": 38}, {"db_idx": 32380, "episode_idx": 190, "frame_idx": 3, "global_frame_idx": 32380, "task_index": 38}, {"db_idx": 32381, "episode_idx": 190, "frame_idx": 4, "global_frame_idx": 32381, "task_index": 38}, {"db_idx": 32382, "episode_idx": 190, "frame_idx": 5, "global_frame_idx": 32382, "task_index": 38}, {"db_idx": 32383, "episode_idx": 190, "frame_idx": 6, "global_frame_idx": 32383, "task_index": 38}, {"db_idx": 32384, "episode_idx": 190, "frame_idx": 7, "global_frame_idx": 32384, "task_index": 38}, {"db_idx": 32385, "episode_idx": 190, "frame_idx": 8, "global_frame_idx": 32385, "task_index": 38}, {"db_idx": 32386, "episode_idx": 190, "frame_idx": 9, "global_frame_idx": 32386, "task_index": 38}, {"db_idx": 32387, "episode_idx": 190, "frame_idx": 10, "global_frame_idx": 32387, "task_index": 38}, {"db_idx": 32388, "episode_idx": 190, "frame_idx": 11, "global_frame_idx": 32388, "task_index": 38}, {"db_idx": 32389, "episode_idx": 190, "frame_idx": 12, "global_frame_idx": 32389, "task_index": 38}, {"db_idx": 32390, "episode_idx": 190, "frame_idx": 13, "global_frame_idx": 32390, "task_index": 38}, {"db_idx": 32391, "episode_idx": 190, "frame_idx": 14, "global_frame_idx": 32391, "task_index": 38}, {"db_idx": 32392, "episode_idx": 190, "frame_idx": 15, "global_frame_idx": 32392, "task_index": 38}, {"db_idx": 32393, "episode_idx": 190, "frame_idx": 16, "global_frame_idx": 32393, "task_index": 38}, {"db_idx": 32394, "episode_idx": 190, "frame_idx": 17, "global_frame_idx": 32394, "task_index": 38}, {"db_idx": 32395, "episode_idx": 190, "frame_idx": 18, "global_frame_idx": 32395, "task_index": 38}, {"db_idx": 32396, "episode_idx": 190, "frame_idx": 19, "global_frame_idx": 32396, "task_index": 38}, {"db_idx": 32397, "episode_idx": 190, "frame_idx": 20, "global_frame_idx": 32397, "task_index": 38}, {"db_idx": 32398, "episode_idx": 190, "frame_idx": 21, "global_frame_idx": 32398, "task_index": 38}, {"db_idx": 32399, "episode_idx": 190, "frame_idx": 22, "global_frame_idx": 32399, "task_index": 38}, {"db_idx": 32400, "episode_idx": 190, "frame_idx": 23, "global_frame_idx": 32400, "task_index": 38}, {"db_idx": 32401, "episode_idx": 190, "frame_idx": 24, "global_frame_idx": 32401, "task_index": 38}, {"db_idx": 32402, "episode_idx": 190, "frame_idx": 25, "global_frame_idx": 32402, "task_index": 38}, {"db_idx": 32403, "episode_idx": 190, "frame_idx": 26, "global_frame_idx": 32403, "task_index": 38}, {"db_idx": 32404, "episode_idx": 190, "frame_idx": 27, "global_frame_idx": 32404, "task_index": 38}, {"db_idx": 32405, "episode_idx": 190, "frame_idx": 28, "global_frame_idx": 32405, "task_index": 38}, {"db_idx": 32406, "episode_idx": 190, "frame_idx": 29, "global_frame_idx": 32406, "task_index": 38}, {"db_idx": 32407, "episode_idx": 190, "frame_idx": 30, "global_frame_idx": 32407, "task_index": 38}, {"db_idx": 32408, "episode_idx": 190, "frame_idx": 31, "global_frame_idx": 32408, "task_index": 38}, {"db_idx": 32409, "episode_idx": 190, "frame_idx": 32, "global_frame_idx": 32409, "task_index": 38}, {"db_idx": 32410, "episode_idx": 190, "frame_idx": 33, "global_frame_idx": 32410, "task_index": 38}, {"db_idx": 32411, "episode_idx": 190, "frame_idx": 34, "global_frame_idx": 32411, "task_index": 38}, {"db_idx": 32412, "episode_idx": 190, "frame_idx": 35, "global_frame_idx": 32412, "task_index": 38}, {"db_idx": 32413, "episode_idx": 190, "frame_idx": 36, "global_frame_idx": 32413, "task_index": 38}, {"db_idx": 32414, "episode_idx": 190, "frame_idx": 37, "global_frame_idx": 32414, "task_index": 38}, {"db_idx": 32415, "episode_idx": 190, "frame_idx": 38, "global_frame_idx": 32415, "task_index": 38}, {"db_idx": 32416, "episode_idx": 190, "frame_idx": 39, "global_frame_idx": 32416, "task_index": 38}, {"db_idx": 32417, "episode_idx": 190, "frame_idx": 40, "global_frame_idx": 32417, "task_index": 38}, {"db_idx": 32418, "episode_idx": 190, "frame_idx": 41, "global_frame_idx": 32418, "task_index": 38}, {"db_idx": 32419, "episode_idx": 190, "frame_idx": 42, "global_frame_idx": 32419, "task_index": 38}, {"db_idx": 32420, "episode_idx": 190, "frame_idx": 43, "global_frame_idx": 32420, "task_index": 38}, {"db_idx": 32421, "episode_idx": 190, "frame_idx": 44, "global_frame_idx": 32421, "task_index": 38}, {"db_idx": 32422, "episode_idx": 190, "frame_idx": 45, "global_frame_idx": 32422, "task_index": 38}, {"db_idx": 32423, "episode_idx": 190, "frame_idx": 46, "global_frame_idx": 32423, "task_index": 38}, {"db_idx": 32424, "episode_idx": 190, "frame_idx": 47, "global_frame_idx": 32424, "task_index": 38}, {"db_idx": 32425, "episode_idx": 190, "frame_idx": 48, "global_frame_idx": 32425, "task_index": 38}, {"db_idx": 32426, "episode_idx": 190, "frame_idx": 49, "global_frame_idx": 32426, "task_index": 38}, {"db_idx": 32427, "episode_idx": 190, "frame_idx": 50, "global_frame_idx": 32427, "task_index": 38}, {"db_idx": 32428, "episode_idx": 190, "frame_idx": 51, "global_frame_idx": 32428, "task_index": 38}, {"db_idx": 32429, "episode_idx": 190, "frame_idx": 52, "global_frame_idx": 32429, "task_index": 38}, {"db_idx": 32430, "episode_idx": 190, "frame_idx": 53, "global_frame_idx": 32430, "task_index": 38}, {"db_idx": 32431, "episode_idx": 190, "frame_idx": 54, "global_frame_idx": 32431, "task_index": 38}, {"db_idx": 32432, "episode_idx": 190, "frame_idx": 55, "global_frame_idx": 32432, "task_index": 38}, {"db_idx": 32433, "episode_idx": 190, "frame_idx": 56, "global_frame_idx": 32433, "task_index": 38}, {"db_idx": 32434, "episode_idx": 190, "frame_idx": 57, "global_frame_idx": 32434, "task_index": 38}, {"db_idx": 32435, "episode_idx": 190, "frame_idx": 58, "global_frame_idx": 32435, "task_index": 38}, {"db_idx": 32436, "episode_idx": 190, "frame_idx": 59, "global_frame_idx": 32436, "task_index": 38}, {"db_idx": 32437, "episode_idx": 190, "frame_idx": 60, "global_frame_idx": 32437, "task_index": 38}, {"db_idx": 32438, "episode_idx": 190, "frame_idx": 61, "global_frame_idx": 32438, "task_index": 38}, {"db_idx": 32439, "episode_idx": 190, "frame_idx": 62, "global_frame_idx": 32439, "task_index": 38}, {"db_idx": 32440, "episode_idx": 190, "frame_idx": 63, "global_frame_idx": 32440, "task_index": 38}, {"db_idx": 32441, "episode_idx": 190, "frame_idx": 64, "global_frame_idx": 32441, "task_index": 38}, {"db_idx": 32442, "episode_idx": 190, "frame_idx": 65, "global_frame_idx": 32442, "task_index": 38}, {"db_idx": 32443, "episode_idx": 190, "frame_idx": 66, "global_frame_idx": 32443, "task_index": 38}, {"db_idx": 32444, "episode_idx": 190, "frame_idx": 67, "global_frame_idx": 32444, "task_index": 38}, {"db_idx": 32445, "episode_idx": 190, "frame_idx": 68, "global_frame_idx": 32445, "task_index": 38}, {"db_idx": 32446, "episode_idx": 190, "frame_idx": 69, "global_frame_idx": 32446, "task_index": 38}, {"db_idx": 32447, "episode_idx": 190, "frame_idx": 70, "global_frame_idx": 32447, "task_index": 38}, {"db_idx": 32448, "episode_idx": 190, "frame_idx": 71, "global_frame_idx": 32448, "task_index": 38}, {"db_idx": 32449, "episode_idx": 190, "frame_idx": 72, "global_frame_idx": 32449, "task_index": 38}, {"db_idx": 32450, "episode_idx": 190, "frame_idx": 73, "global_frame_idx": 32450, "task_index": 38}, {"db_idx": 32451, "episode_idx": 190, "frame_idx": 74, "global_frame_idx": 32451, "task_index": 38}, {"db_idx": 32452, "episode_idx": 190, "frame_idx": 75, "global_frame_idx": 32452, "task_index": 38}, {"db_idx": 32453, "episode_idx": 190, "frame_idx": 76, "global_frame_idx": 32453, "task_index": 38}, {"db_idx": 32454, "episode_idx": 190, "frame_idx": 77, "global_frame_idx": 32454, "task_index": 38}, {"db_idx": 32455, "episode_idx": 190, "frame_idx": 78, "global_frame_idx": 32455, "task_index": 38}, {"db_idx": 32456, "episode_idx": 190, "frame_idx": 79, "global_frame_idx": 32456, "task_index": 38}, {"db_idx": 32457, "episode_idx": 190, "frame_idx": 80, "global_frame_idx": 32457, "task_index": 38}, {"db_idx": 32458, "episode_idx": 190, "frame_idx": 81, "global_frame_idx": 32458, "task_index": 38}, {"db_idx": 32459, "episode_idx": 190, "frame_idx": 82, "global_frame_idx": 32459, "task_index": 38}, {"db_idx": 32460, "episode_idx": 190, "frame_idx": 83, "global_frame_idx": 32460, "task_index": 38}, {"db_idx": 32461, "episode_idx": 190, "frame_idx": 84, "global_frame_idx": 32461, "task_index": 38}, {"db_idx": 32462, "episode_idx": 190, "frame_idx": 85, "global_frame_idx": 32462, "task_index": 38}, {"db_idx": 32463, "episode_idx": 190, "frame_idx": 86, "global_frame_idx": 32463, "task_index": 38}, {"db_idx": 32464, "episode_idx": 190, "frame_idx": 87, "global_frame_idx": 32464, "task_index": 38}, {"db_idx": 32465, "episode_idx": 190, "frame_idx": 88, "global_frame_idx": 32465, "task_index": 38}, {"db_idx": 32466, "episode_idx": 190, "frame_idx": 89, "global_frame_idx": 32466, "task_index": 38}, {"db_idx": 32467, "episode_idx": 190, "frame_idx": 90, "global_frame_idx": 32467, "task_index": 38}, {"db_idx": 32468, "episode_idx": 190, "frame_idx": 91, "global_frame_idx": 32468, "task_index": 38}, {"db_idx": 32469, "episode_idx": 190, "frame_idx": 92, "global_frame_idx": 32469, "task_index": 38}, {"db_idx": 32470, "episode_idx": 190, "frame_idx": 93, "global_frame_idx": 32470, "task_index": 38}, {"db_idx": 32471, "episode_idx": 190, "frame_idx": 94, "global_frame_idx": 32471, "task_index": 38}, {"db_idx": 32472, "episode_idx": 190, "frame_idx": 95, "global_frame_idx": 32472, "task_index": 38}, {"db_idx": 32473, "episode_idx": 190, "frame_idx": 96, "global_frame_idx": 32473, "task_index": 38}, {"db_idx": 32474, "episode_idx": 190, "frame_idx": 97, "global_frame_idx": 32474, "task_index": 38}, {"db_idx": 32475, "episode_idx": 190, "frame_idx": 98, "global_frame_idx": 32475, "task_index": 38}, {"db_idx": 32476, "episode_idx": 190, "frame_idx": 99, "global_frame_idx": 32476, "task_index": 38}, {"db_idx": 32477, "episode_idx": 190, "frame_idx": 100, "global_frame_idx": 32477, "task_index": 38}, {"db_idx": 32478, "episode_idx": 190, "frame_idx": 101, "global_frame_idx": 32478, "task_index": 38}, {"db_idx": 32479, "episode_idx": 190, "frame_idx": 102, "global_frame_idx": 32479, "task_index": 38}, {"db_idx": 32480, "episode_idx": 190, "frame_idx": 103, "global_frame_idx": 32480, "task_index": 38}, {"db_idx": 32481, "episode_idx": 190, "frame_idx": 104, "global_frame_idx": 32481, "task_index": 38}, {"db_idx": 32482, "episode_idx": 190, "frame_idx": 105, "global_frame_idx": 32482, "task_index": 38}, {"db_idx": 32483, "episode_idx": 190, "frame_idx": 106, "global_frame_idx": 32483, "task_index": 38}, {"db_idx": 32484, "episode_idx": 190, "frame_idx": 107, "global_frame_idx": 32484, "task_index": 38}, {"db_idx": 32485, "episode_idx": 190, "frame_idx": 108, "global_frame_idx": 32485, "task_index": 38}, {"db_idx": 32486, "episode_idx": 190, "frame_idx": 109, "global_frame_idx": 32486, "task_index": 38}, {"db_idx": 32487, "episode_idx": 190, "frame_idx": 110, "global_frame_idx": 32487, "task_index": 38}, {"db_idx": 32488, "episode_idx": 190, "frame_idx": 111, "global_frame_idx": 32488, "task_index": 38}, {"db_idx": 32489, "episode_idx": 190, "frame_idx": 112, "global_frame_idx": 32489, "task_index": 38}, {"db_idx": 32490, "episode_idx": 190, "frame_idx": 113, "global_frame_idx": 32490, "task_index": 38}, {"db_idx": 32491, "episode_idx": 190, "frame_idx": 114, "global_frame_idx": 32491, "task_index": 38}, {"db_idx": 32492, "episode_idx": 190, "frame_idx": 115, "global_frame_idx": 32492, "task_index": 38}, {"db_idx": 32493, "episode_idx": 190, "frame_idx": 116, "global_frame_idx": 32493, "task_index": 38}, {"db_idx": 32494, "episode_idx": 190, "frame_idx": 117, "global_frame_idx": 32494, "task_index": 38}, {"db_idx": 32495, "episode_idx": 190, "frame_idx": 118, "global_frame_idx": 32495, "task_index": 38}, {"db_idx": 32496, "episode_idx": 190, "frame_idx": 119, "global_frame_idx": 32496, "task_index": 38}, {"db_idx": 32497, "episode_idx": 190, "frame_idx": 120, "global_frame_idx": 32497, "task_index": 38}, {"db_idx": 32498, "episode_idx": 190, "frame_idx": 121, "global_frame_idx": 32498, "task_index": 38}, {"db_idx": 32499, "episode_idx": 190, "frame_idx": 122, "global_frame_idx": 32499, "task_index": 38}, {"db_idx": 32500, "episode_idx": 190, "frame_idx": 123, "global_frame_idx": 32500, "task_index": 38}, {"db_idx": 32501, "episode_idx": 190, "frame_idx": 124, "global_frame_idx": 32501, "task_index": 38}, {"db_idx": 32502, "episode_idx": 190, "frame_idx": 125, "global_frame_idx": 32502, "task_index": 38}, {"db_idx": 32503, "episode_idx": 190, "frame_idx": 126, "global_frame_idx": 32503, "task_index": 38}, {"db_idx": 32504, "episode_idx": 190, "frame_idx": 127, "global_frame_idx": 32504, "task_index": 38}, {"db_idx": 32505, "episode_idx": 190, "frame_idx": 128, "global_frame_idx": 32505, "task_index": 38}, {"db_idx": 32506, "episode_idx": 190, "frame_idx": 129, "global_frame_idx": 32506, "task_index": 38}, {"db_idx": 32507, "episode_idx": 190, "frame_idx": 130, "global_frame_idx": 32507, "task_index": 38}, {"db_idx": 32508, "episode_idx": 190, "frame_idx": 131, "global_frame_idx": 32508, "task_index": 38}, {"db_idx": 32509, "episode_idx": 190, "frame_idx": 132, "global_frame_idx": 32509, "task_index": 38}, {"db_idx": 32510, "episode_idx": 190, "frame_idx": 133, "global_frame_idx": 32510, "task_index": 38}, {"db_idx": 32511, "episode_idx": 190, "frame_idx": 134, "global_frame_idx": 32511, "task_index": 38}, {"db_idx": 32512, "episode_idx": 190, "frame_idx": 135, "global_frame_idx": 32512, "task_index": 38}, {"db_idx": 32513, "episode_idx": 190, "frame_idx": 136, "global_frame_idx": 32513, "task_index": 38}, {"db_idx": 32514, "episode_idx": 190, "frame_idx": 137, "global_frame_idx": 32514, "task_index": 38}, {"db_idx": 32515, "episode_idx": 190, "frame_idx": 138, "global_frame_idx": 32515, "task_index": 38}, {"db_idx": 32516, "episode_idx": 190, "frame_idx": 139, "global_frame_idx": 32516, "task_index": 38}, {"db_idx": 32517, "episode_idx": 190, "frame_idx": 140, "global_frame_idx": 32517, "task_index": 38}, {"db_idx": 32518, "episode_idx": 191, "frame_idx": 0, "global_frame_idx": 32518, "task_index": 38}, {"db_idx": 32519, "episode_idx": 191, "frame_idx": 1, "global_frame_idx": 32519, "task_index": 38}, {"db_idx": 32520, "episode_idx": 191, "frame_idx": 2, "global_frame_idx": 32520, "task_index": 38}, {"db_idx": 32521, "episode_idx": 191, "frame_idx": 3, "global_frame_idx": 32521, "task_index": 38}, {"db_idx": 32522, "episode_idx": 191, "frame_idx": 4, "global_frame_idx": 32522, "task_index": 38}, {"db_idx": 32523, "episode_idx": 191, "frame_idx": 5, "global_frame_idx": 32523, "task_index": 38}, {"db_idx": 32524, "episode_idx": 191, "frame_idx": 6, "global_frame_idx": 32524, "task_index": 38}, {"db_idx": 32525, "episode_idx": 191, "frame_idx": 7, "global_frame_idx": 32525, "task_index": 38}, {"db_idx": 32526, "episode_idx": 191, "frame_idx": 8, "global_frame_idx": 32526, "task_index": 38}, {"db_idx": 32527, "episode_idx": 191, "frame_idx": 9, "global_frame_idx": 32527, "task_index": 38}, {"db_idx": 32528, "episode_idx": 191, "frame_idx": 10, "global_frame_idx": 32528, "task_index": 38}, {"db_idx": 32529, "episode_idx": 191, "frame_idx": 11, "global_frame_idx": 32529, "task_index": 38}, {"db_idx": 32530, "episode_idx": 191, "frame_idx": 12, "global_frame_idx": 32530, "task_index": 38}, {"db_idx": 32531, "episode_idx": 191, "frame_idx": 13, "global_frame_idx": 32531, "task_index": 38}, {"db_idx": 32532, "episode_idx": 191, "frame_idx": 14, "global_frame_idx": 32532, "task_index": 38}, {"db_idx": 32533, "episode_idx": 191, "frame_idx": 15, "global_frame_idx": 32533, "task_index": 38}, {"db_idx": 32534, "episode_idx": 191, "frame_idx": 16, "global_frame_idx": 32534, "task_index": 38}, {"db_idx": 32535, "episode_idx": 191, "frame_idx": 17, "global_frame_idx": 32535, "task_index": 38}, {"db_idx": 32536, "episode_idx": 191, "frame_idx": 18, "global_frame_idx": 32536, "task_index": 38}, {"db_idx": 32537, "episode_idx": 191, "frame_idx": 19, "global_frame_idx": 32537, "task_index": 38}, {"db_idx": 32538, "episode_idx": 191, "frame_idx": 20, "global_frame_idx": 32538, "task_index": 38}, {"db_idx": 32539, "episode_idx": 191, "frame_idx": 21, "global_frame_idx": 32539, "task_index": 38}, {"db_idx": 32540, "episode_idx": 191, "frame_idx": 22, "global_frame_idx": 32540, "task_index": 38}, {"db_idx": 32541, "episode_idx": 191, "frame_idx": 23, "global_frame_idx": 32541, "task_index": 38}, {"db_idx": 32542, "episode_idx": 191, "frame_idx": 24, "global_frame_idx": 32542, "task_index": 38}, {"db_idx": 32543, "episode_idx": 191, "frame_idx": 25, "global_frame_idx": 32543, "task_index": 38}, {"db_idx": 32544, "episode_idx": 191, "frame_idx": 26, "global_frame_idx": 32544, "task_index": 38}, {"db_idx": 32545, "episode_idx": 191, "frame_idx": 27, "global_frame_idx": 32545, "task_index": 38}, {"db_idx": 32546, "episode_idx": 191, "frame_idx": 28, "global_frame_idx": 32546, "task_index": 38}, {"db_idx": 32547, "episode_idx": 191, "frame_idx": 29, "global_frame_idx": 32547, "task_index": 38}, {"db_idx": 32548, "episode_idx": 191, "frame_idx": 30, "global_frame_idx": 32548, "task_index": 38}, {"db_idx": 32549, "episode_idx": 191, "frame_idx": 31, "global_frame_idx": 32549, "task_index": 38}, {"db_idx": 32550, "episode_idx": 191, "frame_idx": 32, "global_frame_idx": 32550, "task_index": 38}, {"db_idx": 32551, "episode_idx": 191, "frame_idx": 33, "global_frame_idx": 32551, "task_index": 38}, {"db_idx": 32552, "episode_idx": 191, "frame_idx": 34, "global_frame_idx": 32552, "task_index": 38}, {"db_idx": 32553, "episode_idx": 191, "frame_idx": 35, "global_frame_idx": 32553, "task_index": 38}, {"db_idx": 32554, "episode_idx": 191, "frame_idx": 36, "global_frame_idx": 32554, "task_index": 38}, {"db_idx": 32555, "episode_idx": 191, "frame_idx": 37, "global_frame_idx": 32555, "task_index": 38}, {"db_idx": 32556, "episode_idx": 191, "frame_idx": 38, "global_frame_idx": 32556, "task_index": 38}, {"db_idx": 32557, "episode_idx": 191, "frame_idx": 39, "global_frame_idx": 32557, "task_index": 38}, {"db_idx": 32558, "episode_idx": 191, "frame_idx": 40, "global_frame_idx": 32558, "task_index": 38}, {"db_idx": 32559, "episode_idx": 191, "frame_idx": 41, "global_frame_idx": 32559, "task_index": 38}, {"db_idx": 32560, "episode_idx": 191, "frame_idx": 42, "global_frame_idx": 32560, "task_index": 38}, {"db_idx": 32561, "episode_idx": 191, "frame_idx": 43, "global_frame_idx": 32561, "task_index": 38}, {"db_idx": 32562, "episode_idx": 191, "frame_idx": 44, "global_frame_idx": 32562, "task_index": 38}, {"db_idx": 32563, "episode_idx": 191, "frame_idx": 45, "global_frame_idx": 32563, "task_index": 38}, {"db_idx": 32564, "episode_idx": 191, "frame_idx": 46, "global_frame_idx": 32564, "task_index": 38}, {"db_idx": 32565, "episode_idx": 191, "frame_idx": 47, "global_frame_idx": 32565, "task_index": 38}, {"db_idx": 32566, "episode_idx": 191, "frame_idx": 48, "global_frame_idx": 32566, "task_index": 38}, {"db_idx": 32567, "episode_idx": 191, "frame_idx": 49, "global_frame_idx": 32567, "task_index": 38}, {"db_idx": 32568, "episode_idx": 191, "frame_idx": 50, "global_frame_idx": 32568, "task_index": 38}, {"db_idx": 32569, "episode_idx": 191, "frame_idx": 51, "global_frame_idx": 32569, "task_index": 38}, {"db_idx": 32570, "episode_idx": 191, "frame_idx": 52, "global_frame_idx": 32570, "task_index": 38}, {"db_idx": 32571, "episode_idx": 191, "frame_idx": 53, "global_frame_idx": 32571, "task_index": 38}, {"db_idx": 32572, "episode_idx": 191, "frame_idx": 54, "global_frame_idx": 32572, "task_index": 38}, {"db_idx": 32573, "episode_idx": 191, "frame_idx": 55, "global_frame_idx": 32573, "task_index": 38}, {"db_idx": 32574, "episode_idx": 191, "frame_idx": 56, "global_frame_idx": 32574, "task_index": 38}, {"db_idx": 32575, "episode_idx": 191, "frame_idx": 57, "global_frame_idx": 32575, "task_index": 38}, {"db_idx": 32576, "episode_idx": 191, "frame_idx": 58, "global_frame_idx": 32576, "task_index": 38}, {"db_idx": 32577, "episode_idx": 191, "frame_idx": 59, "global_frame_idx": 32577, "task_index": 38}, {"db_idx": 32578, "episode_idx": 191, "frame_idx": 60, "global_frame_idx": 32578, "task_index": 38}, {"db_idx": 32579, "episode_idx": 191, "frame_idx": 61, "global_frame_idx": 32579, "task_index": 38}, {"db_idx": 32580, "episode_idx": 191, "frame_idx": 62, "global_frame_idx": 32580, "task_index": 38}, {"db_idx": 32581, "episode_idx": 191, "frame_idx": 63, "global_frame_idx": 32581, "task_index": 38}, {"db_idx": 32582, "episode_idx": 191, "frame_idx": 64, "global_frame_idx": 32582, "task_index": 38}, {"db_idx": 32583, "episode_idx": 191, "frame_idx": 65, "global_frame_idx": 32583, "task_index": 38}, {"db_idx": 32584, "episode_idx": 191, "frame_idx": 66, "global_frame_idx": 32584, "task_index": 38}, {"db_idx": 32585, "episode_idx": 191, "frame_idx": 67, "global_frame_idx": 32585, "task_index": 38}, {"db_idx": 32586, "episode_idx": 191, "frame_idx": 68, "global_frame_idx": 32586, "task_index": 38}, {"db_idx": 32587, "episode_idx": 191, "frame_idx": 69, "global_frame_idx": 32587, "task_index": 38}, {"db_idx": 32588, "episode_idx": 191, "frame_idx": 70, "global_frame_idx": 32588, "task_index": 38}, {"db_idx": 32589, "episode_idx": 191, "frame_idx": 71, "global_frame_idx": 32589, "task_index": 38}, {"db_idx": 32590, "episode_idx": 191, "frame_idx": 72, "global_frame_idx": 32590, "task_index": 38}, {"db_idx": 32591, "episode_idx": 191, "frame_idx": 73, "global_frame_idx": 32591, "task_index": 38}, {"db_idx": 32592, "episode_idx": 191, "frame_idx": 74, "global_frame_idx": 32592, "task_index": 38}, {"db_idx": 32593, "episode_idx": 191, "frame_idx": 75, "global_frame_idx": 32593, "task_index": 38}, {"db_idx": 32594, "episode_idx": 191, "frame_idx": 76, "global_frame_idx": 32594, "task_index": 38}, {"db_idx": 32595, "episode_idx": 191, "frame_idx": 77, "global_frame_idx": 32595, "task_index": 38}, {"db_idx": 32596, "episode_idx": 191, "frame_idx": 78, "global_frame_idx": 32596, "task_index": 38}, {"db_idx": 32597, "episode_idx": 191, "frame_idx": 79, "global_frame_idx": 32597, "task_index": 38}, {"db_idx": 32598, "episode_idx": 191, "frame_idx": 80, "global_frame_idx": 32598, "task_index": 38}, {"db_idx": 32599, "episode_idx": 191, "frame_idx": 81, "global_frame_idx": 32599, "task_index": 38}, {"db_idx": 32600, "episode_idx": 191, "frame_idx": 82, "global_frame_idx": 32600, "task_index": 38}, {"db_idx": 32601, "episode_idx": 191, "frame_idx": 83, "global_frame_idx": 32601, "task_index": 38}, {"db_idx": 32602, "episode_idx": 191, "frame_idx": 84, "global_frame_idx": 32602, "task_index": 38}, {"db_idx": 32603, "episode_idx": 191, "frame_idx": 85, "global_frame_idx": 32603, "task_index": 38}, {"db_idx": 32604, "episode_idx": 191, "frame_idx": 86, "global_frame_idx": 32604, "task_index": 38}, {"db_idx": 32605, "episode_idx": 191, "frame_idx": 87, "global_frame_idx": 32605, "task_index": 38}, {"db_idx": 32606, "episode_idx": 191, "frame_idx": 88, "global_frame_idx": 32606, "task_index": 38}, {"db_idx": 32607, "episode_idx": 191, "frame_idx": 89, "global_frame_idx": 32607, "task_index": 38}, {"db_idx": 32608, "episode_idx": 191, "frame_idx": 90, "global_frame_idx": 32608, "task_index": 38}, {"db_idx": 32609, "episode_idx": 191, "frame_idx": 91, "global_frame_idx": 32609, "task_index": 38}, {"db_idx": 32610, "episode_idx": 191, "frame_idx": 92, "global_frame_idx": 32610, "task_index": 38}, {"db_idx": 32611, "episode_idx": 191, "frame_idx": 93, "global_frame_idx": 32611, "task_index": 38}, {"db_idx": 32612, "episode_idx": 191, "frame_idx": 94, "global_frame_idx": 32612, "task_index": 38}, {"db_idx": 32613, "episode_idx": 191, "frame_idx": 95, "global_frame_idx": 32613, "task_index": 38}, {"db_idx": 32614, "episode_idx": 191, "frame_idx": 96, "global_frame_idx": 32614, "task_index": 38}, {"db_idx": 32615, "episode_idx": 191, "frame_idx": 97, "global_frame_idx": 32615, "task_index": 38}, {"db_idx": 32616, "episode_idx": 191, "frame_idx": 98, "global_frame_idx": 32616, "task_index": 38}, {"db_idx": 32617, "episode_idx": 191, "frame_idx": 99, "global_frame_idx": 32617, "task_index": 38}, {"db_idx": 32618, "episode_idx": 191, "frame_idx": 100, "global_frame_idx": 32618, "task_index": 38}, {"db_idx": 32619, "episode_idx": 191, "frame_idx": 101, "global_frame_idx": 32619, "task_index": 38}, {"db_idx": 32620, "episode_idx": 191, "frame_idx": 102, "global_frame_idx": 32620, "task_index": 38}, {"db_idx": 32621, "episode_idx": 191, "frame_idx": 103, "global_frame_idx": 32621, "task_index": 38}, {"db_idx": 32622, "episode_idx": 191, "frame_idx": 104, "global_frame_idx": 32622, "task_index": 38}, {"db_idx": 32623, "episode_idx": 191, "frame_idx": 105, "global_frame_idx": 32623, "task_index": 38}, {"db_idx": 32624, "episode_idx": 191, "frame_idx": 106, "global_frame_idx": 32624, "task_index": 38}, {"db_idx": 32625, "episode_idx": 191, "frame_idx": 107, "global_frame_idx": 32625, "task_index": 38}, {"db_idx": 32626, "episode_idx": 191, "frame_idx": 108, "global_frame_idx": 32626, "task_index": 38}, {"db_idx": 32627, "episode_idx": 191, "frame_idx": 109, "global_frame_idx": 32627, "task_index": 38}, {"db_idx": 32628, "episode_idx": 191, "frame_idx": 110, "global_frame_idx": 32628, "task_index": 38}, {"db_idx": 32629, "episode_idx": 191, "frame_idx": 111, "global_frame_idx": 32629, "task_index": 38}, {"db_idx": 32630, "episode_idx": 191, "frame_idx": 112, "global_frame_idx": 32630, "task_index": 38}, {"db_idx": 32631, "episode_idx": 191, "frame_idx": 113, "global_frame_idx": 32631, "task_index": 38}, {"db_idx": 32632, "episode_idx": 191, "frame_idx": 114, "global_frame_idx": 32632, "task_index": 38}, {"db_idx": 32633, "episode_idx": 191, "frame_idx": 115, "global_frame_idx": 32633, "task_index": 38}, {"db_idx": 32634, "episode_idx": 191, "frame_idx": 116, "global_frame_idx": 32634, "task_index": 38}, {"db_idx": 32635, "episode_idx": 191, "frame_idx": 117, "global_frame_idx": 32635, "task_index": 38}, {"db_idx": 32636, "episode_idx": 191, "frame_idx": 118, "global_frame_idx": 32636, "task_index": 38}, {"db_idx": 32637, "episode_idx": 191, "frame_idx": 119, "global_frame_idx": 32637, "task_index": 38}, {"db_idx": 32638, "episode_idx": 191, "frame_idx": 120, "global_frame_idx": 32638, "task_index": 38}, {"db_idx": 32639, "episode_idx": 191, "frame_idx": 121, "global_frame_idx": 32639, "task_index": 38}, {"db_idx": 32640, "episode_idx": 191, "frame_idx": 122, "global_frame_idx": 32640, "task_index": 38}, {"db_idx": 32641, "episode_idx": 191, "frame_idx": 123, "global_frame_idx": 32641, "task_index": 38}, {"db_idx": 32642, "episode_idx": 191, "frame_idx": 124, "global_frame_idx": 32642, "task_index": 38}, {"db_idx": 32643, "episode_idx": 191, "frame_idx": 125, "global_frame_idx": 32643, "task_index": 38}, {"db_idx": 32644, "episode_idx": 191, "frame_idx": 126, "global_frame_idx": 32644, "task_index": 38}, {"db_idx": 32645, "episode_idx": 191, "frame_idx": 127, "global_frame_idx": 32645, "task_index": 38}, {"db_idx": 32646, "episode_idx": 191, "frame_idx": 128, "global_frame_idx": 32646, "task_index": 38}, {"db_idx": 32647, "episode_idx": 191, "frame_idx": 129, "global_frame_idx": 32647, "task_index": 38}, {"db_idx": 32648, "episode_idx": 191, "frame_idx": 130, "global_frame_idx": 32648, "task_index": 38}, {"db_idx": 32649, "episode_idx": 191, "frame_idx": 131, "global_frame_idx": 32649, "task_index": 38}, {"db_idx": 32650, "episode_idx": 191, "frame_idx": 132, "global_frame_idx": 32650, "task_index": 38}, {"db_idx": 32651, "episode_idx": 191, "frame_idx": 133, "global_frame_idx": 32651, "task_index": 38}, {"db_idx": 32652, "episode_idx": 191, "frame_idx": 134, "global_frame_idx": 32652, "task_index": 38}, {"db_idx": 32653, "episode_idx": 191, "frame_idx": 135, "global_frame_idx": 32653, "task_index": 38}, {"db_idx": 32654, "episode_idx": 191, "frame_idx": 136, "global_frame_idx": 32654, "task_index": 38}, {"db_idx": 32655, "episode_idx": 192, "frame_idx": 0, "global_frame_idx": 32655, "task_index": 38}, {"db_idx": 32656, "episode_idx": 192, "frame_idx": 1, "global_frame_idx": 32656, "task_index": 38}, {"db_idx": 32657, "episode_idx": 192, "frame_idx": 2, "global_frame_idx": 32657, "task_index": 38}, {"db_idx": 32658, "episode_idx": 192, "frame_idx": 3, "global_frame_idx": 32658, "task_index": 38}, {"db_idx": 32659, "episode_idx": 192, "frame_idx": 4, "global_frame_idx": 32659, "task_index": 38}, {"db_idx": 32660, "episode_idx": 192, "frame_idx": 5, "global_frame_idx": 32660, "task_index": 38}, {"db_idx": 32661, "episode_idx": 192, "frame_idx": 6, "global_frame_idx": 32661, "task_index": 38}, {"db_idx": 32662, "episode_idx": 192, "frame_idx": 7, "global_frame_idx": 32662, "task_index": 38}, {"db_idx": 32663, "episode_idx": 192, "frame_idx": 8, "global_frame_idx": 32663, "task_index": 38}, {"db_idx": 32664, "episode_idx": 192, "frame_idx": 9, "global_frame_idx": 32664, "task_index": 38}, {"db_idx": 32665, "episode_idx": 192, "frame_idx": 10, "global_frame_idx": 32665, "task_index": 38}, {"db_idx": 32666, "episode_idx": 192, "frame_idx": 11, "global_frame_idx": 32666, "task_index": 38}, {"db_idx": 32667, "episode_idx": 192, "frame_idx": 12, "global_frame_idx": 32667, "task_index": 38}, {"db_idx": 32668, "episode_idx": 192, "frame_idx": 13, "global_frame_idx": 32668, "task_index": 38}, {"db_idx": 32669, "episode_idx": 192, "frame_idx": 14, "global_frame_idx": 32669, "task_index": 38}, {"db_idx": 32670, "episode_idx": 192, "frame_idx": 15, "global_frame_idx": 32670, "task_index": 38}, {"db_idx": 32671, "episode_idx": 192, "frame_idx": 16, "global_frame_idx": 32671, "task_index": 38}, {"db_idx": 32672, "episode_idx": 192, "frame_idx": 17, "global_frame_idx": 32672, "task_index": 38}, {"db_idx": 32673, "episode_idx": 192, "frame_idx": 18, "global_frame_idx": 32673, "task_index": 38}, {"db_idx": 32674, "episode_idx": 192, "frame_idx": 19, "global_frame_idx": 32674, "task_index": 38}, {"db_idx": 32675, "episode_idx": 192, "frame_idx": 20, "global_frame_idx": 32675, "task_index": 38}, {"db_idx": 32676, "episode_idx": 192, "frame_idx": 21, "global_frame_idx": 32676, "task_index": 38}, {"db_idx": 32677, "episode_idx": 192, "frame_idx": 22, "global_frame_idx": 32677, "task_index": 38}, {"db_idx": 32678, "episode_idx": 192, "frame_idx": 23, "global_frame_idx": 32678, "task_index": 38}, {"db_idx": 32679, "episode_idx": 192, "frame_idx": 24, "global_frame_idx": 32679, "task_index": 38}, {"db_idx": 32680, "episode_idx": 192, "frame_idx": 25, "global_frame_idx": 32680, "task_index": 38}, {"db_idx": 32681, "episode_idx": 192, "frame_idx": 26, "global_frame_idx": 32681, "task_index": 38}, {"db_idx": 32682, "episode_idx": 192, "frame_idx": 27, "global_frame_idx": 32682, "task_index": 38}, {"db_idx": 32683, "episode_idx": 192, "frame_idx": 28, "global_frame_idx": 32683, "task_index": 38}, {"db_idx": 32684, "episode_idx": 192, "frame_idx": 29, "global_frame_idx": 32684, "task_index": 38}, {"db_idx": 32685, "episode_idx": 192, "frame_idx": 30, "global_frame_idx": 32685, "task_index": 38}, {"db_idx": 32686, "episode_idx": 192, "frame_idx": 31, "global_frame_idx": 32686, "task_index": 38}, {"db_idx": 32687, "episode_idx": 192, "frame_idx": 32, "global_frame_idx": 32687, "task_index": 38}, {"db_idx": 32688, "episode_idx": 192, "frame_idx": 33, "global_frame_idx": 32688, "task_index": 38}, {"db_idx": 32689, "episode_idx": 192, "frame_idx": 34, "global_frame_idx": 32689, "task_index": 38}, {"db_idx": 32690, "episode_idx": 192, "frame_idx": 35, "global_frame_idx": 32690, "task_index": 38}, {"db_idx": 32691, "episode_idx": 192, "frame_idx": 36, "global_frame_idx": 32691, "task_index": 38}, {"db_idx": 32692, "episode_idx": 192, "frame_idx": 37, "global_frame_idx": 32692, "task_index": 38}, {"db_idx": 32693, "episode_idx": 192, "frame_idx": 38, "global_frame_idx": 32693, "task_index": 38}, {"db_idx": 32694, "episode_idx": 192, "frame_idx": 39, "global_frame_idx": 32694, "task_index": 38}, {"db_idx": 32695, "episode_idx": 192, "frame_idx": 40, "global_frame_idx": 32695, "task_index": 38}, {"db_idx": 32696, "episode_idx": 192, "frame_idx": 41, "global_frame_idx": 32696, "task_index": 38}, {"db_idx": 32697, "episode_idx": 192, "frame_idx": 42, "global_frame_idx": 32697, "task_index": 38}, {"db_idx": 32698, "episode_idx": 192, "frame_idx": 43, "global_frame_idx": 32698, "task_index": 38}, {"db_idx": 32699, "episode_idx": 192, "frame_idx": 44, "global_frame_idx": 32699, "task_index": 38}, {"db_idx": 32700, "episode_idx": 192, "frame_idx": 45, "global_frame_idx": 32700, "task_index": 38}, {"db_idx": 32701, "episode_idx": 192, "frame_idx": 46, "global_frame_idx": 32701, "task_index": 38}, {"db_idx": 32702, "episode_idx": 192, "frame_idx": 47, "global_frame_idx": 32702, "task_index": 38}, {"db_idx": 32703, "episode_idx": 192, "frame_idx": 48, "global_frame_idx": 32703, "task_index": 38}, {"db_idx": 32704, "episode_idx": 192, "frame_idx": 49, "global_frame_idx": 32704, "task_index": 38}, {"db_idx": 32705, "episode_idx": 192, "frame_idx": 50, "global_frame_idx": 32705, "task_index": 38}, {"db_idx": 32706, "episode_idx": 192, "frame_idx": 51, "global_frame_idx": 32706, "task_index": 38}, {"db_idx": 32707, "episode_idx": 192, "frame_idx": 52, "global_frame_idx": 32707, "task_index": 38}, {"db_idx": 32708, "episode_idx": 192, "frame_idx": 53, "global_frame_idx": 32708, "task_index": 38}, {"db_idx": 32709, "episode_idx": 192, "frame_idx": 54, "global_frame_idx": 32709, "task_index": 38}, {"db_idx": 32710, "episode_idx": 192, "frame_idx": 55, "global_frame_idx": 32710, "task_index": 38}, {"db_idx": 32711, "episode_idx": 192, "frame_idx": 56, "global_frame_idx": 32711, "task_index": 38}, {"db_idx": 32712, "episode_idx": 192, "frame_idx": 57, "global_frame_idx": 32712, "task_index": 38}, {"db_idx": 32713, "episode_idx": 192, "frame_idx": 58, "global_frame_idx": 32713, "task_index": 38}, {"db_idx": 32714, "episode_idx": 192, "frame_idx": 59, "global_frame_idx": 32714, "task_index": 38}, {"db_idx": 32715, "episode_idx": 192, "frame_idx": 60, "global_frame_idx": 32715, "task_index": 38}, {"db_idx": 32716, "episode_idx": 192, "frame_idx": 61, "global_frame_idx": 32716, "task_index": 38}, {"db_idx": 32717, "episode_idx": 192, "frame_idx": 62, "global_frame_idx": 32717, "task_index": 38}, {"db_idx": 32718, "episode_idx": 192, "frame_idx": 63, "global_frame_idx": 32718, "task_index": 38}, {"db_idx": 32719, "episode_idx": 192, "frame_idx": 64, "global_frame_idx": 32719, "task_index": 38}, {"db_idx": 32720, "episode_idx": 192, "frame_idx": 65, "global_frame_idx": 32720, "task_index": 38}, {"db_idx": 32721, "episode_idx": 192, "frame_idx": 66, "global_frame_idx": 32721, "task_index": 38}, {"db_idx": 32722, "episode_idx": 192, "frame_idx": 67, "global_frame_idx": 32722, "task_index": 38}, {"db_idx": 32723, "episode_idx": 192, "frame_idx": 68, "global_frame_idx": 32723, "task_index": 38}, {"db_idx": 32724, "episode_idx": 192, "frame_idx": 69, "global_frame_idx": 32724, "task_index": 38}, {"db_idx": 32725, "episode_idx": 192, "frame_idx": 70, "global_frame_idx": 32725, "task_index": 38}, {"db_idx": 32726, "episode_idx": 192, "frame_idx": 71, "global_frame_idx": 32726, "task_index": 38}, {"db_idx": 32727, "episode_idx": 192, "frame_idx": 72, "global_frame_idx": 32727, "task_index": 38}, {"db_idx": 32728, "episode_idx": 192, "frame_idx": 73, "global_frame_idx": 32728, "task_index": 38}, {"db_idx": 32729, "episode_idx": 192, "frame_idx": 74, "global_frame_idx": 32729, "task_index": 38}, {"db_idx": 32730, "episode_idx": 192, "frame_idx": 75, "global_frame_idx": 32730, "task_index": 38}, {"db_idx": 32731, "episode_idx": 192, "frame_idx": 76, "global_frame_idx": 32731, "task_index": 38}, {"db_idx": 32732, "episode_idx": 192, "frame_idx": 77, "global_frame_idx": 32732, "task_index": 38}, {"db_idx": 32733, "episode_idx": 192, "frame_idx": 78, "global_frame_idx": 32733, "task_index": 38}, {"db_idx": 32734, "episode_idx": 192, "frame_idx": 79, "global_frame_idx": 32734, "task_index": 38}, {"db_idx": 32735, "episode_idx": 192, "frame_idx": 80, "global_frame_idx": 32735, "task_index": 38}, {"db_idx": 32736, "episode_idx": 192, "frame_idx": 81, "global_frame_idx": 32736, "task_index": 38}, {"db_idx": 32737, "episode_idx": 192, "frame_idx": 82, "global_frame_idx": 32737, "task_index": 38}, {"db_idx": 32738, "episode_idx": 192, "frame_idx": 83, "global_frame_idx": 32738, "task_index": 38}, {"db_idx": 32739, "episode_idx": 192, "frame_idx": 84, "global_frame_idx": 32739, "task_index": 38}, {"db_idx": 32740, "episode_idx": 192, "frame_idx": 85, "global_frame_idx": 32740, "task_index": 38}, {"db_idx": 32741, "episode_idx": 192, "frame_idx": 86, "global_frame_idx": 32741, "task_index": 38}, {"db_idx": 32742, "episode_idx": 192, "frame_idx": 87, "global_frame_idx": 32742, "task_index": 38}, {"db_idx": 32743, "episode_idx": 192, "frame_idx": 88, "global_frame_idx": 32743, "task_index": 38}, {"db_idx": 32744, "episode_idx": 192, "frame_idx": 89, "global_frame_idx": 32744, "task_index": 38}, {"db_idx": 32745, "episode_idx": 192, "frame_idx": 90, "global_frame_idx": 32745, "task_index": 38}, {"db_idx": 32746, "episode_idx": 192, "frame_idx": 91, "global_frame_idx": 32746, "task_index": 38}, {"db_idx": 32747, "episode_idx": 192, "frame_idx": 92, "global_frame_idx": 32747, "task_index": 38}, {"db_idx": 32748, "episode_idx": 192, "frame_idx": 93, "global_frame_idx": 32748, "task_index": 38}, {"db_idx": 32749, "episode_idx": 192, "frame_idx": 94, "global_frame_idx": 32749, "task_index": 38}, {"db_idx": 32750, "episode_idx": 192, "frame_idx": 95, "global_frame_idx": 32750, "task_index": 38}, {"db_idx": 32751, "episode_idx": 192, "frame_idx": 96, "global_frame_idx": 32751, "task_index": 38}, {"db_idx": 32752, "episode_idx": 192, "frame_idx": 97, "global_frame_idx": 32752, "task_index": 38}, {"db_idx": 32753, "episode_idx": 192, "frame_idx": 98, "global_frame_idx": 32753, "task_index": 38}, {"db_idx": 32754, "episode_idx": 192, "frame_idx": 99, "global_frame_idx": 32754, "task_index": 38}, {"db_idx": 32755, "episode_idx": 192, "frame_idx": 100, "global_frame_idx": 32755, "task_index": 38}, {"db_idx": 32756, "episode_idx": 192, "frame_idx": 101, "global_frame_idx": 32756, "task_index": 38}, {"db_idx": 32757, "episode_idx": 192, "frame_idx": 102, "global_frame_idx": 32757, "task_index": 38}, {"db_idx": 32758, "episode_idx": 192, "frame_idx": 103, "global_frame_idx": 32758, "task_index": 38}, {"db_idx": 32759, "episode_idx": 192, "frame_idx": 104, "global_frame_idx": 32759, "task_index": 38}, {"db_idx": 32760, "episode_idx": 192, "frame_idx": 105, "global_frame_idx": 32760, "task_index": 38}, {"db_idx": 32761, "episode_idx": 192, "frame_idx": 106, "global_frame_idx": 32761, "task_index": 38}, {"db_idx": 32762, "episode_idx": 192, "frame_idx": 107, "global_frame_idx": 32762, "task_index": 38}, {"db_idx": 32763, "episode_idx": 192, "frame_idx": 108, "global_frame_idx": 32763, "task_index": 38}, {"db_idx": 32764, "episode_idx": 192, "frame_idx": 109, "global_frame_idx": 32764, "task_index": 38}, {"db_idx": 32765, "episode_idx": 192, "frame_idx": 110, "global_frame_idx": 32765, "task_index": 38}, {"db_idx": 32766, "episode_idx": 192, "frame_idx": 111, "global_frame_idx": 32766, "task_index": 38}, {"db_idx": 32767, "episode_idx": 192, "frame_idx": 112, "global_frame_idx": 32767, "task_index": 38}, {"db_idx": 32768, "episode_idx": 192, "frame_idx": 113, "global_frame_idx": 32768, "task_index": 38}, {"db_idx": 32769, "episode_idx": 192, "frame_idx": 114, "global_frame_idx": 32769, "task_index": 38}, {"db_idx": 32770, "episode_idx": 192, "frame_idx": 115, "global_frame_idx": 32770, "task_index": 38}, {"db_idx": 32771, "episode_idx": 192, "frame_idx": 116, "global_frame_idx": 32771, "task_index": 38}, {"db_idx": 32772, "episode_idx": 192, "frame_idx": 117, "global_frame_idx": 32772, "task_index": 38}, {"db_idx": 32773, "episode_idx": 192, "frame_idx": 118, "global_frame_idx": 32773, "task_index": 38}, {"db_idx": 32774, "episode_idx": 192, "frame_idx": 119, "global_frame_idx": 32774, "task_index": 38}, {"db_idx": 32775, "episode_idx": 192, "frame_idx": 120, "global_frame_idx": 32775, "task_index": 38}, {"db_idx": 32776, "episode_idx": 192, "frame_idx": 121, "global_frame_idx": 32776, "task_index": 38}, {"db_idx": 32777, "episode_idx": 192, "frame_idx": 122, "global_frame_idx": 32777, "task_index": 38}, {"db_idx": 32778, "episode_idx": 192, "frame_idx": 123, "global_frame_idx": 32778, "task_index": 38}, {"db_idx": 32779, "episode_idx": 192, "frame_idx": 124, "global_frame_idx": 32779, "task_index": 38}, {"db_idx": 32780, "episode_idx": 192, "frame_idx": 125, "global_frame_idx": 32780, "task_index": 38}, {"db_idx": 32781, "episode_idx": 192, "frame_idx": 126, "global_frame_idx": 32781, "task_index": 38}, {"db_idx": 32782, "episode_idx": 192, "frame_idx": 127, "global_frame_idx": 32782, "task_index": 38}, {"db_idx": 32783, "episode_idx": 192, "frame_idx": 128, "global_frame_idx": 32783, "task_index": 38}, {"db_idx": 32784, "episode_idx": 192, "frame_idx": 129, "global_frame_idx": 32784, "task_index": 38}, {"db_idx": 32785, "episode_idx": 192, "frame_idx": 130, "global_frame_idx": 32785, "task_index": 38}, {"db_idx": 32786, "episode_idx": 192, "frame_idx": 131, "global_frame_idx": 32786, "task_index": 38}, {"db_idx": 32787, "episode_idx": 192, "frame_idx": 132, "global_frame_idx": 32787, "task_index": 38}, {"db_idx": 32788, "episode_idx": 192, "frame_idx": 133, "global_frame_idx": 32788, "task_index": 38}, {"db_idx": 32789, "episode_idx": 192, "frame_idx": 134, "global_frame_idx": 32789, "task_index": 38}, {"db_idx": 32790, "episode_idx": 192, "frame_idx": 135, "global_frame_idx": 32790, "task_index": 38}, {"db_idx": 32791, "episode_idx": 192, "frame_idx": 136, "global_frame_idx": 32791, "task_index": 38}, {"db_idx": 32792, "episode_idx": 192, "frame_idx": 137, "global_frame_idx": 32792, "task_index": 38}, {"db_idx": 32793, "episode_idx": 192, "frame_idx": 138, "global_frame_idx": 32793, "task_index": 38}, {"db_idx": 32794, "episode_idx": 192, "frame_idx": 139, "global_frame_idx": 32794, "task_index": 38}, {"db_idx": 32795, "episode_idx": 192, "frame_idx": 140, "global_frame_idx": 32795, "task_index": 38}, {"db_idx": 32796, "episode_idx": 192, "frame_idx": 141, "global_frame_idx": 32796, "task_index": 38}, {"db_idx": 32797, "episode_idx": 192, "frame_idx": 142, "global_frame_idx": 32797, "task_index": 38}, {"db_idx": 32798, "episode_idx": 192, "frame_idx": 143, "global_frame_idx": 32798, "task_index": 38}, {"db_idx": 32799, "episode_idx": 192, "frame_idx": 144, "global_frame_idx": 32799, "task_index": 38}, {"db_idx": 32800, "episode_idx": 192, "frame_idx": 145, "global_frame_idx": 32800, "task_index": 38}, {"db_idx": 32801, "episode_idx": 192, "frame_idx": 146, "global_frame_idx": 32801, "task_index": 38}, {"db_idx": 32802, "episode_idx": 192, "frame_idx": 147, "global_frame_idx": 32802, "task_index": 38}, {"db_idx": 32803, "episode_idx": 192, "frame_idx": 148, "global_frame_idx": 32803, "task_index": 38}, {"db_idx": 32804, "episode_idx": 192, "frame_idx": 149, "global_frame_idx": 32804, "task_index": 38}, {"db_idx": 32805, "episode_idx": 193, "frame_idx": 0, "global_frame_idx": 32805, "task_index": 38}, {"db_idx": 32806, "episode_idx": 193, "frame_idx": 1, "global_frame_idx": 32806, "task_index": 38}, {"db_idx": 32807, "episode_idx": 193, "frame_idx": 2, "global_frame_idx": 32807, "task_index": 38}, {"db_idx": 32808, "episode_idx": 193, "frame_idx": 3, "global_frame_idx": 32808, "task_index": 38}, {"db_idx": 32809, "episode_idx": 193, "frame_idx": 4, "global_frame_idx": 32809, "task_index": 38}, {"db_idx": 32810, "episode_idx": 193, "frame_idx": 5, "global_frame_idx": 32810, "task_index": 38}, {"db_idx": 32811, "episode_idx": 193, "frame_idx": 6, "global_frame_idx": 32811, "task_index": 38}, {"db_idx": 32812, "episode_idx": 193, "frame_idx": 7, "global_frame_idx": 32812, "task_index": 38}, {"db_idx": 32813, "episode_idx": 193, "frame_idx": 8, "global_frame_idx": 32813, "task_index": 38}, {"db_idx": 32814, "episode_idx": 193, "frame_idx": 9, "global_frame_idx": 32814, "task_index": 38}, {"db_idx": 32815, "episode_idx": 193, "frame_idx": 10, "global_frame_idx": 32815, "task_index": 38}, {"db_idx": 32816, "episode_idx": 193, "frame_idx": 11, "global_frame_idx": 32816, "task_index": 38}, {"db_idx": 32817, "episode_idx": 193, "frame_idx": 12, "global_frame_idx": 32817, "task_index": 38}, {"db_idx": 32818, "episode_idx": 193, "frame_idx": 13, "global_frame_idx": 32818, "task_index": 38}, {"db_idx": 32819, "episode_idx": 193, "frame_idx": 14, "global_frame_idx": 32819, "task_index": 38}, {"db_idx": 32820, "episode_idx": 193, "frame_idx": 15, "global_frame_idx": 32820, "task_index": 38}, {"db_idx": 32821, "episode_idx": 193, "frame_idx": 16, "global_frame_idx": 32821, "task_index": 38}, {"db_idx": 32822, "episode_idx": 193, "frame_idx": 17, "global_frame_idx": 32822, "task_index": 38}, {"db_idx": 32823, "episode_idx": 193, "frame_idx": 18, "global_frame_idx": 32823, "task_index": 38}, {"db_idx": 32824, "episode_idx": 193, "frame_idx": 19, "global_frame_idx": 32824, "task_index": 38}, {"db_idx": 32825, "episode_idx": 193, "frame_idx": 20, "global_frame_idx": 32825, "task_index": 38}, {"db_idx": 32826, "episode_idx": 193, "frame_idx": 21, "global_frame_idx": 32826, "task_index": 38}, {"db_idx": 32827, "episode_idx": 193, "frame_idx": 22, "global_frame_idx": 32827, "task_index": 38}, {"db_idx": 32828, "episode_idx": 193, "frame_idx": 23, "global_frame_idx": 32828, "task_index": 38}, {"db_idx": 32829, "episode_idx": 193, "frame_idx": 24, "global_frame_idx": 32829, "task_index": 38}, {"db_idx": 32830, "episode_idx": 193, "frame_idx": 25, "global_frame_idx": 32830, "task_index": 38}, {"db_idx": 32831, "episode_idx": 193, "frame_idx": 26, "global_frame_idx": 32831, "task_index": 38}, {"db_idx": 32832, "episode_idx": 193, "frame_idx": 27, "global_frame_idx": 32832, "task_index": 38}, {"db_idx": 32833, "episode_idx": 193, "frame_idx": 28, "global_frame_idx": 32833, "task_index": 38}, {"db_idx": 32834, "episode_idx": 193, "frame_idx": 29, "global_frame_idx": 32834, "task_index": 38}, {"db_idx": 32835, "episode_idx": 193, "frame_idx": 30, "global_frame_idx": 32835, "task_index": 38}, {"db_idx": 32836, "episode_idx": 193, "frame_idx": 31, "global_frame_idx": 32836, "task_index": 38}, {"db_idx": 32837, "episode_idx": 193, "frame_idx": 32, "global_frame_idx": 32837, "task_index": 38}, {"db_idx": 32838, "episode_idx": 193, "frame_idx": 33, "global_frame_idx": 32838, "task_index": 38}, {"db_idx": 32839, "episode_idx": 193, "frame_idx": 34, "global_frame_idx": 32839, "task_index": 38}, {"db_idx": 32840, "episode_idx": 193, "frame_idx": 35, "global_frame_idx": 32840, "task_index": 38}, {"db_idx": 32841, "episode_idx": 193, "frame_idx": 36, "global_frame_idx": 32841, "task_index": 38}, {"db_idx": 32842, "episode_idx": 193, "frame_idx": 37, "global_frame_idx": 32842, "task_index": 38}, {"db_idx": 32843, "episode_idx": 193, "frame_idx": 38, "global_frame_idx": 32843, "task_index": 38}, {"db_idx": 32844, "episode_idx": 193, "frame_idx": 39, "global_frame_idx": 32844, "task_index": 38}, {"db_idx": 32845, "episode_idx": 193, "frame_idx": 40, "global_frame_idx": 32845, "task_index": 38}, {"db_idx": 32846, "episode_idx": 193, "frame_idx": 41, "global_frame_idx": 32846, "task_index": 38}, {"db_idx": 32847, "episode_idx": 193, "frame_idx": 42, "global_frame_idx": 32847, "task_index": 38}, {"db_idx": 32848, "episode_idx": 193, "frame_idx": 43, "global_frame_idx": 32848, "task_index": 38}, {"db_idx": 32849, "episode_idx": 193, "frame_idx": 44, "global_frame_idx": 32849, "task_index": 38}, {"db_idx": 32850, "episode_idx": 193, "frame_idx": 45, "global_frame_idx": 32850, "task_index": 38}, {"db_idx": 32851, "episode_idx": 193, "frame_idx": 46, "global_frame_idx": 32851, "task_index": 38}, {"db_idx": 32852, "episode_idx": 193, "frame_idx": 47, "global_frame_idx": 32852, "task_index": 38}, {"db_idx": 32853, "episode_idx": 193, "frame_idx": 48, "global_frame_idx": 32853, "task_index": 38}, {"db_idx": 32854, "episode_idx": 193, "frame_idx": 49, "global_frame_idx": 32854, "task_index": 38}, {"db_idx": 32855, "episode_idx": 193, "frame_idx": 50, "global_frame_idx": 32855, "task_index": 38}, {"db_idx": 32856, "episode_idx": 193, "frame_idx": 51, "global_frame_idx": 32856, "task_index": 38}, {"db_idx": 32857, "episode_idx": 193, "frame_idx": 52, "global_frame_idx": 32857, "task_index": 38}, {"db_idx": 32858, "episode_idx": 193, "frame_idx": 53, "global_frame_idx": 32858, "task_index": 38}, {"db_idx": 32859, "episode_idx": 193, "frame_idx": 54, "global_frame_idx": 32859, "task_index": 38}, {"db_idx": 32860, "episode_idx": 193, "frame_idx": 55, "global_frame_idx": 32860, "task_index": 38}, {"db_idx": 32861, "episode_idx": 193, "frame_idx": 56, "global_frame_idx": 32861, "task_index": 38}, {"db_idx": 32862, "episode_idx": 193, "frame_idx": 57, "global_frame_idx": 32862, "task_index": 38}, {"db_idx": 32863, "episode_idx": 193, "frame_idx": 58, "global_frame_idx": 32863, "task_index": 38}, {"db_idx": 32864, "episode_idx": 193, "frame_idx": 59, "global_frame_idx": 32864, "task_index": 38}, {"db_idx": 32865, "episode_idx": 193, "frame_idx": 60, "global_frame_idx": 32865, "task_index": 38}, {"db_idx": 32866, "episode_idx": 193, "frame_idx": 61, "global_frame_idx": 32866, "task_index": 38}, {"db_idx": 32867, "episode_idx": 193, "frame_idx": 62, "global_frame_idx": 32867, "task_index": 38}, {"db_idx": 32868, "episode_idx": 193, "frame_idx": 63, "global_frame_idx": 32868, "task_index": 38}, {"db_idx": 32869, "episode_idx": 193, "frame_idx": 64, "global_frame_idx": 32869, "task_index": 38}, {"db_idx": 32870, "episode_idx": 193, "frame_idx": 65, "global_frame_idx": 32870, "task_index": 38}, {"db_idx": 32871, "episode_idx": 193, "frame_idx": 66, "global_frame_idx": 32871, "task_index": 38}, {"db_idx": 32872, "episode_idx": 193, "frame_idx": 67, "global_frame_idx": 32872, "task_index": 38}, {"db_idx": 32873, "episode_idx": 193, "frame_idx": 68, "global_frame_idx": 32873, "task_index": 38}, {"db_idx": 32874, "episode_idx": 193, "frame_idx": 69, "global_frame_idx": 32874, "task_index": 38}, {"db_idx": 32875, "episode_idx": 193, "frame_idx": 70, "global_frame_idx": 32875, "task_index": 38}, {"db_idx": 32876, "episode_idx": 193, "frame_idx": 71, "global_frame_idx": 32876, "task_index": 38}, {"db_idx": 32877, "episode_idx": 193, "frame_idx": 72, "global_frame_idx": 32877, "task_index": 38}, {"db_idx": 32878, "episode_idx": 193, "frame_idx": 73, "global_frame_idx": 32878, "task_index": 38}, {"db_idx": 32879, "episode_idx": 193, "frame_idx": 74, "global_frame_idx": 32879, "task_index": 38}, {"db_idx": 32880, "episode_idx": 193, "frame_idx": 75, "global_frame_idx": 32880, "task_index": 38}, {"db_idx": 32881, "episode_idx": 193, "frame_idx": 76, "global_frame_idx": 32881, "task_index": 38}, {"db_idx": 32882, "episode_idx": 193, "frame_idx": 77, "global_frame_idx": 32882, "task_index": 38}, {"db_idx": 32883, "episode_idx": 193, "frame_idx": 78, "global_frame_idx": 32883, "task_index": 38}, {"db_idx": 32884, "episode_idx": 193, "frame_idx": 79, "global_frame_idx": 32884, "task_index": 38}, {"db_idx": 32885, "episode_idx": 193, "frame_idx": 80, "global_frame_idx": 32885, "task_index": 38}, {"db_idx": 32886, "episode_idx": 193, "frame_idx": 81, "global_frame_idx": 32886, "task_index": 38}, {"db_idx": 32887, "episode_idx": 193, "frame_idx": 82, "global_frame_idx": 32887, "task_index": 38}, {"db_idx": 32888, "episode_idx": 193, "frame_idx": 83, "global_frame_idx": 32888, "task_index": 38}, {"db_idx": 32889, "episode_idx": 193, "frame_idx": 84, "global_frame_idx": 32889, "task_index": 38}, {"db_idx": 32890, "episode_idx": 193, "frame_idx": 85, "global_frame_idx": 32890, "task_index": 38}, {"db_idx": 32891, "episode_idx": 193, "frame_idx": 86, "global_frame_idx": 32891, "task_index": 38}, {"db_idx": 32892, "episode_idx": 193, "frame_idx": 87, "global_frame_idx": 32892, "task_index": 38}, {"db_idx": 32893, "episode_idx": 193, "frame_idx": 88, "global_frame_idx": 32893, "task_index": 38}, {"db_idx": 32894, "episode_idx": 193, "frame_idx": 89, "global_frame_idx": 32894, "task_index": 38}, {"db_idx": 32895, "episode_idx": 193, "frame_idx": 90, "global_frame_idx": 32895, "task_index": 38}, {"db_idx": 32896, "episode_idx": 193, "frame_idx": 91, "global_frame_idx": 32896, "task_index": 38}, {"db_idx": 32897, "episode_idx": 193, "frame_idx": 92, "global_frame_idx": 32897, "task_index": 38}, {"db_idx": 32898, "episode_idx": 193, "frame_idx": 93, "global_frame_idx": 32898, "task_index": 38}, {"db_idx": 32899, "episode_idx": 193, "frame_idx": 94, "global_frame_idx": 32899, "task_index": 38}, {"db_idx": 32900, "episode_idx": 193, "frame_idx": 95, "global_frame_idx": 32900, "task_index": 38}, {"db_idx": 32901, "episode_idx": 193, "frame_idx": 96, "global_frame_idx": 32901, "task_index": 38}, {"db_idx": 32902, "episode_idx": 193, "frame_idx": 97, "global_frame_idx": 32902, "task_index": 38}, {"db_idx": 32903, "episode_idx": 193, "frame_idx": 98, "global_frame_idx": 32903, "task_index": 38}, {"db_idx": 32904, "episode_idx": 193, "frame_idx": 99, "global_frame_idx": 32904, "task_index": 38}, {"db_idx": 32905, "episode_idx": 193, "frame_idx": 100, "global_frame_idx": 32905, "task_index": 38}, {"db_idx": 32906, "episode_idx": 193, "frame_idx": 101, "global_frame_idx": 32906, "task_index": 38}, {"db_idx": 32907, "episode_idx": 193, "frame_idx": 102, "global_frame_idx": 32907, "task_index": 38}, {"db_idx": 32908, "episode_idx": 193, "frame_idx": 103, "global_frame_idx": 32908, "task_index": 38}, {"db_idx": 32909, "episode_idx": 193, "frame_idx": 104, "global_frame_idx": 32909, "task_index": 38}, {"db_idx": 32910, "episode_idx": 193, "frame_idx": 105, "global_frame_idx": 32910, "task_index": 38}, {"db_idx": 32911, "episode_idx": 193, "frame_idx": 106, "global_frame_idx": 32911, "task_index": 38}, {"db_idx": 32912, "episode_idx": 193, "frame_idx": 107, "global_frame_idx": 32912, "task_index": 38}, {"db_idx": 32913, "episode_idx": 193, "frame_idx": 108, "global_frame_idx": 32913, "task_index": 38}, {"db_idx": 32914, "episode_idx": 193, "frame_idx": 109, "global_frame_idx": 32914, "task_index": 38}, {"db_idx": 32915, "episode_idx": 193, "frame_idx": 110, "global_frame_idx": 32915, "task_index": 38}, {"db_idx": 32916, "episode_idx": 193, "frame_idx": 111, "global_frame_idx": 32916, "task_index": 38}, {"db_idx": 32917, "episode_idx": 193, "frame_idx": 112, "global_frame_idx": 32917, "task_index": 38}, {"db_idx": 32918, "episode_idx": 193, "frame_idx": 113, "global_frame_idx": 32918, "task_index": 38}, {"db_idx": 32919, "episode_idx": 194, "frame_idx": 0, "global_frame_idx": 32919, "task_index": 38}, {"db_idx": 32920, "episode_idx": 194, "frame_idx": 1, "global_frame_idx": 32920, "task_index": 38}, {"db_idx": 32921, "episode_idx": 194, "frame_idx": 2, "global_frame_idx": 32921, "task_index": 38}, {"db_idx": 32922, "episode_idx": 194, "frame_idx": 3, "global_frame_idx": 32922, "task_index": 38}, {"db_idx": 32923, "episode_idx": 194, "frame_idx": 4, "global_frame_idx": 32923, "task_index": 38}, {"db_idx": 32924, "episode_idx": 194, "frame_idx": 5, "global_frame_idx": 32924, "task_index": 38}, {"db_idx": 32925, "episode_idx": 194, "frame_idx": 6, "global_frame_idx": 32925, "task_index": 38}, {"db_idx": 32926, "episode_idx": 194, "frame_idx": 7, "global_frame_idx": 32926, "task_index": 38}, {"db_idx": 32927, "episode_idx": 194, "frame_idx": 8, "global_frame_idx": 32927, "task_index": 38}, {"db_idx": 32928, "episode_idx": 194, "frame_idx": 9, "global_frame_idx": 32928, "task_index": 38}, {"db_idx": 32929, "episode_idx": 194, "frame_idx": 10, "global_frame_idx": 32929, "task_index": 38}, {"db_idx": 32930, "episode_idx": 194, "frame_idx": 11, "global_frame_idx": 32930, "task_index": 38}, {"db_idx": 32931, "episode_idx": 194, "frame_idx": 12, "global_frame_idx": 32931, "task_index": 38}, {"db_idx": 32932, "episode_idx": 194, "frame_idx": 13, "global_frame_idx": 32932, "task_index": 38}, {"db_idx": 32933, "episode_idx": 194, "frame_idx": 14, "global_frame_idx": 32933, "task_index": 38}, {"db_idx": 32934, "episode_idx": 194, "frame_idx": 15, "global_frame_idx": 32934, "task_index": 38}, {"db_idx": 32935, "episode_idx": 194, "frame_idx": 16, "global_frame_idx": 32935, "task_index": 38}, {"db_idx": 32936, "episode_idx": 194, "frame_idx": 17, "global_frame_idx": 32936, "task_index": 38}, {"db_idx": 32937, "episode_idx": 194, "frame_idx": 18, "global_frame_idx": 32937, "task_index": 38}, {"db_idx": 32938, "episode_idx": 194, "frame_idx": 19, "global_frame_idx": 32938, "task_index": 38}, {"db_idx": 32939, "episode_idx": 194, "frame_idx": 20, "global_frame_idx": 32939, "task_index": 38}, {"db_idx": 32940, "episode_idx": 194, "frame_idx": 21, "global_frame_idx": 32940, "task_index": 38}, {"db_idx": 32941, "episode_idx": 194, "frame_idx": 22, "global_frame_idx": 32941, "task_index": 38}, {"db_idx": 32942, "episode_idx": 194, "frame_idx": 23, "global_frame_idx": 32942, "task_index": 38}, {"db_idx": 32943, "episode_idx": 194, "frame_idx": 24, "global_frame_idx": 32943, "task_index": 38}, {"db_idx": 32944, "episode_idx": 194, "frame_idx": 25, "global_frame_idx": 32944, "task_index": 38}, {"db_idx": 32945, "episode_idx": 194, "frame_idx": 26, "global_frame_idx": 32945, "task_index": 38}, {"db_idx": 32946, "episode_idx": 194, "frame_idx": 27, "global_frame_idx": 32946, "task_index": 38}, {"db_idx": 32947, "episode_idx": 194, "frame_idx": 28, "global_frame_idx": 32947, "task_index": 38}, {"db_idx": 32948, "episode_idx": 194, "frame_idx": 29, "global_frame_idx": 32948, "task_index": 38}, {"db_idx": 32949, "episode_idx": 194, "frame_idx": 30, "global_frame_idx": 32949, "task_index": 38}, {"db_idx": 32950, "episode_idx": 194, "frame_idx": 31, "global_frame_idx": 32950, "task_index": 38}, {"db_idx": 32951, "episode_idx": 194, "frame_idx": 32, "global_frame_idx": 32951, "task_index": 38}, {"db_idx": 32952, "episode_idx": 194, "frame_idx": 33, "global_frame_idx": 32952, "task_index": 38}, {"db_idx": 32953, "episode_idx": 194, "frame_idx": 34, "global_frame_idx": 32953, "task_index": 38}, {"db_idx": 32954, "episode_idx": 194, "frame_idx": 35, "global_frame_idx": 32954, "task_index": 38}, {"db_idx": 32955, "episode_idx": 194, "frame_idx": 36, "global_frame_idx": 32955, "task_index": 38}, {"db_idx": 32956, "episode_idx": 194, "frame_idx": 37, "global_frame_idx": 32956, "task_index": 38}, {"db_idx": 32957, "episode_idx": 194, "frame_idx": 38, "global_frame_idx": 32957, "task_index": 38}, {"db_idx": 32958, "episode_idx": 194, "frame_idx": 39, "global_frame_idx": 32958, "task_index": 38}, {"db_idx": 32959, "episode_idx": 194, "frame_idx": 40, "global_frame_idx": 32959, "task_index": 38}, {"db_idx": 32960, "episode_idx": 194, "frame_idx": 41, "global_frame_idx": 32960, "task_index": 38}, {"db_idx": 32961, "episode_idx": 194, "frame_idx": 42, "global_frame_idx": 32961, "task_index": 38}, {"db_idx": 32962, "episode_idx": 194, "frame_idx": 43, "global_frame_idx": 32962, "task_index": 38}, {"db_idx": 32963, "episode_idx": 194, "frame_idx": 44, "global_frame_idx": 32963, "task_index": 38}, {"db_idx": 32964, "episode_idx": 194, "frame_idx": 45, "global_frame_idx": 32964, "task_index": 38}, {"db_idx": 32965, "episode_idx": 194, "frame_idx": 46, "global_frame_idx": 32965, "task_index": 38}, {"db_idx": 32966, "episode_idx": 194, "frame_idx": 47, "global_frame_idx": 32966, "task_index": 38}, {"db_idx": 32967, "episode_idx": 194, "frame_idx": 48, "global_frame_idx": 32967, "task_index": 38}, {"db_idx": 32968, "episode_idx": 194, "frame_idx": 49, "global_frame_idx": 32968, "task_index": 38}, {"db_idx": 32969, "episode_idx": 194, "frame_idx": 50, "global_frame_idx": 32969, "task_index": 38}, {"db_idx": 32970, "episode_idx": 194, "frame_idx": 51, "global_frame_idx": 32970, "task_index": 38}, {"db_idx": 32971, "episode_idx": 194, "frame_idx": 52, "global_frame_idx": 32971, "task_index": 38}, {"db_idx": 32972, "episode_idx": 194, "frame_idx": 53, "global_frame_idx": 32972, "task_index": 38}, {"db_idx": 32973, "episode_idx": 194, "frame_idx": 54, "global_frame_idx": 32973, "task_index": 38}, {"db_idx": 32974, "episode_idx": 194, "frame_idx": 55, "global_frame_idx": 32974, "task_index": 38}, {"db_idx": 32975, "episode_idx": 194, "frame_idx": 56, "global_frame_idx": 32975, "task_index": 38}, {"db_idx": 32976, "episode_idx": 194, "frame_idx": 57, "global_frame_idx": 32976, "task_index": 38}, {"db_idx": 32977, "episode_idx": 194, "frame_idx": 58, "global_frame_idx": 32977, "task_index": 38}, {"db_idx": 32978, "episode_idx": 194, "frame_idx": 59, "global_frame_idx": 32978, "task_index": 38}, {"db_idx": 32979, "episode_idx": 194, "frame_idx": 60, "global_frame_idx": 32979, "task_index": 38}, {"db_idx": 32980, "episode_idx": 194, "frame_idx": 61, "global_frame_idx": 32980, "task_index": 38}, {"db_idx": 32981, "episode_idx": 194, "frame_idx": 62, "global_frame_idx": 32981, "task_index": 38}, {"db_idx": 32982, "episode_idx": 194, "frame_idx": 63, "global_frame_idx": 32982, "task_index": 38}, {"db_idx": 32983, "episode_idx": 194, "frame_idx": 64, "global_frame_idx": 32983, "task_index": 38}, {"db_idx": 32984, "episode_idx": 194, "frame_idx": 65, "global_frame_idx": 32984, "task_index": 38}, {"db_idx": 32985, "episode_idx": 194, "frame_idx": 66, "global_frame_idx": 32985, "task_index": 38}, {"db_idx": 32986, "episode_idx": 194, "frame_idx": 67, "global_frame_idx": 32986, "task_index": 38}, {"db_idx": 32987, "episode_idx": 194, "frame_idx": 68, "global_frame_idx": 32987, "task_index": 38}, {"db_idx": 32988, "episode_idx": 194, "frame_idx": 69, "global_frame_idx": 32988, "task_index": 38}, {"db_idx": 32989, "episode_idx": 194, "frame_idx": 70, "global_frame_idx": 32989, "task_index": 38}, {"db_idx": 32990, "episode_idx": 194, "frame_idx": 71, "global_frame_idx": 32990, "task_index": 38}, {"db_idx": 32991, "episode_idx": 194, "frame_idx": 72, "global_frame_idx": 32991, "task_index": 38}, {"db_idx": 32992, "episode_idx": 194, "frame_idx": 73, "global_frame_idx": 32992, "task_index": 38}, {"db_idx": 32993, "episode_idx": 194, "frame_idx": 74, "global_frame_idx": 32993, "task_index": 38}, {"db_idx": 32994, "episode_idx": 194, "frame_idx": 75, "global_frame_idx": 32994, "task_index": 38}, {"db_idx": 32995, "episode_idx": 194, "frame_idx": 76, "global_frame_idx": 32995, "task_index": 38}, {"db_idx": 32996, "episode_idx": 194, "frame_idx": 77, "global_frame_idx": 32996, "task_index": 38}, {"db_idx": 32997, "episode_idx": 194, "frame_idx": 78, "global_frame_idx": 32997, "task_index": 38}, {"db_idx": 32998, "episode_idx": 194, "frame_idx": 79, "global_frame_idx": 32998, "task_index": 38}, {"db_idx": 32999, "episode_idx": 194, "frame_idx": 80, "global_frame_idx": 32999, "task_index": 38}, {"db_idx": 33000, "episode_idx": 194, "frame_idx": 81, "global_frame_idx": 33000, "task_index": 38}, {"db_idx": 33001, "episode_idx": 194, "frame_idx": 82, "global_frame_idx": 33001, "task_index": 38}, {"db_idx": 33002, "episode_idx": 194, "frame_idx": 83, "global_frame_idx": 33002, "task_index": 38}, {"db_idx": 33003, "episode_idx": 194, "frame_idx": 84, "global_frame_idx": 33003, "task_index": 38}, {"db_idx": 33004, "episode_idx": 194, "frame_idx": 85, "global_frame_idx": 33004, "task_index": 38}, {"db_idx": 33005, "episode_idx": 194, "frame_idx": 86, "global_frame_idx": 33005, "task_index": 38}, {"db_idx": 33006, "episode_idx": 194, "frame_idx": 87, "global_frame_idx": 33006, "task_index": 38}, {"db_idx": 33007, "episode_idx": 194, "frame_idx": 88, "global_frame_idx": 33007, "task_index": 38}, {"db_idx": 33008, "episode_idx": 194, "frame_idx": 89, "global_frame_idx": 33008, "task_index": 38}, {"db_idx": 33009, "episode_idx": 194, "frame_idx": 90, "global_frame_idx": 33009, "task_index": 38}, {"db_idx": 33010, "episode_idx": 194, "frame_idx": 91, "global_frame_idx": 33010, "task_index": 38}, {"db_idx": 33011, "episode_idx": 194, "frame_idx": 92, "global_frame_idx": 33011, "task_index": 38}, {"db_idx": 33012, "episode_idx": 194, "frame_idx": 93, "global_frame_idx": 33012, "task_index": 38}, {"db_idx": 33013, "episode_idx": 194, "frame_idx": 94, "global_frame_idx": 33013, "task_index": 38}, {"db_idx": 33014, "episode_idx": 194, "frame_idx": 95, "global_frame_idx": 33014, "task_index": 38}, {"db_idx": 33015, "episode_idx": 194, "frame_idx": 96, "global_frame_idx": 33015, "task_index": 38}, {"db_idx": 33016, "episode_idx": 194, "frame_idx": 97, "global_frame_idx": 33016, "task_index": 38}, {"db_idx": 33017, "episode_idx": 194, "frame_idx": 98, "global_frame_idx": 33017, "task_index": 38}, {"db_idx": 33018, "episode_idx": 194, "frame_idx": 99, "global_frame_idx": 33018, "task_index": 38}, {"db_idx": 33019, "episode_idx": 194, "frame_idx": 100, "global_frame_idx": 33019, "task_index": 38}, {"db_idx": 33020, "episode_idx": 194, "frame_idx": 101, "global_frame_idx": 33020, "task_index": 38}, {"db_idx": 33021, "episode_idx": 194, "frame_idx": 102, "global_frame_idx": 33021, "task_index": 38}, {"db_idx": 33022, "episode_idx": 194, "frame_idx": 103, "global_frame_idx": 33022, "task_index": 38}, {"db_idx": 33023, "episode_idx": 194, "frame_idx": 104, "global_frame_idx": 33023, "task_index": 38}, {"db_idx": 33024, "episode_idx": 194, "frame_idx": 105, "global_frame_idx": 33024, "task_index": 38}, {"db_idx": 33025, "episode_idx": 194, "frame_idx": 106, "global_frame_idx": 33025, "task_index": 38}, {"db_idx": 33026, "episode_idx": 194, "frame_idx": 107, "global_frame_idx": 33026, "task_index": 38}, {"db_idx": 33027, "episode_idx": 194, "frame_idx": 108, "global_frame_idx": 33027, "task_index": 38}, {"db_idx": 33028, "episode_idx": 194, "frame_idx": 109, "global_frame_idx": 33028, "task_index": 38}, {"db_idx": 33029, "episode_idx": 194, "frame_idx": 110, "global_frame_idx": 33029, "task_index": 38}, {"db_idx": 33030, "episode_idx": 194, "frame_idx": 111, "global_frame_idx": 33030, "task_index": 38}, {"db_idx": 33031, "episode_idx": 195, "frame_idx": 0, "global_frame_idx": 33031, "task_index": 39}, {"db_idx": 33032, "episode_idx": 195, "frame_idx": 1, "global_frame_idx": 33032, "task_index": 39}, {"db_idx": 33033, "episode_idx": 195, "frame_idx": 2, "global_frame_idx": 33033, "task_index": 39}, {"db_idx": 33034, "episode_idx": 195, "frame_idx": 3, "global_frame_idx": 33034, "task_index": 39}, {"db_idx": 33035, "episode_idx": 195, "frame_idx": 4, "global_frame_idx": 33035, "task_index": 39}, {"db_idx": 33036, "episode_idx": 195, "frame_idx": 5, "global_frame_idx": 33036, "task_index": 39}, {"db_idx": 33037, "episode_idx": 195, "frame_idx": 6, "global_frame_idx": 33037, "task_index": 39}, {"db_idx": 33038, "episode_idx": 195, "frame_idx": 7, "global_frame_idx": 33038, "task_index": 39}, {"db_idx": 33039, "episode_idx": 195, "frame_idx": 8, "global_frame_idx": 33039, "task_index": 39}, {"db_idx": 33040, "episode_idx": 195, "frame_idx": 9, "global_frame_idx": 33040, "task_index": 39}, {"db_idx": 33041, "episode_idx": 195, "frame_idx": 10, "global_frame_idx": 33041, "task_index": 39}, {"db_idx": 33042, "episode_idx": 195, "frame_idx": 11, "global_frame_idx": 33042, "task_index": 39}, {"db_idx": 33043, "episode_idx": 195, "frame_idx": 12, "global_frame_idx": 33043, "task_index": 39}, {"db_idx": 33044, "episode_idx": 195, "frame_idx": 13, "global_frame_idx": 33044, "task_index": 39}, {"db_idx": 33045, "episode_idx": 195, "frame_idx": 14, "global_frame_idx": 33045, "task_index": 39}, {"db_idx": 33046, "episode_idx": 195, "frame_idx": 15, "global_frame_idx": 33046, "task_index": 39}, {"db_idx": 33047, "episode_idx": 195, "frame_idx": 16, "global_frame_idx": 33047, "task_index": 39}, {"db_idx": 33048, "episode_idx": 195, "frame_idx": 17, "global_frame_idx": 33048, "task_index": 39}, {"db_idx": 33049, "episode_idx": 195, "frame_idx": 18, "global_frame_idx": 33049, "task_index": 39}, {"db_idx": 33050, "episode_idx": 195, "frame_idx": 19, "global_frame_idx": 33050, "task_index": 39}, {"db_idx": 33051, "episode_idx": 195, "frame_idx": 20, "global_frame_idx": 33051, "task_index": 39}, {"db_idx": 33052, "episode_idx": 195, "frame_idx": 21, "global_frame_idx": 33052, "task_index": 39}, {"db_idx": 33053, "episode_idx": 195, "frame_idx": 22, "global_frame_idx": 33053, "task_index": 39}, {"db_idx": 33054, "episode_idx": 195, "frame_idx": 23, "global_frame_idx": 33054, "task_index": 39}, {"db_idx": 33055, "episode_idx": 195, "frame_idx": 24, "global_frame_idx": 33055, "task_index": 39}, {"db_idx": 33056, "episode_idx": 195, "frame_idx": 25, "global_frame_idx": 33056, "task_index": 39}, {"db_idx": 33057, "episode_idx": 195, "frame_idx": 26, "global_frame_idx": 33057, "task_index": 39}, {"db_idx": 33058, "episode_idx": 195, "frame_idx": 27, "global_frame_idx": 33058, "task_index": 39}, {"db_idx": 33059, "episode_idx": 195, "frame_idx": 28, "global_frame_idx": 33059, "task_index": 39}, {"db_idx": 33060, "episode_idx": 195, "frame_idx": 29, "global_frame_idx": 33060, "task_index": 39}, {"db_idx": 33061, "episode_idx": 195, "frame_idx": 30, "global_frame_idx": 33061, "task_index": 39}, {"db_idx": 33062, "episode_idx": 195, "frame_idx": 31, "global_frame_idx": 33062, "task_index": 39}, {"db_idx": 33063, "episode_idx": 195, "frame_idx": 32, "global_frame_idx": 33063, "task_index": 39}, {"db_idx": 33064, "episode_idx": 195, "frame_idx": 33, "global_frame_idx": 33064, "task_index": 39}, {"db_idx": 33065, "episode_idx": 195, "frame_idx": 34, "global_frame_idx": 33065, "task_index": 39}, {"db_idx": 33066, "episode_idx": 195, "frame_idx": 35, "global_frame_idx": 33066, "task_index": 39}, {"db_idx": 33067, "episode_idx": 195, "frame_idx": 36, "global_frame_idx": 33067, "task_index": 39}, {"db_idx": 33068, "episode_idx": 195, "frame_idx": 37, "global_frame_idx": 33068, "task_index": 39}, {"db_idx": 33069, "episode_idx": 195, "frame_idx": 38, "global_frame_idx": 33069, "task_index": 39}, {"db_idx": 33070, "episode_idx": 195, "frame_idx": 39, "global_frame_idx": 33070, "task_index": 39}, {"db_idx": 33071, "episode_idx": 195, "frame_idx": 40, "global_frame_idx": 33071, "task_index": 39}, {"db_idx": 33072, "episode_idx": 195, "frame_idx": 41, "global_frame_idx": 33072, "task_index": 39}, {"db_idx": 33073, "episode_idx": 195, "frame_idx": 42, "global_frame_idx": 33073, "task_index": 39}, {"db_idx": 33074, "episode_idx": 195, "frame_idx": 43, "global_frame_idx": 33074, "task_index": 39}, {"db_idx": 33075, "episode_idx": 195, "frame_idx": 44, "global_frame_idx": 33075, "task_index": 39}, {"db_idx": 33076, "episode_idx": 195, "frame_idx": 45, "global_frame_idx": 33076, "task_index": 39}, {"db_idx": 33077, "episode_idx": 195, "frame_idx": 46, "global_frame_idx": 33077, "task_index": 39}, {"db_idx": 33078, "episode_idx": 195, "frame_idx": 47, "global_frame_idx": 33078, "task_index": 39}, {"db_idx": 33079, "episode_idx": 195, "frame_idx": 48, "global_frame_idx": 33079, "task_index": 39}, {"db_idx": 33080, "episode_idx": 195, "frame_idx": 49, "global_frame_idx": 33080, "task_index": 39}, {"db_idx": 33081, "episode_idx": 195, "frame_idx": 50, "global_frame_idx": 33081, "task_index": 39}, {"db_idx": 33082, "episode_idx": 195, "frame_idx": 51, "global_frame_idx": 33082, "task_index": 39}, {"db_idx": 33083, "episode_idx": 195, "frame_idx": 52, "global_frame_idx": 33083, "task_index": 39}, {"db_idx": 33084, "episode_idx": 195, "frame_idx": 53, "global_frame_idx": 33084, "task_index": 39}, {"db_idx": 33085, "episode_idx": 195, "frame_idx": 54, "global_frame_idx": 33085, "task_index": 39}, {"db_idx": 33086, "episode_idx": 195, "frame_idx": 55, "global_frame_idx": 33086, "task_index": 39}, {"db_idx": 33087, "episode_idx": 195, "frame_idx": 56, "global_frame_idx": 33087, "task_index": 39}, {"db_idx": 33088, "episode_idx": 195, "frame_idx": 57, "global_frame_idx": 33088, "task_index": 39}, {"db_idx": 33089, "episode_idx": 195, "frame_idx": 58, "global_frame_idx": 33089, "task_index": 39}, {"db_idx": 33090, "episode_idx": 195, "frame_idx": 59, "global_frame_idx": 33090, "task_index": 39}, {"db_idx": 33091, "episode_idx": 195, "frame_idx": 60, "global_frame_idx": 33091, "task_index": 39}, {"db_idx": 33092, "episode_idx": 195, "frame_idx": 61, "global_frame_idx": 33092, "task_index": 39}, {"db_idx": 33093, "episode_idx": 195, "frame_idx": 62, "global_frame_idx": 33093, "task_index": 39}, {"db_idx": 33094, "episode_idx": 195, "frame_idx": 63, "global_frame_idx": 33094, "task_index": 39}, {"db_idx": 33095, "episode_idx": 195, "frame_idx": 64, "global_frame_idx": 33095, "task_index": 39}, {"db_idx": 33096, "episode_idx": 195, "frame_idx": 65, "global_frame_idx": 33096, "task_index": 39}, {"db_idx": 33097, "episode_idx": 195, "frame_idx": 66, "global_frame_idx": 33097, "task_index": 39}, {"db_idx": 33098, "episode_idx": 195, "frame_idx": 67, "global_frame_idx": 33098, "task_index": 39}, {"db_idx": 33099, "episode_idx": 195, "frame_idx": 68, "global_frame_idx": 33099, "task_index": 39}, {"db_idx": 33100, "episode_idx": 195, "frame_idx": 69, "global_frame_idx": 33100, "task_index": 39}, {"db_idx": 33101, "episode_idx": 195, "frame_idx": 70, "global_frame_idx": 33101, "task_index": 39}, {"db_idx": 33102, "episode_idx": 195, "frame_idx": 71, "global_frame_idx": 33102, "task_index": 39}, {"db_idx": 33103, "episode_idx": 195, "frame_idx": 72, "global_frame_idx": 33103, "task_index": 39}, {"db_idx": 33104, "episode_idx": 195, "frame_idx": 73, "global_frame_idx": 33104, "task_index": 39}, {"db_idx": 33105, "episode_idx": 195, "frame_idx": 74, "global_frame_idx": 33105, "task_index": 39}, {"db_idx": 33106, "episode_idx": 195, "frame_idx": 75, "global_frame_idx": 33106, "task_index": 39}, {"db_idx": 33107, "episode_idx": 195, "frame_idx": 76, "global_frame_idx": 33107, "task_index": 39}, {"db_idx": 33108, "episode_idx": 195, "frame_idx": 77, "global_frame_idx": 33108, "task_index": 39}, {"db_idx": 33109, "episode_idx": 195, "frame_idx": 78, "global_frame_idx": 33109, "task_index": 39}, {"db_idx": 33110, "episode_idx": 195, "frame_idx": 79, "global_frame_idx": 33110, "task_index": 39}, {"db_idx": 33111, "episode_idx": 195, "frame_idx": 80, "global_frame_idx": 33111, "task_index": 39}, {"db_idx": 33112, "episode_idx": 195, "frame_idx": 81, "global_frame_idx": 33112, "task_index": 39}, {"db_idx": 33113, "episode_idx": 195, "frame_idx": 82, "global_frame_idx": 33113, "task_index": 39}, {"db_idx": 33114, "episode_idx": 195, "frame_idx": 83, "global_frame_idx": 33114, "task_index": 39}, {"db_idx": 33115, "episode_idx": 195, "frame_idx": 84, "global_frame_idx": 33115, "task_index": 39}, {"db_idx": 33116, "episode_idx": 195, "frame_idx": 85, "global_frame_idx": 33116, "task_index": 39}, {"db_idx": 33117, "episode_idx": 195, "frame_idx": 86, "global_frame_idx": 33117, "task_index": 39}, {"db_idx": 33118, "episode_idx": 195, "frame_idx": 87, "global_frame_idx": 33118, "task_index": 39}, {"db_idx": 33119, "episode_idx": 195, "frame_idx": 88, "global_frame_idx": 33119, "task_index": 39}, {"db_idx": 33120, "episode_idx": 195, "frame_idx": 89, "global_frame_idx": 33120, "task_index": 39}, {"db_idx": 33121, "episode_idx": 195, "frame_idx": 90, "global_frame_idx": 33121, "task_index": 39}, {"db_idx": 33122, "episode_idx": 195, "frame_idx": 91, "global_frame_idx": 33122, "task_index": 39}, {"db_idx": 33123, "episode_idx": 195, "frame_idx": 92, "global_frame_idx": 33123, "task_index": 39}, {"db_idx": 33124, "episode_idx": 195, "frame_idx": 93, "global_frame_idx": 33124, "task_index": 39}, {"db_idx": 33125, "episode_idx": 195, "frame_idx": 94, "global_frame_idx": 33125, "task_index": 39}, {"db_idx": 33126, "episode_idx": 195, "frame_idx": 95, "global_frame_idx": 33126, "task_index": 39}, {"db_idx": 33127, "episode_idx": 195, "frame_idx": 96, "global_frame_idx": 33127, "task_index": 39}, {"db_idx": 33128, "episode_idx": 195, "frame_idx": 97, "global_frame_idx": 33128, "task_index": 39}, {"db_idx": 33129, "episode_idx": 196, "frame_idx": 0, "global_frame_idx": 33129, "task_index": 39}, {"db_idx": 33130, "episode_idx": 196, "frame_idx": 1, "global_frame_idx": 33130, "task_index": 39}, {"db_idx": 33131, "episode_idx": 196, "frame_idx": 2, "global_frame_idx": 33131, "task_index": 39}, {"db_idx": 33132, "episode_idx": 196, "frame_idx": 3, "global_frame_idx": 33132, "task_index": 39}, {"db_idx": 33133, "episode_idx": 196, "frame_idx": 4, "global_frame_idx": 33133, "task_index": 39}, {"db_idx": 33134, "episode_idx": 196, "frame_idx": 5, "global_frame_idx": 33134, "task_index": 39}, {"db_idx": 33135, "episode_idx": 196, "frame_idx": 6, "global_frame_idx": 33135, "task_index": 39}, {"db_idx": 33136, "episode_idx": 196, "frame_idx": 7, "global_frame_idx": 33136, "task_index": 39}, {"db_idx": 33137, "episode_idx": 196, "frame_idx": 8, "global_frame_idx": 33137, "task_index": 39}, {"db_idx": 33138, "episode_idx": 196, "frame_idx": 9, "global_frame_idx": 33138, "task_index": 39}, {"db_idx": 33139, "episode_idx": 196, "frame_idx": 10, "global_frame_idx": 33139, "task_index": 39}, {"db_idx": 33140, "episode_idx": 196, "frame_idx": 11, "global_frame_idx": 33140, "task_index": 39}, {"db_idx": 33141, "episode_idx": 196, "frame_idx": 12, "global_frame_idx": 33141, "task_index": 39}, {"db_idx": 33142, "episode_idx": 196, "frame_idx": 13, "global_frame_idx": 33142, "task_index": 39}, {"db_idx": 33143, "episode_idx": 196, "frame_idx": 14, "global_frame_idx": 33143, "task_index": 39}, {"db_idx": 33144, "episode_idx": 196, "frame_idx": 15, "global_frame_idx": 33144, "task_index": 39}, {"db_idx": 33145, "episode_idx": 196, "frame_idx": 16, "global_frame_idx": 33145, "task_index": 39}, {"db_idx": 33146, "episode_idx": 196, "frame_idx": 17, "global_frame_idx": 33146, "task_index": 39}, {"db_idx": 33147, "episode_idx": 196, "frame_idx": 18, "global_frame_idx": 33147, "task_index": 39}, {"db_idx": 33148, "episode_idx": 196, "frame_idx": 19, "global_frame_idx": 33148, "task_index": 39}, {"db_idx": 33149, "episode_idx": 196, "frame_idx": 20, "global_frame_idx": 33149, "task_index": 39}, {"db_idx": 33150, "episode_idx": 196, "frame_idx": 21, "global_frame_idx": 33150, "task_index": 39}, {"db_idx": 33151, "episode_idx": 196, "frame_idx": 22, "global_frame_idx": 33151, "task_index": 39}, {"db_idx": 33152, "episode_idx": 196, "frame_idx": 23, "global_frame_idx": 33152, "task_index": 39}, {"db_idx": 33153, "episode_idx": 196, "frame_idx": 24, "global_frame_idx": 33153, "task_index": 39}, {"db_idx": 33154, "episode_idx": 196, "frame_idx": 25, "global_frame_idx": 33154, "task_index": 39}, {"db_idx": 33155, "episode_idx": 196, "frame_idx": 26, "global_frame_idx": 33155, "task_index": 39}, {"db_idx": 33156, "episode_idx": 196, "frame_idx": 27, "global_frame_idx": 33156, "task_index": 39}, {"db_idx": 33157, "episode_idx": 196, "frame_idx": 28, "global_frame_idx": 33157, "task_index": 39}, {"db_idx": 33158, "episode_idx": 196, "frame_idx": 29, "global_frame_idx": 33158, "task_index": 39}, {"db_idx": 33159, "episode_idx": 196, "frame_idx": 30, "global_frame_idx": 33159, "task_index": 39}, {"db_idx": 33160, "episode_idx": 196, "frame_idx": 31, "global_frame_idx": 33160, "task_index": 39}, {"db_idx": 33161, "episode_idx": 196, "frame_idx": 32, "global_frame_idx": 33161, "task_index": 39}, {"db_idx": 33162, "episode_idx": 196, "frame_idx": 33, "global_frame_idx": 33162, "task_index": 39}, {"db_idx": 33163, "episode_idx": 196, "frame_idx": 34, "global_frame_idx": 33163, "task_index": 39}, {"db_idx": 33164, "episode_idx": 196, "frame_idx": 35, "global_frame_idx": 33164, "task_index": 39}, {"db_idx": 33165, "episode_idx": 196, "frame_idx": 36, "global_frame_idx": 33165, "task_index": 39}, {"db_idx": 33166, "episode_idx": 196, "frame_idx": 37, "global_frame_idx": 33166, "task_index": 39}, {"db_idx": 33167, "episode_idx": 196, "frame_idx": 38, "global_frame_idx": 33167, "task_index": 39}, {"db_idx": 33168, "episode_idx": 196, "frame_idx": 39, "global_frame_idx": 33168, "task_index": 39}, {"db_idx": 33169, "episode_idx": 196, "frame_idx": 40, "global_frame_idx": 33169, "task_index": 39}, {"db_idx": 33170, "episode_idx": 196, "frame_idx": 41, "global_frame_idx": 33170, "task_index": 39}, {"db_idx": 33171, "episode_idx": 196, "frame_idx": 42, "global_frame_idx": 33171, "task_index": 39}, {"db_idx": 33172, "episode_idx": 196, "frame_idx": 43, "global_frame_idx": 33172, "task_index": 39}, {"db_idx": 33173, "episode_idx": 196, "frame_idx": 44, "global_frame_idx": 33173, "task_index": 39}, {"db_idx": 33174, "episode_idx": 196, "frame_idx": 45, "global_frame_idx": 33174, "task_index": 39}, {"db_idx": 33175, "episode_idx": 196, "frame_idx": 46, "global_frame_idx": 33175, "task_index": 39}, {"db_idx": 33176, "episode_idx": 196, "frame_idx": 47, "global_frame_idx": 33176, "task_index": 39}, {"db_idx": 33177, "episode_idx": 196, "frame_idx": 48, "global_frame_idx": 33177, "task_index": 39}, {"db_idx": 33178, "episode_idx": 196, "frame_idx": 49, "global_frame_idx": 33178, "task_index": 39}, {"db_idx": 33179, "episode_idx": 196, "frame_idx": 50, "global_frame_idx": 33179, "task_index": 39}, {"db_idx": 33180, "episode_idx": 196, "frame_idx": 51, "global_frame_idx": 33180, "task_index": 39}, {"db_idx": 33181, "episode_idx": 196, "frame_idx": 52, "global_frame_idx": 33181, "task_index": 39}, {"db_idx": 33182, "episode_idx": 196, "frame_idx": 53, "global_frame_idx": 33182, "task_index": 39}, {"db_idx": 33183, "episode_idx": 196, "frame_idx": 54, "global_frame_idx": 33183, "task_index": 39}, {"db_idx": 33184, "episode_idx": 196, "frame_idx": 55, "global_frame_idx": 33184, "task_index": 39}, {"db_idx": 33185, "episode_idx": 196, "frame_idx": 56, "global_frame_idx": 33185, "task_index": 39}, {"db_idx": 33186, "episode_idx": 196, "frame_idx": 57, "global_frame_idx": 33186, "task_index": 39}, {"db_idx": 33187, "episode_idx": 196, "frame_idx": 58, "global_frame_idx": 33187, "task_index": 39}, {"db_idx": 33188, "episode_idx": 196, "frame_idx": 59, "global_frame_idx": 33188, "task_index": 39}, {"db_idx": 33189, "episode_idx": 196, "frame_idx": 60, "global_frame_idx": 33189, "task_index": 39}, {"db_idx": 33190, "episode_idx": 196, "frame_idx": 61, "global_frame_idx": 33190, "task_index": 39}, {"db_idx": 33191, "episode_idx": 196, "frame_idx": 62, "global_frame_idx": 33191, "task_index": 39}, {"db_idx": 33192, "episode_idx": 196, "frame_idx": 63, "global_frame_idx": 33192, "task_index": 39}, {"db_idx": 33193, "episode_idx": 196, "frame_idx": 64, "global_frame_idx": 33193, "task_index": 39}, {"db_idx": 33194, "episode_idx": 196, "frame_idx": 65, "global_frame_idx": 33194, "task_index": 39}, {"db_idx": 33195, "episode_idx": 196, "frame_idx": 66, "global_frame_idx": 33195, "task_index": 39}, {"db_idx": 33196, "episode_idx": 196, "frame_idx": 67, "global_frame_idx": 33196, "task_index": 39}, {"db_idx": 33197, "episode_idx": 196, "frame_idx": 68, "global_frame_idx": 33197, "task_index": 39}, {"db_idx": 33198, "episode_idx": 196, "frame_idx": 69, "global_frame_idx": 33198, "task_index": 39}, {"db_idx": 33199, "episode_idx": 196, "frame_idx": 70, "global_frame_idx": 33199, "task_index": 39}, {"db_idx": 33200, "episode_idx": 196, "frame_idx": 71, "global_frame_idx": 33200, "task_index": 39}, {"db_idx": 33201, "episode_idx": 196, "frame_idx": 72, "global_frame_idx": 33201, "task_index": 39}, {"db_idx": 33202, "episode_idx": 196, "frame_idx": 73, "global_frame_idx": 33202, "task_index": 39}, {"db_idx": 33203, "episode_idx": 196, "frame_idx": 74, "global_frame_idx": 33203, "task_index": 39}, {"db_idx": 33204, "episode_idx": 196, "frame_idx": 75, "global_frame_idx": 33204, "task_index": 39}, {"db_idx": 33205, "episode_idx": 196, "frame_idx": 76, "global_frame_idx": 33205, "task_index": 39}, {"db_idx": 33206, "episode_idx": 196, "frame_idx": 77, "global_frame_idx": 33206, "task_index": 39}, {"db_idx": 33207, "episode_idx": 196, "frame_idx": 78, "global_frame_idx": 33207, "task_index": 39}, {"db_idx": 33208, "episode_idx": 196, "frame_idx": 79, "global_frame_idx": 33208, "task_index": 39}, {"db_idx": 33209, "episode_idx": 196, "frame_idx": 80, "global_frame_idx": 33209, "task_index": 39}, {"db_idx": 33210, "episode_idx": 196, "frame_idx": 81, "global_frame_idx": 33210, "task_index": 39}, {"db_idx": 33211, "episode_idx": 196, "frame_idx": 82, "global_frame_idx": 33211, "task_index": 39}, {"db_idx": 33212, "episode_idx": 196, "frame_idx": 83, "global_frame_idx": 33212, "task_index": 39}, {"db_idx": 33213, "episode_idx": 196, "frame_idx": 84, "global_frame_idx": 33213, "task_index": 39}, {"db_idx": 33214, "episode_idx": 196, "frame_idx": 85, "global_frame_idx": 33214, "task_index": 39}, {"db_idx": 33215, "episode_idx": 196, "frame_idx": 86, "global_frame_idx": 33215, "task_index": 39}, {"db_idx": 33216, "episode_idx": 196, "frame_idx": 87, "global_frame_idx": 33216, "task_index": 39}, {"db_idx": 33217, "episode_idx": 196, "frame_idx": 88, "global_frame_idx": 33217, "task_index": 39}, {"db_idx": 33218, "episode_idx": 196, "frame_idx": 89, "global_frame_idx": 33218, "task_index": 39}, {"db_idx": 33219, "episode_idx": 196, "frame_idx": 90, "global_frame_idx": 33219, "task_index": 39}, {"db_idx": 33220, "episode_idx": 196, "frame_idx": 91, "global_frame_idx": 33220, "task_index": 39}, {"db_idx": 33221, "episode_idx": 196, "frame_idx": 92, "global_frame_idx": 33221, "task_index": 39}, {"db_idx": 33222, "episode_idx": 196, "frame_idx": 93, "global_frame_idx": 33222, "task_index": 39}, {"db_idx": 33223, "episode_idx": 196, "frame_idx": 94, "global_frame_idx": 33223, "task_index": 39}, {"db_idx": 33224, "episode_idx": 196, "frame_idx": 95, "global_frame_idx": 33224, "task_index": 39}, {"db_idx": 33225, "episode_idx": 196, "frame_idx": 96, "global_frame_idx": 33225, "task_index": 39}, {"db_idx": 33226, "episode_idx": 196, "frame_idx": 97, "global_frame_idx": 33226, "task_index": 39}, {"db_idx": 33227, "episode_idx": 196, "frame_idx": 98, "global_frame_idx": 33227, "task_index": 39}, {"db_idx": 33228, "episode_idx": 196, "frame_idx": 99, "global_frame_idx": 33228, "task_index": 39}, {"db_idx": 33229, "episode_idx": 196, "frame_idx": 100, "global_frame_idx": 33229, "task_index": 39}, {"db_idx": 33230, "episode_idx": 196, "frame_idx": 101, "global_frame_idx": 33230, "task_index": 39}, {"db_idx": 33231, "episode_idx": 196, "frame_idx": 102, "global_frame_idx": 33231, "task_index": 39}, {"db_idx": 33232, "episode_idx": 196, "frame_idx": 103, "global_frame_idx": 33232, "task_index": 39}, {"db_idx": 33233, "episode_idx": 196, "frame_idx": 104, "global_frame_idx": 33233, "task_index": 39}, {"db_idx": 33234, "episode_idx": 196, "frame_idx": 105, "global_frame_idx": 33234, "task_index": 39}, {"db_idx": 33235, "episode_idx": 196, "frame_idx": 106, "global_frame_idx": 33235, "task_index": 39}, {"db_idx": 33236, "episode_idx": 196, "frame_idx": 107, "global_frame_idx": 33236, "task_index": 39}, {"db_idx": 33237, "episode_idx": 196, "frame_idx": 108, "global_frame_idx": 33237, "task_index": 39}, {"db_idx": 33238, "episode_idx": 196, "frame_idx": 109, "global_frame_idx": 33238, "task_index": 39}, {"db_idx": 33239, "episode_idx": 196, "frame_idx": 110, "global_frame_idx": 33239, "task_index": 39}, {"db_idx": 33240, "episode_idx": 196, "frame_idx": 111, "global_frame_idx": 33240, "task_index": 39}, {"db_idx": 33241, "episode_idx": 196, "frame_idx": 112, "global_frame_idx": 33241, "task_index": 39}, {"db_idx": 33242, "episode_idx": 196, "frame_idx": 113, "global_frame_idx": 33242, "task_index": 39}, {"db_idx": 33243, "episode_idx": 196, "frame_idx": 114, "global_frame_idx": 33243, "task_index": 39}, {"db_idx": 33244, "episode_idx": 196, "frame_idx": 115, "global_frame_idx": 33244, "task_index": 39}, {"db_idx": 33245, "episode_idx": 196, "frame_idx": 116, "global_frame_idx": 33245, "task_index": 39}, {"db_idx": 33246, "episode_idx": 196, "frame_idx": 117, "global_frame_idx": 33246, "task_index": 39}, {"db_idx": 33247, "episode_idx": 196, "frame_idx": 118, "global_frame_idx": 33247, "task_index": 39}, {"db_idx": 33248, "episode_idx": 196, "frame_idx": 119, "global_frame_idx": 33248, "task_index": 39}, {"db_idx": 33249, "episode_idx": 197, "frame_idx": 0, "global_frame_idx": 33249, "task_index": 39}, {"db_idx": 33250, "episode_idx": 197, "frame_idx": 1, "global_frame_idx": 33250, "task_index": 39}, {"db_idx": 33251, "episode_idx": 197, "frame_idx": 2, "global_frame_idx": 33251, "task_index": 39}, {"db_idx": 33252, "episode_idx": 197, "frame_idx": 3, "global_frame_idx": 33252, "task_index": 39}, {"db_idx": 33253, "episode_idx": 197, "frame_idx": 4, "global_frame_idx": 33253, "task_index": 39}, {"db_idx": 33254, "episode_idx": 197, "frame_idx": 5, "global_frame_idx": 33254, "task_index": 39}, {"db_idx": 33255, "episode_idx": 197, "frame_idx": 6, "global_frame_idx": 33255, "task_index": 39}, {"db_idx": 33256, "episode_idx": 197, "frame_idx": 7, "global_frame_idx": 33256, "task_index": 39}, {"db_idx": 33257, "episode_idx": 197, "frame_idx": 8, "global_frame_idx": 33257, "task_index": 39}, {"db_idx": 33258, "episode_idx": 197, "frame_idx": 9, "global_frame_idx": 33258, "task_index": 39}, {"db_idx": 33259, "episode_idx": 197, "frame_idx": 10, "global_frame_idx": 33259, "task_index": 39}, {"db_idx": 33260, "episode_idx": 197, "frame_idx": 11, "global_frame_idx": 33260, "task_index": 39}, {"db_idx": 33261, "episode_idx": 197, "frame_idx": 12, "global_frame_idx": 33261, "task_index": 39}, {"db_idx": 33262, "episode_idx": 197, "frame_idx": 13, "global_frame_idx": 33262, "task_index": 39}, {"db_idx": 33263, "episode_idx": 197, "frame_idx": 14, "global_frame_idx": 33263, "task_index": 39}, {"db_idx": 33264, "episode_idx": 197, "frame_idx": 15, "global_frame_idx": 33264, "task_index": 39}, {"db_idx": 33265, "episode_idx": 197, "frame_idx": 16, "global_frame_idx": 33265, "task_index": 39}, {"db_idx": 33266, "episode_idx": 197, "frame_idx": 17, "global_frame_idx": 33266, "task_index": 39}, {"db_idx": 33267, "episode_idx": 197, "frame_idx": 18, "global_frame_idx": 33267, "task_index": 39}, {"db_idx": 33268, "episode_idx": 197, "frame_idx": 19, "global_frame_idx": 33268, "task_index": 39}, {"db_idx": 33269, "episode_idx": 197, "frame_idx": 20, "global_frame_idx": 33269, "task_index": 39}, {"db_idx": 33270, "episode_idx": 197, "frame_idx": 21, "global_frame_idx": 33270, "task_index": 39}, {"db_idx": 33271, "episode_idx": 197, "frame_idx": 22, "global_frame_idx": 33271, "task_index": 39}, {"db_idx": 33272, "episode_idx": 197, "frame_idx": 23, "global_frame_idx": 33272, "task_index": 39}, {"db_idx": 33273, "episode_idx": 197, "frame_idx": 24, "global_frame_idx": 33273, "task_index": 39}, {"db_idx": 33274, "episode_idx": 197, "frame_idx": 25, "global_frame_idx": 33274, "task_index": 39}, {"db_idx": 33275, "episode_idx": 197, "frame_idx": 26, "global_frame_idx": 33275, "task_index": 39}, {"db_idx": 33276, "episode_idx": 197, "frame_idx": 27, "global_frame_idx": 33276, "task_index": 39}, {"db_idx": 33277, "episode_idx": 197, "frame_idx": 28, "global_frame_idx": 33277, "task_index": 39}, {"db_idx": 33278, "episode_idx": 197, "frame_idx": 29, "global_frame_idx": 33278, "task_index": 39}, {"db_idx": 33279, "episode_idx": 197, "frame_idx": 30, "global_frame_idx": 33279, "task_index": 39}, {"db_idx": 33280, "episode_idx": 197, "frame_idx": 31, "global_frame_idx": 33280, "task_index": 39}, {"db_idx": 33281, "episode_idx": 197, "frame_idx": 32, "global_frame_idx": 33281, "task_index": 39}, {"db_idx": 33282, "episode_idx": 197, "frame_idx": 33, "global_frame_idx": 33282, "task_index": 39}, {"db_idx": 33283, "episode_idx": 197, "frame_idx": 34, "global_frame_idx": 33283, "task_index": 39}, {"db_idx": 33284, "episode_idx": 197, "frame_idx": 35, "global_frame_idx": 33284, "task_index": 39}, {"db_idx": 33285, "episode_idx": 197, "frame_idx": 36, "global_frame_idx": 33285, "task_index": 39}, {"db_idx": 33286, "episode_idx": 197, "frame_idx": 37, "global_frame_idx": 33286, "task_index": 39}, {"db_idx": 33287, "episode_idx": 197, "frame_idx": 38, "global_frame_idx": 33287, "task_index": 39}, {"db_idx": 33288, "episode_idx": 197, "frame_idx": 39, "global_frame_idx": 33288, "task_index": 39}, {"db_idx": 33289, "episode_idx": 197, "frame_idx": 40, "global_frame_idx": 33289, "task_index": 39}, {"db_idx": 33290, "episode_idx": 197, "frame_idx": 41, "global_frame_idx": 33290, "task_index": 39}, {"db_idx": 33291, "episode_idx": 197, "frame_idx": 42, "global_frame_idx": 33291, "task_index": 39}, {"db_idx": 33292, "episode_idx": 197, "frame_idx": 43, "global_frame_idx": 33292, "task_index": 39}, {"db_idx": 33293, "episode_idx": 197, "frame_idx": 44, "global_frame_idx": 33293, "task_index": 39}, {"db_idx": 33294, "episode_idx": 197, "frame_idx": 45, "global_frame_idx": 33294, "task_index": 39}, {"db_idx": 33295, "episode_idx": 197, "frame_idx": 46, "global_frame_idx": 33295, "task_index": 39}, {"db_idx": 33296, "episode_idx": 197, "frame_idx": 47, "global_frame_idx": 33296, "task_index": 39}, {"db_idx": 33297, "episode_idx": 197, "frame_idx": 48, "global_frame_idx": 33297, "task_index": 39}, {"db_idx": 33298, "episode_idx": 197, "frame_idx": 49, "global_frame_idx": 33298, "task_index": 39}, {"db_idx": 33299, "episode_idx": 197, "frame_idx": 50, "global_frame_idx": 33299, "task_index": 39}, {"db_idx": 33300, "episode_idx": 197, "frame_idx": 51, "global_frame_idx": 33300, "task_index": 39}, {"db_idx": 33301, "episode_idx": 197, "frame_idx": 52, "global_frame_idx": 33301, "task_index": 39}, {"db_idx": 33302, "episode_idx": 197, "frame_idx": 53, "global_frame_idx": 33302, "task_index": 39}, {"db_idx": 33303, "episode_idx": 197, "frame_idx": 54, "global_frame_idx": 33303, "task_index": 39}, {"db_idx": 33304, "episode_idx": 197, "frame_idx": 55, "global_frame_idx": 33304, "task_index": 39}, {"db_idx": 33305, "episode_idx": 197, "frame_idx": 56, "global_frame_idx": 33305, "task_index": 39}, {"db_idx": 33306, "episode_idx": 197, "frame_idx": 57, "global_frame_idx": 33306, "task_index": 39}, {"db_idx": 33307, "episode_idx": 197, "frame_idx": 58, "global_frame_idx": 33307, "task_index": 39}, {"db_idx": 33308, "episode_idx": 197, "frame_idx": 59, "global_frame_idx": 33308, "task_index": 39}, {"db_idx": 33309, "episode_idx": 197, "frame_idx": 60, "global_frame_idx": 33309, "task_index": 39}, {"db_idx": 33310, "episode_idx": 197, "frame_idx": 61, "global_frame_idx": 33310, "task_index": 39}, {"db_idx": 33311, "episode_idx": 197, "frame_idx": 62, "global_frame_idx": 33311, "task_index": 39}, {"db_idx": 33312, "episode_idx": 197, "frame_idx": 63, "global_frame_idx": 33312, "task_index": 39}, {"db_idx": 33313, "episode_idx": 197, "frame_idx": 64, "global_frame_idx": 33313, "task_index": 39}, {"db_idx": 33314, "episode_idx": 197, "frame_idx": 65, "global_frame_idx": 33314, "task_index": 39}, {"db_idx": 33315, "episode_idx": 197, "frame_idx": 66, "global_frame_idx": 33315, "task_index": 39}, {"db_idx": 33316, "episode_idx": 197, "frame_idx": 67, "global_frame_idx": 33316, "task_index": 39}, {"db_idx": 33317, "episode_idx": 197, "frame_idx": 68, "global_frame_idx": 33317, "task_index": 39}, {"db_idx": 33318, "episode_idx": 197, "frame_idx": 69, "global_frame_idx": 33318, "task_index": 39}, {"db_idx": 33319, "episode_idx": 197, "frame_idx": 70, "global_frame_idx": 33319, "task_index": 39}, {"db_idx": 33320, "episode_idx": 197, "frame_idx": 71, "global_frame_idx": 33320, "task_index": 39}, {"db_idx": 33321, "episode_idx": 197, "frame_idx": 72, "global_frame_idx": 33321, "task_index": 39}, {"db_idx": 33322, "episode_idx": 197, "frame_idx": 73, "global_frame_idx": 33322, "task_index": 39}, {"db_idx": 33323, "episode_idx": 197, "frame_idx": 74, "global_frame_idx": 33323, "task_index": 39}, {"db_idx": 33324, "episode_idx": 197, "frame_idx": 75, "global_frame_idx": 33324, "task_index": 39}, {"db_idx": 33325, "episode_idx": 197, "frame_idx": 76, "global_frame_idx": 33325, "task_index": 39}, {"db_idx": 33326, "episode_idx": 197, "frame_idx": 77, "global_frame_idx": 33326, "task_index": 39}, {"db_idx": 33327, "episode_idx": 197, "frame_idx": 78, "global_frame_idx": 33327, "task_index": 39}, {"db_idx": 33328, "episode_idx": 197, "frame_idx": 79, "global_frame_idx": 33328, "task_index": 39}, {"db_idx": 33329, "episode_idx": 197, "frame_idx": 80, "global_frame_idx": 33329, "task_index": 39}, {"db_idx": 33330, "episode_idx": 197, "frame_idx": 81, "global_frame_idx": 33330, "task_index": 39}, {"db_idx": 33331, "episode_idx": 197, "frame_idx": 82, "global_frame_idx": 33331, "task_index": 39}, {"db_idx": 33332, "episode_idx": 197, "frame_idx": 83, "global_frame_idx": 33332, "task_index": 39}, {"db_idx": 33333, "episode_idx": 197, "frame_idx": 84, "global_frame_idx": 33333, "task_index": 39}, {"db_idx": 33334, "episode_idx": 197, "frame_idx": 85, "global_frame_idx": 33334, "task_index": 39}, {"db_idx": 33335, "episode_idx": 197, "frame_idx": 86, "global_frame_idx": 33335, "task_index": 39}, {"db_idx": 33336, "episode_idx": 197, "frame_idx": 87, "global_frame_idx": 33336, "task_index": 39}, {"db_idx": 33337, "episode_idx": 197, "frame_idx": 88, "global_frame_idx": 33337, "task_index": 39}, {"db_idx": 33338, "episode_idx": 197, "frame_idx": 89, "global_frame_idx": 33338, "task_index": 39}, {"db_idx": 33339, "episode_idx": 197, "frame_idx": 90, "global_frame_idx": 33339, "task_index": 39}, {"db_idx": 33340, "episode_idx": 197, "frame_idx": 91, "global_frame_idx": 33340, "task_index": 39}, {"db_idx": 33341, "episode_idx": 197, "frame_idx": 92, "global_frame_idx": 33341, "task_index": 39}, {"db_idx": 33342, "episode_idx": 197, "frame_idx": 93, "global_frame_idx": 33342, "task_index": 39}, {"db_idx": 33343, "episode_idx": 197, "frame_idx": 94, "global_frame_idx": 33343, "task_index": 39}, {"db_idx": 33344, "episode_idx": 197, "frame_idx": 95, "global_frame_idx": 33344, "task_index": 39}, {"db_idx": 33345, "episode_idx": 197, "frame_idx": 96, "global_frame_idx": 33345, "task_index": 39}, {"db_idx": 33346, "episode_idx": 197, "frame_idx": 97, "global_frame_idx": 33346, "task_index": 39}, {"db_idx": 33347, "episode_idx": 197, "frame_idx": 98, "global_frame_idx": 33347, "task_index": 39}, {"db_idx": 33348, "episode_idx": 197, "frame_idx": 99, "global_frame_idx": 33348, "task_index": 39}, {"db_idx": 33349, "episode_idx": 197, "frame_idx": 100, "global_frame_idx": 33349, "task_index": 39}, {"db_idx": 33350, "episode_idx": 197, "frame_idx": 101, "global_frame_idx": 33350, "task_index": 39}, {"db_idx": 33351, "episode_idx": 197, "frame_idx": 102, "global_frame_idx": 33351, "task_index": 39}, {"db_idx": 33352, "episode_idx": 197, "frame_idx": 103, "global_frame_idx": 33352, "task_index": 39}, {"db_idx": 33353, "episode_idx": 197, "frame_idx": 104, "global_frame_idx": 33353, "task_index": 39}, {"db_idx": 33354, "episode_idx": 197, "frame_idx": 105, "global_frame_idx": 33354, "task_index": 39}, {"db_idx": 33355, "episode_idx": 197, "frame_idx": 106, "global_frame_idx": 33355, "task_index": 39}, {"db_idx": 33356, "episode_idx": 197, "frame_idx": 107, "global_frame_idx": 33356, "task_index": 39}, {"db_idx": 33357, "episode_idx": 197, "frame_idx": 108, "global_frame_idx": 33357, "task_index": 39}, {"db_idx": 33358, "episode_idx": 197, "frame_idx": 109, "global_frame_idx": 33358, "task_index": 39}, {"db_idx": 33359, "episode_idx": 197, "frame_idx": 110, "global_frame_idx": 33359, "task_index": 39}, {"db_idx": 33360, "episode_idx": 197, "frame_idx": 111, "global_frame_idx": 33360, "task_index": 39}, {"db_idx": 33361, "episode_idx": 197, "frame_idx": 112, "global_frame_idx": 33361, "task_index": 39}, {"db_idx": 33362, "episode_idx": 197, "frame_idx": 113, "global_frame_idx": 33362, "task_index": 39}, {"db_idx": 33363, "episode_idx": 197, "frame_idx": 114, "global_frame_idx": 33363, "task_index": 39}, {"db_idx": 33364, "episode_idx": 197, "frame_idx": 115, "global_frame_idx": 33364, "task_index": 39}, {"db_idx": 33365, "episode_idx": 197, "frame_idx": 116, "global_frame_idx": 33365, "task_index": 39}, {"db_idx": 33366, "episode_idx": 198, "frame_idx": 0, "global_frame_idx": 33366, "task_index": 39}, {"db_idx": 33367, "episode_idx": 198, "frame_idx": 1, "global_frame_idx": 33367, "task_index": 39}, {"db_idx": 33368, "episode_idx": 198, "frame_idx": 2, "global_frame_idx": 33368, "task_index": 39}, {"db_idx": 33369, "episode_idx": 198, "frame_idx": 3, "global_frame_idx": 33369, "task_index": 39}, {"db_idx": 33370, "episode_idx": 198, "frame_idx": 4, "global_frame_idx": 33370, "task_index": 39}, {"db_idx": 33371, "episode_idx": 198, "frame_idx": 5, "global_frame_idx": 33371, "task_index": 39}, {"db_idx": 33372, "episode_idx": 198, "frame_idx": 6, "global_frame_idx": 33372, "task_index": 39}, {"db_idx": 33373, "episode_idx": 198, "frame_idx": 7, "global_frame_idx": 33373, "task_index": 39}, {"db_idx": 33374, "episode_idx": 198, "frame_idx": 8, "global_frame_idx": 33374, "task_index": 39}, {"db_idx": 33375, "episode_idx": 198, "frame_idx": 9, "global_frame_idx": 33375, "task_index": 39}, {"db_idx": 33376, "episode_idx": 198, "frame_idx": 10, "global_frame_idx": 33376, "task_index": 39}, {"db_idx": 33377, "episode_idx": 198, "frame_idx": 11, "global_frame_idx": 33377, "task_index": 39}, {"db_idx": 33378, "episode_idx": 198, "frame_idx": 12, "global_frame_idx": 33378, "task_index": 39}, {"db_idx": 33379, "episode_idx": 198, "frame_idx": 13, "global_frame_idx": 33379, "task_index": 39}, {"db_idx": 33380, "episode_idx": 198, "frame_idx": 14, "global_frame_idx": 33380, "task_index": 39}, {"db_idx": 33381, "episode_idx": 198, "frame_idx": 15, "global_frame_idx": 33381, "task_index": 39}, {"db_idx": 33382, "episode_idx": 198, "frame_idx": 16, "global_frame_idx": 33382, "task_index": 39}, {"db_idx": 33383, "episode_idx": 198, "frame_idx": 17, "global_frame_idx": 33383, "task_index": 39}, {"db_idx": 33384, "episode_idx": 198, "frame_idx": 18, "global_frame_idx": 33384, "task_index": 39}, {"db_idx": 33385, "episode_idx": 198, "frame_idx": 19, "global_frame_idx": 33385, "task_index": 39}, {"db_idx": 33386, "episode_idx": 198, "frame_idx": 20, "global_frame_idx": 33386, "task_index": 39}, {"db_idx": 33387, "episode_idx": 198, "frame_idx": 21, "global_frame_idx": 33387, "task_index": 39}, {"db_idx": 33388, "episode_idx": 198, "frame_idx": 22, "global_frame_idx": 33388, "task_index": 39}, {"db_idx": 33389, "episode_idx": 198, "frame_idx": 23, "global_frame_idx": 33389, "task_index": 39}, {"db_idx": 33390, "episode_idx": 198, "frame_idx": 24, "global_frame_idx": 33390, "task_index": 39}, {"db_idx": 33391, "episode_idx": 198, "frame_idx": 25, "global_frame_idx": 33391, "task_index": 39}, {"db_idx": 33392, "episode_idx": 198, "frame_idx": 26, "global_frame_idx": 33392, "task_index": 39}, {"db_idx": 33393, "episode_idx": 198, "frame_idx": 27, "global_frame_idx": 33393, "task_index": 39}, {"db_idx": 33394, "episode_idx": 198, "frame_idx": 28, "global_frame_idx": 33394, "task_index": 39}, {"db_idx": 33395, "episode_idx": 198, "frame_idx": 29, "global_frame_idx": 33395, "task_index": 39}, {"db_idx": 33396, "episode_idx": 198, "frame_idx": 30, "global_frame_idx": 33396, "task_index": 39}, {"db_idx": 33397, "episode_idx": 198, "frame_idx": 31, "global_frame_idx": 33397, "task_index": 39}, {"db_idx": 33398, "episode_idx": 198, "frame_idx": 32, "global_frame_idx": 33398, "task_index": 39}, {"db_idx": 33399, "episode_idx": 198, "frame_idx": 33, "global_frame_idx": 33399, "task_index": 39}, {"db_idx": 33400, "episode_idx": 198, "frame_idx": 34, "global_frame_idx": 33400, "task_index": 39}, {"db_idx": 33401, "episode_idx": 198, "frame_idx": 35, "global_frame_idx": 33401, "task_index": 39}, {"db_idx": 33402, "episode_idx": 198, "frame_idx": 36, "global_frame_idx": 33402, "task_index": 39}, {"db_idx": 33403, "episode_idx": 198, "frame_idx": 37, "global_frame_idx": 33403, "task_index": 39}, {"db_idx": 33404, "episode_idx": 198, "frame_idx": 38, "global_frame_idx": 33404, "task_index": 39}, {"db_idx": 33405, "episode_idx": 198, "frame_idx": 39, "global_frame_idx": 33405, "task_index": 39}, {"db_idx": 33406, "episode_idx": 198, "frame_idx": 40, "global_frame_idx": 33406, "task_index": 39}, {"db_idx": 33407, "episode_idx": 198, "frame_idx": 41, "global_frame_idx": 33407, "task_index": 39}, {"db_idx": 33408, "episode_idx": 198, "frame_idx": 42, "global_frame_idx": 33408, "task_index": 39}, {"db_idx": 33409, "episode_idx": 198, "frame_idx": 43, "global_frame_idx": 33409, "task_index": 39}, {"db_idx": 33410, "episode_idx": 198, "frame_idx": 44, "global_frame_idx": 33410, "task_index": 39}, {"db_idx": 33411, "episode_idx": 198, "frame_idx": 45, "global_frame_idx": 33411, "task_index": 39}, {"db_idx": 33412, "episode_idx": 198, "frame_idx": 46, "global_frame_idx": 33412, "task_index": 39}, {"db_idx": 33413, "episode_idx": 198, "frame_idx": 47, "global_frame_idx": 33413, "task_index": 39}, {"db_idx": 33414, "episode_idx": 198, "frame_idx": 48, "global_frame_idx": 33414, "task_index": 39}, {"db_idx": 33415, "episode_idx": 198, "frame_idx": 49, "global_frame_idx": 33415, "task_index": 39}, {"db_idx": 33416, "episode_idx": 198, "frame_idx": 50, "global_frame_idx": 33416, "task_index": 39}, {"db_idx": 33417, "episode_idx": 198, "frame_idx": 51, "global_frame_idx": 33417, "task_index": 39}, {"db_idx": 33418, "episode_idx": 198, "frame_idx": 52, "global_frame_idx": 33418, "task_index": 39}, {"db_idx": 33419, "episode_idx": 198, "frame_idx": 53, "global_frame_idx": 33419, "task_index": 39}, {"db_idx": 33420, "episode_idx": 198, "frame_idx": 54, "global_frame_idx": 33420, "task_index": 39}, {"db_idx": 33421, "episode_idx": 198, "frame_idx": 55, "global_frame_idx": 33421, "task_index": 39}, {"db_idx": 33422, "episode_idx": 198, "frame_idx": 56, "global_frame_idx": 33422, "task_index": 39}, {"db_idx": 33423, "episode_idx": 198, "frame_idx": 57, "global_frame_idx": 33423, "task_index": 39}, {"db_idx": 33424, "episode_idx": 198, "frame_idx": 58, "global_frame_idx": 33424, "task_index": 39}, {"db_idx": 33425, "episode_idx": 198, "frame_idx": 59, "global_frame_idx": 33425, "task_index": 39}, {"db_idx": 33426, "episode_idx": 198, "frame_idx": 60, "global_frame_idx": 33426, "task_index": 39}, {"db_idx": 33427, "episode_idx": 198, "frame_idx": 61, "global_frame_idx": 33427, "task_index": 39}, {"db_idx": 33428, "episode_idx": 198, "frame_idx": 62, "global_frame_idx": 33428, "task_index": 39}, {"db_idx": 33429, "episode_idx": 198, "frame_idx": 63, "global_frame_idx": 33429, "task_index": 39}, {"db_idx": 33430, "episode_idx": 198, "frame_idx": 64, "global_frame_idx": 33430, "task_index": 39}, {"db_idx": 33431, "episode_idx": 198, "frame_idx": 65, "global_frame_idx": 33431, "task_index": 39}, {"db_idx": 33432, "episode_idx": 198, "frame_idx": 66, "global_frame_idx": 33432, "task_index": 39}, {"db_idx": 33433, "episode_idx": 198, "frame_idx": 67, "global_frame_idx": 33433, "task_index": 39}, {"db_idx": 33434, "episode_idx": 198, "frame_idx": 68, "global_frame_idx": 33434, "task_index": 39}, {"db_idx": 33435, "episode_idx": 198, "frame_idx": 69, "global_frame_idx": 33435, "task_index": 39}, {"db_idx": 33436, "episode_idx": 198, "frame_idx": 70, "global_frame_idx": 33436, "task_index": 39}, {"db_idx": 33437, "episode_idx": 198, "frame_idx": 71, "global_frame_idx": 33437, "task_index": 39}, {"db_idx": 33438, "episode_idx": 198, "frame_idx": 72, "global_frame_idx": 33438, "task_index": 39}, {"db_idx": 33439, "episode_idx": 198, "frame_idx": 73, "global_frame_idx": 33439, "task_index": 39}, {"db_idx": 33440, "episode_idx": 198, "frame_idx": 74, "global_frame_idx": 33440, "task_index": 39}, {"db_idx": 33441, "episode_idx": 198, "frame_idx": 75, "global_frame_idx": 33441, "task_index": 39}, {"db_idx": 33442, "episode_idx": 198, "frame_idx": 76, "global_frame_idx": 33442, "task_index": 39}, {"db_idx": 33443, "episode_idx": 198, "frame_idx": 77, "global_frame_idx": 33443, "task_index": 39}, {"db_idx": 33444, "episode_idx": 198, "frame_idx": 78, "global_frame_idx": 33444, "task_index": 39}, {"db_idx": 33445, "episode_idx": 198, "frame_idx": 79, "global_frame_idx": 33445, "task_index": 39}, {"db_idx": 33446, "episode_idx": 198, "frame_idx": 80, "global_frame_idx": 33446, "task_index": 39}, {"db_idx": 33447, "episode_idx": 198, "frame_idx": 81, "global_frame_idx": 33447, "task_index": 39}, {"db_idx": 33448, "episode_idx": 198, "frame_idx": 82, "global_frame_idx": 33448, "task_index": 39}, {"db_idx": 33449, "episode_idx": 198, "frame_idx": 83, "global_frame_idx": 33449, "task_index": 39}, {"db_idx": 33450, "episode_idx": 198, "frame_idx": 84, "global_frame_idx": 33450, "task_index": 39}, {"db_idx": 33451, "episode_idx": 198, "frame_idx": 85, "global_frame_idx": 33451, "task_index": 39}, {"db_idx": 33452, "episode_idx": 198, "frame_idx": 86, "global_frame_idx": 33452, "task_index": 39}, {"db_idx": 33453, "episode_idx": 198, "frame_idx": 87, "global_frame_idx": 33453, "task_index": 39}, {"db_idx": 33454, "episode_idx": 198, "frame_idx": 88, "global_frame_idx": 33454, "task_index": 39}, {"db_idx": 33455, "episode_idx": 198, "frame_idx": 89, "global_frame_idx": 33455, "task_index": 39}, {"db_idx": 33456, "episode_idx": 198, "frame_idx": 90, "global_frame_idx": 33456, "task_index": 39}, {"db_idx": 33457, "episode_idx": 198, "frame_idx": 91, "global_frame_idx": 33457, "task_index": 39}, {"db_idx": 33458, "episode_idx": 198, "frame_idx": 92, "global_frame_idx": 33458, "task_index": 39}, {"db_idx": 33459, "episode_idx": 198, "frame_idx": 93, "global_frame_idx": 33459, "task_index": 39}, {"db_idx": 33460, "episode_idx": 198, "frame_idx": 94, "global_frame_idx": 33460, "task_index": 39}, {"db_idx": 33461, "episode_idx": 198, "frame_idx": 95, "global_frame_idx": 33461, "task_index": 39}, {"db_idx": 33462, "episode_idx": 198, "frame_idx": 96, "global_frame_idx": 33462, "task_index": 39}, {"db_idx": 33463, "episode_idx": 198, "frame_idx": 97, "global_frame_idx": 33463, "task_index": 39}, {"db_idx": 33464, "episode_idx": 198, "frame_idx": 98, "global_frame_idx": 33464, "task_index": 39}, {"db_idx": 33465, "episode_idx": 198, "frame_idx": 99, "global_frame_idx": 33465, "task_index": 39}, {"db_idx": 33466, "episode_idx": 198, "frame_idx": 100, "global_frame_idx": 33466, "task_index": 39}, {"db_idx": 33467, "episode_idx": 198, "frame_idx": 101, "global_frame_idx": 33467, "task_index": 39}, {"db_idx": 33468, "episode_idx": 199, "frame_idx": 0, "global_frame_idx": 33468, "task_index": 39}, {"db_idx": 33469, "episode_idx": 199, "frame_idx": 1, "global_frame_idx": 33469, "task_index": 39}, {"db_idx": 33470, "episode_idx": 199, "frame_idx": 2, "global_frame_idx": 33470, "task_index": 39}, {"db_idx": 33471, "episode_idx": 199, "frame_idx": 3, "global_frame_idx": 33471, "task_index": 39}, {"db_idx": 33472, "episode_idx": 199, "frame_idx": 4, "global_frame_idx": 33472, "task_index": 39}, {"db_idx": 33473, "episode_idx": 199, "frame_idx": 5, "global_frame_idx": 33473, "task_index": 39}, {"db_idx": 33474, "episode_idx": 199, "frame_idx": 6, "global_frame_idx": 33474, "task_index": 39}, {"db_idx": 33475, "episode_idx": 199, "frame_idx": 7, "global_frame_idx": 33475, "task_index": 39}, {"db_idx": 33476, "episode_idx": 199, "frame_idx": 8, "global_frame_idx": 33476, "task_index": 39}, {"db_idx": 33477, "episode_idx": 199, "frame_idx": 9, "global_frame_idx": 33477, "task_index": 39}, {"db_idx": 33478, "episode_idx": 199, "frame_idx": 10, "global_frame_idx": 33478, "task_index": 39}, {"db_idx": 33479, "episode_idx": 199, "frame_idx": 11, "global_frame_idx": 33479, "task_index": 39}, {"db_idx": 33480, "episode_idx": 199, "frame_idx": 12, "global_frame_idx": 33480, "task_index": 39}, {"db_idx": 33481, "episode_idx": 199, "frame_idx": 13, "global_frame_idx": 33481, "task_index": 39}, {"db_idx": 33482, "episode_idx": 199, "frame_idx": 14, "global_frame_idx": 33482, "task_index": 39}, {"db_idx": 33483, "episode_idx": 199, "frame_idx": 15, "global_frame_idx": 33483, "task_index": 39}, {"db_idx": 33484, "episode_idx": 199, "frame_idx": 16, "global_frame_idx": 33484, "task_index": 39}, {"db_idx": 33485, "episode_idx": 199, "frame_idx": 17, "global_frame_idx": 33485, "task_index": 39}, {"db_idx": 33486, "episode_idx": 199, "frame_idx": 18, "global_frame_idx": 33486, "task_index": 39}, {"db_idx": 33487, "episode_idx": 199, "frame_idx": 19, "global_frame_idx": 33487, "task_index": 39}, {"db_idx": 33488, "episode_idx": 199, "frame_idx": 20, "global_frame_idx": 33488, "task_index": 39}, {"db_idx": 33489, "episode_idx": 199, "frame_idx": 21, "global_frame_idx": 33489, "task_index": 39}, {"db_idx": 33490, "episode_idx": 199, "frame_idx": 22, "global_frame_idx": 33490, "task_index": 39}, {"db_idx": 33491, "episode_idx": 199, "frame_idx": 23, "global_frame_idx": 33491, "task_index": 39}, {"db_idx": 33492, "episode_idx": 199, "frame_idx": 24, "global_frame_idx": 33492, "task_index": 39}, {"db_idx": 33493, "episode_idx": 199, "frame_idx": 25, "global_frame_idx": 33493, "task_index": 39}, {"db_idx": 33494, "episode_idx": 199, "frame_idx": 26, "global_frame_idx": 33494, "task_index": 39}, {"db_idx": 33495, "episode_idx": 199, "frame_idx": 27, "global_frame_idx": 33495, "task_index": 39}, {"db_idx": 33496, "episode_idx": 199, "frame_idx": 28, "global_frame_idx": 33496, "task_index": 39}, {"db_idx": 33497, "episode_idx": 199, "frame_idx": 29, "global_frame_idx": 33497, "task_index": 39}, {"db_idx": 33498, "episode_idx": 199, "frame_idx": 30, "global_frame_idx": 33498, "task_index": 39}, {"db_idx": 33499, "episode_idx": 199, "frame_idx": 31, "global_frame_idx": 33499, "task_index": 39}, {"db_idx": 33500, "episode_idx": 199, "frame_idx": 32, "global_frame_idx": 33500, "task_index": 39}, {"db_idx": 33501, "episode_idx": 199, "frame_idx": 33, "global_frame_idx": 33501, "task_index": 39}, {"db_idx": 33502, "episode_idx": 199, "frame_idx": 34, "global_frame_idx": 33502, "task_index": 39}, {"db_idx": 33503, "episode_idx": 199, "frame_idx": 35, "global_frame_idx": 33503, "task_index": 39}, {"db_idx": 33504, "episode_idx": 199, "frame_idx": 36, "global_frame_idx": 33504, "task_index": 39}, {"db_idx": 33505, "episode_idx": 199, "frame_idx": 37, "global_frame_idx": 33505, "task_index": 39}, {"db_idx": 33506, "episode_idx": 199, "frame_idx": 38, "global_frame_idx": 33506, "task_index": 39}, {"db_idx": 33507, "episode_idx": 199, "frame_idx": 39, "global_frame_idx": 33507, "task_index": 39}, {"db_idx": 33508, "episode_idx": 199, "frame_idx": 40, "global_frame_idx": 33508, "task_index": 39}, {"db_idx": 33509, "episode_idx": 199, "frame_idx": 41, "global_frame_idx": 33509, "task_index": 39}, {"db_idx": 33510, "episode_idx": 199, "frame_idx": 42, "global_frame_idx": 33510, "task_index": 39}, {"db_idx": 33511, "episode_idx": 199, "frame_idx": 43, "global_frame_idx": 33511, "task_index": 39}, {"db_idx": 33512, "episode_idx": 199, "frame_idx": 44, "global_frame_idx": 33512, "task_index": 39}, {"db_idx": 33513, "episode_idx": 199, "frame_idx": 45, "global_frame_idx": 33513, "task_index": 39}, {"db_idx": 33514, "episode_idx": 199, "frame_idx": 46, "global_frame_idx": 33514, "task_index": 39}, {"db_idx": 33515, "episode_idx": 199, "frame_idx": 47, "global_frame_idx": 33515, "task_index": 39}, {"db_idx": 33516, "episode_idx": 199, "frame_idx": 48, "global_frame_idx": 33516, "task_index": 39}, {"db_idx": 33517, "episode_idx": 199, "frame_idx": 49, "global_frame_idx": 33517, "task_index": 39}, {"db_idx": 33518, "episode_idx": 199, "frame_idx": 50, "global_frame_idx": 33518, "task_index": 39}, {"db_idx": 33519, "episode_idx": 199, "frame_idx": 51, "global_frame_idx": 33519, "task_index": 39}, {"db_idx": 33520, "episode_idx": 199, "frame_idx": 52, "global_frame_idx": 33520, "task_index": 39}, {"db_idx": 33521, "episode_idx": 199, "frame_idx": 53, "global_frame_idx": 33521, "task_index": 39}, {"db_idx": 33522, "episode_idx": 199, "frame_idx": 54, "global_frame_idx": 33522, "task_index": 39}, {"db_idx": 33523, "episode_idx": 199, "frame_idx": 55, "global_frame_idx": 33523, "task_index": 39}, {"db_idx": 33524, "episode_idx": 199, "frame_idx": 56, "global_frame_idx": 33524, "task_index": 39}, {"db_idx": 33525, "episode_idx": 199, "frame_idx": 57, "global_frame_idx": 33525, "task_index": 39}, {"db_idx": 33526, "episode_idx": 199, "frame_idx": 58, "global_frame_idx": 33526, "task_index": 39}, {"db_idx": 33527, "episode_idx": 199, "frame_idx": 59, "global_frame_idx": 33527, "task_index": 39}, {"db_idx": 33528, "episode_idx": 199, "frame_idx": 60, "global_frame_idx": 33528, "task_index": 39}, {"db_idx": 33529, "episode_idx": 199, "frame_idx": 61, "global_frame_idx": 33529, "task_index": 39}, {"db_idx": 33530, "episode_idx": 199, "frame_idx": 62, "global_frame_idx": 33530, "task_index": 39}, {"db_idx": 33531, "episode_idx": 199, "frame_idx": 63, "global_frame_idx": 33531, "task_index": 39}, {"db_idx": 33532, "episode_idx": 199, "frame_idx": 64, "global_frame_idx": 33532, "task_index": 39}, {"db_idx": 33533, "episode_idx": 199, "frame_idx": 65, "global_frame_idx": 33533, "task_index": 39}, {"db_idx": 33534, "episode_idx": 199, "frame_idx": 66, "global_frame_idx": 33534, "task_index": 39}, {"db_idx": 33535, "episode_idx": 199, "frame_idx": 67, "global_frame_idx": 33535, "task_index": 39}, {"db_idx": 33536, "episode_idx": 199, "frame_idx": 68, "global_frame_idx": 33536, "task_index": 39}, {"db_idx": 33537, "episode_idx": 199, "frame_idx": 69, "global_frame_idx": 33537, "task_index": 39}, {"db_idx": 33538, "episode_idx": 199, "frame_idx": 70, "global_frame_idx": 33538, "task_index": 39}, {"db_idx": 33539, "episode_idx": 199, "frame_idx": 71, "global_frame_idx": 33539, "task_index": 39}, {"db_idx": 33540, "episode_idx": 199, "frame_idx": 72, "global_frame_idx": 33540, "task_index": 39}, {"db_idx": 33541, "episode_idx": 199, "frame_idx": 73, "global_frame_idx": 33541, "task_index": 39}, {"db_idx": 33542, "episode_idx": 199, "frame_idx": 74, "global_frame_idx": 33542, "task_index": 39}, {"db_idx": 33543, "episode_idx": 199, "frame_idx": 75, "global_frame_idx": 33543, "task_index": 39}, {"db_idx": 33544, "episode_idx": 199, "frame_idx": 76, "global_frame_idx": 33544, "task_index": 39}, {"db_idx": 33545, "episode_idx": 199, "frame_idx": 77, "global_frame_idx": 33545, "task_index": 39}, {"db_idx": 33546, "episode_idx": 199, "frame_idx": 78, "global_frame_idx": 33546, "task_index": 39}, {"db_idx": 33547, "episode_idx": 199, "frame_idx": 79, "global_frame_idx": 33547, "task_index": 39}, {"db_idx": 33548, "episode_idx": 199, "frame_idx": 80, "global_frame_idx": 33548, "task_index": 39}, {"db_idx": 33549, "episode_idx": 199, "frame_idx": 81, "global_frame_idx": 33549, "task_index": 39}, {"db_idx": 33550, "episode_idx": 199, "frame_idx": 82, "global_frame_idx": 33550, "task_index": 39}, {"db_idx": 33551, "episode_idx": 199, "frame_idx": 83, "global_frame_idx": 33551, "task_index": 39}, {"db_idx": 33552, "episode_idx": 199, "frame_idx": 84, "global_frame_idx": 33552, "task_index": 39}, {"db_idx": 33553, "episode_idx": 199, "frame_idx": 85, "global_frame_idx": 33553, "task_index": 39}, {"db_idx": 33554, "episode_idx": 199, "frame_idx": 86, "global_frame_idx": 33554, "task_index": 39}, {"db_idx": 33555, "episode_idx": 199, "frame_idx": 87, "global_frame_idx": 33555, "task_index": 39}, {"db_idx": 33556, "episode_idx": 199, "frame_idx": 88, "global_frame_idx": 33556, "task_index": 39}, {"db_idx": 33557, "episode_idx": 199, "frame_idx": 89, "global_frame_idx": 33557, "task_index": 39}, {"db_idx": 33558, "episode_idx": 199, "frame_idx": 90, "global_frame_idx": 33558, "task_index": 39}, {"db_idx": 33559, "episode_idx": 199, "frame_idx": 91, "global_frame_idx": 33559, "task_index": 39}, {"db_idx": 33560, "episode_idx": 199, "frame_idx": 92, "global_frame_idx": 33560, "task_index": 39}, {"db_idx": 33561, "episode_idx": 199, "frame_idx": 93, "global_frame_idx": 33561, "task_index": 39}, {"db_idx": 33562, "episode_idx": 199, "frame_idx": 94, "global_frame_idx": 33562, "task_index": 39}, {"db_idx": 33563, "episode_idx": 199, "frame_idx": 95, "global_frame_idx": 33563, "task_index": 39}, {"db_idx": 33564, "episode_idx": 199, "frame_idx": 96, "global_frame_idx": 33564, "task_index": 39}, {"db_idx": 33565, "episode_idx": 199, "frame_idx": 97, "global_frame_idx": 33565, "task_index": 39}, {"db_idx": 33566, "episode_idx": 199, "frame_idx": 98, "global_frame_idx": 33566, "task_index": 39}, {"db_idx": 33567, "episode_idx": 199, "frame_idx": 99, "global_frame_idx": 33567, "task_index": 39}, {"db_idx": 33568, "episode_idx": 199, "frame_idx": 100, "global_frame_idx": 33568, "task_index": 39}, {"db_idx": 33569, "episode_idx": 199, "frame_idx": 101, "global_frame_idx": 33569, "task_index": 39}, {"db_idx": 33570, "episode_idx": 199, "frame_idx": 102, "global_frame_idx": 33570, "task_index": 39}, {"db_idx": 33571, "episode_idx": 199, "frame_idx": 103, "global_frame_idx": 33571, "task_index": 39}, {"db_idx": 33572, "episode_idx": 199, "frame_idx": 104, "global_frame_idx": 33572, "task_index": 39}, {"db_idx": 33573, "episode_idx": 199, "frame_idx": 105, "global_frame_idx": 33573, "task_index": 39}, {"db_idx": 33574, "episode_idx": 199, "frame_idx": 106, "global_frame_idx": 33574, "task_index": 39}, {"db_idx": 33575, "episode_idx": 199, "frame_idx": 107, "global_frame_idx": 33575, "task_index": 39}, {"db_idx": 33576, "episode_idx": 199, "frame_idx": 108, "global_frame_idx": 33576, "task_index": 39}, {"db_idx": 33577, "episode_idx": 199, "frame_idx": 109, "global_frame_idx": 33577, "task_index": 39}, {"db_idx": 33578, "episode_idx": 199, "frame_idx": 110, "global_frame_idx": 33578, "task_index": 39}, {"db_idx": 33579, "episode_idx": 199, "frame_idx": 111, "global_frame_idx": 33579, "task_index": 39}, {"db_idx": 33580, "episode_idx": 199, "frame_idx": 112, "global_frame_idx": 33580, "task_index": 39}, {"db_idx": 33581, "episode_idx": 199, "frame_idx": 113, "global_frame_idx": 33581, "task_index": 39}, {"db_idx": 33582, "episode_idx": 199, "frame_idx": 114, "global_frame_idx": 33582, "task_index": 39}, {"db_idx": 33583, "episode_idx": 199, "frame_idx": 115, "global_frame_idx": 33583, "task_index": 39}, {"db_idx": 33584, "episode_idx": 199, "frame_idx": 116, "global_frame_idx": 33584, "task_index": 39}, {"db_idx": 33585, "episode_idx": 199, "frame_idx": 117, "global_frame_idx": 33585, "task_index": 39}, {"db_idx": 33586, "episode_idx": 199, "frame_idx": 118, "global_frame_idx": 33586, "task_index": 39}]
\ No newline at end of file
diff --git a/scripts/docker/compose.yml b/scripts/docker/compose.yml
new file mode 100644
index 0000000000000000000000000000000000000000..0caf87849e70e67af3cb9fc44277ab754ecc38ff
--- /dev/null
+++ b/scripts/docker/compose.yml
@@ -0,0 +1,29 @@
+# Run with:
+# docker compose -f scripts/compose.yml up --build
+services:
+ openpi_server:
+ image: openpi_server
+ build:
+ context: ..
+ dockerfile: scripts/docker/serve_policy.Dockerfile
+ init: true
+ tty: true
+ network_mode: host
+ # Populate configured openpi data home to /openpi_assets inside the container.
+ # Populate aws credential inside the container.
+ volumes:
+ - $PWD:/app
+ - ${OPENPI_DATA_HOME:-~/.cache/openpi}:/openpi_assets
+ environment:
+ - SERVER_ARGS
+ - OPENPI_DATA_HOME=/openpi_assets
+ - IS_DOCKER=true
+
+ # Comment out this block if not running on a machine with GPUs.
+ deploy:
+ resources:
+ reservations:
+ devices:
+ - driver: nvidia
+ count: 1
+ capabilities: [gpu]
diff --git a/scripts/docker/install_docker_ubuntu22.sh b/scripts/docker/install_docker_ubuntu22.sh
new file mode 100644
index 0000000000000000000000000000000000000000..38873b3e379ee40e6f80fe86a88be7dae494e05b
--- /dev/null
+++ b/scripts/docker/install_docker_ubuntu22.sh
@@ -0,0 +1,37 @@
+#!/bin/bash
+
+# Add Docker's official GPG key:
+sudo apt-get update
+sudo apt-get install -y ca-certificates curl
+sudo install -m 0755 -d /etc/apt/keyrings
+sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc
+sudo chmod a+r /etc/apt/keyrings/docker.asc
+
+# Add the repository to Apt sources:
+echo \
+ "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \
+ $(. /etc/os-release && echo "$VERSION_CODENAME") stable" |
+ sudo tee /etc/apt/sources.list.d/docker.list >/dev/null
+sudo apt-get update
+
+sudo apt-get install -y docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin
+
+# Add current user to the 'docker' group, which allows them to use docker commands (docker build, docker run, etc).
+# See https://docs.docker.com/engine/install/linux-postinstall/
+username=$(whoami)
+sudo usermod -aG docker $username
+
+# Configure docker to start automatically on system boot.
+sudo systemctl enable docker.service
+sudo systemctl enable containerd.service
+
+# https://forums.docker.com/t/docker-credential-desktop-exe-executable-file-not-found-in-path-using-wsl2/100225/5
+if [ ~/.docker/config.json ]; then
+ sed -i 's/credsStore/credStore/g' ~/.docker/config.json
+fi
+
+echo ""
+echo "********************************************************************"
+echo "**** Restart to allow Docker permission changes to take effect. ****"
+echo "********************************************************************"
+echo ""
diff --git a/scripts/docker/install_nvidia_container_toolkit.sh b/scripts/docker/install_nvidia_container_toolkit.sh
new file mode 100644
index 0000000000000000000000000000000000000000..a4c67f1d5bcc6655f7ae2084a8866037b819b4f0
--- /dev/null
+++ b/scripts/docker/install_nvidia_container_toolkit.sh
@@ -0,0 +1,17 @@
+#!/bin/bash
+
+# Installs the NVIDIA Container Toolkit, which allows Docker containers to access NVIDIA GPUs.
+# NVIDIA's official documentation: https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html
+
+curl -fsSL https://nvidia.github.io/libnvidia-container/gpgkey | sudo gpg --dearmor -o /usr/share/keyrings/nvidia-container-toolkit-keyring.gpg &&
+ curl -s -L https://nvidia.github.io/libnvidia-container/stable/deb/nvidia-container-toolkit.list |
+ sed 's#deb https://#deb [signed-by=/usr/share/keyrings/nvidia-container-toolkit-keyring.gpg] https://#g' |
+ sudo tee /etc/apt/sources.list.d/nvidia-container-toolkit.list
+
+# NVIDIA's documenation omits 'sudo' in the following command, but it is required.
+sudo sed -i -e '/experimental/ s/^#//g' /etc/apt/sources.list.d/nvidia-container-toolkit.list
+sudo apt-get update
+sudo apt-get install -y nvidia-container-toolkit
+
+sudo nvidia-ctk runtime configure --runtime=docker
+sudo systemctl restart docker
diff --git a/scripts/docker/serve_policy.Dockerfile b/scripts/docker/serve_policy.Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..f96b660ce76b16d8567de4a43d1874644d1713e1
--- /dev/null
+++ b/scripts/docker/serve_policy.Dockerfile
@@ -0,0 +1,34 @@
+# Dockerfile for serving a PI policy.
+# Based on UV's instructions: https://docs.astral.sh/uv/guides/integration/docker/#developing-in-a-container
+
+# Build the container:
+# docker build . -t openpi_server -f scripts/docker/serve_policy.Dockerfile
+
+# Run the container:
+# docker run --rm -it --network=host -v .:/app --gpus=all openpi_server /bin/bash
+
+FROM nvidia/cuda:12.2.2-cudnn8-runtime-ubuntu22.04@sha256:2d913b09e6be8387e1a10976933642c73c840c0b735f0bf3c28d97fc9bc422e0
+COPY --from=ghcr.io/astral-sh/uv:0.5.1 /uv /uvx /bin/
+
+WORKDIR /app
+
+# Needed because LeRobot uses git-lfs.
+RUN apt-get update && apt-get install -y git git-lfs
+
+# Copy from the cache instead of linking since it's a mounted volume
+ENV UV_LINK_MODE=copy
+
+# Write the virtual environment outside of the project directory so it doesn't
+# leak out of the container when we mount the application code.
+ENV UV_PROJECT_ENVIRONMENT=/.venv
+
+# Install the project's dependencies using the lockfile and settings
+RUN uv venv --python 3.11.9 $UV_PROJECT_ENVIRONMENT
+RUN --mount=type=cache,target=/root/.cache/uv \
+ --mount=type=bind,source=uv.lock,target=uv.lock \
+ --mount=type=bind,source=pyproject.toml,target=pyproject.toml \
+ --mount=type=bind,source=packages/openpi-client/pyproject.toml,target=packages/openpi-client/pyproject.toml \
+ --mount=type=bind,source=packages/openpi-client/src,target=packages/openpi-client/src \
+ GIT_LFS_SKIP_SMUDGE=1 uv sync --frozen --no-install-project --no-dev
+
+CMD /bin/bash -c "uv run scripts/serve_policy.py $SERVER_ARGS"
diff --git a/scripts/logs/eval_pi0fast_libero.out b/scripts/logs/eval_pi0fast_libero.out
new file mode 100644
index 0000000000000000000000000000000000000000..e58b6df85ac2ef1f886559a2e18c26549997d664
--- /dev/null
+++ b/scripts/logs/eval_pi0fast_libero.out
@@ -0,0 +1,1243 @@
+================================================================================
+JobID = 12563538
+User = u24877, Account = kisski-spath
+Partition = kisski, Nodelist = ggpu170
+================================================================================
+===========================================
+Experiment: eval_pi0fast_libero
+Codebase: /projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/VLA-Humanoid
+Checkpoint: /projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/VLA-Humanoid/outputs/train/2026-02-08/07-52-25_pi0fast_baseline_libero_30k/checkpoints/100000/pretrained_model
+Video Output: /projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/VLA-Humanoid/results/eval_pi0fast_libero/videos
+===========================================
+[robosuite WARNING] No private macro file found! (__init__.py:7)
+[robosuite WARNING] It is recommended to use a private macro file (__init__.py:8)
+[robosuite WARNING] To setup, run: python /user/hominhduy.nguyen01/u24877/.local/lib/python3.10/site-packages/robosuite/scripts/setup_macros.py (__init__.py:9)
+Gym has been unmaintained since 2022 and does not support NumPy 2.0 amongst other critical functionality.
+Please upgrade to Gymnasium, the maintained drop-in replacement of Gym, or contact the authors of your software and request that they upgrade.
+Users of this version of Gym should be able to simply replace 'import gym' with 'import gymnasium as gym' in the vast majority of cases.
+See the migration guide at https://gymnasium.farama.org/introduction/migration_guide/ for additional information.
+INFO:root:Task suite: libero_10
+:241: DeprecationWarning: builtin type SwigPyPacked has no __module__ attribute
+:241: DeprecationWarning: builtin type SwigPyObject has no __module__ attribute
+Some kwargs in processor config are unused and will not have any effect: scale, action_dim, time_horizon, min_token, vocab_size.
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/transformers/models/paligemma/configuration_paligemma.py:134: FutureWarning: The `ignore_index` attribute is deprecated and will be removed in v4.47.
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/VLA-Humanoid/libero_config/libero_config.yaml
+[info] using task orders [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
+Loading weights from local directory
+[INFO] Missing keys (will be randomly initialized): ['model.pi0_paligemma.language_model.model.embed_tokens.weight']
+
0%| | 0/10 [00:00, ?it/s][Warning]: datasets path /mnt/vast-kisski/projects/kisski-spath/VLA_objectcentric/VLA-Humanoid/libero/libero/../datasets does not exist!
+[Warning]: datasets path /mnt/vast-kisski/projects/kisski-spath/VLA_objectcentric/VLA-Humanoid/libero/libero/../datasets does not exist!
+
+
0%| | 0/10 [00:00, ?it/s][AINFO:root:
+Task: put both the alphabet soup and the tomato sauce in the basket
+INFO:root:Starting episode 1...
+You're using a GemmaTokenizerFast tokenizer. Please note that with a fast tokenizer, using the `__call__` method is faster than using a method to encode the text followed by a call to the `pad` method to get a padded encoding.
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 1
+INFO:root:# successes: 0 (0.0%)
+
+
10%|█ | 1/10 [00:40<06:08, 40.97s/it][AINFO:root:
+Task: put both the alphabet soup and the tomato sauce in the basket
+INFO:root:Starting episode 2...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 2
+INFO:root:# successes: 0 (0.0%)
+
+
20%|██ | 2/10 [01:17<05:07, 38.42s/it][AINFO:root:
+Task: put both the alphabet soup and the tomato sauce in the basket
+INFO:root:Starting episode 3...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 3
+INFO:root:# successes: 0 (0.0%)
+
+
30%|███ | 3/10 [01:54<04:24, 37.83s/it][AINFO:root:
+Task: put both the alphabet soup and the tomato sauce in the basket
+INFO:root:Starting episode 4...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 4
+INFO:root:# successes: 0 (0.0%)
+
+
40%|████ | 4/10 [02:33<03:48, 38.11s/it][AINFO:root:
+Task: put both the alphabet soup and the tomato sauce in the basket
+INFO:root:Starting episode 5...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 5
+INFO:root:# successes: 0 (0.0%)
+
+
50%|█████ | 5/10 [03:11<03:11, 38.32s/it][AINFO:root:
+Task: put both the alphabet soup and the tomato sauce in the basket
+INFO:root:Starting episode 6...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 6
+INFO:root:# successes: 0 (0.0%)
+
+
60%|██████ | 6/10 [03:50<02:34, 38.56s/it][AINFO:root:
+Task: put both the alphabet soup and the tomato sauce in the basket
+INFO:root:Starting episode 7...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 7
+INFO:root:# successes: 0 (0.0%)
+
+
70%|███████ | 7/10 [04:28<01:54, 38.17s/it][AINFO:root:
+Task: put both the alphabet soup and the tomato sauce in the basket
+INFO:root:Starting episode 8...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 8
+INFO:root:# successes: 0 (0.0%)
+
+
80%|████████ | 8/10 [05:09<01:18, 39.02s/it][AINFO:root:
+Task: put both the alphabet soup and the tomato sauce in the basket
+INFO:root:Starting episode 9...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 9
+INFO:root:# successes: 0 (0.0%)
+
+
90%|█████████ | 9/10 [05:49<00:39, 39.29s/it][AINFO:root:
+Task: put both the alphabet soup and the tomato sauce in the basket
+INFO:root:Starting episode 10...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 10
+INFO:root:# successes: 0 (0.0%)
+
+
100%|██████████| 10/10 [06:27<00:00, 39.06s/it][A
100%|██████████| 10/10 [06:27<00:00, 38.76s/it]
+INFO:root:Current task success rate: 0.0
+INFO:root:Current total success rate: 0.0
+
10%|█ | 1/10 [06:31<58:44, 391.59s/it][Warning]: datasets path /mnt/vast-kisski/projects/kisski-spath/VLA_objectcentric/VLA-Humanoid/libero/libero/../datasets does not exist!
+[Warning]: datasets path /mnt/vast-kisski/projects/kisski-spath/VLA_objectcentric/VLA-Humanoid/libero/libero/../datasets does not exist!
+
+
0%| | 0/10 [00:00, ?it/s][AINFO:root:
+Task: put both the cream cheese box and the butter in the basket
+INFO:root:Starting episode 1...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 11
+INFO:root:# successes: 0 (0.0%)
+
+
10%|█ | 1/10 [00:38<05:44, 38.24s/it][AINFO:root:
+Task: put both the cream cheese box and the butter in the basket
+INFO:root:Starting episode 2...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 12
+INFO:root:# successes: 0 (0.0%)
+
+
20%|██ | 2/10 [01:16<05:04, 38.11s/it][AINFO:root:
+Task: put both the cream cheese box and the butter in the basket
+INFO:root:Starting episode 3...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 13
+INFO:root:# successes: 0 (0.0%)
+
+
30%|███ | 3/10 [01:55<04:29, 38.45s/it][AINFO:root:
+Task: put both the cream cheese box and the butter in the basket
+INFO:root:Starting episode 4...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 14
+INFO:root:# successes: 0 (0.0%)
+
+
40%|████ | 4/10 [02:33<03:50, 38.38s/it][AINFO:root:
+Task: put both the cream cheese box and the butter in the basket
+INFO:root:Starting episode 5...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 15
+INFO:root:# successes: 0 (0.0%)
+
+
50%|█████ | 5/10 [03:11<03:11, 38.37s/it][AINFO:root:
+Task: put both the cream cheese box and the butter in the basket
+INFO:root:Starting episode 6...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 16
+INFO:root:# successes: 0 (0.0%)
+
+
60%|██████ | 6/10 [03:50<02:34, 38.58s/it][AINFO:root:
+Task: put both the cream cheese box and the butter in the basket
+INFO:root:Starting episode 7...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 17
+INFO:root:# successes: 0 (0.0%)
+
+
70%|███████ | 7/10 [04:29<01:55, 38.50s/it][AINFO:root:
+Task: put both the cream cheese box and the butter in the basket
+INFO:root:Starting episode 8...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 18
+INFO:root:# successes: 0 (0.0%)
+
+
80%|████████ | 8/10 [05:07<01:16, 38.43s/it][AINFO:root:
+Task: put both the cream cheese box and the butter in the basket
+INFO:root:Starting episode 9...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 19
+INFO:root:# successes: 0 (0.0%)
+
+
90%|█████████ | 9/10 [05:46<00:38, 38.71s/it][AINFO:root:
+Task: put both the cream cheese box and the butter in the basket
+INFO:root:Starting episode 10...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 20
+INFO:root:# successes: 0 (0.0%)
+
+
100%|██████████| 10/10 [06:25<00:00, 38.72s/it][A
100%|██████████| 10/10 [06:25<00:00, 38.54s/it]
+INFO:root:Current task success rate: 0.0
+INFO:root:Current total success rate: 0.0
+
20%|██ | 2/10 [12:57<51:47, 388.42s/it][Warning]: datasets path /mnt/vast-kisski/projects/kisski-spath/VLA_objectcentric/VLA-Humanoid/libero/libero/../datasets does not exist!
+[Warning]: datasets path /mnt/vast-kisski/projects/kisski-spath/VLA_objectcentric/VLA-Humanoid/libero/libero/../datasets does not exist!
+
+
0%| | 0/10 [00:00, ?it/s][AINFO:root:
+Task: turn on the stove and put the moka pot on it
+INFO:root:Starting episode 1...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 21
+INFO:root:# successes: 0 (0.0%)
+
+
10%|█ | 1/10 [00:31<04:47, 31.93s/it][AINFO:root:
+Task: turn on the stove and put the moka pot on it
+INFO:root:Starting episode 2...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 22
+INFO:root:# successes: 0 (0.0%)
+
+
20%|██ | 2/10 [01:03<04:12, 31.57s/it][AINFO:root:
+Task: turn on the stove and put the moka pot on it
+INFO:root:Starting episode 3...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 23
+INFO:root:# successes: 0 (0.0%)
+
+
30%|███ | 3/10 [01:34<03:39, 31.35s/it][AINFO:root:
+Task: turn on the stove and put the moka pot on it
+INFO:root:Starting episode 4...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 24
+INFO:root:# successes: 0 (0.0%)
+
+
40%|████ | 4/10 [02:02<03:00, 30.07s/it][AINFO:root:
+Task: turn on the stove and put the moka pot on it
+INFO:root:Starting episode 5...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 25
+INFO:root:# successes: 0 (0.0%)
+
+
50%|█████ | 5/10 [02:38<02:40, 32.18s/it][AINFO:root:
+Task: turn on the stove and put the moka pot on it
+INFO:root:Starting episode 6...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 26
+INFO:root:# successes: 0 (0.0%)
+
+
60%|██████ | 6/10 [03:12<02:11, 32.96s/it][AINFO:root:
+Task: turn on the stove and put the moka pot on it
+INFO:root:Starting episode 7...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 27
+INFO:root:# successes: 0 (0.0%)
+
+
70%|███████ | 7/10 [03:46<01:40, 33.34s/it][AINFO:root:
+Task: turn on the stove and put the moka pot on it
+INFO:root:Starting episode 8...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 28
+INFO:root:# successes: 0 (0.0%)
+
+
80%|████████ | 8/10 [04:21<01:07, 33.60s/it][AINFO:root:
+Task: turn on the stove and put the moka pot on it
+INFO:root:Starting episode 9...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 29
+INFO:root:# successes: 0 (0.0%)
+
+
90%|█████████ | 9/10 [04:51<00:32, 32.60s/it][AINFO:root:
+Task: turn on the stove and put the moka pot on it
+INFO:root:Starting episode 10...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 30
+INFO:root:# successes: 0 (0.0%)
+
+
100%|██████████| 10/10 [05:20<00:00, 31.36s/it][A
100%|██████████| 10/10 [05:20<00:00, 32.01s/it]
+INFO:root:Current task success rate: 0.0
+INFO:root:Current total success rate: 0.0
+
30%|███ | 3/10 [18:19<41:45, 357.93s/it][Warning]: datasets path /mnt/vast-kisski/projects/kisski-spath/VLA_objectcentric/VLA-Humanoid/libero/libero/../datasets does not exist!
+[Warning]: datasets path /mnt/vast-kisski/projects/kisski-spath/VLA_objectcentric/VLA-Humanoid/libero/libero/../datasets does not exist!
+
+
0%| | 0/10 [00:00, ?it/s][AINFO:root:
+Task: put the black bowl in the bottom drawer of the cabinet and close it
+INFO:root:Starting episode 1...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 31
+INFO:root:# successes: 0 (0.0%)
+
+
10%|█ | 1/10 [00:31<04:46, 31.84s/it][AINFO:root:
+Task: put the black bowl in the bottom drawer of the cabinet and close it
+INFO:root:Starting episode 2...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 32
+INFO:root:# successes: 0 (0.0%)
+
+
20%|██ | 2/10 [01:02<04:10, 31.26s/it][AINFO:root:
+Task: put the black bowl in the bottom drawer of the cabinet and close it
+INFO:root:Starting episode 3...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 33
+INFO:root:# successes: 0 (0.0%)
+
+
30%|███ | 3/10 [01:34<03:39, 31.32s/it][AINFO:root:
+Task: put the black bowl in the bottom drawer of the cabinet and close it
+INFO:root:Starting episode 4...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 34
+INFO:root:# successes: 0 (0.0%)
+
+
40%|████ | 4/10 [02:03<03:04, 30.71s/it][AINFO:root:
+Task: put the black bowl in the bottom drawer of the cabinet and close it
+INFO:root:Starting episode 5...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 35
+INFO:root:# successes: 0 (0.0%)
+
+
50%|█████ | 5/10 [02:36<02:37, 31.57s/it][AINFO:root:
+Task: put the black bowl in the bottom drawer of the cabinet and close it
+INFO:root:Starting episode 6...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 36
+INFO:root:# successes: 0 (0.0%)
+
+
60%|██████ | 6/10 [03:07<02:05, 31.33s/it][AINFO:root:
+Task: put the black bowl in the bottom drawer of the cabinet and close it
+INFO:root:Starting episode 7...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 37
+INFO:root:# successes: 0 (0.0%)
+
+
70%|███████ | 7/10 [03:39<01:34, 31.34s/it][AINFO:root:
+Task: put the black bowl in the bottom drawer of the cabinet and close it
+INFO:root:Starting episode 8...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 38
+INFO:root:# successes: 0 (0.0%)
+
+
80%|████████ | 8/10 [04:11<01:03, 31.67s/it][AINFO:root:
+Task: put the black bowl in the bottom drawer of the cabinet and close it
+INFO:root:Starting episode 9...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 39
+INFO:root:# successes: 0 (0.0%)
+
+
90%|█████████ | 9/10 [04:43<00:31, 31.76s/it][AINFO:root:
+Task: put the black bowl in the bottom drawer of the cabinet and close it
+INFO:root:Starting episode 10...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 40
+INFO:root:# successes: 0 (0.0%)
+
+
100%|██████████| 10/10 [05:13<00:00, 31.13s/it][A
100%|██████████| 10/10 [05:13<00:00, 31.32s/it]
+INFO:root:Current task success rate: 0.0
+INFO:root:Current total success rate: 0.0
+
40%|████ | 4/10 [23:33<34:04, 340.77s/it][Warning]: datasets path /mnt/vast-kisski/projects/kisski-spath/VLA_objectcentric/VLA-Humanoid/libero/libero/../datasets does not exist!
+[Warning]: datasets path /mnt/vast-kisski/projects/kisski-spath/VLA_objectcentric/VLA-Humanoid/libero/libero/../datasets does not exist!
+
+
0%| | 0/10 [00:00, ?it/s][AINFO:root:
+Task: put the white mug on the left plate and put the yellow and white mug on the right plate
+INFO:root:Starting episode 1...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 41
+INFO:root:# successes: 0 (0.0%)
+
+
10%|█ | 1/10 [00:35<05:22, 35.86s/it][AINFO:root:
+Task: put the white mug on the left plate and put the yellow and white mug on the right plate
+INFO:root:Starting episode 2...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 42
+INFO:root:# successes: 0 (0.0%)
+
+
20%|██ | 2/10 [01:15<05:05, 38.18s/it][AINFO:root:
+Task: put the white mug on the left plate and put the yellow and white mug on the right plate
+INFO:root:Starting episode 3...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 43
+INFO:root:# successes: 0 (0.0%)
+
+
30%|███ | 3/10 [01:55<04:32, 38.95s/it][AINFO:root:
+Task: put the white mug on the left plate and put the yellow and white mug on the right plate
+INFO:root:Starting episode 4...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 44
+INFO:root:# successes: 0 (0.0%)
+
+
40%|████ | 4/10 [02:31<03:46, 37.70s/it][AINFO:root:
+Task: put the white mug on the left plate and put the yellow and white mug on the right plate
+INFO:root:Starting episode 5...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 45
+INFO:root:# successes: 0 (0.0%)
+
+
50%|█████ | 5/10 [03:09<03:09, 37.82s/it][AINFO:root:
+Task: put the white mug on the left plate and put the yellow and white mug on the right plate
+INFO:root:Starting episode 6...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 46
+INFO:root:# successes: 0 (0.0%)
+
+
60%|██████ | 6/10 [03:47<02:32, 38.03s/it][AINFO:root:
+Task: put the white mug on the left plate and put the yellow and white mug on the right plate
+INFO:root:Starting episode 7...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 47
+INFO:root:# successes: 0 (0.0%)
+
+
70%|███████ | 7/10 [04:28<01:56, 38.90s/it][AINFO:root:
+Task: put the white mug on the left plate and put the yellow and white mug on the right plate
+INFO:root:Starting episode 8...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 48
+INFO:root:# successes: 0 (0.0%)
+
+
80%|████████ | 8/10 [05:07<01:17, 38.94s/it][AINFO:root:
+Task: put the white mug on the left plate and put the yellow and white mug on the right plate
+INFO:root:Starting episode 9...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 49
+INFO:root:# successes: 0 (0.0%)
+
+
90%|█████████ | 9/10 [05:46<00:38, 38.99s/it][AINFO:root:
+Task: put the white mug on the left plate and put the yellow and white mug on the right plate
+INFO:root:Starting episode 10...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 50
+INFO:root:# successes: 0 (0.0%)
+
+
100%|██████████| 10/10 [06:20<00:00, 37.54s/it][A
100%|██████████| 10/10 [06:20<00:00, 38.09s/it]
+INFO:root:Current task success rate: 0.0
+INFO:root:Current total success rate: 0.0
+
50%|█████ | 5/10 [29:57<29:40, 356.10s/it][Warning]: datasets path /mnt/vast-kisski/projects/kisski-spath/VLA_objectcentric/VLA-Humanoid/libero/libero/../datasets does not exist!
+[Warning]: datasets path /mnt/vast-kisski/projects/kisski-spath/VLA_objectcentric/VLA-Humanoid/libero/libero/../datasets does not exist!
+
+
0%| | 0/10 [00:00, ?it/s][AINFO:root:
+Task: pick up the book and place it in the back compartment of the caddy
+INFO:root:Starting episode 1...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 51
+INFO:root:# successes: 0 (0.0%)
+
+
10%|█ | 1/10 [00:33<05:02, 33.65s/it][AINFO:root:
+Task: pick up the book and place it in the back compartment of the caddy
+INFO:root:Starting episode 2...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 52
+INFO:root:# successes: 0 (0.0%)
+
+
20%|██ | 2/10 [01:05<04:21, 32.66s/it][AINFO:root:
+Task: pick up the book and place it in the back compartment of the caddy
+INFO:root:Starting episode 3...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 53
+INFO:root:# successes: 0 (0.0%)
+
+
30%|███ | 3/10 [01:39<03:51, 33.06s/it][AINFO:root:
+Task: pick up the book and place it in the back compartment of the caddy
+INFO:root:Starting episode 4...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 54
+INFO:root:# successes: 0 (0.0%)
+
+
40%|████ | 4/10 [02:11<03:16, 32.69s/it][AINFO:root:
+Task: pick up the book and place it in the back compartment of the caddy
+INFO:root:Starting episode 5...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 55
+INFO:root:# successes: 0 (0.0%)
+
+
50%|█████ | 5/10 [02:42<02:41, 32.23s/it][AINFO:root:
+Task: pick up the book and place it in the back compartment of the caddy
+INFO:root:Starting episode 6...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 56
+INFO:root:# successes: 0 (0.0%)
+
+
60%|██████ | 6/10 [03:15<02:09, 32.46s/it][AINFO:root:
+Task: pick up the book and place it in the back compartment of the caddy
+INFO:root:Starting episode 7...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 57
+INFO:root:# successes: 0 (0.0%)
+
+
70%|███████ | 7/10 [03:49<01:38, 32.82s/it][AINFO:root:
+Task: pick up the book and place it in the back compartment of the caddy
+INFO:root:Starting episode 8...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 58
+INFO:root:# successes: 0 (0.0%)
+
+
80%|████████ | 8/10 [04:21<01:05, 32.52s/it][AINFO:root:
+Task: pick up the book and place it in the back compartment of the caddy
+INFO:root:Starting episode 9...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 59
+INFO:root:# successes: 0 (0.0%)
+
+
90%|█████████ | 9/10 [04:54<00:32, 32.74s/it][AINFO:root:
+Task: pick up the book and place it in the back compartment of the caddy
+INFO:root:Starting episode 10...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 60
+INFO:root:# successes: 0 (0.0%)
+
+
100%|██████████| 10/10 [05:26<00:00, 32.72s/it][A
100%|██████████| 10/10 [05:26<00:00, 32.69s/it]
+INFO:root:Current task success rate: 0.0
+INFO:root:Current total success rate: 0.0
+
60%|██████ | 6/10 [35:25<23:06, 346.70s/it][Warning]: datasets path /mnt/vast-kisski/projects/kisski-spath/VLA_objectcentric/VLA-Humanoid/libero/libero/../datasets does not exist!
+[Warning]: datasets path /mnt/vast-kisski/projects/kisski-spath/VLA_objectcentric/VLA-Humanoid/libero/libero/../datasets does not exist!
+
+
0%| | 0/10 [00:00, ?it/s][AINFO:root:
+Task: put the white mug on the plate and put the chocolate pudding to the right of the plate
+INFO:root:Starting episode 1...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 61
+INFO:root:# successes: 0 (0.0%)
+
+
10%|█ | 1/10 [00:34<05:09, 34.37s/it][AINFO:root:
+Task: put the white mug on the plate and put the chocolate pudding to the right of the plate
+INFO:root:Starting episode 2...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 62
+INFO:root:# successes: 0 (0.0%)
+
+
20%|██ | 2/10 [01:08<04:35, 34.44s/it][AINFO:root:
+Task: put the white mug on the plate and put the chocolate pudding to the right of the plate
+INFO:root:Starting episode 3...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 63
+INFO:root:# successes: 0 (0.0%)
+
+
30%|███ | 3/10 [01:42<03:57, 33.94s/it][AINFO:root:
+Task: put the white mug on the plate and put the chocolate pudding to the right of the plate
+INFO:root:Starting episode 4...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 64
+INFO:root:# successes: 0 (0.0%)
+
+
40%|████ | 4/10 [02:16<03:24, 34.01s/it][AINFO:root:
+Task: put the white mug on the plate and put the chocolate pudding to the right of the plate
+INFO:root:Starting episode 5...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 65
+INFO:root:# successes: 0 (0.0%)
+
+
50%|█████ | 5/10 [02:53<02:55, 35.07s/it][AINFO:root:
+Task: put the white mug on the plate and put the chocolate pudding to the right of the plate
+INFO:root:Starting episode 6...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 66
+INFO:root:# successes: 0 (0.0%)
+
+
60%|██████ | 6/10 [03:27<02:19, 34.87s/it][AINFO:root:
+Task: put the white mug on the plate and put the chocolate pudding to the right of the plate
+INFO:root:Starting episode 7...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 67
+INFO:root:# successes: 0 (0.0%)
+
+
70%|███████ | 7/10 [04:05<01:47, 35.78s/it][AINFO:root:
+Task: put the white mug on the plate and put the chocolate pudding to the right of the plate
+INFO:root:Starting episode 8...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 68
+INFO:root:# successes: 0 (0.0%)
+
+
80%|████████ | 8/10 [04:39<01:10, 35.26s/it][AINFO:root:
+Task: put the white mug on the plate and put the chocolate pudding to the right of the plate
+INFO:root:Starting episode 9...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 69
+INFO:root:# successes: 0 (0.0%)
+
+
90%|█████████ | 9/10 [05:15<00:35, 35.41s/it][AINFO:root:
+Task: put the white mug on the plate and put the chocolate pudding to the right of the plate
+INFO:root:Starting episode 10...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 70
+INFO:root:# successes: 0 (0.0%)
+
+
100%|██████████| 10/10 [05:49<00:00, 35.07s/it][A
100%|██████████| 10/10 [05:49<00:00, 34.96s/it]
+INFO:root:Current task success rate: 0.0
+INFO:root:Current total success rate: 0.0
+
70%|███████ | 7/10 [41:16<17:24, 348.18s/it][Warning]: datasets path /mnt/vast-kisski/projects/kisski-spath/VLA_objectcentric/VLA-Humanoid/libero/libero/../datasets does not exist!
+[Warning]: datasets path /mnt/vast-kisski/projects/kisski-spath/VLA_objectcentric/VLA-Humanoid/libero/libero/../datasets does not exist!
+
+
0%| | 0/10 [00:00, ?it/s][AINFO:root:
+Task: put both the alphabet soup and the cream cheese box in the basket
+INFO:root:Starting episode 1...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 71
+INFO:root:# successes: 0 (0.0%)
+
+
10%|█ | 1/10 [00:37<05:38, 37.63s/it][AINFO:root:
+Task: put both the alphabet soup and the cream cheese box in the basket
+INFO:root:Starting episode 2...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 72
+INFO:root:# successes: 0 (0.0%)
+
+
20%|██ | 2/10 [01:14<04:55, 36.90s/it][AINFO:root:
+Task: put both the alphabet soup and the cream cheese box in the basket
+INFO:root:Starting episode 3...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 73
+INFO:root:# successes: 0 (0.0%)
+
+
30%|███ | 3/10 [01:50<04:18, 36.92s/it][AINFO:root:
+Task: put both the alphabet soup and the cream cheese box in the basket
+INFO:root:Starting episode 4...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 74
+INFO:root:# successes: 0 (0.0%)
+
+
40%|████ | 4/10 [02:28<03:42, 37.15s/it][AINFO:root:
+Task: put both the alphabet soup and the cream cheese box in the basket
+INFO:root:Starting episode 5...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 75
+INFO:root:# successes: 0 (0.0%)
+
+
50%|█████ | 5/10 [03:06<03:07, 37.55s/it][AINFO:root:
+Task: put both the alphabet soup and the cream cheese box in the basket
+INFO:root:Starting episode 6...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 76
+INFO:root:# successes: 0 (0.0%)
+
+
60%|██████ | 6/10 [03:43<02:29, 37.33s/it][AINFO:root:
+Task: put both the alphabet soup and the cream cheese box in the basket
+INFO:root:Starting episode 7...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 77
+INFO:root:# successes: 0 (0.0%)
+
+
70%|███████ | 7/10 [04:21<01:52, 37.54s/it][AINFO:root:
+Task: put both the alphabet soup and the cream cheese box in the basket
+INFO:root:Starting episode 8...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 78
+INFO:root:# successes: 0 (0.0%)
+
+
80%|████████ | 8/10 [04:57<01:14, 37.14s/it][AINFO:root:
+Task: put both the alphabet soup and the cream cheese box in the basket
+INFO:root:Starting episode 9...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 79
+INFO:root:# successes: 0 (0.0%)
+
+
90%|█████████ | 9/10 [05:33<00:36, 36.74s/it][AINFO:root:
+Task: put both the alphabet soup and the cream cheese box in the basket
+INFO:root:Starting episode 10...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 80
+INFO:root:# successes: 0 (0.0%)
+
+
100%|██████████| 10/10 [06:11<00:00, 37.17s/it][A
100%|██████████| 10/10 [06:11<00:00, 37.19s/it]
+INFO:root:Current task success rate: 0.0
+INFO:root:Current total success rate: 0.0
+
80%|████████ | 8/10 [47:29<11:51, 355.92s/it][Warning]: datasets path /mnt/vast-kisski/projects/kisski-spath/VLA_objectcentric/VLA-Humanoid/libero/libero/../datasets does not exist!
+[Warning]: datasets path /mnt/vast-kisski/projects/kisski-spath/VLA_objectcentric/VLA-Humanoid/libero/libero/../datasets does not exist!
+
+
0%| | 0/10 [00:00, ?it/s][AINFO:root:
+Task: put both moka pots on the stove
+INFO:root:Starting episode 1...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 81
+INFO:root:# successes: 0 (0.0%)
+
+
10%|█ | 1/10 [00:27<04:03, 27.01s/it][AINFO:root:
+Task: put both moka pots on the stove
+INFO:root:Starting episode 2...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 82
+INFO:root:# successes: 0 (0.0%)
+
+
20%|██ | 2/10 [00:54<03:36, 27.05s/it][AINFO:root:
+Task: put both moka pots on the stove
+INFO:root:Starting episode 3...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 83
+INFO:root:# successes: 0 (0.0%)
+
+
30%|███ | 3/10 [01:21<03:09, 27.08s/it][AINFO:root:
+Task: put both moka pots on the stove
+INFO:root:Starting episode 4...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 84
+INFO:root:# successes: 0 (0.0%)
+
+
40%|████ | 4/10 [01:48<02:41, 26.97s/it][AINFO:root:
+Task: put both moka pots on the stove
+INFO:root:Starting episode 5...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 85
+INFO:root:# successes: 0 (0.0%)
+
+
50%|█████ | 5/10 [02:14<02:14, 26.97s/it][AINFO:root:
+Task: put both moka pots on the stove
+INFO:root:Starting episode 6...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 86
+INFO:root:# successes: 0 (0.0%)
+
+
60%|██████ | 6/10 [02:43<01:49, 27.40s/it][AINFO:root:
+Task: put both moka pots on the stove
+INFO:root:Starting episode 7...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 87
+INFO:root:# successes: 0 (0.0%)
+
+
70%|███████ | 7/10 [03:09<01:21, 27.19s/it][AINFO:root:
+Task: put both moka pots on the stove
+INFO:root:Starting episode 8...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 88
+INFO:root:# successes: 0 (0.0%)
+
+
80%|████████ | 8/10 [03:37<00:54, 27.34s/it][AINFO:root:
+Task: put both moka pots on the stove
+INFO:root:Starting episode 9...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 89
+INFO:root:# successes: 0 (0.0%)
+
+
90%|█████████ | 9/10 [04:04<00:27, 27.32s/it][AINFO:root:
+Task: put both moka pots on the stove
+INFO:root:Starting episode 10...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 90
+INFO:root:# successes: 0 (0.0%)
+
+
100%|██████████| 10/10 [04:32<00:00, 27.33s/it][A
100%|██████████| 10/10 [04:32<00:00, 27.23s/it]
+INFO:root:Current task success rate: 0.0
+INFO:root:Current total success rate: 0.0
+
90%|█████████ | 9/10 [52:02<05:29, 329.92s/it][Warning]: datasets path /mnt/vast-kisski/projects/kisski-spath/VLA_objectcentric/VLA-Humanoid/libero/libero/../datasets does not exist!
+[Warning]: datasets path /mnt/vast-kisski/projects/kisski-spath/VLA_objectcentric/VLA-Humanoid/libero/libero/../datasets does not exist!
+
+
0%| | 0/10 [00:00, ?it/s][AINFO:root:
+Task: put the yellow and white mug in the microwave and close it
+INFO:root:Starting episode 1...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 91
+INFO:root:# successes: 0 (0.0%)
+
+
10%|█ | 1/10 [00:29<04:25, 29.54s/it][AINFO:root:
+Task: put the yellow and white mug in the microwave and close it
+INFO:root:Starting episode 2...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 92
+INFO:root:# successes: 0 (0.0%)
+
+
20%|██ | 2/10 [00:59<03:58, 29.84s/it][AINFO:root:
+Task: put the yellow and white mug in the microwave and close it
+INFO:root:Starting episode 3...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 93
+INFO:root:# successes: 0 (0.0%)
+
+
30%|███ | 3/10 [01:29<03:30, 30.07s/it][AINFO:root:
+Task: put the yellow and white mug in the microwave and close it
+INFO:root:Starting episode 4...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 94
+INFO:root:# successes: 0 (0.0%)
+
+
40%|████ | 4/10 [01:59<02:59, 29.99s/it][AINFO:root:
+Task: put the yellow and white mug in the microwave and close it
+INFO:root:Starting episode 5...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 95
+INFO:root:# successes: 0 (0.0%)
+
+
50%|█████ | 5/10 [02:30<02:31, 30.31s/it][AINFO:root:
+Task: put the yellow and white mug in the microwave and close it
+INFO:root:Starting episode 6...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 96
+INFO:root:# successes: 0 (0.0%)
+
+
60%|██████ | 6/10 [03:01<02:02, 30.60s/it][AINFO:root:
+Task: put the yellow and white mug in the microwave and close it
+INFO:root:Starting episode 7...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 97
+INFO:root:# successes: 0 (0.0%)
+
+
70%|███████ | 7/10 [03:32<01:31, 30.64s/it][AINFO:root:
+Task: put the yellow and white mug in the microwave and close it
+INFO:root:Starting episode 8...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 98
+INFO:root:# successes: 0 (0.0%)
+
+
80%|████████ | 8/10 [04:02<01:00, 30.34s/it][AINFO:root:
+Task: put the yellow and white mug in the microwave and close it
+INFO:root:Starting episode 9...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 99
+INFO:root:# successes: 0 (0.0%)
+
+
90%|█████████ | 9/10 [04:31<00:30, 30.02s/it][AINFO:root:
+Task: put the yellow and white mug in the microwave and close it
+INFO:root:Starting episode 10...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+INFO:root:Success: False
+INFO:root:# episodes completed so far: 100
+INFO:root:# successes: 0 (0.0%)
+
+
100%|██████████| 10/10 [05:01<00:00, 29.94s/it][A
100%|██████████| 10/10 [05:01<00:00, 30.13s/it]
+INFO:root:Current task success rate: 0.0
+INFO:root:Current total success rate: 0.0
+
100%|██████████| 10/10 [57:04<00:00, 321.44s/it]
100%|██████████| 10/10 [57:04<00:00, 342.46s/it]
+INFO:root:Total success rate: 0.0
+INFO:root:Total episodes: 100
+Exception ignored in:
+Traceback (most recent call last):
+ File "/user/hominhduy.nguyen01/u24877/.local/lib/python3.10/site-packages/robosuite/utils/binding_utils.py", line 199, in __del__
+ self.gl_ctx.free()
+ File "/user/hominhduy.nguyen01/u24877/.local/lib/python3.10/site-packages/robosuite/renderers/context/egl_context.py", line 149, in free
+ EGL.eglMakeCurrent(EGL_DISPLAY, EGL.EGL_NO_SURFACE, EGL.EGL_NO_SURFACE, EGL.EGL_NO_CONTEXT)
+ File "/user/hominhduy.nguyen01/u24877/.local/lib/python3.10/site-packages/OpenGL/error.py", line 230, in glCheckError
+ raise self._errorClass(
+OpenGL.raw.EGL._errors.EGLError: EGLError(
+ err = EGL_NOT_INITIALIZED,
+ baseOperation = eglMakeCurrent,
+ cArguments = (
+ ,
+ ,
+ ,
+ ,
+ ),
+ result = 0
+)
+Exception ignored in:
+Traceback (most recent call last):
+ File "/user/hominhduy.nguyen01/u24877/.local/lib/python3.10/site-packages/robosuite/renderers/context/egl_context.py", line 155, in __del__
+ self.free()
+ File "/user/hominhduy.nguyen01/u24877/.local/lib/python3.10/site-packages/robosuite/renderers/context/egl_context.py", line 149, in free
+ EGL.eglMakeCurrent(EGL_DISPLAY, EGL.EGL_NO_SURFACE, EGL.EGL_NO_SURFACE, EGL.EGL_NO_CONTEXT)
+ File "/user/hominhduy.nguyen01/u24877/.local/lib/python3.10/site-packages/OpenGL/error.py", line 230, in glCheckError
+ raise self._errorClass(
+OpenGL.raw.EGL._errors.EGLError: EGLError(
+ err = EGL_NOT_INITIALIZED,
+ baseOperation = eglMakeCurrent,
+ cArguments = (
+ ,
+ ,
+ ,
+ ,
+ ),
+ result = 0
+)
+sys:1: DeprecationWarning: builtin type swigvarlink has no __module__ attribute
+===========================================
+Evaluation completed!
+Videos saved to: /projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/VLA-Humanoid/results/eval_pi0fast_libero/videos
+===========================================
+============ Job Information ===================================================
+Submitted: 2026-02-10T09:23:42
+Started: 2026-02-10T09:24:05
+Ended: 2026-02-10T10:22:43
+Elapsed: 59 min, Limit: 240 min, Difference: 181 min
+CPUs: 16, Nodes: 1
+Estimated Consumption: 590.00 core-hours
+================================================================================
diff --git a/scripts/logs/pi0fast_baseline_libero_h100.err b/scripts/logs/pi0fast_baseline_libero_h100.err
new file mode 100644
index 0000000000000000000000000000000000000000..48debda9d62e59f63228c834d71424131c4c081b
--- /dev/null
+++ b/scripts/logs/pi0fast_baseline_libero_h100.err
@@ -0,0 +1,10163 @@
+The following values were not passed to `accelerate launch` and had defaults used instead:
+ More than one GPU was found, enabling multi-GPU training.
+ If this was unintended please pass in `--num_processes=1`.
+ `--num_machines` was set to a value of `1`
+ `--mixed_precision` was set to a value of `'no'`
+ `--dynamo_backend` was set to a value of `'no'`
+To avoid this warning pass in values for each of the problematic parameters or run `accelerate config`.
+INFO 2026-02-08 07:53:17 ils/utils.py:46 Cuda backend detected, using cuda.
+WARNING 2026-02-08 07:53:17 /policies.py:68 Device 'None' is not available. Switching to 'cuda'.
+WARNING 2026-02-08 07:53:18 ls/other.py:513 Detected kernel version 4.18.0, which is below the recommended minimum of 5.5.0; this can cause the process to hang. It is recommended to upgrade the kernel to the minimum version or higher.
+INFO 2026-02-08 07:53:18 celerate.py:160 Creating dataset
+INFO 2026-02-08 07:53:18 ils/utils.py:46 Cuda backend detected, using cuda.
+WARNING 2026-02-08 07:53:18 /policies.py:68 Device 'None' is not available. Switching to 'cuda'.
+INFO 2026-02-08 07:53:18 ils/utils.py:46 Cuda backend detected, using cuda.
+WARNING 2026-02-08 07:53:18 /policies.py:68 Device 'None' is not available. Switching to 'cuda'.
+INFO 2026-02-08 07:53:18 ils/utils.py:46 Cuda backend detected, using cuda.
+WARNING 2026-02-08 07:53:18 /policies.py:68 Device 'None' is not available. Switching to 'cuda'.
+INFO 2026-02-08 07:53:20 celerate.py:171 Creating policy
+Some kwargs in processor config are unused and will not have any effect: time_horizon, min_token, scale, action_dim, vocab_size.
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/transformers/models/paligemma/configuration_paligemma.py:134: FutureWarning: The `ignore_index` attribute is deprecated and will be removed in v4.47.
+ warnings.warn(
+INFO 2026-02-08 07:54:06 celerate.py:182 Creating optimizer and scheduler
+Some kwargs in processor config are unused and will not have any effect: min_token, scale, time_horizon, action_dim, vocab_size.
+Some kwargs in processor config are unused and will not have any effect: time_horizon, scale, action_dim, min_token, vocab_size.
+Some kwargs in processor config are unused and will not have any effect: scale, min_token, action_dim, vocab_size, time_horizon.
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/transformers/models/paligemma/configuration_paligemma.py:134: FutureWarning: The `ignore_index` attribute is deprecated and will be removed in v4.47.
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/transformers/models/paligemma/configuration_paligemma.py:134: FutureWarning: The `ignore_index` attribute is deprecated and will be removed in v4.47.
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/transformers/models/paligemma/configuration_paligemma.py:134: FutureWarning: The `ignore_index` attribute is deprecated and will be removed in v4.47.
+ warnings.warn(
+INFO 2026-02-08 07:54:47 celerate.py:222 Output dir: /projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/VLA-Humanoid/outputs/train/2026-02-08/07-52-25_pi0fast_baseline_libero_30k
+INFO 2026-02-08 07:54:47 celerate.py:225 cfg.steps=100000 (100K)
+INFO 2026-02-08 07:54:47 celerate.py:226 dataset.num_frames=33587 (34K)
+INFO 2026-02-08 07:54:47 celerate.py:227 dataset.num_episodes=200
+INFO 2026-02-08 07:54:47 celerate.py:228 num_learnable_params=1984245760 (2B)
+INFO 2026-02-08 07:54:47 celerate.py:229 num_total_params=2923335482 (3B)
+INFO 2026-02-08 07:54:47 celerate.py:230 Number of processes: 4
+INFO 2026-02-08 07:54:47 celerate.py:231 Device: cuda:0
+INFO 2026-02-08 07:54:47 celerate.py:232 Mixed precision: no
+INFO 2026-02-08 07:54:47 celerate.py:254 Start offline training on a fixed dataset
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+You're using a GemmaTokenizerFast tokenizer. Please note that with a fast tokenizer, using the `__call__` method is faster than using a method to encode the text followed by a call to the `pad` method to get a padded encoding.
+You're using a GemmaTokenizerFast tokenizer. Please note that with a fast tokenizer, using the `__call__` method is faster than using a method to encode the text followed by a call to the `pad` method to get a padded encoding.
+You're using a GemmaTokenizerFast tokenizer. Please note that with a fast tokenizer, using the `__call__` method is faster than using a method to encode the text followed by a call to the `pad` method to get a padded encoding.
+You're using a GemmaTokenizerFast tokenizer. Please note that with a fast tokenizer, using the `__call__` method is faster than using a method to encode the text followed by a call to the `pad` method to get a padded encoding.
+You're using a GemmaTokenizerFast tokenizer. Please note that with a fast tokenizer, using the `__call__` method is faster than using a method to encode the text followed by a call to the `pad` method to get a padded encoding.
+You're using a GemmaTokenizerFast tokenizer. Please note that with a fast tokenizer, using the `__call__` method is faster than using a method to encode the text followed by a call to the `pad` method to get a padded encoding.
+You're using a GemmaTokenizerFast tokenizer. Please note that with a fast tokenizer, using the `__call__` method is faster than using a method to encode the text followed by a call to the `pad` method to get a padded encoding.
+You're using a GemmaTokenizerFast tokenizer. Please note that with a fast tokenizer, using the `__call__` method is faster than using a method to encode the text followed by a call to the `pad` method to get a padded encoding.
+[rank1]:[W208 07:54:51.375678710 reducer.cpp:1430] Warning: find_unused_parameters=True was specified in DDP constructor, but did not find any unused parameters in the forward pass. This flag results in an extra traversal of the autograd graph every iteration, which can adversely affect performance. If your model indeed never has any unused parameters in the forward pass, consider turning this flag off. Note that this warning may be a false positive if your model has flow control causing later iterations to have unused parameters. (function operator())
+[rank2]:[W208 07:54:51.375903289 reducer.cpp:1430] Warning: find_unused_parameters=True was specified in DDP constructor, but did not find any unused parameters in the forward pass. This flag results in an extra traversal of the autograd graph every iteration, which can adversely affect performance. If your model indeed never has any unused parameters in the forward pass, consider turning this flag off. Note that this warning may be a false positive if your model has flow control causing later iterations to have unused parameters. (function operator())
+[rank0]:[W208 07:54:51.376435899 reducer.cpp:1430] Warning: find_unused_parameters=True was specified in DDP constructor, but did not find any unused parameters in the forward pass. This flag results in an extra traversal of the autograd graph every iteration, which can adversely affect performance. If your model indeed never has any unused parameters in the forward pass, consider turning this flag off. Note that this warning may be a false positive if your model has flow control causing later iterations to have unused parameters. (function operator())
+[rank3]:[W208 07:54:51.376471209 reducer.cpp:1430] Warning: find_unused_parameters=True was specified in DDP constructor, but did not find any unused parameters in the forward pass. This flag results in an extra traversal of the autograd graph every iteration, which can adversely affect performance. If your model indeed never has any unused parameters in the forward pass, consider turning this flag off. Note that this warning may be a false positive if your model has flow control causing later iterations to have unused parameters. (function operator())
+INFO 2026-02-08 07:56:19 celerate.py:304 step:100 smpl:2K ep:10 epch:0.05 loss:10.002 grdn:3.131 lr:2.0e-05 updt_s:0.846 data_s:0.023
+INFO 2026-02-08 07:57:47 celerate.py:304 step:200 smpl:3K ep:19 epch:0.10 loss:7.873 grdn:1.012 lr:6.0e-05 updt_s:0.826 data_s:0.007
+INFO 2026-02-08 07:59:11 celerate.py:304 step:300 smpl:5K ep:29 epch:0.14 loss:7.187 grdn:0.624 lr:9.5e-05 updt_s:0.826 data_s:0.007
+INFO 2026-02-08 08:00:35 celerate.py:304 step:400 smpl:6K ep:38 epch:0.19 loss:6.906 grdn:0.462 lr:9.9e-05 updt_s:0.826 data_s:0.007
+INFO 2026-02-08 08:01:59 celerate.py:304 step:500 smpl:8K ep:48 epch:0.24 loss:6.712 grdn:0.410 lr:9.9e-05 updt_s:0.826 data_s:0.007
+INFO 2026-02-08 08:03:23 celerate.py:304 step:600 smpl:10K ep:57 epch:0.29 loss:6.597 grdn:0.418 lr:9.9e-05 updt_s:0.826 data_s:0.007
+INFO 2026-02-08 08:04:47 celerate.py:304 step:700 smpl:11K ep:67 epch:0.33 loss:6.414 grdn:0.435 lr:9.8e-05 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 08:06:11 celerate.py:304 step:800 smpl:13K ep:76 epch:0.38 loss:6.328 grdn:0.413 lr:9.8e-05 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 08:07:35 celerate.py:304 step:900 smpl:14K ep:86 epch:0.43 loss:6.237 grdn:0.408 lr:9.7e-05 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 08:08:59 celerate.py:304 step:1K smpl:16K ep:95 epch:0.48 loss:6.188 grdn:0.410 lr:9.6e-05 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 08:10:23 celerate.py:304 step:1K smpl:18K ep:105 epch:0.52 loss:6.128 grdn:0.406 lr:9.5e-05 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 08:11:47 celerate.py:304 step:1K smpl:19K ep:114 epch:0.57 loss:6.039 grdn:0.411 lr:9.4e-05 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 08:13:11 celerate.py:304 step:1K smpl:21K ep:124 epch:0.62 loss:6.005 grdn:0.407 lr:9.3e-05 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 08:14:35 celerate.py:304 step:1K smpl:22K ep:133 epch:0.67 loss:5.985 grdn:0.440 lr:9.2e-05 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 08:15:59 celerate.py:304 step:2K smpl:24K ep:143 epch:0.71 loss:5.925 grdn:0.435 lr:9.1e-05 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 08:17:23 celerate.py:304 step:2K smpl:26K ep:152 epch:0.76 loss:5.847 grdn:0.435 lr:9.0e-05 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 08:18:47 celerate.py:304 step:2K smpl:27K ep:162 epch:0.81 loss:5.853 grdn:0.427 lr:8.9e-05 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 08:20:11 celerate.py:304 step:2K smpl:29K ep:171 epch:0.86 loss:5.845 grdn:0.434 lr:8.7e-05 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 08:21:35 celerate.py:304 step:2K smpl:30K ep:181 epch:0.91 loss:5.760 grdn:0.461 lr:8.6e-05 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 08:22:59 celerate.py:304 step:2K smpl:32K ep:191 epch:0.95 loss:5.714 grdn:0.443 lr:8.5e-05 updt_s:0.825 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+TOKENIZERS_PARALLELISM=(true | false)
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+TOKENIZERS_PARALLELISM=(true | false)
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+TOKENIZERS_PARALLELISMTo disable this warning, you can either:
+=(true | false)
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 08:24:25 celerate.py:304 step:2K smpl:34K ep:200 epch:1.00 loss:5.685 grdn:0.464 lr:8.3e-05 updt_s:0.826 data_s:0.022
+INFO 2026-02-08 08:25:49 celerate.py:304 step:2K smpl:35K ep:210 epch:1.05 loss:5.556 grdn:0.488 lr:8.2e-05 updt_s:0.827 data_s:0.006
+INFO 2026-02-08 08:27:13 celerate.py:304 step:2K smpl:37K ep:219 epch:1.10 loss:5.545 grdn:0.492 lr:8.0e-05 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 08:28:37 celerate.py:304 step:2K smpl:38K ep:229 epch:1.14 loss:5.508 grdn:0.505 lr:7.8e-05 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 08:30:01 celerate.py:304 step:2K smpl:40K ep:238 epch:1.19 loss:5.468 grdn:0.497 lr:7.6e-05 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 08:31:25 celerate.py:304 step:3K smpl:42K ep:248 epch:1.24 loss:5.441 grdn:0.519 lr:7.5e-05 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 08:32:49 celerate.py:304 step:3K smpl:43K ep:257 epch:1.29 loss:5.429 grdn:0.532 lr:7.3e-05 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 08:34:13 celerate.py:304 step:3K smpl:45K ep:267 epch:1.33 loss:5.447 grdn:0.530 lr:7.1e-05 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 08:35:37 celerate.py:304 step:3K smpl:46K ep:276 epch:1.38 loss:5.390 grdn:0.537 lr:6.9e-05 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 08:37:01 celerate.py:304 step:3K smpl:48K ep:286 epch:1.43 loss:5.371 grdn:0.545 lr:6.7e-05 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 08:38:25 celerate.py:304 step:3K smpl:50K ep:295 epch:1.48 loss:5.337 grdn:0.583 lr:6.5e-05 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 08:39:49 celerate.py:304 step:3K smpl:51K ep:305 epch:1.52 loss:5.272 grdn:0.580 lr:6.3e-05 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 08:41:13 celerate.py:304 step:3K smpl:53K ep:314 epch:1.57 loss:5.316 grdn:0.571 lr:6.1e-05 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 08:42:37 celerate.py:304 step:3K smpl:54K ep:324 epch:1.62 loss:5.221 grdn:0.636 lr:5.9e-05 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 08:44:01 celerate.py:304 step:4K smpl:56K ep:333 epch:1.67 loss:5.212 grdn:0.599 lr:5.7e-05 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 08:45:25 celerate.py:304 step:4K smpl:58K ep:343 epch:1.71 loss:5.190 grdn:0.624 lr:5.5e-05 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 08:46:49 celerate.py:304 step:4K smpl:59K ep:353 epch:1.76 loss:5.157 grdn:0.630 lr:5.3e-05 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 08:48:13 celerate.py:304 step:4K smpl:61K ep:362 epch:1.81 loss:5.173 grdn:0.643 lr:5.1e-05 updt_s:0.827 data_s:0.007
+INFO 2026-02-08 08:49:37 celerate.py:304 step:4K smpl:62K ep:372 epch:1.86 loss:5.121 grdn:0.659 lr:4.9e-05 updt_s:0.826 data_s:0.007
+INFO 2026-02-08 08:51:01 celerate.py:304 step:4K smpl:64K ep:381 epch:1.91 loss:5.098 grdn:0.675 lr:4.7e-05 updt_s:0.829 data_s:0.007
+INFO 2026-02-08 08:52:25 celerate.py:304 step:4K smpl:66K ep:391 epch:1.95 loss:5.096 grdn:0.669 lr:4.5e-05 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 08:53:51 celerate.py:304 step:4K smpl:67K ep:400 epch:2.00 loss:5.086 grdn:0.684 lr:4.3e-05 updt_s:0.830 data_s:0.021
+INFO 2026-02-08 08:55:15 celerate.py:304 step:4K smpl:69K ep:410 epch:2.05 loss:4.840 grdn:0.710 lr:4.1e-05 updt_s:0.827 data_s:0.007
+INFO 2026-02-08 08:56:40 celerate.py:304 step:4K smpl:70K ep:419 epch:2.10 loss:4.820 grdn:0.758 lr:3.9e-05 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 08:58:03 celerate.py:304 step:4K smpl:72K ep:429 epch:2.14 loss:4.808 grdn:0.791 lr:3.7e-05 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 08:59:27 celerate.py:304 step:5K smpl:74K ep:438 epch:2.19 loss:4.793 grdn:0.812 lr:3.5e-05 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 09:00:51 celerate.py:304 step:5K smpl:75K ep:448 epch:2.24 loss:4.800 grdn:0.814 lr:3.3e-05 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 09:02:15 celerate.py:304 step:5K smpl:77K ep:457 epch:2.29 loss:4.758 grdn:0.848 lr:3.1e-05 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 09:03:39 celerate.py:304 step:5K smpl:78K ep:467 epch:2.33 loss:4.770 grdn:0.831 lr:3.0e-05 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 09:05:03 celerate.py:304 step:5K smpl:80K ep:476 epch:2.38 loss:4.728 grdn:0.836 lr:2.8e-05 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 09:05:03 celerate.py:329 Checkpoint policy after step 5000
+INFO 2026-02-08 09:06:46 celerate.py:304 step:5K smpl:82K ep:486 epch:2.43 loss:4.755 grdn:0.847 lr:2.6e-05 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 09:08:10 celerate.py:304 step:5K smpl:83K ep:495 epch:2.48 loss:4.727 grdn:0.844 lr:2.4e-05 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 09:09:34 celerate.py:304 step:5K smpl:85K ep:505 epch:2.52 loss:4.679 grdn:0.878 lr:2.3e-05 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 09:10:58 celerate.py:304 step:5K smpl:86K ep:514 epch:2.57 loss:4.671 grdn:0.900 lr:2.1e-05 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 09:12:22 celerate.py:304 step:6K smpl:88K ep:524 epch:2.62 loss:4.718 grdn:0.884 lr:1.9e-05 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 09:13:46 celerate.py:304 step:6K smpl:90K ep:534 epch:2.67 loss:4.679 grdn:0.944 lr:1.8e-05 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 09:15:10 celerate.py:304 step:6K smpl:91K ep:543 epch:2.72 loss:4.691 grdn:0.890 lr:1.6e-05 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 09:16:34 celerate.py:304 step:6K smpl:93K ep:553 epch:2.76 loss:4.692 grdn:0.918 lr:1.5e-05 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 09:17:58 celerate.py:304 step:6K smpl:94K ep:562 epch:2.81 loss:4.645 grdn:0.915 lr:1.4e-05 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 09:19:22 celerate.py:304 step:6K smpl:96K ep:572 epch:2.86 loss:4.662 grdn:0.944 lr:1.2e-05 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 09:20:46 celerate.py:304 step:6K smpl:98K ep:581 epch:2.91 loss:4.683 grdn:0.922 lr:1.1e-05 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 09:22:10 celerate.py:304 step:6K smpl:99K ep:591 epch:2.95 loss:4.712 grdn:0.937 lr:1.0e-05 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 09:23:36 celerate.py:304 step:6K smpl:101K ep:600 epch:3.00 loss:4.651 grdn:0.929 lr:9.0e-06 updt_s:0.824 data_s:0.026
+INFO 2026-02-08 09:25:00 celerate.py:304 step:6K smpl:102K ep:610 epch:3.05 loss:4.501 grdn:0.811 lr:8.0e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 09:26:24 celerate.py:304 step:6K smpl:104K ep:619 epch:3.10 loss:4.522 grdn:0.839 lr:7.1e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 09:27:48 celerate.py:304 step:7K smpl:106K ep:629 epch:3.14 loss:4.535 grdn:0.864 lr:6.3e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 09:29:12 celerate.py:304 step:7K smpl:107K ep:638 epch:3.19 loss:4.518 grdn:0.851 lr:5.6e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 09:30:36 celerate.py:304 step:7K smpl:109K ep:648 epch:3.24 loss:4.483 grdn:0.842 lr:4.9e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 09:32:00 celerate.py:304 step:7K smpl:110K ep:657 epch:3.29 loss:4.527 grdn:0.861 lr:4.3e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 09:33:24 celerate.py:304 step:7K smpl:112K ep:667 epch:3.33 loss:4.475 grdn:0.865 lr:3.8e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 09:34:48 celerate.py:304 step:7K smpl:114K ep:676 epch:3.38 loss:4.558 grdn:0.877 lr:3.4e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 09:36:12 celerate.py:304 step:7K smpl:115K ep:686 epch:3.43 loss:4.467 grdn:0.883 lr:3.0e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 09:37:36 celerate.py:304 step:7K smpl:117K ep:696 epch:3.48 loss:4.512 grdn:0.877 lr:2.8e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 09:38:59 celerate.py:304 step:7K smpl:118K ep:705 epch:3.53 loss:4.529 grdn:0.865 lr:2.6e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 09:40:23 celerate.py:304 step:8K smpl:120K ep:715 epch:3.57 loss:4.549 grdn:0.927 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 09:41:47 celerate.py:304 step:8K smpl:122K ep:724 epch:3.62 loss:4.522 grdn:0.876 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 09:43:11 celerate.py:304 step:8K smpl:123K ep:734 epch:3.67 loss:4.510 grdn:0.874 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 09:44:35 celerate.py:304 step:8K smpl:125K ep:743 epch:3.72 loss:4.548 grdn:0.863 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 09:45:59 celerate.py:304 step:8K smpl:126K ep:753 epch:3.76 loss:4.499 grdn:0.859 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 09:47:23 celerate.py:304 step:8K smpl:128K ep:762 epch:3.81 loss:4.485 grdn:0.880 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 09:48:47 celerate.py:304 step:8K smpl:130K ep:772 epch:3.86 loss:4.499 grdn:0.876 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 09:50:11 celerate.py:304 step:8K smpl:131K ep:781 epch:3.91 loss:4.500 grdn:0.871 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 09:51:36 celerate.py:304 step:8K smpl:133K ep:791 epch:3.95 loss:4.530 grdn:0.878 lr:2.5e-06 updt_s:0.825 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+TOKENIZERS_PARALLELISMTo disable this warning, you can either:
+=(true | false)
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 09:53:01 celerate.py:304 step:8K smpl:134K ep:800 epch:4.00 loss:4.510 grdn:0.886 lr:2.5e-06 updt_s:0.826 data_s:0.023
+INFO 2026-02-08 09:54:25 celerate.py:304 step:8K smpl:136K ep:810 epch:4.05 loss:4.509 grdn:0.872 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 09:55:49 celerate.py:304 step:9K smpl:138K ep:819 epch:4.10 loss:4.495 grdn:0.879 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 09:57:13 celerate.py:304 step:9K smpl:139K ep:829 epch:4.14 loss:4.512 grdn:0.875 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 09:58:37 celerate.py:304 step:9K smpl:141K ep:838 epch:4.19 loss:4.486 grdn:0.856 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 10:00:01 celerate.py:304 step:9K smpl:142K ep:848 epch:4.24 loss:4.512 grdn:0.869 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 10:01:25 celerate.py:304 step:9K smpl:144K ep:857 epch:4.29 loss:4.524 grdn:0.878 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 10:02:49 celerate.py:304 step:9K smpl:146K ep:867 epch:4.34 loss:4.479 grdn:0.881 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 10:04:13 celerate.py:304 step:9K smpl:147K ep:877 epch:4.38 loss:4.523 grdn:0.860 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 10:05:37 celerate.py:304 step:9K smpl:149K ep:886 epch:4.43 loss:4.504 grdn:0.886 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 10:07:01 celerate.py:304 step:9K smpl:150K ep:896 epch:4.48 loss:4.516 grdn:0.885 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 10:08:25 celerate.py:304 step:10K smpl:152K ep:905 epch:4.53 loss:4.486 grdn:0.889 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 10:09:49 celerate.py:304 step:10K smpl:154K ep:915 epch:4.57 loss:4.518 grdn:0.871 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 10:11:13 celerate.py:304 step:10K smpl:155K ep:924 epch:4.62 loss:4.496 grdn:0.873 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 10:12:37 celerate.py:304 step:10K smpl:157K ep:934 epch:4.67 loss:4.515 grdn:0.877 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 10:14:01 celerate.py:304 step:10K smpl:158K ep:943 epch:4.72 loss:4.519 grdn:0.875 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 10:15:25 celerate.py:304 step:10K smpl:160K ep:953 epch:4.76 loss:4.508 grdn:0.890 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 10:15:25 celerate.py:329 Checkpoint policy after step 10000
+INFO 2026-02-08 10:17:06 celerate.py:304 step:10K smpl:162K ep:962 epch:4.81 loss:4.509 grdn:0.889 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 10:18:30 celerate.py:304 step:10K smpl:163K ep:972 epch:4.86 loss:4.511 grdn:0.887 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 10:19:54 celerate.py:304 step:10K smpl:165K ep:981 epch:4.91 loss:4.468 grdn:0.871 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 10:21:19 celerate.py:304 step:10K smpl:166K ep:991 epch:4.95 loss:4.469 grdn:0.887 lr:2.5e-06 updt_s:0.825 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+TOKENIZERS_PARALLELISMTo disable this warning, you can either:
+=(true | false)
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 10:22:44 celerate.py:304 step:10K smpl:168K ep:1K epch:5.00 loss:4.512 grdn:0.891 lr:2.5e-06 updt_s:0.824 data_s:0.025
+INFO 2026-02-08 10:24:08 celerate.py:304 step:11K smpl:170K ep:1K epch:5.05 loss:4.514 grdn:0.885 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 10:25:32 celerate.py:304 step:11K smpl:171K ep:1K epch:5.10 loss:4.480 grdn:0.878 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 10:26:56 celerate.py:304 step:11K smpl:173K ep:1K epch:5.14 loss:4.486 grdn:0.891 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 10:28:20 celerate.py:304 step:11K smpl:174K ep:1K epch:5.19 loss:4.552 grdn:0.887 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 10:29:44 celerate.py:304 step:11K smpl:176K ep:1K epch:5.24 loss:4.485 grdn:0.864 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 10:31:08 celerate.py:304 step:11K smpl:178K ep:1K epch:5.29 loss:4.474 grdn:0.883 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 10:32:32 celerate.py:304 step:11K smpl:179K ep:1K epch:5.34 loss:4.502 grdn:0.885 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 10:33:56 celerate.py:304 step:11K smpl:181K ep:1K epch:5.38 loss:4.480 grdn:0.888 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 10:35:20 celerate.py:304 step:11K smpl:182K ep:1K epch:5.43 loss:4.499 grdn:0.890 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 10:36:45 celerate.py:304 step:12K smpl:184K ep:1K epch:5.48 loss:4.520 grdn:0.865 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 10:38:09 celerate.py:304 step:12K smpl:186K ep:1K epch:5.53 loss:4.516 grdn:0.902 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 10:39:33 celerate.py:304 step:12K smpl:187K ep:1K epch:5.57 loss:4.480 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 10:40:57 celerate.py:304 step:12K smpl:189K ep:1K epch:5.62 loss:4.464 grdn:0.887 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 10:42:21 celerate.py:304 step:12K smpl:190K ep:1K epch:5.67 loss:4.479 grdn:0.884 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 10:43:45 celerate.py:304 step:12K smpl:192K ep:1K epch:5.72 loss:4.499 grdn:0.885 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 10:45:09 celerate.py:304 step:12K smpl:194K ep:1K epch:5.76 loss:4.486 grdn:0.878 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 10:46:33 celerate.py:304 step:12K smpl:195K ep:1K epch:5.81 loss:4.499 grdn:0.892 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 10:47:57 celerate.py:304 step:12K smpl:197K ep:1K epch:5.86 loss:4.502 grdn:0.892 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 10:49:21 celerate.py:304 step:12K smpl:198K ep:1K epch:5.91 loss:4.546 grdn:0.909 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 10:50:45 celerate.py:304 step:12K smpl:200K ep:1K epch:5.95 loss:4.493 grdn:0.907 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISMhuggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+TOKENIZERS_PARALLELISM=(true | false)
+To disable this warning, you can either:
+=(true | false)
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+TOKENIZERS_PARALLELISMTo disable this warning, you can either:
+=(true | false)
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 10:52:10 celerate.py:304 step:13K smpl:202K ep:1K epch:6.00 loss:4.508 grdn:0.880 lr:2.5e-06 updt_s:0.825 data_s:0.024
+INFO 2026-02-08 10:53:34 celerate.py:304 step:13K smpl:203K ep:1K epch:6.05 loss:4.478 grdn:0.885 lr:2.5e-06 updt_s:0.825 data_s:0.006
+INFO 2026-02-08 10:54:58 celerate.py:304 step:13K smpl:205K ep:1K epch:6.10 loss:4.487 grdn:0.894 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 10:56:23 celerate.py:304 step:13K smpl:206K ep:1K epch:6.15 loss:4.518 grdn:0.875 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 10:57:47 celerate.py:304 step:13K smpl:208K ep:1K epch:6.19 loss:4.501 grdn:0.904 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 10:59:11 celerate.py:304 step:13K smpl:210K ep:1K epch:6.24 loss:4.494 grdn:0.888 lr:2.5e-06 updt_s:0.827 data_s:0.007
+INFO 2026-02-08 11:00:35 celerate.py:304 step:13K smpl:211K ep:1K epch:6.29 loss:4.478 grdn:0.879 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 11:01:59 celerate.py:304 step:13K smpl:213K ep:1K epch:6.34 loss:4.528 grdn:0.891 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 11:03:23 celerate.py:304 step:13K smpl:214K ep:1K epch:6.38 loss:4.509 grdn:0.894 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 11:04:47 celerate.py:304 step:14K smpl:216K ep:1K epch:6.43 loss:4.496 grdn:0.893 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 11:06:11 celerate.py:304 step:14K smpl:218K ep:1K epch:6.48 loss:4.483 grdn:0.894 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 11:07:35 celerate.py:304 step:14K smpl:219K ep:1K epch:6.53 loss:4.511 grdn:0.886 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 11:08:59 celerate.py:304 step:14K smpl:221K ep:1K epch:6.57 loss:4.530 grdn:0.896 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 11:10:23 celerate.py:304 step:14K smpl:222K ep:1K epch:6.62 loss:4.514 grdn:0.891 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 11:11:47 celerate.py:304 step:14K smpl:224K ep:1K epch:6.67 loss:4.469 grdn:0.893 lr:2.5e-06 updt_s:0.828 data_s:0.007
+INFO 2026-02-08 11:13:11 celerate.py:304 step:14K smpl:226K ep:1K epch:6.72 loss:4.521 grdn:0.896 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 11:14:36 celerate.py:304 step:14K smpl:227K ep:1K epch:6.76 loss:4.498 grdn:0.887 lr:2.5e-06 updt_s:0.828 data_s:0.007
+INFO 2026-02-08 11:16:00 celerate.py:304 step:14K smpl:229K ep:1K epch:6.81 loss:4.510 grdn:0.889 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 11:17:24 celerate.py:304 step:14K smpl:230K ep:1K epch:6.86 loss:4.502 grdn:0.905 lr:2.5e-06 updt_s:0.827 data_s:0.007
+INFO 2026-02-08 11:18:48 celerate.py:304 step:14K smpl:232K ep:1K epch:6.91 loss:4.457 grdn:0.895 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 11:20:12 celerate.py:304 step:15K smpl:234K ep:1K epch:6.96 loss:4.470 grdn:0.885 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 11:21:38 celerate.py:304 step:15K smpl:235K ep:1K epch:7.00 loss:4.483 grdn:0.912 lr:2.5e-06 updt_s:0.824 data_s:0.024
+INFO 2026-02-08 11:23:01 celerate.py:304 step:15K smpl:237K ep:1K epch:7.05 loss:4.535 grdn:0.897 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 11:24:25 celerate.py:304 step:15K smpl:238K ep:1K epch:7.10 loss:4.495 grdn:0.889 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 11:25:50 celerate.py:304 step:15K smpl:240K ep:1K epch:7.15 loss:4.458 grdn:0.915 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 11:25:50 celerate.py:329 Checkpoint policy after step 15000
+INFO 2026-02-08 11:27:32 celerate.py:304 step:15K smpl:242K ep:1K epch:7.19 loss:4.505 grdn:0.888 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 11:28:56 celerate.py:304 step:15K smpl:243K ep:1K epch:7.24 loss:4.516 grdn:0.876 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 11:30:20 celerate.py:304 step:15K smpl:245K ep:1K epch:7.29 loss:4.515 grdn:0.894 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 11:31:44 celerate.py:304 step:15K smpl:246K ep:1K epch:7.34 loss:4.516 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 11:33:08 celerate.py:304 step:16K smpl:248K ep:1K epch:7.38 loss:4.497 grdn:0.879 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 11:34:32 celerate.py:304 step:16K smpl:250K ep:1K epch:7.43 loss:4.485 grdn:0.881 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 11:35:56 celerate.py:304 step:16K smpl:251K ep:1K epch:7.48 loss:4.526 grdn:0.888 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 11:37:20 celerate.py:304 step:16K smpl:253K ep:2K epch:7.53 loss:4.468 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 11:38:43 celerate.py:304 step:16K smpl:254K ep:2K epch:7.57 loss:4.484 grdn:0.888 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 11:40:07 celerate.py:304 step:16K smpl:256K ep:2K epch:7.62 loss:4.500 grdn:0.888 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 11:41:31 celerate.py:304 step:16K smpl:258K ep:2K epch:7.67 loss:4.506 grdn:0.911 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 11:42:55 celerate.py:304 step:16K smpl:259K ep:2K epch:7.72 loss:4.478 grdn:0.888 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 11:44:19 celerate.py:304 step:16K smpl:261K ep:2K epch:7.76 loss:4.495 grdn:0.891 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 11:45:43 celerate.py:304 step:16K smpl:262K ep:2K epch:7.81 loss:4.504 grdn:0.906 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 11:47:07 celerate.py:304 step:16K smpl:264K ep:2K epch:7.86 loss:4.442 grdn:0.893 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 11:48:31 celerate.py:304 step:17K smpl:266K ep:2K epch:7.91 loss:4.485 grdn:0.931 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 11:49:55 celerate.py:304 step:17K smpl:267K ep:2K epch:7.96 loss:4.473 grdn:0.909 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISMhuggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+TOKENIZERS_PARALLELISM=(true | false)
+To disable this warning, you can either:
+=(true | false)
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 11:51:21 celerate.py:304 step:17K smpl:269K ep:2K epch:8.00 loss:4.516 grdn:0.890 lr:2.5e-06 updt_s:0.824 data_s:0.025
+INFO 2026-02-08 11:52:44 celerate.py:304 step:17K smpl:270K ep:2K epch:8.05 loss:4.491 grdn:0.890 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 11:54:09 celerate.py:304 step:17K smpl:272K ep:2K epch:8.10 loss:4.493 grdn:0.897 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 11:55:32 celerate.py:304 step:17K smpl:274K ep:2K epch:8.15 loss:4.448 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 11:56:57 celerate.py:304 step:17K smpl:275K ep:2K epch:8.19 loss:4.517 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 11:58:21 celerate.py:304 step:17K smpl:277K ep:2K epch:8.24 loss:4.474 grdn:0.904 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 11:59:44 celerate.py:304 step:17K smpl:278K ep:2K epch:8.29 loss:4.484 grdn:0.900 lr:2.5e-06 updt_s:0.823 data_s:0.007
+INFO 2026-02-08 12:01:08 celerate.py:304 step:18K smpl:280K ep:2K epch:8.34 loss:4.504 grdn:0.888 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 12:02:32 celerate.py:304 step:18K smpl:282K ep:2K epch:8.38 loss:4.467 grdn:0.895 lr:2.5e-06 updt_s:0.823 data_s:0.007
+INFO 2026-02-08 12:03:56 celerate.py:304 step:18K smpl:283K ep:2K epch:8.43 loss:4.496 grdn:0.881 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 12:05:20 celerate.py:304 step:18K smpl:285K ep:2K epch:8.48 loss:4.490 grdn:0.907 lr:2.5e-06 updt_s:0.823 data_s:0.007
+INFO 2026-02-08 12:06:44 celerate.py:304 step:18K smpl:286K ep:2K epch:8.53 loss:4.497 grdn:0.902 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 12:08:08 celerate.py:304 step:18K smpl:288K ep:2K epch:8.57 loss:4.501 grdn:0.882 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 12:09:31 celerate.py:304 step:18K smpl:290K ep:2K epch:8.62 loss:4.531 grdn:0.893 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 12:10:55 celerate.py:304 step:18K smpl:291K ep:2K epch:8.67 loss:4.536 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 12:12:19 celerate.py:304 step:18K smpl:293K ep:2K epch:8.72 loss:4.481 grdn:0.899 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 12:13:43 celerate.py:304 step:18K smpl:294K ep:2K epch:8.77 loss:4.487 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 12:15:07 celerate.py:304 step:18K smpl:296K ep:2K epch:8.81 loss:4.513 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 12:16:31 celerate.py:304 step:19K smpl:298K ep:2K epch:8.86 loss:4.518 grdn:0.891 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 12:17:55 celerate.py:304 step:19K smpl:299K ep:2K epch:8.91 loss:4.482 grdn:0.893 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 12:19:18 celerate.py:304 step:19K smpl:301K ep:2K epch:8.96 loss:4.465 grdn:0.897 lr:2.5e-06 updt_s:0.823 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISMTOKENIZERS_PARALLELISM=(true | false)
+=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 12:20:44 celerate.py:304 step:19K smpl:302K ep:2K epch:9.00 loss:4.473 grdn:0.895 lr:2.5e-06 updt_s:0.823 data_s:0.024
+INFO 2026-02-08 12:22:08 celerate.py:304 step:19K smpl:304K ep:2K epch:9.05 loss:4.496 grdn:0.895 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 12:23:32 celerate.py:304 step:19K smpl:306K ep:2K epch:9.10 loss:4.517 grdn:0.897 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 12:24:56 celerate.py:304 step:19K smpl:307K ep:2K epch:9.15 loss:4.435 grdn:0.938 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 12:26:20 celerate.py:304 step:19K smpl:309K ep:2K epch:9.19 loss:4.510 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 12:27:44 celerate.py:304 step:19K smpl:310K ep:2K epch:9.24 loss:4.519 grdn:0.916 lr:2.5e-06 updt_s:0.827 data_s:0.007
+INFO 2026-02-08 12:29:08 celerate.py:304 step:20K smpl:312K ep:2K epch:9.29 loss:4.504 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 12:30:32 celerate.py:304 step:20K smpl:314K ep:2K epch:9.34 loss:4.475 grdn:0.892 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 12:31:56 celerate.py:304 step:20K smpl:315K ep:2K epch:9.38 loss:4.488 grdn:0.884 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 12:33:20 celerate.py:304 step:20K smpl:317K ep:2K epch:9.43 loss:4.500 grdn:0.897 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 12:34:44 celerate.py:304 step:20K smpl:318K ep:2K epch:9.48 loss:4.492 grdn:0.879 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 12:36:08 celerate.py:304 step:20K smpl:320K ep:2K epch:9.53 loss:4.497 grdn:0.903 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 12:36:08 celerate.py:329 Checkpoint policy after step 20000
+INFO 2026-02-08 12:37:51 celerate.py:304 step:20K smpl:322K ep:2K epch:9.58 loss:4.516 grdn:0.919 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 12:39:15 celerate.py:304 step:20K smpl:323K ep:2K epch:9.62 loss:4.508 grdn:0.903 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 12:40:39 celerate.py:304 step:20K smpl:325K ep:2K epch:9.67 loss:4.503 grdn:0.883 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 12:42:03 celerate.py:304 step:20K smpl:326K ep:2K epch:9.72 loss:4.442 grdn:0.904 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 12:43:27 celerate.py:304 step:20K smpl:328K ep:2K epch:9.77 loss:4.535 grdn:0.884 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 12:44:51 celerate.py:304 step:21K smpl:330K ep:2K epch:9.81 loss:4.492 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 12:46:15 celerate.py:304 step:21K smpl:331K ep:2K epch:9.86 loss:4.485 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 12:47:39 celerate.py:304 step:21K smpl:333K ep:2K epch:9.91 loss:4.504 grdn:0.897 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 12:49:03 celerate.py:304 step:21K smpl:334K ep:2K epch:9.96 loss:4.463 grdn:0.895 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+TOKENIZERS_PARALLELISM=(true | false)
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+TOKENIZERS_PARALLELISMTo disable this warning, you can either:
+=(true | false)
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 12:50:28 celerate.py:304 step:21K smpl:336K ep:2K epch:10.00 loss:4.464 grdn:0.889 lr:2.5e-06 updt_s:0.824 data_s:0.023
+INFO 2026-02-08 12:51:52 celerate.py:304 step:21K smpl:338K ep:2K epch:10.05 loss:4.464 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 12:53:16 celerate.py:304 step:21K smpl:339K ep:2K epch:10.10 loss:4.509 grdn:0.889 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 12:54:40 celerate.py:304 step:21K smpl:341K ep:2K epch:10.15 loss:4.472 grdn:0.924 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 12:56:04 celerate.py:304 step:21K smpl:342K ep:2K epch:10.19 loss:4.462 grdn:0.887 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 12:57:28 celerate.py:304 step:22K smpl:344K ep:2K epch:10.24 loss:4.481 grdn:0.896 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 12:58:52 celerate.py:304 step:22K smpl:346K ep:2K epch:10.29 loss:4.552 grdn:0.907 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:00:16 celerate.py:304 step:22K smpl:347K ep:2K epch:10.34 loss:4.504 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:01:40 celerate.py:304 step:22K smpl:349K ep:2K epch:10.38 loss:4.514 grdn:0.902 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 13:03:04 celerate.py:304 step:22K smpl:350K ep:2K epch:10.43 loss:4.505 grdn:0.887 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:04:28 celerate.py:304 step:22K smpl:352K ep:2K epch:10.48 loss:4.529 grdn:0.885 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:05:52 celerate.py:304 step:22K smpl:354K ep:2K epch:10.53 loss:4.468 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:07:16 celerate.py:304 step:22K smpl:355K ep:2K epch:10.58 loss:4.464 grdn:0.916 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:08:39 celerate.py:304 step:22K smpl:357K ep:2K epch:10.62 loss:4.500 grdn:0.887 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:10:04 celerate.py:304 step:22K smpl:358K ep:2K epch:10.67 loss:4.512 grdn:0.887 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:11:27 celerate.py:304 step:22K smpl:360K ep:2K epch:10.72 loss:4.530 grdn:0.879 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:12:51 celerate.py:304 step:23K smpl:362K ep:2K epch:10.77 loss:4.506 grdn:0.915 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:14:15 celerate.py:304 step:23K smpl:363K ep:2K epch:10.81 loss:4.513 grdn:0.908 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:15:40 celerate.py:304 step:23K smpl:365K ep:2K epch:10.86 loss:4.485 grdn:0.901 lr:2.5e-06 updt_s:0.827 data_s:0.007
+INFO 2026-02-08 13:17:04 celerate.py:304 step:23K smpl:366K ep:2K epch:10.91 loss:4.457 grdn:0.894 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:18:27 celerate.py:304 step:23K smpl:368K ep:2K epch:10.96 loss:4.447 grdn:0.928 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+TOKENIZERS_PARALLELISMTo disable this warning, you can either:
+To disable this warning, you can either:
+=(true | false)
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISMTOKENIZERS_PARALLELISM=(true | false)
+=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 13:19:53 celerate.py:304 step:23K smpl:370K ep:2K epch:11.00 loss:4.508 grdn:0.902 lr:2.5e-06 updt_s:0.825 data_s:0.022
+INFO 2026-02-08 13:21:17 celerate.py:304 step:23K smpl:371K ep:2K epch:11.05 loss:4.517 grdn:0.889 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:22:41 celerate.py:304 step:23K smpl:373K ep:2K epch:11.10 loss:4.484 grdn:0.904 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:24:05 celerate.py:304 step:23K smpl:374K ep:2K epch:11.15 loss:4.464 grdn:0.890 lr:2.5e-06 updt_s:0.823 data_s:0.007
+INFO 2026-02-08 13:25:29 celerate.py:304 step:24K smpl:376K ep:2K epch:11.19 loss:4.501 grdn:0.905 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:26:52 celerate.py:304 step:24K smpl:378K ep:2K epch:11.24 loss:4.507 grdn:0.929 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:28:16 celerate.py:304 step:24K smpl:379K ep:2K epch:11.29 loss:4.458 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:29:40 celerate.py:304 step:24K smpl:381K ep:2K epch:11.34 loss:4.497 grdn:0.935 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:31:04 celerate.py:304 step:24K smpl:382K ep:2K epch:11.39 loss:4.518 grdn:0.905 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:32:29 celerate.py:304 step:24K smpl:384K ep:2K epch:11.43 loss:4.475 grdn:0.898 lr:2.5e-06 updt_s:0.826 data_s:0.007
+INFO 2026-02-08 13:33:53 celerate.py:304 step:24K smpl:386K ep:2K epch:11.48 loss:4.518 grdn:0.893 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:35:17 celerate.py:304 step:24K smpl:387K ep:2K epch:11.53 loss:4.490 grdn:0.894 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:36:41 celerate.py:304 step:24K smpl:389K ep:2K epch:11.58 loss:4.507 grdn:0.918 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 13:38:04 celerate.py:304 step:24K smpl:390K ep:2K epch:11.62 loss:4.517 grdn:0.910 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:39:29 celerate.py:304 step:24K smpl:392K ep:2K epch:11.67 loss:4.484 grdn:0.894 lr:2.5e-06 updt_s:0.826 data_s:0.007
+INFO 2026-02-08 13:40:53 celerate.py:304 step:25K smpl:394K ep:2K epch:11.72 loss:4.520 grdn:0.891 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:42:16 celerate.py:304 step:25K smpl:395K ep:2K epch:11.77 loss:4.479 grdn:0.892 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:43:40 celerate.py:304 step:25K smpl:397K ep:2K epch:11.81 loss:4.486 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:45:04 celerate.py:304 step:25K smpl:398K ep:2K epch:11.86 loss:4.462 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:46:28 celerate.py:304 step:25K smpl:400K ep:2K epch:11.91 loss:4.478 grdn:0.892 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:46:28 celerate.py:329 Checkpoint policy after step 25000
+INFO 2026-02-08 13:48:11 celerate.py:304 step:25K smpl:402K ep:2K epch:11.96 loss:4.481 grdn:0.907 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISMhuggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+TOKENIZERS_PARALLELISM=(true | false)
+To disable this warning, you can either:
+=(true | false)
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 13:49:37 celerate.py:304 step:25K smpl:403K ep:2K epch:12.00 loss:4.503 grdn:0.893 lr:2.5e-06 updt_s:0.824 data_s:0.025
+INFO 2026-02-08 13:51:01 celerate.py:304 step:25K smpl:405K ep:2K epch:12.05 loss:4.478 grdn:0.893 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 13:52:25 celerate.py:304 step:25K smpl:406K ep:2K epch:12.10 loss:4.486 grdn:0.902 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:53:48 celerate.py:304 step:26K smpl:408K ep:2K epch:12.15 loss:4.517 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:55:12 celerate.py:304 step:26K smpl:410K ep:2K epch:12.20 loss:4.472 grdn:0.903 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:56:36 celerate.py:304 step:26K smpl:411K ep:2K epch:12.24 loss:4.494 grdn:0.905 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 13:58:00 celerate.py:304 step:26K smpl:413K ep:2K epch:12.29 loss:4.514 grdn:0.889 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 13:59:24 celerate.py:304 step:26K smpl:414K ep:2K epch:12.34 loss:4.484 grdn:0.905 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:00:48 celerate.py:304 step:26K smpl:416K ep:2K epch:12.39 loss:4.470 grdn:0.891 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:02:12 celerate.py:304 step:26K smpl:418K ep:2K epch:12.43 loss:4.497 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:03:36 celerate.py:304 step:26K smpl:419K ep:2K epch:12.48 loss:4.442 grdn:0.892 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:05:00 celerate.py:304 step:26K smpl:421K ep:3K epch:12.53 loss:4.479 grdn:0.903 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 14:06:24 celerate.py:304 step:26K smpl:422K ep:3K epch:12.58 loss:4.494 grdn:0.880 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:07:48 celerate.py:304 step:26K smpl:424K ep:3K epch:12.62 loss:4.485 grdn:0.897 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:09:12 celerate.py:304 step:27K smpl:426K ep:3K epch:12.67 loss:4.488 grdn:0.911 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:10:36 celerate.py:304 step:27K smpl:427K ep:3K epch:12.72 loss:4.497 grdn:0.910 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:12:00 celerate.py:304 step:27K smpl:429K ep:3K epch:12.77 loss:4.532 grdn:0.906 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 14:13:24 celerate.py:304 step:27K smpl:430K ep:3K epch:12.81 loss:4.504 grdn:0.896 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 14:14:48 celerate.py:304 step:27K smpl:432K ep:3K epch:12.86 loss:4.485 grdn:0.885 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:16:12 celerate.py:304 step:27K smpl:434K ep:3K epch:12.91 loss:4.528 grdn:0.892 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:17:35 celerate.py:304 step:27K smpl:435K ep:3K epch:12.96 loss:4.480 grdn:0.899 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 14:19:01 celerate.py:304 step:27K smpl:437K ep:3K epch:13.01 loss:4.529 grdn:0.908 lr:2.5e-06 updt_s:0.826 data_s:0.022
+INFO 2026-02-08 14:20:25 celerate.py:304 step:27K smpl:438K ep:3K epch:13.05 loss:4.498 grdn:0.892 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:21:49 celerate.py:304 step:28K smpl:440K ep:3K epch:13.10 loss:4.507 grdn:0.879 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:23:13 celerate.py:304 step:28K smpl:442K ep:3K epch:13.15 loss:4.488 grdn:0.895 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:24:37 celerate.py:304 step:28K smpl:443K ep:3K epch:13.20 loss:4.498 grdn:0.916 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:26:01 celerate.py:304 step:28K smpl:445K ep:3K epch:13.24 loss:4.476 grdn:0.903 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 14:27:25 celerate.py:304 step:28K smpl:446K ep:3K epch:13.29 loss:4.504 grdn:0.897 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:28:49 celerate.py:304 step:28K smpl:448K ep:3K epch:13.34 loss:4.526 grdn:0.894 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:30:13 celerate.py:304 step:28K smpl:450K ep:3K epch:13.39 loss:4.494 grdn:0.904 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:31:37 celerate.py:304 step:28K smpl:451K ep:3K epch:13.43 loss:4.494 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:33:01 celerate.py:304 step:28K smpl:453K ep:3K epch:13.48 loss:4.482 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:34:25 celerate.py:304 step:28K smpl:454K ep:3K epch:13.53 loss:4.483 grdn:0.892 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:35:49 celerate.py:304 step:28K smpl:456K ep:3K epch:13.58 loss:4.471 grdn:0.895 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 14:37:13 celerate.py:304 step:29K smpl:458K ep:3K epch:13.62 loss:4.468 grdn:0.903 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:38:37 celerate.py:304 step:29K smpl:459K ep:3K epch:13.67 loss:4.478 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:40:00 celerate.py:304 step:29K smpl:461K ep:3K epch:13.72 loss:4.498 grdn:0.895 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 14:41:24 celerate.py:304 step:29K smpl:462K ep:3K epch:13.77 loss:4.494 grdn:0.889 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:42:48 celerate.py:304 step:29K smpl:464K ep:3K epch:13.81 loss:4.529 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:44:12 celerate.py:304 step:29K smpl:466K ep:3K epch:13.86 loss:4.496 grdn:0.929 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:45:36 celerate.py:304 step:29K smpl:467K ep:3K epch:13.91 loss:4.467 grdn:0.912 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:47:00 celerate.py:304 step:29K smpl:469K ep:3K epch:13.96 loss:4.486 grdn:0.906 lr:2.5e-06 updt_s:0.825 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISMhuggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+=(true | false)
+TOKENIZERS_PARALLELISMTo disable this warning, you can either:
+=(true | false)
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 14:48:26 celerate.py:304 step:29K smpl:470K ep:3K epch:14.01 loss:4.496 grdn:0.922 lr:2.5e-06 updt_s:0.824 data_s:0.024
+INFO 2026-02-08 14:49:50 celerate.py:304 step:30K smpl:472K ep:3K epch:14.05 loss:4.531 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:51:14 celerate.py:304 step:30K smpl:474K ep:3K epch:14.10 loss:4.543 grdn:0.891 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:52:38 celerate.py:304 step:30K smpl:475K ep:3K epch:14.15 loss:4.472 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:54:02 celerate.py:304 step:30K smpl:477K ep:3K epch:14.20 loss:4.503 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:55:26 celerate.py:304 step:30K smpl:478K ep:3K epch:14.24 loss:4.490 grdn:0.952 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:56:50 celerate.py:304 step:30K smpl:480K ep:3K epch:14.29 loss:4.503 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 14:56:50 celerate.py:329 Checkpoint policy after step 30000
+INFO 2026-02-08 14:58:32 celerate.py:304 step:30K smpl:482K ep:3K epch:14.34 loss:4.501 grdn:0.905 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 14:59:56 celerate.py:304 step:30K smpl:483K ep:3K epch:14.39 loss:4.470 grdn:0.899 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:01:20 celerate.py:304 step:30K smpl:485K ep:3K epch:14.43 loss:4.524 grdn:0.890 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 15:02:44 celerate.py:304 step:30K smpl:486K ep:3K epch:14.48 loss:4.468 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:04:08 celerate.py:304 step:30K smpl:488K ep:3K epch:14.53 loss:4.516 grdn:0.890 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:05:32 celerate.py:304 step:31K smpl:490K ep:3K epch:14.58 loss:4.493 grdn:0.910 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:06:56 celerate.py:304 step:31K smpl:491K ep:3K epch:14.62 loss:4.529 grdn:0.893 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:08:20 celerate.py:304 step:31K smpl:493K ep:3K epch:14.67 loss:4.475 grdn:0.895 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:09:44 celerate.py:304 step:31K smpl:494K ep:3K epch:14.72 loss:4.473 grdn:0.894 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 15:11:08 celerate.py:304 step:31K smpl:496K ep:3K epch:14.77 loss:4.452 grdn:0.908 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:12:32 celerate.py:304 step:31K smpl:498K ep:3K epch:14.82 loss:4.474 grdn:0.904 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:13:55 celerate.py:304 step:31K smpl:499K ep:3K epch:14.86 loss:4.510 grdn:0.914 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:15:19 celerate.py:304 step:31K smpl:501K ep:3K epch:14.91 loss:4.497 grdn:0.890 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:16:43 celerate.py:304 step:31K smpl:502K ep:3K epch:14.96 loss:4.471 grdn:0.909 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 15:18:09 celerate.py:304 step:32K smpl:504K ep:3K epch:15.01 loss:4.435 grdn:0.917 lr:2.5e-06 updt_s:0.826 data_s:0.021
+INFO 2026-02-08 15:19:33 celerate.py:304 step:32K smpl:506K ep:3K epch:15.05 loss:4.469 grdn:0.886 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:20:57 celerate.py:304 step:32K smpl:507K ep:3K epch:15.10 loss:4.537 grdn:0.890 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:22:21 celerate.py:304 step:32K smpl:509K ep:3K epch:15.15 loss:4.488 grdn:0.910 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:23:45 celerate.py:304 step:32K smpl:510K ep:3K epch:15.20 loss:4.468 grdn:0.895 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 15:25:09 celerate.py:304 step:32K smpl:512K ep:3K epch:15.24 loss:4.509 grdn:0.922 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:26:33 celerate.py:304 step:32K smpl:514K ep:3K epch:15.29 loss:4.518 grdn:0.912 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:27:57 celerate.py:304 step:32K smpl:515K ep:3K epch:15.34 loss:4.501 grdn:0.890 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:29:21 celerate.py:304 step:32K smpl:517K ep:3K epch:15.39 loss:4.531 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:30:45 celerate.py:304 step:32K smpl:518K ep:3K epch:15.43 loss:4.444 grdn:0.890 lr:2.5e-06 updt_s:0.827 data_s:0.007
+INFO 2026-02-08 15:32:09 celerate.py:304 step:32K smpl:520K ep:3K epch:15.48 loss:4.533 grdn:0.917 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:33:33 celerate.py:304 step:33K smpl:522K ep:3K epch:15.53 loss:4.484 grdn:0.902 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:34:57 celerate.py:304 step:33K smpl:523K ep:3K epch:15.58 loss:4.501 grdn:0.910 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:36:20 celerate.py:304 step:33K smpl:525K ep:3K epch:15.63 loss:4.445 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:37:45 celerate.py:304 step:33K smpl:526K ep:3K epch:15.67 loss:4.476 grdn:0.899 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 15:39:09 celerate.py:304 step:33K smpl:528K ep:3K epch:15.72 loss:4.500 grdn:0.913 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:40:33 celerate.py:304 step:33K smpl:530K ep:3K epch:15.77 loss:4.450 grdn:0.887 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:41:57 celerate.py:304 step:33K smpl:531K ep:3K epch:15.82 loss:4.492 grdn:0.895 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:43:21 celerate.py:304 step:33K smpl:533K ep:3K epch:15.86 loss:4.517 grdn:0.894 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:44:44 celerate.py:304 step:33K smpl:534K ep:3K epch:15.91 loss:4.511 grdn:0.891 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:46:08 celerate.py:304 step:34K smpl:536K ep:3K epch:15.96 loss:4.484 grdn:0.914 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISMhuggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+=(true | false)
+TOKENIZERS_PARALLELISMTo disable this warning, you can either:
+=(true | false)
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 15:47:34 celerate.py:304 step:34K smpl:538K ep:3K epch:16.01 loss:4.496 grdn:0.907 lr:2.5e-06 updt_s:0.824 data_s:0.026
+INFO 2026-02-08 15:48:58 celerate.py:304 step:34K smpl:539K ep:3K epch:16.05 loss:4.454 grdn:0.889 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 15:50:22 celerate.py:304 step:34K smpl:541K ep:3K epch:16.10 loss:4.519 grdn:0.915 lr:2.5e-06 updt_s:0.826 data_s:0.007
+INFO 2026-02-08 15:51:46 celerate.py:304 step:34K smpl:542K ep:3K epch:16.15 loss:4.463 grdn:0.891 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:53:10 celerate.py:304 step:34K smpl:544K ep:3K epch:16.20 loss:4.500 grdn:0.897 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:54:35 celerate.py:304 step:34K smpl:546K ep:3K epch:16.24 loss:4.485 grdn:0.903 lr:2.5e-06 updt_s:0.827 data_s:0.007
+INFO 2026-02-08 15:55:59 celerate.py:304 step:34K smpl:547K ep:3K epch:16.29 loss:4.492 grdn:0.879 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:57:23 celerate.py:304 step:34K smpl:549K ep:3K epch:16.34 loss:4.519 grdn:0.903 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 15:58:47 celerate.py:304 step:34K smpl:550K ep:3K epch:16.39 loss:4.493 grdn:0.890 lr:2.5e-06 updt_s:0.826 data_s:0.007
+INFO 2026-02-08 16:00:11 celerate.py:304 step:34K smpl:552K ep:3K epch:16.43 loss:4.504 grdn:0.895 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:01:35 celerate.py:304 step:35K smpl:554K ep:3K epch:16.48 loss:4.489 grdn:0.904 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:02:59 celerate.py:304 step:35K smpl:555K ep:3K epch:16.53 loss:4.484 grdn:0.920 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:04:23 celerate.py:304 step:35K smpl:557K ep:3K epch:16.58 loss:4.501 grdn:0.886 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:05:47 celerate.py:304 step:35K smpl:558K ep:3K epch:16.63 loss:4.483 grdn:0.905 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:07:10 celerate.py:304 step:35K smpl:560K ep:3K epch:16.67 loss:4.499 grdn:0.897 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:07:10 celerate.py:329 Checkpoint policy after step 35000
+INFO 2026-02-08 16:08:54 celerate.py:304 step:35K smpl:562K ep:3K epch:16.72 loss:4.498 grdn:0.921 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:10:18 celerate.py:304 step:35K smpl:563K ep:3K epch:16.77 loss:4.460 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:11:42 celerate.py:304 step:35K smpl:565K ep:3K epch:16.82 loss:4.468 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:13:06 celerate.py:304 step:35K smpl:566K ep:3K epch:16.86 loss:4.502 grdn:0.903 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 16:14:30 celerate.py:304 step:36K smpl:568K ep:3K epch:16.91 loss:4.489 grdn:0.904 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:15:54 celerate.py:304 step:36K smpl:570K ep:3K epch:16.96 loss:4.500 grdn:0.919 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 16:17:20 celerate.py:304 step:36K smpl:571K ep:3K epch:17.01 loss:4.510 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.025
+INFO 2026-02-08 16:18:44 celerate.py:304 step:36K smpl:573K ep:3K epch:17.05 loss:4.497 grdn:0.905 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:20:08 celerate.py:304 step:36K smpl:574K ep:3K epch:17.10 loss:4.499 grdn:0.923 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:21:32 celerate.py:304 step:36K smpl:576K ep:3K epch:17.15 loss:4.503 grdn:0.903 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:22:56 celerate.py:304 step:36K smpl:578K ep:3K epch:17.20 loss:4.476 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:24:20 celerate.py:304 step:36K smpl:579K ep:3K epch:17.24 loss:4.510 grdn:0.881 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:25:44 celerate.py:304 step:36K smpl:581K ep:3K epch:17.29 loss:4.476 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:27:08 celerate.py:304 step:36K smpl:582K ep:3K epch:17.34 loss:4.512 grdn:0.909 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:28:32 celerate.py:304 step:36K smpl:584K ep:3K epch:17.39 loss:4.514 grdn:0.902 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:29:56 celerate.py:304 step:37K smpl:586K ep:3K epch:17.44 loss:4.470 grdn:0.893 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:31:20 celerate.py:304 step:37K smpl:587K ep:3K epch:17.48 loss:4.524 grdn:0.899 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:32:44 celerate.py:304 step:37K smpl:589K ep:4K epch:17.53 loss:4.460 grdn:0.894 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:34:07 celerate.py:304 step:37K smpl:590K ep:4K epch:17.58 loss:4.516 grdn:0.913 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:35:31 celerate.py:304 step:37K smpl:592K ep:4K epch:17.63 loss:4.482 grdn:0.884 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:36:55 celerate.py:304 step:37K smpl:594K ep:4K epch:17.67 loss:4.452 grdn:0.902 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:38:19 celerate.py:304 step:37K smpl:595K ep:4K epch:17.72 loss:4.456 grdn:0.904 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:39:43 celerate.py:304 step:37K smpl:597K ep:4K epch:17.77 loss:4.525 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:41:07 celerate.py:304 step:37K smpl:598K ep:4K epch:17.82 loss:4.540 grdn:0.892 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 16:42:31 celerate.py:304 step:38K smpl:600K ep:4K epch:17.86 loss:4.470 grdn:0.913 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:43:55 celerate.py:304 step:38K smpl:602K ep:4K epch:17.91 loss:4.488 grdn:0.966 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:45:19 celerate.py:304 step:38K smpl:603K ep:4K epch:17.96 loss:4.438 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISMhuggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+=(true | false)
+TOKENIZERS_PARALLELISMhuggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+=(true | false)
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISMTOKENIZERS_PARALLELISM=(true | false)
+=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 16:46:44 celerate.py:304 step:38K smpl:605K ep:4K epch:18.01 loss:4.518 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.023
+INFO 2026-02-08 16:48:08 celerate.py:304 step:38K smpl:606K ep:4K epch:18.05 loss:4.507 grdn:0.899 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 16:49:32 celerate.py:304 step:38K smpl:608K ep:4K epch:18.10 loss:4.492 grdn:0.893 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:50:56 celerate.py:304 step:38K smpl:610K ep:4K epch:18.15 loss:4.486 grdn:0.902 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:52:20 celerate.py:304 step:38K smpl:611K ep:4K epch:18.20 loss:4.503 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:53:45 celerate.py:304 step:38K smpl:613K ep:4K epch:18.25 loss:4.518 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:55:08 celerate.py:304 step:38K smpl:614K ep:4K epch:18.29 loss:4.470 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:56:32 celerate.py:304 step:38K smpl:616K ep:4K epch:18.34 loss:4.513 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:57:56 celerate.py:304 step:39K smpl:618K ep:4K epch:18.39 loss:4.456 grdn:0.894 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 16:59:20 celerate.py:304 step:39K smpl:619K ep:4K epch:18.44 loss:4.465 grdn:0.918 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:00:44 celerate.py:304 step:39K smpl:621K ep:4K epch:18.48 loss:4.482 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:02:08 celerate.py:304 step:39K smpl:622K ep:4K epch:18.53 loss:4.491 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:03:32 celerate.py:304 step:39K smpl:624K ep:4K epch:18.58 loss:4.495 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:04:56 celerate.py:304 step:39K smpl:626K ep:4K epch:18.63 loss:4.505 grdn:0.913 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:06:20 celerate.py:304 step:39K smpl:627K ep:4K epch:18.67 loss:4.505 grdn:0.903 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:07:44 celerate.py:304 step:39K smpl:629K ep:4K epch:18.72 loss:4.501 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:09:08 celerate.py:304 step:39K smpl:630K ep:4K epch:18.77 loss:4.455 grdn:0.903 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:10:32 celerate.py:304 step:40K smpl:632K ep:4K epch:18.82 loss:4.483 grdn:0.903 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:11:55 celerate.py:304 step:40K smpl:634K ep:4K epch:18.86 loss:4.456 grdn:0.911 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:13:19 celerate.py:304 step:40K smpl:635K ep:4K epch:18.91 loss:4.523 grdn:0.894 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:14:43 celerate.py:304 step:40K smpl:637K ep:4K epch:18.96 loss:4.521 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 17:16:09 celerate.py:304 step:40K smpl:638K ep:4K epch:19.01 loss:4.496 grdn:0.884 lr:2.5e-06 updt_s:0.824 data_s:0.023
+INFO 2026-02-08 17:17:33 celerate.py:304 step:40K smpl:640K ep:4K epch:19.05 loss:4.482 grdn:0.901 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:17:33 celerate.py:329 Checkpoint policy after step 40000
+INFO 2026-02-08 17:19:16 celerate.py:304 step:40K smpl:642K ep:4K epch:19.10 loss:4.490 grdn:0.911 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:20:40 celerate.py:304 step:40K smpl:643K ep:4K epch:19.15 loss:4.499 grdn:0.899 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:22:04 celerate.py:304 step:40K smpl:645K ep:4K epch:19.20 loss:4.494 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:23:28 celerate.py:304 step:40K smpl:646K ep:4K epch:19.25 loss:4.495 grdn:0.894 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:24:52 celerate.py:304 step:40K smpl:648K ep:4K epch:19.29 loss:4.483 grdn:0.905 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:26:16 celerate.py:304 step:41K smpl:650K ep:4K epch:19.34 loss:4.517 grdn:0.889 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 17:27:40 celerate.py:304 step:41K smpl:651K ep:4K epch:19.39 loss:4.463 grdn:0.892 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:29:04 celerate.py:304 step:41K smpl:653K ep:4K epch:19.44 loss:4.500 grdn:0.891 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:30:28 celerate.py:304 step:41K smpl:654K ep:4K epch:19.48 loss:4.482 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:31:52 celerate.py:304 step:41K smpl:656K ep:4K epch:19.53 loss:4.484 grdn:0.910 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:33:16 celerate.py:304 step:41K smpl:658K ep:4K epch:19.58 loss:4.478 grdn:0.938 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:34:40 celerate.py:304 step:41K smpl:659K ep:4K epch:19.63 loss:4.448 grdn:0.905 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:36:04 celerate.py:304 step:41K smpl:661K ep:4K epch:19.67 loss:4.564 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:37:28 celerate.py:304 step:41K smpl:662K ep:4K epch:19.72 loss:4.510 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:38:52 celerate.py:304 step:42K smpl:664K ep:4K epch:19.77 loss:4.468 grdn:0.912 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:40:15 celerate.py:304 step:42K smpl:666K ep:4K epch:19.82 loss:4.487 grdn:0.889 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:41:39 celerate.py:304 step:42K smpl:667K ep:4K epch:19.86 loss:4.464 grdn:0.889 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:43:03 celerate.py:304 step:42K smpl:669K ep:4K epch:19.91 loss:4.490 grdn:0.895 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:44:27 celerate.py:304 step:42K smpl:670K ep:4K epch:19.96 loss:4.519 grdn:0.914 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 17:45:53 celerate.py:304 step:42K smpl:672K ep:4K epch:20.01 loss:4.480 grdn:0.889 lr:2.5e-06 updt_s:0.824 data_s:0.024
+INFO 2026-02-08 17:47:17 celerate.py:304 step:42K smpl:674K ep:4K epch:20.06 loss:4.466 grdn:0.891 lr:2.5e-06 updt_s:0.827 data_s:0.006
+INFO 2026-02-08 17:48:41 celerate.py:304 step:42K smpl:675K ep:4K epch:20.10 loss:4.487 grdn:0.907 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:50:05 celerate.py:304 step:42K smpl:677K ep:4K epch:20.15 loss:4.549 grdn:0.905 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:51:29 celerate.py:304 step:42K smpl:678K ep:4K epch:20.20 loss:4.499 grdn:0.907 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:52:53 celerate.py:304 step:42K smpl:680K ep:4K epch:20.25 loss:4.521 grdn:0.902 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:54:17 celerate.py:304 step:43K smpl:682K ep:4K epch:20.29 loss:4.464 grdn:0.908 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:55:41 celerate.py:304 step:43K smpl:683K ep:4K epch:20.34 loss:4.512 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:57:05 celerate.py:304 step:43K smpl:685K ep:4K epch:20.39 loss:4.512 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:58:29 celerate.py:304 step:43K smpl:686K ep:4K epch:20.44 loss:4.481 grdn:0.907 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 17:59:53 celerate.py:304 step:43K smpl:688K ep:4K epch:20.48 loss:4.519 grdn:0.879 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 18:01:17 celerate.py:304 step:43K smpl:690K ep:4K epch:20.53 loss:4.488 grdn:0.909 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 18:02:41 celerate.py:304 step:43K smpl:691K ep:4K epch:20.58 loss:4.501 grdn:0.897 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 18:04:05 celerate.py:304 step:43K smpl:693K ep:4K epch:20.63 loss:4.480 grdn:0.904 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 18:05:29 celerate.py:304 step:43K smpl:694K ep:4K epch:20.67 loss:4.491 grdn:0.901 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 18:06:53 celerate.py:304 step:44K smpl:696K ep:4K epch:20.72 loss:4.465 grdn:0.904 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 18:08:17 celerate.py:304 step:44K smpl:698K ep:4K epch:20.77 loss:4.538 grdn:0.893 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 18:09:41 celerate.py:304 step:44K smpl:699K ep:4K epch:20.82 loss:4.467 grdn:0.890 lr:2.5e-06 updt_s:0.826 data_s:0.007
+INFO 2026-02-08 18:11:05 celerate.py:304 step:44K smpl:701K ep:4K epch:20.87 loss:4.489 grdn:0.911 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 18:12:29 celerate.py:304 step:44K smpl:702K ep:4K epch:20.91 loss:4.492 grdn:0.895 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 18:13:53 celerate.py:304 step:44K smpl:704K ep:4K epch:20.96 loss:4.438 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+TOKENIZERS_PARALLELISM=(true | false)
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+TOKENIZERS_PARALLELISMTo disable this warning, you can either:
+=(true | false)
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 18:15:19 celerate.py:304 step:44K smpl:706K ep:4K epch:21.01 loss:4.443 grdn:0.892 lr:2.5e-06 updt_s:0.826 data_s:0.024
+INFO 2026-02-08 18:16:43 celerate.py:304 step:44K smpl:707K ep:4K epch:21.06 loss:4.468 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 18:18:08 celerate.py:304 step:44K smpl:709K ep:4K epch:21.10 loss:4.490 grdn:0.891 lr:2.5e-06 updt_s:0.827 data_s:0.007
+INFO 2026-02-08 18:19:32 celerate.py:304 step:44K smpl:710K ep:4K epch:21.15 loss:4.493 grdn:0.907 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 18:20:56 celerate.py:304 step:44K smpl:712K ep:4K epch:21.20 loss:4.496 grdn:0.915 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 18:22:19 celerate.py:304 step:45K smpl:714K ep:4K epch:21.25 loss:4.494 grdn:0.904 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 18:23:43 celerate.py:304 step:45K smpl:715K ep:4K epch:21.29 loss:4.529 grdn:0.888 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 18:25:07 celerate.py:304 step:45K smpl:717K ep:4K epch:21.34 loss:4.444 grdn:0.893 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 18:26:31 celerate.py:304 step:45K smpl:718K ep:4K epch:21.39 loss:4.511 grdn:0.893 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 18:27:55 celerate.py:304 step:45K smpl:720K ep:4K epch:21.44 loss:4.489 grdn:0.889 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 18:27:55 celerate.py:329 Checkpoint policy after step 45000
+INFO 2026-02-08 18:29:38 celerate.py:304 step:45K smpl:722K ep:4K epch:21.48 loss:4.498 grdn:0.883 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 18:31:02 celerate.py:304 step:45K smpl:723K ep:4K epch:21.53 loss:4.466 grdn:0.908 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 18:32:26 celerate.py:304 step:45K smpl:725K ep:4K epch:21.58 loss:4.501 grdn:0.914 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 18:33:50 celerate.py:304 step:45K smpl:726K ep:4K epch:21.63 loss:4.493 grdn:0.908 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 18:35:14 celerate.py:304 step:46K smpl:728K ep:4K epch:21.68 loss:4.486 grdn:0.905 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 18:36:38 celerate.py:304 step:46K smpl:730K ep:4K epch:21.72 loss:4.473 grdn:0.901 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 18:38:02 celerate.py:304 step:46K smpl:731K ep:4K epch:21.77 loss:4.502 grdn:0.916 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 18:39:26 celerate.py:304 step:46K smpl:733K ep:4K epch:21.82 loss:4.494 grdn:0.909 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 18:40:50 celerate.py:304 step:46K smpl:734K ep:4K epch:21.87 loss:4.470 grdn:0.909 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 18:42:14 celerate.py:304 step:46K smpl:736K ep:4K epch:21.91 loss:4.488 grdn:0.912 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 18:43:38 celerate.py:304 step:46K smpl:738K ep:4K epch:21.96 loss:4.503 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISMTOKENIZERS_PARALLELISM=(true | false)
+=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 18:45:04 celerate.py:304 step:46K smpl:739K ep:4K epch:22.01 loss:4.485 grdn:0.899 lr:2.5e-06 updt_s:0.824 data_s:0.026
+INFO 2026-02-08 18:46:28 celerate.py:304 step:46K smpl:741K ep:4K epch:22.06 loss:4.490 grdn:0.903 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 18:47:52 celerate.py:304 step:46K smpl:742K ep:4K epch:22.10 loss:4.514 grdn:0.894 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 18:49:16 celerate.py:304 step:46K smpl:744K ep:4K epch:22.15 loss:4.489 grdn:0.907 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 18:50:40 celerate.py:304 step:47K smpl:746K ep:4K epch:22.20 loss:4.479 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 18:52:03 celerate.py:304 step:47K smpl:747K ep:4K epch:22.25 loss:4.457 grdn:0.914 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 18:53:27 celerate.py:304 step:47K smpl:749K ep:4K epch:22.29 loss:4.506 grdn:0.915 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 18:54:51 celerate.py:304 step:47K smpl:750K ep:4K epch:22.34 loss:4.492 grdn:0.898 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 18:56:15 celerate.py:304 step:47K smpl:752K ep:4K epch:22.39 loss:4.441 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 18:57:39 celerate.py:304 step:47K smpl:754K ep:4K epch:22.44 loss:4.487 grdn:0.888 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 18:59:04 celerate.py:304 step:47K smpl:755K ep:4K epch:22.48 loss:4.501 grdn:0.914 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:00:28 celerate.py:304 step:47K smpl:757K ep:5K epch:22.53 loss:4.433 grdn:0.899 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:01:52 celerate.py:304 step:47K smpl:758K ep:5K epch:22.58 loss:4.464 grdn:0.909 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:03:15 celerate.py:304 step:48K smpl:760K ep:5K epch:22.63 loss:4.492 grdn:0.905 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:04:39 celerate.py:304 step:48K smpl:762K ep:5K epch:22.68 loss:4.529 grdn:0.910 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:06:03 celerate.py:304 step:48K smpl:763K ep:5K epch:22.72 loss:4.527 grdn:0.915 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:07:27 celerate.py:304 step:48K smpl:765K ep:5K epch:22.77 loss:4.500 grdn:0.893 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:08:51 celerate.py:304 step:48K smpl:766K ep:5K epch:22.82 loss:4.499 grdn:0.888 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:10:15 celerate.py:304 step:48K smpl:768K ep:5K epch:22.87 loss:4.502 grdn:0.895 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:11:39 celerate.py:304 step:48K smpl:770K ep:5K epch:22.91 loss:4.510 grdn:0.891 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:13:03 celerate.py:304 step:48K smpl:771K ep:5K epch:22.96 loss:4.505 grdn:0.895 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISMTOKENIZERS_PARALLELISM=(true | false)
+=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 19:14:29 celerate.py:304 step:48K smpl:773K ep:5K epch:23.01 loss:4.472 grdn:0.889 lr:2.5e-06 updt_s:0.825 data_s:0.023
+INFO 2026-02-08 19:15:53 celerate.py:304 step:48K smpl:774K ep:5K epch:23.06 loss:4.471 grdn:0.896 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 19:17:17 celerate.py:304 step:48K smpl:776K ep:5K epch:23.10 loss:4.459 grdn:0.911 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:18:41 celerate.py:304 step:49K smpl:778K ep:5K epch:23.15 loss:4.465 grdn:0.905 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:20:04 celerate.py:304 step:49K smpl:779K ep:5K epch:23.20 loss:4.472 grdn:0.893 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 19:21:29 celerate.py:304 step:49K smpl:781K ep:5K epch:23.25 loss:4.501 grdn:0.915 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:22:52 celerate.py:304 step:49K smpl:782K ep:5K epch:23.29 loss:4.473 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:24:16 celerate.py:304 step:49K smpl:784K ep:5K epch:23.34 loss:4.494 grdn:0.903 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:25:40 celerate.py:304 step:49K smpl:786K ep:5K epch:23.39 loss:4.496 grdn:0.893 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:27:04 celerate.py:304 step:49K smpl:787K ep:5K epch:23.44 loss:4.515 grdn:0.894 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:28:28 celerate.py:304 step:49K smpl:789K ep:5K epch:23.49 loss:4.510 grdn:0.894 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:29:52 celerate.py:304 step:49K smpl:790K ep:5K epch:23.53 loss:4.458 grdn:0.911 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:31:16 celerate.py:304 step:50K smpl:792K ep:5K epch:23.58 loss:4.516 grdn:0.909 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:32:40 celerate.py:304 step:50K smpl:794K ep:5K epch:23.63 loss:4.480 grdn:0.899 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 19:34:04 celerate.py:304 step:50K smpl:795K ep:5K epch:23.68 loss:4.488 grdn:0.905 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 19:35:28 celerate.py:304 step:50K smpl:797K ep:5K epch:23.72 loss:4.478 grdn:0.924 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:36:52 celerate.py:304 step:50K smpl:798K ep:5K epch:23.77 loss:4.499 grdn:0.901 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 19:38:16 celerate.py:304 step:50K smpl:800K ep:5K epch:23.82 loss:4.498 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:38:16 celerate.py:329 Checkpoint policy after step 50000
+INFO 2026-02-08 19:40:01 celerate.py:304 step:50K smpl:802K ep:5K epch:23.87 loss:4.526 grdn:0.902 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 19:41:25 celerate.py:304 step:50K smpl:803K ep:5K epch:23.91 loss:4.506 grdn:0.911 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:42:49 celerate.py:304 step:50K smpl:805K ep:5K epch:23.96 loss:4.504 grdn:0.897 lr:2.5e-06 updt_s:0.825 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 19:44:14 celerate.py:304 step:50K smpl:806K ep:5K epch:24.01 loss:4.491 grdn:0.911 lr:2.5e-06 updt_s:0.825 data_s:0.024
+INFO 2026-02-08 19:45:38 celerate.py:304 step:50K smpl:808K ep:5K epch:24.06 loss:4.488 grdn:0.905 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 19:47:02 celerate.py:304 step:51K smpl:810K ep:5K epch:24.10 loss:4.448 grdn:0.890 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:48:26 celerate.py:304 step:51K smpl:811K ep:5K epch:24.15 loss:4.482 grdn:0.910 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:49:50 celerate.py:304 step:51K smpl:813K ep:5K epch:24.20 loss:4.500 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:51:14 celerate.py:304 step:51K smpl:814K ep:5K epch:24.25 loss:4.527 grdn:0.894 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:52:38 celerate.py:304 step:51K smpl:816K ep:5K epch:24.30 loss:4.482 grdn:0.905 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:54:02 celerate.py:304 step:51K smpl:818K ep:5K epch:24.34 loss:4.470 grdn:0.903 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:55:26 celerate.py:304 step:51K smpl:819K ep:5K epch:24.39 loss:4.490 grdn:0.905 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 19:56:50 celerate.py:304 step:51K smpl:821K ep:5K epch:24.44 loss:4.514 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:58:14 celerate.py:304 step:51K smpl:822K ep:5K epch:24.49 loss:4.487 grdn:0.908 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 19:59:38 celerate.py:304 step:52K smpl:824K ep:5K epch:24.53 loss:4.491 grdn:0.893 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:01:02 celerate.py:304 step:52K smpl:826K ep:5K epch:24.58 loss:4.520 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:02:26 celerate.py:304 step:52K smpl:827K ep:5K epch:24.63 loss:4.508 grdn:0.904 lr:2.5e-06 updt_s:0.828 data_s:0.007
+INFO 2026-02-08 20:03:50 celerate.py:304 step:52K smpl:829K ep:5K epch:24.68 loss:4.504 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:05:14 celerate.py:304 step:52K smpl:830K ep:5K epch:24.72 loss:4.514 grdn:0.903 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:06:38 celerate.py:304 step:52K smpl:832K ep:5K epch:24.77 loss:4.487 grdn:0.891 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:08:02 celerate.py:304 step:52K smpl:834K ep:5K epch:24.82 loss:4.503 grdn:0.909 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:09:26 celerate.py:304 step:52K smpl:835K ep:5K epch:24.87 loss:4.514 grdn:0.912 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:10:50 celerate.py:304 step:52K smpl:837K ep:5K epch:24.91 loss:4.469 grdn:0.909 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:12:13 celerate.py:304 step:52K smpl:838K ep:5K epch:24.96 loss:4.481 grdn:0.887 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 20:13:39 celerate.py:304 step:52K smpl:840K ep:5K epch:25.01 loss:4.480 grdn:0.904 lr:2.5e-06 updt_s:0.824 data_s:0.026
+INFO 2026-02-08 20:15:03 celerate.py:304 step:53K smpl:842K ep:5K epch:25.06 loss:4.464 grdn:0.890 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:16:27 celerate.py:304 step:53K smpl:843K ep:5K epch:25.10 loss:4.479 grdn:0.897 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:17:51 celerate.py:304 step:53K smpl:845K ep:5K epch:25.15 loss:4.455 grdn:0.901 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:19:16 celerate.py:304 step:53K smpl:846K ep:5K epch:25.20 loss:4.509 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:20:40 celerate.py:304 step:53K smpl:848K ep:5K epch:25.25 loss:4.475 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:22:04 celerate.py:304 step:53K smpl:850K ep:5K epch:25.30 loss:4.505 grdn:0.895 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:23:28 celerate.py:304 step:53K smpl:851K ep:5K epch:25.34 loss:4.526 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:24:51 celerate.py:304 step:53K smpl:853K ep:5K epch:25.39 loss:4.482 grdn:0.889 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:26:15 celerate.py:304 step:53K smpl:854K ep:5K epch:25.44 loss:4.485 grdn:0.919 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:27:39 celerate.py:304 step:54K smpl:856K ep:5K epch:25.49 loss:4.501 grdn:0.918 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:29:03 celerate.py:304 step:54K smpl:858K ep:5K epch:25.53 loss:4.512 grdn:0.895 lr:2.5e-06 updt_s:0.826 data_s:0.007
+INFO 2026-02-08 20:30:27 celerate.py:304 step:54K smpl:859K ep:5K epch:25.58 loss:4.479 grdn:0.902 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:31:51 celerate.py:304 step:54K smpl:861K ep:5K epch:25.63 loss:4.466 grdn:0.961 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 20:33:15 celerate.py:304 step:54K smpl:862K ep:5K epch:25.68 loss:4.495 grdn:0.904 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:34:39 celerate.py:304 step:54K smpl:864K ep:5K epch:25.72 loss:4.499 grdn:0.912 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:36:03 celerate.py:304 step:54K smpl:866K ep:5K epch:25.77 loss:4.498 grdn:0.909 lr:2.5e-06 updt_s:0.826 data_s:0.007
+INFO 2026-02-08 20:37:27 celerate.py:304 step:54K smpl:867K ep:5K epch:25.82 loss:4.516 grdn:0.909 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:38:51 celerate.py:304 step:54K smpl:869K ep:5K epch:25.87 loss:4.513 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:40:16 celerate.py:304 step:54K smpl:870K ep:5K epch:25.91 loss:4.502 grdn:0.895 lr:2.5e-06 updt_s:0.828 data_s:0.007
+INFO 2026-02-08 20:41:40 celerate.py:304 step:54K smpl:872K ep:5K epch:25.96 loss:4.451 grdn:0.893 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 20:43:05 celerate.py:304 step:55K smpl:874K ep:5K epch:26.01 loss:4.485 grdn:0.892 lr:2.5e-06 updt_s:0.824 data_s:0.025
+INFO 2026-02-08 20:44:29 celerate.py:304 step:55K smpl:875K ep:5K epch:26.06 loss:4.466 grdn:0.899 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:45:53 celerate.py:304 step:55K smpl:877K ep:5K epch:26.11 loss:4.517 grdn:0.899 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:47:17 celerate.py:304 step:55K smpl:878K ep:5K epch:26.15 loss:4.492 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:48:41 celerate.py:304 step:55K smpl:880K ep:5K epch:26.20 loss:4.481 grdn:0.907 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:48:41 celerate.py:329 Checkpoint policy after step 55000
+INFO 2026-02-08 20:50:26 celerate.py:304 step:55K smpl:882K ep:5K epch:26.25 loss:4.477 grdn:0.892 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 20:51:50 celerate.py:304 step:55K smpl:883K ep:5K epch:26.30 loss:4.538 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:53:14 celerate.py:304 step:55K smpl:885K ep:5K epch:26.34 loss:4.541 grdn:0.897 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:54:38 celerate.py:304 step:55K smpl:886K ep:5K epch:26.39 loss:4.483 grdn:0.893 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:56:02 celerate.py:304 step:56K smpl:888K ep:5K epch:26.44 loss:4.504 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:57:26 celerate.py:304 step:56K smpl:890K ep:5K epch:26.49 loss:4.492 grdn:0.911 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 20:58:50 celerate.py:304 step:56K smpl:891K ep:5K epch:26.53 loss:4.503 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:00:14 celerate.py:304 step:56K smpl:893K ep:5K epch:26.58 loss:4.486 grdn:0.911 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:01:38 celerate.py:304 step:56K smpl:894K ep:5K epch:26.63 loss:4.474 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:03:01 celerate.py:304 step:56K smpl:896K ep:5K epch:26.68 loss:4.473 grdn:0.890 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:04:25 celerate.py:304 step:56K smpl:898K ep:5K epch:26.72 loss:4.463 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:05:49 celerate.py:304 step:56K smpl:899K ep:5K epch:26.77 loss:4.464 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:07:13 celerate.py:304 step:56K smpl:901K ep:5K epch:26.82 loss:4.481 grdn:0.910 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 21:08:37 celerate.py:304 step:56K smpl:902K ep:5K epch:26.87 loss:4.524 grdn:0.913 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:10:01 celerate.py:304 step:56K smpl:904K ep:5K epch:26.92 loss:4.475 grdn:0.894 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:11:25 celerate.py:304 step:57K smpl:906K ep:5K epch:26.96 loss:4.475 grdn:0.920 lr:2.5e-06 updt_s:0.825 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 21:12:51 celerate.py:304 step:57K smpl:907K ep:5K epch:27.01 loss:4.537 grdn:0.913 lr:2.5e-06 updt_s:0.824 data_s:0.025
+INFO 2026-02-08 21:14:15 celerate.py:304 step:57K smpl:909K ep:5K epch:27.06 loss:4.479 grdn:0.907 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:15:39 celerate.py:304 step:57K smpl:910K ep:5K epch:27.11 loss:4.524 grdn:0.912 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:17:03 celerate.py:304 step:57K smpl:912K ep:5K epch:27.15 loss:4.451 grdn:0.890 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:18:27 celerate.py:304 step:57K smpl:914K ep:5K epch:27.20 loss:4.507 grdn:0.902 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:19:50 celerate.py:304 step:57K smpl:915K ep:5K epch:27.25 loss:4.491 grdn:0.895 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:21:14 celerate.py:304 step:57K smpl:917K ep:5K epch:27.30 loss:4.482 grdn:0.901 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:22:38 celerate.py:304 step:57K smpl:918K ep:5K epch:27.34 loss:4.521 grdn:0.911 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:24:02 celerate.py:304 step:58K smpl:920K ep:5K epch:27.39 loss:4.470 grdn:0.897 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:25:26 celerate.py:304 step:58K smpl:922K ep:5K epch:27.44 loss:4.500 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:26:50 celerate.py:304 step:58K smpl:923K ep:5K epch:27.49 loss:4.481 grdn:0.904 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:28:14 celerate.py:304 step:58K smpl:925K ep:6K epch:27.53 loss:4.466 grdn:0.903 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:29:38 celerate.py:304 step:58K smpl:926K ep:6K epch:27.58 loss:4.515 grdn:0.907 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:31:02 celerate.py:304 step:58K smpl:928K ep:6K epch:27.63 loss:4.502 grdn:0.907 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:32:26 celerate.py:304 step:58K smpl:930K ep:6K epch:27.68 loss:4.496 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:33:50 celerate.py:304 step:58K smpl:931K ep:6K epch:27.73 loss:4.497 grdn:0.893 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 21:35:14 celerate.py:304 step:58K smpl:933K ep:6K epch:27.77 loss:4.501 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:36:38 celerate.py:304 step:58K smpl:934K ep:6K epch:27.82 loss:4.447 grdn:0.913 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:38:02 celerate.py:304 step:58K smpl:936K ep:6K epch:27.87 loss:4.498 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:39:26 celerate.py:304 step:59K smpl:938K ep:6K epch:27.92 loss:4.444 grdn:0.902 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:40:50 celerate.py:304 step:59K smpl:939K ep:6K epch:27.96 loss:4.506 grdn:0.924 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 21:42:16 celerate.py:304 step:59K smpl:941K ep:6K epch:28.01 loss:4.455 grdn:0.895 lr:2.5e-06 updt_s:0.824 data_s:0.024
+INFO 2026-02-08 21:43:39 celerate.py:304 step:59K smpl:942K ep:6K epch:28.06 loss:4.487 grdn:0.886 lr:2.5e-06 updt_s:0.824 data_s:0.006
+INFO 2026-02-08 21:45:03 celerate.py:304 step:59K smpl:944K ep:6K epch:28.11 loss:4.457 grdn:0.892 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:46:27 celerate.py:304 step:59K smpl:946K ep:6K epch:28.15 loss:4.475 grdn:0.893 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 21:47:51 celerate.py:304 step:59K smpl:947K ep:6K epch:28.20 loss:4.513 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:49:15 celerate.py:304 step:59K smpl:949K ep:6K epch:28.25 loss:4.520 grdn:0.902 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:50:39 celerate.py:304 step:59K smpl:950K ep:6K epch:28.30 loss:4.498 grdn:0.911 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 21:52:03 celerate.py:304 step:60K smpl:952K ep:6K epch:28.34 loss:4.462 grdn:0.907 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:53:27 celerate.py:304 step:60K smpl:954K ep:6K epch:28.39 loss:4.500 grdn:0.920 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:54:51 celerate.py:304 step:60K smpl:955K ep:6K epch:28.44 loss:4.493 grdn:0.890 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:56:15 celerate.py:304 step:60K smpl:957K ep:6K epch:28.49 loss:4.503 grdn:0.909 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:57:39 celerate.py:304 step:60K smpl:958K ep:6K epch:28.53 loss:4.513 grdn:0.899 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:59:03 celerate.py:304 step:60K smpl:960K ep:6K epch:28.58 loss:4.480 grdn:0.901 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 21:59:03 celerate.py:329 Checkpoint policy after step 60000
+INFO 2026-02-08 22:00:46 celerate.py:304 step:60K smpl:962K ep:6K epch:28.63 loss:4.447 grdn:0.921 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 22:02:10 celerate.py:304 step:60K smpl:963K ep:6K epch:28.68 loss:4.491 grdn:0.903 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 22:03:34 celerate.py:304 step:60K smpl:965K ep:6K epch:28.73 loss:4.516 grdn:0.911 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 22:04:57 celerate.py:304 step:60K smpl:966K ep:6K epch:28.77 loss:4.505 grdn:0.899 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 22:06:21 celerate.py:304 step:60K smpl:968K ep:6K epch:28.82 loss:4.500 grdn:0.893 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 22:07:45 celerate.py:304 step:61K smpl:970K ep:6K epch:28.87 loss:4.479 grdn:0.923 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 22:09:09 celerate.py:304 step:61K smpl:971K ep:6K epch:28.92 loss:4.489 grdn:0.908 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 22:10:33 celerate.py:304 step:61K smpl:973K ep:6K epch:28.96 loss:4.480 grdn:0.901 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISMTOKENIZERS_PARALLELISM=(true | false)
+=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 22:11:59 celerate.py:304 step:61K smpl:974K ep:6K epch:29.01 loss:4.522 grdn:0.893 lr:2.5e-06 updt_s:0.826 data_s:0.023
+INFO 2026-02-08 22:13:23 celerate.py:304 step:61K smpl:976K ep:6K epch:29.06 loss:4.486 grdn:0.904 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 22:14:47 celerate.py:304 step:61K smpl:978K ep:6K epch:29.11 loss:4.521 grdn:0.902 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 22:16:11 celerate.py:304 step:61K smpl:979K ep:6K epch:29.15 loss:4.457 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 22:17:35 celerate.py:304 step:61K smpl:981K ep:6K epch:29.20 loss:4.491 grdn:0.886 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 22:18:59 celerate.py:304 step:61K smpl:982K ep:6K epch:29.25 loss:4.502 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 22:20:23 celerate.py:304 step:62K smpl:984K ep:6K epch:29.30 loss:4.495 grdn:0.917 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 22:21:47 celerate.py:304 step:62K smpl:986K ep:6K epch:29.34 loss:4.518 grdn:0.901 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 22:23:11 celerate.py:304 step:62K smpl:987K ep:6K epch:29.39 loss:4.495 grdn:0.914 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 22:24:35 celerate.py:304 step:62K smpl:989K ep:6K epch:29.44 loss:4.501 grdn:0.904 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 22:25:58 celerate.py:304 step:62K smpl:990K ep:6K epch:29.49 loss:4.497 grdn:0.895 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 22:27:22 celerate.py:304 step:62K smpl:992K ep:6K epch:29.54 loss:4.498 grdn:0.905 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 22:28:46 celerate.py:304 step:62K smpl:994K ep:6K epch:29.58 loss:4.511 grdn:0.899 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 22:30:10 celerate.py:304 step:62K smpl:995K ep:6K epch:29.63 loss:4.507 grdn:0.931 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 22:31:34 celerate.py:304 step:62K smpl:997K ep:6K epch:29.68 loss:4.530 grdn:0.902 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 22:32:58 celerate.py:304 step:62K smpl:998K ep:6K epch:29.73 loss:4.482 grdn:0.905 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 22:34:22 celerate.py:304 step:62K smpl:1M ep:6K epch:29.77 loss:4.441 grdn:0.909 lr:2.5e-06 updt_s:0.823 data_s:0.007
+INFO 2026-02-08 22:35:46 celerate.py:304 step:63K smpl:1M ep:6K epch:29.82 loss:4.446 grdn:0.924 lr:2.5e-06 updt_s:0.823 data_s:0.007
+INFO 2026-02-08 22:37:10 celerate.py:304 step:63K smpl:1M ep:6K epch:29.87 loss:4.471 grdn:0.892 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 22:38:34 celerate.py:304 step:63K smpl:1M ep:6K epch:29.92 loss:4.478 grdn:0.906 lr:2.5e-06 updt_s:0.823 data_s:0.007
+INFO 2026-02-08 22:39:58 celerate.py:304 step:63K smpl:1M ep:6K epch:29.96 loss:4.486 grdn:0.896 lr:2.5e-06 updt_s:0.823 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 22:41:24 celerate.py:304 step:63K smpl:1M ep:6K epch:30.01 loss:4.457 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.024
+INFO 2026-02-08 22:42:48 celerate.py:304 step:63K smpl:1M ep:6K epch:30.06 loss:4.456 grdn:0.904 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 22:44:12 celerate.py:304 step:63K smpl:1M ep:6K epch:30.11 loss:4.482 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 22:45:36 celerate.py:304 step:63K smpl:1M ep:6K epch:30.15 loss:4.502 grdn:0.906 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 22:47:00 celerate.py:304 step:63K smpl:1M ep:6K epch:30.20 loss:4.478 grdn:0.895 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 22:48:24 celerate.py:304 step:64K smpl:1M ep:6K epch:30.25 loss:4.482 grdn:0.905 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 22:49:48 celerate.py:304 step:64K smpl:1M ep:6K epch:30.30 loss:4.512 grdn:0.893 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 22:51:12 celerate.py:304 step:64K smpl:1M ep:6K epch:30.35 loss:4.510 grdn:0.907 lr:2.5e-06 updt_s:0.827 data_s:0.007
+INFO 2026-02-08 22:52:36 celerate.py:304 step:64K smpl:1M ep:6K epch:30.39 loss:4.488 grdn:0.912 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 22:54:00 celerate.py:304 step:64K smpl:1M ep:6K epch:30.44 loss:4.498 grdn:0.897 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 22:55:24 celerate.py:304 step:64K smpl:1M ep:6K epch:30.49 loss:4.475 grdn:0.886 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 22:56:48 celerate.py:304 step:64K smpl:1M ep:6K epch:30.54 loss:4.462 grdn:0.902 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 22:58:12 celerate.py:304 step:64K smpl:1M ep:6K epch:30.58 loss:4.507 grdn:0.902 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 22:59:36 celerate.py:304 step:64K smpl:1M ep:6K epch:30.63 loss:4.524 grdn:0.913 lr:2.5e-06 updt_s:0.827 data_s:0.007
+INFO 2026-02-08 23:01:00 celerate.py:304 step:64K smpl:1M ep:6K epch:30.68 loss:4.484 grdn:0.886 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 23:02:24 celerate.py:304 step:64K smpl:1M ep:6K epch:30.73 loss:4.512 grdn:0.933 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 23:03:48 celerate.py:304 step:65K smpl:1M ep:6K epch:30.77 loss:4.476 grdn:0.904 lr:2.5e-06 updt_s:0.826 data_s:0.007
+INFO 2026-02-08 23:05:12 celerate.py:304 step:65K smpl:1M ep:6K epch:30.82 loss:4.477 grdn:0.891 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 23:06:36 celerate.py:304 step:65K smpl:1M ep:6K epch:30.87 loss:4.488 grdn:0.904 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 23:08:00 celerate.py:304 step:65K smpl:1M ep:6K epch:30.92 loss:4.493 grdn:0.905 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 23:09:24 celerate.py:304 step:65K smpl:1M ep:6K epch:30.96 loss:4.505 grdn:0.903 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 23:09:24 celerate.py:329 Checkpoint policy after step 65000
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+TOKENIZERS_PARALLELISMTo disable this warning, you can either:
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+=(true | false)
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 23:11:07 celerate.py:304 step:65K smpl:1M ep:6K epch:31.01 loss:4.505 grdn:0.911 lr:2.5e-06 updt_s:0.824 data_s:0.025
+INFO 2026-02-08 23:12:31 celerate.py:304 step:65K smpl:1M ep:6K epch:31.06 loss:4.485 grdn:0.919 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 23:13:55 celerate.py:304 step:65K smpl:1M ep:6K epch:31.11 loss:4.513 grdn:0.892 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 23:15:19 celerate.py:304 step:65K smpl:1M ep:6K epch:31.15 loss:4.518 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 23:16:43 celerate.py:304 step:66K smpl:1M ep:6K epch:31.20 loss:4.497 grdn:0.903 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 23:18:07 celerate.py:304 step:66K smpl:1M ep:6K epch:31.25 loss:4.497 grdn:0.912 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 23:19:31 celerate.py:304 step:66K smpl:1M ep:6K epch:31.30 loss:4.486 grdn:0.904 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 23:20:55 celerate.py:304 step:66K smpl:1M ep:6K epch:31.35 loss:4.481 grdn:0.917 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 23:22:19 celerate.py:304 step:66K smpl:1M ep:6K epch:31.39 loss:4.473 grdn:0.907 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 23:23:43 celerate.py:304 step:66K smpl:1M ep:6K epch:31.44 loss:4.485 grdn:0.915 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 23:25:07 celerate.py:304 step:66K smpl:1M ep:6K epch:31.49 loss:4.446 grdn:0.890 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 23:26:31 celerate.py:304 step:66K smpl:1M ep:6K epch:31.54 loss:4.468 grdn:0.901 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 23:27:55 celerate.py:304 step:66K smpl:1M ep:6K epch:31.58 loss:4.473 grdn:0.901 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 23:29:19 celerate.py:304 step:66K smpl:1M ep:6K epch:31.63 loss:4.504 grdn:0.894 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 23:30:43 celerate.py:304 step:66K smpl:1M ep:6K epch:31.68 loss:4.506 grdn:0.912 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 23:32:07 celerate.py:304 step:67K smpl:1M ep:6K epch:31.73 loss:4.474 grdn:0.905 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 23:33:31 celerate.py:304 step:67K smpl:1M ep:6K epch:31.77 loss:4.503 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 23:34:55 celerate.py:304 step:67K smpl:1M ep:6K epch:31.82 loss:4.530 grdn:0.915 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 23:36:19 celerate.py:304 step:67K smpl:1M ep:6K epch:31.87 loss:4.482 grdn:0.907 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 23:37:43 celerate.py:304 step:67K smpl:1M ep:6K epch:31.92 loss:4.508 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 23:39:06 celerate.py:304 step:67K smpl:1M ep:6K epch:31.96 loss:4.459 grdn:0.906 lr:2.5e-06 updt_s:0.825 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISMTOKENIZERS_PARALLELISM=(true | false)
+=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-08 23:40:32 celerate.py:304 step:67K smpl:1M ep:6K epch:32.01 loss:4.533 grdn:0.903 lr:2.5e-06 updt_s:0.825 data_s:0.023
+INFO 2026-02-08 23:41:56 celerate.py:304 step:67K smpl:1M ep:6K epch:32.06 loss:4.494 grdn:0.897 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 23:43:20 celerate.py:304 step:67K smpl:1M ep:6K epch:32.11 loss:4.473 grdn:0.909 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 23:44:44 celerate.py:304 step:68K smpl:1M ep:6K epch:32.16 loss:4.462 grdn:0.901 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 23:46:08 celerate.py:304 step:68K smpl:1M ep:6K epch:32.20 loss:4.480 grdn:0.906 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 23:47:32 celerate.py:304 step:68K smpl:1M ep:6K epch:32.25 loss:4.504 grdn:0.901 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 23:48:56 celerate.py:304 step:68K smpl:1M ep:6K epch:32.30 loss:4.496 grdn:0.888 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 23:50:21 celerate.py:304 step:68K smpl:1M ep:6K epch:32.35 loss:4.504 grdn:0.895 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 23:51:45 celerate.py:304 step:68K smpl:1M ep:6K epch:32.39 loss:4.478 grdn:0.902 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 23:53:09 celerate.py:304 step:68K smpl:1M ep:6K epch:32.44 loss:4.447 grdn:0.909 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 23:54:33 celerate.py:304 step:68K smpl:1M ep:6K epch:32.49 loss:4.486 grdn:0.907 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-08 23:55:57 celerate.py:304 step:68K smpl:1M ep:7K epch:32.54 loss:4.480 grdn:0.905 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 23:57:21 celerate.py:304 step:68K smpl:1M ep:7K epch:32.58 loss:4.472 grdn:0.894 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-08 23:58:45 celerate.py:304 step:68K smpl:1M ep:7K epch:32.63 loss:4.453 grdn:0.920 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:00:09 celerate.py:304 step:69K smpl:1M ep:7K epch:32.68 loss:4.481 grdn:0.904 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:01:33 celerate.py:304 step:69K smpl:1M ep:7K epch:32.73 loss:4.517 grdn:0.894 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 00:02:57 celerate.py:304 step:69K smpl:1M ep:7K epch:32.77 loss:4.507 grdn:0.914 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:04:21 celerate.py:304 step:69K smpl:1M ep:7K epch:32.82 loss:4.480 grdn:0.899 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:05:45 celerate.py:304 step:69K smpl:1M ep:7K epch:32.87 loss:4.552 grdn:0.918 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:07:09 celerate.py:304 step:69K smpl:1M ep:7K epch:32.92 loss:4.499 grdn:0.899 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:08:33 celerate.py:304 step:69K smpl:1M ep:7K epch:32.97 loss:4.516 grdn:0.928 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-09 00:09:58 celerate.py:304 step:69K smpl:1M ep:7K epch:33.01 loss:4.503 grdn:0.901 lr:2.5e-06 updt_s:0.826 data_s:0.022
+INFO 2026-02-09 00:11:22 celerate.py:304 step:69K smpl:1M ep:7K epch:33.06 loss:4.477 grdn:0.903 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:12:46 celerate.py:304 step:70K smpl:1M ep:7K epch:33.11 loss:4.512 grdn:0.910 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:14:10 celerate.py:304 step:70K smpl:1M ep:7K epch:33.16 loss:4.522 grdn:0.898 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 00:15:34 celerate.py:304 step:70K smpl:1M ep:7K epch:33.20 loss:4.458 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:16:58 celerate.py:304 step:70K smpl:1M ep:7K epch:33.25 loss:4.497 grdn:0.911 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:18:22 celerate.py:304 step:70K smpl:1M ep:7K epch:33.30 loss:4.486 grdn:0.911 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:19:46 celerate.py:304 step:70K smpl:1M ep:7K epch:33.35 loss:4.496 grdn:0.897 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:19:46 celerate.py:329 Checkpoint policy after step 70000
+INFO 2026-02-09 00:21:30 celerate.py:304 step:70K smpl:1M ep:7K epch:33.39 loss:4.471 grdn:0.909 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 00:22:54 celerate.py:304 step:70K smpl:1M ep:7K epch:33.44 loss:4.469 grdn:0.907 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:24:18 celerate.py:304 step:70K smpl:1M ep:7K epch:33.49 loss:4.482 grdn:0.907 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:25:42 celerate.py:304 step:70K smpl:1M ep:7K epch:33.54 loss:4.499 grdn:0.899 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:27:05 celerate.py:304 step:70K smpl:1M ep:7K epch:33.58 loss:4.520 grdn:0.911 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:28:29 celerate.py:304 step:71K smpl:1M ep:7K epch:33.63 loss:4.487 grdn:0.895 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:29:53 celerate.py:304 step:71K smpl:1M ep:7K epch:33.68 loss:4.485 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:31:17 celerate.py:304 step:71K smpl:1M ep:7K epch:33.73 loss:4.479 grdn:0.889 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:32:41 celerate.py:304 step:71K smpl:1M ep:7K epch:33.77 loss:4.502 grdn:0.903 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:34:05 celerate.py:304 step:71K smpl:1M ep:7K epch:33.82 loss:4.460 grdn:0.897 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:35:29 celerate.py:304 step:71K smpl:1M ep:7K epch:33.87 loss:4.484 grdn:0.901 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:36:53 celerate.py:304 step:71K smpl:1M ep:7K epch:33.92 loss:4.505 grdn:0.918 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 00:38:17 celerate.py:304 step:71K smpl:1M ep:7K epch:33.97 loss:4.463 grdn:0.890 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISMTOKENIZERS_PARALLELISM=(true | false)
+=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-09 00:39:43 celerate.py:304 step:71K smpl:1M ep:7K epch:34.01 loss:4.513 grdn:0.906 lr:2.5e-06 updt_s:0.825 data_s:0.023
+INFO 2026-02-09 00:41:07 celerate.py:304 step:72K smpl:1M ep:7K epch:34.06 loss:4.470 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:42:31 celerate.py:304 step:72K smpl:1M ep:7K epch:34.11 loss:4.445 grdn:0.931 lr:2.5e-06 updt_s:0.827 data_s:0.007
+INFO 2026-02-09 00:43:55 celerate.py:304 step:72K smpl:1M ep:7K epch:34.16 loss:4.500 grdn:0.913 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:45:19 celerate.py:304 step:72K smpl:1M ep:7K epch:34.20 loss:4.484 grdn:0.899 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:46:43 celerate.py:304 step:72K smpl:1M ep:7K epch:34.25 loss:4.505 grdn:0.914 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 00:48:07 celerate.py:304 step:72K smpl:1M ep:7K epch:34.30 loss:4.515 grdn:0.914 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:49:31 celerate.py:304 step:72K smpl:1M ep:7K epch:34.35 loss:4.495 grdn:0.902 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:50:55 celerate.py:304 step:72K smpl:1M ep:7K epch:34.39 loss:4.491 grdn:0.914 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:52:19 celerate.py:304 step:72K smpl:1M ep:7K epch:34.44 loss:4.504 grdn:0.895 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 00:53:43 celerate.py:304 step:72K smpl:1M ep:7K epch:34.49 loss:4.549 grdn:0.898 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 00:55:07 celerate.py:304 step:72K smpl:1M ep:7K epch:34.54 loss:4.554 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:56:31 celerate.py:304 step:73K smpl:1M ep:7K epch:34.58 loss:4.514 grdn:0.903 lr:2.5e-06 updt_s:0.826 data_s:0.007
+INFO 2026-02-09 00:57:55 celerate.py:304 step:73K smpl:1M ep:7K epch:34.63 loss:4.480 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 00:59:19 celerate.py:304 step:73K smpl:1M ep:7K epch:34.68 loss:4.459 grdn:0.910 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:00:43 celerate.py:304 step:73K smpl:1M ep:7K epch:34.73 loss:4.491 grdn:0.908 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:02:07 celerate.py:304 step:73K smpl:1M ep:7K epch:34.78 loss:4.449 grdn:0.916 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:03:31 celerate.py:304 step:73K smpl:1M ep:7K epch:34.82 loss:4.459 grdn:0.899 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:04:55 celerate.py:304 step:73K smpl:1M ep:7K epch:34.87 loss:4.477 grdn:0.903 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:06:19 celerate.py:304 step:73K smpl:1M ep:7K epch:34.92 loss:4.466 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:07:42 celerate.py:304 step:73K smpl:1M ep:7K epch:34.97 loss:4.473 grdn:0.899 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+TOKENIZERS_PARALLELISMTo disable this warning, you can either:
+=(true | false)
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-09 01:09:08 celerate.py:304 step:74K smpl:1M ep:7K epch:35.01 loss:4.479 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.022
+INFO 2026-02-09 01:10:32 celerate.py:304 step:74K smpl:1M ep:7K epch:35.06 loss:4.517 grdn:0.921 lr:2.5e-06 updt_s:0.827 data_s:0.007
+INFO 2026-02-09 01:11:56 celerate.py:304 step:74K smpl:1M ep:7K epch:35.11 loss:4.490 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:13:20 celerate.py:304 step:74K smpl:1M ep:7K epch:35.16 loss:4.492 grdn:0.887 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:14:44 celerate.py:304 step:74K smpl:1M ep:7K epch:35.20 loss:4.513 grdn:0.893 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:16:08 celerate.py:304 step:74K smpl:1M ep:7K epch:35.25 loss:4.478 grdn:0.908 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:17:32 celerate.py:304 step:74K smpl:1M ep:7K epch:35.30 loss:4.508 grdn:0.910 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:18:56 celerate.py:304 step:74K smpl:1M ep:7K epch:35.35 loss:4.494 grdn:0.892 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:20:20 celerate.py:304 step:74K smpl:1M ep:7K epch:35.39 loss:4.455 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:21:44 celerate.py:304 step:74K smpl:1M ep:7K epch:35.44 loss:4.518 grdn:0.907 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:23:08 celerate.py:304 step:74K smpl:1M ep:7K epch:35.49 loss:4.500 grdn:0.908 lr:2.5e-06 updt_s:0.827 data_s:0.007
+INFO 2026-02-09 01:24:32 celerate.py:304 step:75K smpl:1M ep:7K epch:35.54 loss:4.457 grdn:0.894 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:25:56 celerate.py:304 step:75K smpl:1M ep:7K epch:35.59 loss:4.480 grdn:0.897 lr:2.5e-06 updt_s:0.827 data_s:0.007
+INFO 2026-02-09 01:27:20 celerate.py:304 step:75K smpl:1M ep:7K epch:35.63 loss:4.476 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:28:44 celerate.py:304 step:75K smpl:1M ep:7K epch:35.68 loss:4.510 grdn:0.893 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:30:08 celerate.py:304 step:75K smpl:1M ep:7K epch:35.73 loss:4.490 grdn:0.894 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:30:08 celerate.py:329 Checkpoint policy after step 75000
+INFO 2026-02-09 01:31:54 celerate.py:304 step:75K smpl:1M ep:7K epch:35.78 loss:4.494 grdn:0.894 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:33:18 celerate.py:304 step:75K smpl:1M ep:7K epch:35.82 loss:4.519 grdn:0.909 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:34:42 celerate.py:304 step:75K smpl:1M ep:7K epch:35.87 loss:4.486 grdn:0.911 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:36:06 celerate.py:304 step:75K smpl:1M ep:7K epch:35.92 loss:4.458 grdn:0.899 lr:2.5e-06 updt_s:0.823 data_s:0.007
+INFO 2026-02-09 01:37:30 celerate.py:304 step:76K smpl:1M ep:7K epch:35.97 loss:4.471 grdn:0.903 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-09 01:38:55 celerate.py:304 step:76K smpl:1M ep:7K epch:36.01 loss:4.520 grdn:0.903 lr:2.5e-06 updt_s:0.825 data_s:0.024
+INFO 2026-02-09 01:40:19 celerate.py:304 step:76K smpl:1M ep:7K epch:36.06 loss:4.495 grdn:0.932 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:41:43 celerate.py:304 step:76K smpl:1M ep:7K epch:36.11 loss:4.474 grdn:0.897 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:43:07 celerate.py:304 step:76K smpl:1M ep:7K epch:36.16 loss:4.482 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:44:31 celerate.py:304 step:76K smpl:1M ep:7K epch:36.20 loss:4.490 grdn:0.892 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:45:55 celerate.py:304 step:76K smpl:1M ep:7K epch:36.25 loss:4.525 grdn:0.914 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:47:19 celerate.py:304 step:76K smpl:1M ep:7K epch:36.30 loss:4.470 grdn:0.893 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:48:43 celerate.py:304 step:76K smpl:1M ep:7K epch:36.35 loss:4.463 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:50:07 celerate.py:304 step:76K smpl:1M ep:7K epch:36.40 loss:4.497 grdn:0.911 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:51:31 celerate.py:304 step:76K smpl:1M ep:7K epch:36.44 loss:4.520 grdn:0.905 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:52:55 celerate.py:304 step:77K smpl:1M ep:7K epch:36.49 loss:4.517 grdn:0.901 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:54:19 celerate.py:304 step:77K smpl:1M ep:7K epch:36.54 loss:4.506 grdn:0.901 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:55:43 celerate.py:304 step:77K smpl:1M ep:7K epch:36.59 loss:4.520 grdn:0.925 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:57:07 celerate.py:304 step:77K smpl:1M ep:7K epch:36.63 loss:4.463 grdn:0.901 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:58:31 celerate.py:304 step:77K smpl:1M ep:7K epch:36.68 loss:4.465 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 01:59:55 celerate.py:304 step:77K smpl:1M ep:7K epch:36.73 loss:4.459 grdn:0.916 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 02:01:19 celerate.py:304 step:77K smpl:1M ep:7K epch:36.78 loss:4.477 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 02:02:43 celerate.py:304 step:77K smpl:1M ep:7K epch:36.82 loss:4.483 grdn:0.897 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 02:04:07 celerate.py:304 step:77K smpl:1M ep:7K epch:36.87 loss:4.445 grdn:0.912 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 02:05:31 celerate.py:304 step:78K smpl:1M ep:7K epch:36.92 loss:4.506 grdn:0.912 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 02:06:55 celerate.py:304 step:78K smpl:1M ep:7K epch:36.97 loss:4.518 grdn:0.899 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-09 02:08:20 celerate.py:304 step:78K smpl:1M ep:7K epch:37.01 loss:4.474 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.022
+INFO 2026-02-09 02:09:45 celerate.py:304 step:78K smpl:1M ep:7K epch:37.06 loss:4.508 grdn:0.914 lr:2.5e-06 updt_s:0.824 data_s:0.006
+INFO 2026-02-09 02:11:09 celerate.py:304 step:78K smpl:1M ep:7K epch:37.11 loss:4.484 grdn:0.912 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 02:12:33 celerate.py:304 step:78K smpl:1M ep:7K epch:37.16 loss:4.503 grdn:0.909 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 02:13:56 celerate.py:304 step:78K smpl:1M ep:7K epch:37.20 loss:4.489 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 02:15:21 celerate.py:304 step:78K smpl:1M ep:7K epch:37.25 loss:4.459 grdn:0.894 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 02:16:45 celerate.py:304 step:78K smpl:1M ep:7K epch:37.30 loss:4.481 grdn:0.901 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 02:18:09 celerate.py:304 step:78K smpl:1M ep:7K epch:37.35 loss:4.513 grdn:0.907 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 02:19:32 celerate.py:304 step:78K smpl:1M ep:7K epch:37.40 loss:4.502 grdn:0.895 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 02:20:56 celerate.py:304 step:79K smpl:1M ep:7K epch:37.44 loss:4.452 grdn:0.900 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 02:22:21 celerate.py:304 step:79K smpl:1M ep:7K epch:37.49 loss:4.505 grdn:0.905 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 02:23:45 celerate.py:304 step:79K smpl:1M ep:8K epch:37.54 loss:4.491 grdn:0.907 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 02:25:08 celerate.py:304 step:79K smpl:1M ep:8K epch:37.59 loss:4.484 grdn:0.888 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 02:26:32 celerate.py:304 step:79K smpl:1M ep:8K epch:37.63 loss:4.468 grdn:0.918 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 02:27:56 celerate.py:304 step:79K smpl:1M ep:8K epch:37.68 loss:4.502 grdn:0.889 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 02:29:20 celerate.py:304 step:79K smpl:1M ep:8K epch:37.73 loss:4.481 grdn:0.909 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 02:30:44 celerate.py:304 step:79K smpl:1M ep:8K epch:37.78 loss:4.497 grdn:0.893 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 02:32:08 celerate.py:304 step:79K smpl:1M ep:8K epch:37.82 loss:4.494 grdn:0.902 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 02:33:32 celerate.py:304 step:80K smpl:1M ep:8K epch:37.87 loss:4.445 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 02:34:56 celerate.py:304 step:80K smpl:1M ep:8K epch:37.92 loss:4.493 grdn:0.931 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 02:36:20 celerate.py:304 step:80K smpl:1M ep:8K epch:37.97 loss:4.503 grdn:0.905 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-09 02:37:46 celerate.py:304 step:80K smpl:1M ep:8K epch:38.01 loss:4.515 grdn:0.912 lr:2.5e-06 updt_s:0.824 data_s:0.025
+INFO 2026-02-09 02:39:10 celerate.py:304 step:80K smpl:1M ep:8K epch:38.06 loss:4.461 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 02:40:34 celerate.py:304 step:80K smpl:1M ep:8K epch:38.11 loss:4.463 grdn:0.891 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 02:40:34 celerate.py:329 Checkpoint policy after step 80000
+INFO 2026-02-09 02:42:17 celerate.py:304 step:80K smpl:1M ep:8K epch:38.16 loss:4.508 grdn:0.913 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 02:43:41 celerate.py:304 step:80K smpl:1M ep:8K epch:38.21 loss:4.507 grdn:0.893 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 02:45:05 celerate.py:304 step:80K smpl:1M ep:8K epch:38.25 loss:4.468 grdn:0.914 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 02:46:29 celerate.py:304 step:80K smpl:1M ep:8K epch:38.30 loss:4.494 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 02:47:53 celerate.py:304 step:80K smpl:1M ep:8K epch:38.35 loss:4.508 grdn:0.894 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 02:49:18 celerate.py:304 step:81K smpl:1M ep:8K epch:38.40 loss:4.496 grdn:0.906 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 02:50:42 celerate.py:304 step:81K smpl:1M ep:8K epch:38.44 loss:4.519 grdn:0.906 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 02:52:06 celerate.py:304 step:81K smpl:1M ep:8K epch:38.49 loss:4.469 grdn:0.905 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 02:53:30 celerate.py:304 step:81K smpl:1M ep:8K epch:38.54 loss:4.495 grdn:0.903 lr:2.5e-06 updt_s:0.826 data_s:0.007
+INFO 2026-02-09 02:54:54 celerate.py:304 step:81K smpl:1M ep:8K epch:38.59 loss:4.476 grdn:0.913 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 02:56:18 celerate.py:304 step:81K smpl:1M ep:8K epch:38.63 loss:4.518 grdn:0.914 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 02:57:42 celerate.py:304 step:81K smpl:1M ep:8K epch:38.68 loss:4.467 grdn:0.916 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 02:59:06 celerate.py:304 step:81K smpl:1M ep:8K epch:38.73 loss:4.513 grdn:0.890 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 03:00:30 celerate.py:304 step:81K smpl:1M ep:8K epch:38.78 loss:4.489 grdn:0.905 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 03:01:54 celerate.py:304 step:82K smpl:1M ep:8K epch:38.82 loss:4.499 grdn:0.908 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 03:03:18 celerate.py:304 step:82K smpl:1M ep:8K epch:38.87 loss:4.521 grdn:0.904 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 03:04:42 celerate.py:304 step:82K smpl:1M ep:8K epch:38.92 loss:4.504 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 03:06:06 celerate.py:304 step:82K smpl:1M ep:8K epch:38.97 loss:4.471 grdn:0.898 lr:2.5e-06 updt_s:0.825 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-09 03:07:31 celerate.py:304 step:82K smpl:1M ep:8K epch:39.02 loss:4.476 grdn:0.904 lr:2.5e-06 updt_s:0.824 data_s:0.023
+INFO 2026-02-09 03:08:55 celerate.py:304 step:82K smpl:1M ep:8K epch:39.06 loss:4.514 grdn:0.908 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 03:10:19 celerate.py:304 step:82K smpl:1M ep:8K epch:39.11 loss:4.469 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 03:11:43 celerate.py:304 step:82K smpl:1M ep:8K epch:39.16 loss:4.503 grdn:0.907 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 03:13:07 celerate.py:304 step:82K smpl:1M ep:8K epch:39.21 loss:4.475 grdn:0.899 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 03:14:31 celerate.py:304 step:82K smpl:1M ep:8K epch:39.25 loss:4.503 grdn:0.897 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 03:15:55 celerate.py:304 step:82K smpl:1M ep:8K epch:39.30 loss:4.497 grdn:0.893 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 03:17:19 celerate.py:304 step:83K smpl:1M ep:8K epch:39.35 loss:4.531 grdn:0.900 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 03:18:43 celerate.py:304 step:83K smpl:1M ep:8K epch:39.40 loss:4.467 grdn:0.899 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 03:20:07 celerate.py:304 step:83K smpl:1M ep:8K epch:39.44 loss:4.490 grdn:0.912 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 03:21:31 celerate.py:304 step:83K smpl:1M ep:8K epch:39.49 loss:4.505 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 03:22:55 celerate.py:304 step:83K smpl:1M ep:8K epch:39.54 loss:4.500 grdn:0.908 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 03:24:19 celerate.py:304 step:83K smpl:1M ep:8K epch:39.59 loss:4.526 grdn:0.903 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 03:25:43 celerate.py:304 step:83K smpl:1M ep:8K epch:39.63 loss:4.511 grdn:0.909 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 03:27:07 celerate.py:304 step:83K smpl:1M ep:8K epch:39.68 loss:4.444 grdn:0.910 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 03:28:31 celerate.py:304 step:83K smpl:1M ep:8K epch:39.73 loss:4.458 grdn:0.911 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 03:29:54 celerate.py:304 step:84K smpl:1M ep:8K epch:39.78 loss:4.487 grdn:0.901 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 03:31:18 celerate.py:304 step:84K smpl:1M ep:8K epch:39.82 loss:4.464 grdn:0.901 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 03:32:43 celerate.py:304 step:84K smpl:1M ep:8K epch:39.87 loss:4.499 grdn:0.901 lr:2.5e-06 updt_s:0.827 data_s:0.007
+INFO 2026-02-09 03:34:07 celerate.py:304 step:84K smpl:1M ep:8K epch:39.92 loss:4.470 grdn:0.952 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 03:35:30 celerate.py:304 step:84K smpl:1M ep:8K epch:39.97 loss:4.464 grdn:0.905 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-09 03:36:56 celerate.py:304 step:84K smpl:1M ep:8K epch:40.02 loss:4.473 grdn:0.895 lr:2.5e-06 updt_s:0.825 data_s:0.023
+INFO 2026-02-09 03:38:20 celerate.py:304 step:84K smpl:1M ep:8K epch:40.06 loss:4.471 grdn:0.909 lr:2.5e-06 updt_s:0.826 data_s:0.007
+INFO 2026-02-09 03:39:44 celerate.py:304 step:84K smpl:1M ep:8K epch:40.11 loss:4.479 grdn:0.902 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 03:41:08 celerate.py:304 step:84K smpl:1M ep:8K epch:40.16 loss:4.486 grdn:0.903 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 03:42:33 celerate.py:304 step:84K smpl:1M ep:8K epch:40.21 loss:4.486 grdn:0.914 lr:2.5e-06 updt_s:0.827 data_s:0.007
+INFO 2026-02-09 03:43:56 celerate.py:304 step:84K smpl:1M ep:8K epch:40.25 loss:4.486 grdn:0.904 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 03:45:21 celerate.py:304 step:85K smpl:1M ep:8K epch:40.30 loss:4.496 grdn:0.909 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 03:46:45 celerate.py:304 step:85K smpl:1M ep:8K epch:40.35 loss:4.516 grdn:0.897 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 03:48:09 celerate.py:304 step:85K smpl:1M ep:8K epch:40.40 loss:4.481 grdn:0.921 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 03:49:33 celerate.py:304 step:85K smpl:1M ep:8K epch:40.44 loss:4.478 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 03:50:56 celerate.py:304 step:85K smpl:1M ep:8K epch:40.49 loss:4.461 grdn:0.901 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 03:50:56 celerate.py:329 Checkpoint policy after step 85000
+INFO 2026-02-09 03:52:39 celerate.py:304 step:85K smpl:1M ep:8K epch:40.54 loss:4.545 grdn:0.898 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 03:54:03 celerate.py:304 step:85K smpl:1M ep:8K epch:40.59 loss:4.518 grdn:0.903 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 03:55:27 celerate.py:304 step:85K smpl:1M ep:8K epch:40.63 loss:4.487 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 03:56:51 celerate.py:304 step:85K smpl:1M ep:8K epch:40.68 loss:4.537 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 03:58:15 celerate.py:304 step:86K smpl:1M ep:8K epch:40.73 loss:4.479 grdn:0.902 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 03:59:39 celerate.py:304 step:86K smpl:1M ep:8K epch:40.78 loss:4.481 grdn:0.904 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:01:03 celerate.py:304 step:86K smpl:1M ep:8K epch:40.83 loss:4.505 grdn:0.983 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 04:02:27 celerate.py:304 step:86K smpl:1M ep:8K epch:40.87 loss:4.456 grdn:0.898 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 04:03:51 celerate.py:304 step:86K smpl:1M ep:8K epch:40.92 loss:4.484 grdn:0.904 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:05:15 celerate.py:304 step:86K smpl:1M ep:8K epch:40.97 loss:4.466 grdn:0.907 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-09 04:06:41 celerate.py:304 step:86K smpl:1M ep:8K epch:41.02 loss:4.478 grdn:0.903 lr:2.5e-06 updt_s:0.824 data_s:0.025
+INFO 2026-02-09 04:08:05 celerate.py:304 step:86K smpl:1M ep:8K epch:41.06 loss:4.459 grdn:0.897 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:09:29 celerate.py:304 step:86K smpl:1M ep:8K epch:41.11 loss:4.493 grdn:0.903 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:10:53 celerate.py:304 step:86K smpl:1M ep:8K epch:41.16 loss:4.507 grdn:0.902 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:12:17 celerate.py:304 step:86K smpl:1M ep:8K epch:41.21 loss:4.546 grdn:0.917 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:13:41 celerate.py:304 step:87K smpl:1M ep:8K epch:41.25 loss:4.453 grdn:0.901 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:15:05 celerate.py:304 step:87K smpl:1M ep:8K epch:41.30 loss:4.478 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:16:28 celerate.py:304 step:87K smpl:1M ep:8K epch:41.35 loss:4.489 grdn:0.915 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:17:52 celerate.py:304 step:87K smpl:1M ep:8K epch:41.40 loss:4.498 grdn:0.909 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:19:16 celerate.py:304 step:87K smpl:1M ep:8K epch:41.44 loss:4.478 grdn:0.901 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:20:40 celerate.py:304 step:87K smpl:1M ep:8K epch:41.49 loss:4.521 grdn:0.914 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:22:04 celerate.py:304 step:87K smpl:1M ep:8K epch:41.54 loss:4.474 grdn:0.894 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:23:28 celerate.py:304 step:87K smpl:1M ep:8K epch:41.59 loss:4.481 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:24:52 celerate.py:304 step:87K smpl:1M ep:8K epch:41.64 loss:4.476 grdn:0.902 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:26:16 celerate.py:304 step:88K smpl:1M ep:8K epch:41.68 loss:4.466 grdn:0.948 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:27:40 celerate.py:304 step:88K smpl:1M ep:8K epch:41.73 loss:4.486 grdn:0.924 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:29:04 celerate.py:304 step:88K smpl:1M ep:8K epch:41.78 loss:4.560 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:30:28 celerate.py:304 step:88K smpl:1M ep:8K epch:41.83 loss:4.478 grdn:0.911 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:31:52 celerate.py:304 step:88K smpl:1M ep:8K epch:41.87 loss:4.499 grdn:0.902 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:33:16 celerate.py:304 step:88K smpl:1M ep:8K epch:41.92 loss:4.482 grdn:0.911 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:34:40 celerate.py:304 step:88K smpl:1M ep:8K epch:41.97 loss:4.478 grdn:0.904 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-09 04:36:05 celerate.py:304 step:88K smpl:1M ep:8K epch:42.02 loss:4.488 grdn:0.907 lr:2.5e-06 updt_s:0.826 data_s:0.021
+INFO 2026-02-09 04:37:29 celerate.py:304 step:88K smpl:1M ep:8K epch:42.06 loss:4.488 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:38:53 celerate.py:304 step:88K smpl:1M ep:8K epch:42.11 loss:4.497 grdn:0.907 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 04:40:17 celerate.py:304 step:88K smpl:1M ep:8K epch:42.16 loss:4.480 grdn:0.904 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:41:41 celerate.py:304 step:89K smpl:1M ep:8K epch:42.21 loss:4.492 grdn:0.897 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:43:05 celerate.py:304 step:89K smpl:1M ep:8K epch:42.25 loss:4.516 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:44:29 celerate.py:304 step:89K smpl:1M ep:8K epch:42.30 loss:4.490 grdn:0.918 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:45:53 celerate.py:304 step:89K smpl:1M ep:8K epch:42.35 loss:4.510 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:47:17 celerate.py:304 step:89K smpl:1M ep:8K epch:42.40 loss:4.482 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:48:41 celerate.py:304 step:89K smpl:1M ep:8K epch:42.44 loss:4.473 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:50:05 celerate.py:304 step:89K smpl:1M ep:8K epch:42.49 loss:4.491 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:51:29 celerate.py:304 step:89K smpl:1M ep:9K epch:42.54 loss:4.538 grdn:0.917 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:52:53 celerate.py:304 step:89K smpl:1M ep:9K epch:42.59 loss:4.475 grdn:0.892 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:54:17 celerate.py:304 step:90K smpl:1M ep:9K epch:42.64 loss:4.506 grdn:0.921 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:55:41 celerate.py:304 step:90K smpl:1M ep:9K epch:42.68 loss:4.493 grdn:0.901 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:57:05 celerate.py:304 step:90K smpl:1M ep:9K epch:42.73 loss:4.461 grdn:0.899 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:58:29 celerate.py:304 step:90K smpl:1M ep:9K epch:42.78 loss:4.501 grdn:0.909 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 04:59:53 celerate.py:304 step:90K smpl:1M ep:9K epch:42.83 loss:4.462 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:01:17 celerate.py:304 step:90K smpl:1M ep:9K epch:42.87 loss:4.510 grdn:0.911 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:01:17 celerate.py:329 Checkpoint policy after step 90000
+INFO 2026-02-09 05:03:03 celerate.py:304 step:90K smpl:1M ep:9K epch:42.92 loss:4.485 grdn:0.904 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 05:04:26 celerate.py:304 step:90K smpl:1M ep:9K epch:42.97 loss:4.460 grdn:0.922 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-09 05:05:52 celerate.py:304 step:90K smpl:1M ep:9K epch:43.02 loss:4.462 grdn:0.905 lr:2.5e-06 updt_s:0.825 data_s:0.024
+INFO 2026-02-09 05:07:16 celerate.py:304 step:90K smpl:1M ep:9K epch:43.06 loss:4.501 grdn:0.906 lr:2.5e-06 updt_s:0.826 data_s:0.007
+INFO 2026-02-09 05:08:40 celerate.py:304 step:90K smpl:1M ep:9K epch:43.11 loss:4.497 grdn:0.892 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:10:04 celerate.py:304 step:91K smpl:1M ep:9K epch:43.16 loss:4.501 grdn:0.899 lr:2.5e-06 updt_s:0.823 data_s:0.007
+INFO 2026-02-09 05:11:28 celerate.py:304 step:91K smpl:1M ep:9K epch:43.21 loss:4.476 grdn:0.897 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:12:52 celerate.py:304 step:91K smpl:1M ep:9K epch:43.25 loss:4.467 grdn:0.913 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:14:16 celerate.py:304 step:91K smpl:1M ep:9K epch:43.30 loss:4.505 grdn:0.925 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 05:15:40 celerate.py:304 step:91K smpl:1M ep:9K epch:43.35 loss:4.497 grdn:0.894 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:17:04 celerate.py:304 step:91K smpl:1M ep:9K epch:43.40 loss:4.516 grdn:0.901 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:18:28 celerate.py:304 step:91K smpl:1M ep:9K epch:43.45 loss:4.456 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:19:52 celerate.py:304 step:91K smpl:1M ep:9K epch:43.49 loss:4.518 grdn:0.910 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:21:16 celerate.py:304 step:91K smpl:1M ep:9K epch:43.54 loss:4.500 grdn:0.899 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:22:40 celerate.py:304 step:92K smpl:1M ep:9K epch:43.59 loss:4.489 grdn:0.903 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:24:03 celerate.py:304 step:92K smpl:1M ep:9K epch:43.64 loss:4.476 grdn:0.903 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:25:27 celerate.py:304 step:92K smpl:1M ep:9K epch:43.68 loss:4.483 grdn:0.917 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:26:51 celerate.py:304 step:92K smpl:1M ep:9K epch:43.73 loss:4.497 grdn:0.903 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:28:15 celerate.py:304 step:92K smpl:1M ep:9K epch:43.78 loss:4.470 grdn:0.919 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:29:39 celerate.py:304 step:92K smpl:1M ep:9K epch:43.83 loss:4.494 grdn:0.914 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:31:03 celerate.py:304 step:92K smpl:1M ep:9K epch:43.87 loss:4.521 grdn:0.909 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:32:27 celerate.py:304 step:92K smpl:1M ep:9K epch:43.92 loss:4.464 grdn:0.889 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:33:51 celerate.py:304 step:92K smpl:1M ep:9K epch:43.97 loss:4.438 grdn:0.901 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+TOKENIZERS_PARALLELISMTo disable this warning, you can either:
+=(true | false)
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-09 05:35:16 celerate.py:304 step:92K smpl:1M ep:9K epch:44.02 loss:4.496 grdn:0.891 lr:2.5e-06 updt_s:0.824 data_s:0.024
+INFO 2026-02-09 05:36:40 celerate.py:304 step:92K smpl:1M ep:9K epch:44.06 loss:4.484 grdn:0.915 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:38:04 celerate.py:304 step:93K smpl:1M ep:9K epch:44.11 loss:4.448 grdn:0.911 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:39:28 celerate.py:304 step:93K smpl:1M ep:9K epch:44.16 loss:4.520 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:40:52 celerate.py:304 step:93K smpl:1M ep:9K epch:44.21 loss:4.519 grdn:0.897 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:42:16 celerate.py:304 step:93K smpl:1M ep:9K epch:44.26 loss:4.503 grdn:0.905 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:43:40 celerate.py:304 step:93K smpl:1M ep:9K epch:44.30 loss:4.420 grdn:0.893 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:45:04 celerate.py:304 step:93K smpl:1M ep:9K epch:44.35 loss:4.523 grdn:0.889 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 05:46:28 celerate.py:304 step:93K smpl:1M ep:9K epch:44.40 loss:4.475 grdn:0.907 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:47:52 celerate.py:304 step:93K smpl:1M ep:9K epch:44.45 loss:4.505 grdn:0.908 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:49:16 celerate.py:304 step:93K smpl:1M ep:9K epch:44.49 loss:4.497 grdn:0.912 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 05:50:40 celerate.py:304 step:94K smpl:1M ep:9K epch:44.54 loss:4.484 grdn:0.918 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:52:04 celerate.py:304 step:94K smpl:1M ep:9K epch:44.59 loss:4.498 grdn:0.889 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:53:28 celerate.py:304 step:94K smpl:1M ep:9K epch:44.64 loss:4.496 grdn:0.904 lr:2.5e-06 updt_s:0.827 data_s:0.007
+INFO 2026-02-09 05:54:52 celerate.py:304 step:94K smpl:2M ep:9K epch:44.68 loss:4.506 grdn:0.907 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:56:16 celerate.py:304 step:94K smpl:2M ep:9K epch:44.73 loss:4.470 grdn:0.908 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:57:40 celerate.py:304 step:94K smpl:2M ep:9K epch:44.78 loss:4.507 grdn:0.911 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 05:59:04 celerate.py:304 step:94K smpl:2M ep:9K epch:44.83 loss:4.433 grdn:0.899 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:00:28 celerate.py:304 step:94K smpl:2M ep:9K epch:44.87 loss:4.507 grdn:0.891 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:01:52 celerate.py:304 step:94K smpl:2M ep:9K epch:44.92 loss:4.454 grdn:0.901 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:03:16 celerate.py:304 step:94K smpl:2M ep:9K epch:44.97 loss:4.536 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-09 06:04:42 celerate.py:304 step:94K smpl:2M ep:9K epch:45.02 loss:4.535 grdn:0.908 lr:2.5e-06 updt_s:0.831 data_s:0.025
+INFO 2026-02-09 06:06:06 celerate.py:304 step:95K smpl:2M ep:9K epch:45.07 loss:4.503 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:07:30 celerate.py:304 step:95K smpl:2M ep:9K epch:45.11 loss:4.482 grdn:0.892 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 06:08:54 celerate.py:304 step:95K smpl:2M ep:9K epch:45.16 loss:4.517 grdn:0.908 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:10:18 celerate.py:304 step:95K smpl:2M ep:9K epch:45.21 loss:4.482 grdn:0.917 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:11:42 celerate.py:304 step:95K smpl:2M ep:9K epch:45.26 loss:4.445 grdn:0.933 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 06:11:42 celerate.py:329 Checkpoint policy after step 95000
+INFO 2026-02-09 06:13:28 celerate.py:304 step:95K smpl:2M ep:9K epch:45.30 loss:4.436 grdn:0.915 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:14:52 celerate.py:304 step:95K smpl:2M ep:9K epch:45.35 loss:4.484 grdn:0.910 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:16:16 celerate.py:304 step:95K smpl:2M ep:9K epch:45.40 loss:4.499 grdn:0.909 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 06:17:40 celerate.py:304 step:95K smpl:2M ep:9K epch:45.45 loss:4.505 grdn:0.907 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:19:04 celerate.py:304 step:96K smpl:2M ep:9K epch:45.49 loss:4.521 grdn:0.892 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 06:20:27 celerate.py:304 step:96K smpl:2M ep:9K epch:45.54 loss:4.430 grdn:0.893 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:21:51 celerate.py:304 step:96K smpl:2M ep:9K epch:45.59 loss:4.479 grdn:0.896 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:23:15 celerate.py:304 step:96K smpl:2M ep:9K epch:45.64 loss:4.468 grdn:0.898 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 06:24:39 celerate.py:304 step:96K smpl:2M ep:9K epch:45.68 loss:4.487 grdn:0.901 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:26:03 celerate.py:304 step:96K smpl:2M ep:9K epch:45.73 loss:4.524 grdn:0.919 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:27:27 celerate.py:304 step:96K smpl:2M ep:9K epch:45.78 loss:4.524 grdn:0.919 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:28:51 celerate.py:304 step:96K smpl:2M ep:9K epch:45.83 loss:4.487 grdn:0.904 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 06:30:15 celerate.py:304 step:96K smpl:2M ep:9K epch:45.87 loss:4.517 grdn:0.902 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 06:31:39 celerate.py:304 step:96K smpl:2M ep:9K epch:45.92 loss:4.508 grdn:0.895 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:33:03 celerate.py:304 step:96K smpl:2M ep:9K epch:45.97 loss:4.493 grdn:0.912 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-09 06:34:29 celerate.py:304 step:97K smpl:2M ep:9K epch:46.02 loss:4.480 grdn:0.902 lr:2.5e-06 updt_s:0.824 data_s:0.024
+INFO 2026-02-09 06:35:53 celerate.py:304 step:97K smpl:2M ep:9K epch:46.07 loss:4.464 grdn:0.909 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:37:17 celerate.py:304 step:97K smpl:2M ep:9K epch:46.11 loss:4.511 grdn:0.897 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:38:41 celerate.py:304 step:97K smpl:2M ep:9K epch:46.16 loss:4.491 grdn:0.895 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:40:05 celerate.py:304 step:97K smpl:2M ep:9K epch:46.21 loss:4.507 grdn:0.902 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:41:29 celerate.py:304 step:97K smpl:2M ep:9K epch:46.26 loss:4.482 grdn:0.892 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:42:52 celerate.py:304 step:97K smpl:2M ep:9K epch:46.30 loss:4.470 grdn:0.894 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:44:16 celerate.py:304 step:97K smpl:2M ep:9K epch:46.35 loss:4.531 grdn:0.901 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:45:40 celerate.py:304 step:97K smpl:2M ep:9K epch:46.40 loss:4.454 grdn:0.889 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:47:04 celerate.py:304 step:98K smpl:2M ep:9K epch:46.45 loss:4.484 grdn:0.922 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:48:28 celerate.py:304 step:98K smpl:2M ep:9K epch:46.49 loss:4.490 grdn:0.894 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:49:52 celerate.py:304 step:98K smpl:2M ep:9K epch:46.54 loss:4.501 grdn:0.911 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:51:16 celerate.py:304 step:98K smpl:2M ep:9K epch:46.59 loss:4.489 grdn:0.892 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:52:40 celerate.py:304 step:98K smpl:2M ep:9K epch:46.64 loss:4.485 grdn:0.919 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:54:04 celerate.py:304 step:98K smpl:2M ep:9K epch:46.68 loss:4.502 grdn:0.924 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:55:28 celerate.py:304 step:98K smpl:2M ep:9K epch:46.73 loss:4.511 grdn:0.919 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:56:52 celerate.py:304 step:98K smpl:2M ep:9K epch:46.78 loss:4.452 grdn:0.904 lr:2.5e-06 updt_s:0.825 data_s:0.007
+INFO 2026-02-09 06:58:16 celerate.py:304 step:98K smpl:2M ep:9K epch:46.83 loss:4.470 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 06:59:40 celerate.py:304 step:98K smpl:2M ep:9K epch:46.88 loss:4.522 grdn:0.918 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 07:01:04 celerate.py:304 step:98K smpl:2M ep:9K epch:46.92 loss:4.458 grdn:0.902 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 07:02:28 celerate.py:304 step:99K smpl:2M ep:9K epch:46.97 loss:4.512 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+ - Avoid using `tokenizers` before the fork if possible
+ - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/torchvision/io/_video_deprecation_warning.py:5: UserWarning: The video decoding and encoding capabilities of torchvision are deprecated from version 0.22 and will be removed in version 0.24. We recommend that you migrate to TorchCodec, where we'll consolidate the future decoding/encoding capabilities of PyTorch: https://github.com/pytorch/torchcodec
+ warnings.warn(
+INFO 2026-02-09 07:03:53 celerate.py:304 step:99K smpl:2M ep:9K epch:47.02 loss:4.505 grdn:0.907 lr:2.5e-06 updt_s:0.824 data_s:0.024
+INFO 2026-02-09 07:05:17 celerate.py:304 step:99K smpl:2M ep:9K epch:47.07 loss:4.494 grdn:0.908 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 07:06:41 celerate.py:304 step:99K smpl:2M ep:9K epch:47.11 loss:4.464 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 07:08:05 celerate.py:304 step:99K smpl:2M ep:9K epch:47.16 loss:4.493 grdn:0.906 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 07:09:29 celerate.py:304 step:99K smpl:2M ep:9K epch:47.21 loss:4.478 grdn:0.915 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 07:10:53 celerate.py:304 step:99K smpl:2M ep:9K epch:47.26 loss:4.530 grdn:0.898 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 07:12:17 celerate.py:304 step:99K smpl:2M ep:9K epch:47.30 loss:4.485 grdn:0.900 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 07:13:41 celerate.py:304 step:99K smpl:2M ep:9K epch:47.35 loss:4.446 grdn:0.904 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 07:15:05 celerate.py:304 step:100K smpl:2M ep:9K epch:47.40 loss:4.518 grdn:0.918 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 07:16:29 celerate.py:304 step:100K smpl:2M ep:9K epch:47.45 loss:4.461 grdn:0.901 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 07:17:53 celerate.py:304 step:100K smpl:2M ep:9K epch:47.49 loss:4.503 grdn:0.904 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 07:19:16 celerate.py:304 step:100K smpl:2M ep:10K epch:47.54 loss:4.511 grdn:0.909 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 07:20:40 celerate.py:304 step:100K smpl:2M ep:10K epch:47.59 loss:4.484 grdn:0.916 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 07:22:04 celerate.py:304 step:100K smpl:2M ep:10K epch:47.64 loss:4.518 grdn:0.907 lr:2.5e-06 updt_s:0.824 data_s:0.007
+INFO 2026-02-09 07:22:04 celerate.py:329 Checkpoint policy after step 100000
+INFO 2026-02-09 07:22:24 celerate.py:386 End of training
+[rank0]:[W209 07:22:25.525998228 ProcessGroupNCCL.cpp:1479] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator())
diff --git a/scripts/logs/pi0fast_baseline_libero_h100.out b/scripts/logs/pi0fast_baseline_libero_h100.out
new file mode 100644
index 0000000000000000000000000000000000000000..c06324691dc3d751d0d3639977321e930e9c5483
--- /dev/null
+++ b/scripts/logs/pi0fast_baseline_libero_h100.out
@@ -0,0 +1,569 @@
+================================================================================
+JobID = 12538472
+User = u24877, Account = kisski-spath
+Partition = kisski, Nodelist = ggpu174
+================================================================================
+===========================================
+Experiment: pi0fast_baseline_libero_30k
+Codebase: /projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/VLA-Humanoid
+Data: /projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/merged_libero_mask_depth_noops_lerobot_10
+Policy Config Source: /projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/VLA-Humanoid/configs/policy_config/pi0fast_baseline.json
+Temp Config Dir: /projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/VLA-Humanoid/temp/configs/pi0fast_baseline_libero_30k
+Output Dir: /projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/VLA-Humanoid/outputs/train/2026-02-08/07-52-25_pi0fast_baseline_libero_30k
+===========================================
+{'batch_size': 4,
+ 'dataset': {'episodes': None,
+ 'image_transforms': {'enable': True,
+ 'image_tfs': {'brightness': {'kwargs': {'brightness': [0.8,
+ 1.2]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'contrast': {'kwargs': {'contrast': [0.8,
+ 1.2]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'crop_resize': {'kwargs': {'ratio': [1,
+ 1],
+ 'scale': [0.9,
+ 0.95],
+ 'size': [256,
+ 256]},
+ 'type': 'RandomResizedCrop',
+ 'weight': 1.0},
+ 'hue': {'kwargs': {'hue': [-0.05,
+ 0.05]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'rotate': {'kwargs': {'degrees': [-5,
+ 5]},
+ 'type': 'RandomRotate',
+ 'weight': 1.0},
+ 'saturation': {'kwargs': {'saturation': [0.5,
+ 1.5]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'sharpness': {'kwargs': {'sharpness': [0.5,
+ 1.5]},
+ 'type': 'SharpnessJitter',
+ 'weight': 1.0}},
+ 'max_num_transforms': 3,
+ 'random_order': False,
+ 'wrist_tfs': {'brightness': {'kwargs': {'brightness': [0.8,
+ 1.2]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'contrast': {'kwargs': {'contrast': [0.8,
+ 1.2]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'hue': {'kwargs': {'hue': [-0.05,
+ 0.05]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'saturation': {'kwargs': {'saturation': [0.5,
+ 1.5]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'sharpness': {'kwargs': {'sharpness': [0.5,
+ 1.5]},
+ 'type': 'SharpnessJitter',
+ 'weight': 1.0}}},
+ 'repo_id': '.',
+ 'revision': None,
+ 'root': '/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/merged_libero_mask_depth_noops_lerobot_10',
+ 'use_imagenet_stats': True,
+ 'video_backend': 'pyav',
+ 'vqa_data_path': None},
+ 'env': None,
+ 'eval': {'batch_size': 50, 'n_episodes': 50, 'use_async_envs': False},
+ 'eval_freq': 20000,
+ 'job_name': 'pi0fast_baseline_libero_30k',
+ 'log_freq': 100,
+ 'num_workers': 8,
+ 'optimizer': {'betas': [0.9, 0.95],
+ 'eps': 1e-08,
+ 'grad_clip_norm': 1,
+ 'lr': 0.0001,
+ 'type': 'adamw',
+ 'weight_decay': 1e-05},
+ 'output_dir': '/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/VLA-Humanoid/outputs/train/2026-02-08/07-52-25_pi0fast_baseline_libero_30k',
+ 'policy': {'adapt_to_pi_aloha': False,
+ 'checkpoint_path': None,
+ 'chunk_size': 10,
+ 'device': 'cuda',
+ 'empty_cameras': 0,
+ 'fast_skip_tokens': 128,
+ 'fast_tokenizer_path': '/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/ricl_openpi/fast',
+ 'freeze_lm_head': True,
+ 'freeze_vision_encoder': True,
+ 'grad_clip_norm': 1,
+ 'gradient_accumulation_steps': 4,
+ 'input_features': {},
+ 'interpolate_like_pi': False,
+ 'max_action_dim': 32,
+ 'max_decoding_steps': 256,
+ 'max_input_seq_len': 256,
+ 'max_state_dim': 32,
+ 'n_action_steps': 10,
+ 'n_obs_steps': 1,
+ 'normalization_mapping': {'ACTION': ,
+ 'STATE': ,
+ 'VISUAL': },
+ 'optimizer_betas': [0.9, 0.95],
+ 'optimizer_eps': 1e-08,
+ 'optimizer_lr': 0.0001,
+ 'optimizer_weight_decay': 1e-05,
+ 'output_features': {},
+ 'padding_side': 'right',
+ 'pi0_paligemma_path': '/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/VLA-Humanoid/paligemma-3b-pt-224',
+ 'precision': 'bfloat16',
+ 'proj_width': 1024,
+ 'relaxed_action_decoding': True,
+ 'resize_imgs_with_padding': [224, 224],
+ 'scheduler_decay_lr': 2.5e-06,
+ 'scheduler_decay_steps': 30000,
+ 'scheduler_warmup_steps': 1000,
+ 'tokenizer_max_length': 48,
+ 'type': 'pi0fast',
+ 'use_amp': False,
+ 'use_cache': True,
+ 'use_delta_joint_actions_aloha': False},
+ 'resume': False,
+ 'save_checkpoint': True,
+ 'save_freq': 5000,
+ 'scheduler': {'decay_lr': 2.5e-06,
+ 'num_decay_steps': 30000,
+ 'num_warmup_steps': 1000,
+ 'peak_lr': 0.0001,
+ 'type': 'cosine_decay_with_warmup'},
+ 'seed': 42,
+ 'steps': 100000,
+ 'use_policy_training_preset': True,
+ 'wandb': {'disable_artifact': True,
+ 'enable': False,
+ 'entity': 'Robotics_VLA',
+ 'mode': 'online',
+ 'notes': None,
+ 'project': 'pi0_lerobot',
+ 'run_id': None}}
+{'batch_size': 4,
+ 'dataset': {'episodes': None,
+ 'image_transforms': {'enable': True,
+ 'image_tfs': {'brightness': {'kwargs': {'brightness': [0.8,
+ 1.2]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'contrast': {'kwargs': {'contrast': [0.8,
+ 1.2]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'crop_resize': {'kwargs': {'ratio': [1,
+ 1],
+ 'scale': [0.9,
+ 0.95],
+ 'size': [256,
+ 256]},
+ 'type': 'RandomResizedCrop',
+ 'weight': 1.0},
+ 'hue': {'kwargs': {'hue': [-0.05,
+ 0.05]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'rotate': {'kwargs': {'degrees': [-5,
+ 5]},
+ 'type': 'RandomRotate',
+ 'weight': 1.0},
+ 'saturation': {'kwargs': {'saturation': [0.5,
+ 1.5]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'sharpness': {'kwargs': {'sharpness': [0.5,
+ 1.5]},
+ 'type': 'SharpnessJitter',
+ 'weight': 1.0}},
+ 'max_num_transforms': 3,
+ 'random_order': False,
+ 'wrist_tfs': {'brightness': {'kwargs': {'brightness': [0.8,
+ 1.2]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'contrast': {'kwargs': {'contrast': [0.8,
+ 1.2]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'hue': {'kwargs': {'hue': [-0.05,
+ 0.05]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'saturation': {'kwargs': {'saturation': [0.5,
+ 1.5]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'sharpness': {'kwargs': {'sharpness': [0.5,
+ 1.5]},
+ 'type': 'SharpnessJitter',
+ 'weight': 1.0}}},
+ 'repo_id': '.',
+ 'revision': None,
+ 'root': '/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/merged_libero_mask_depth_noops_lerobot_10',
+ 'use_imagenet_stats': True,
+ 'video_backend': 'pyav',
+ 'vqa_data_path': None},
+ 'env': None,
+ 'eval': {'batch_size': 50, 'n_episodes': 50, 'use_async_envs': False},
+ 'eval_freq': 20000,
+ 'job_name': 'pi0fast_baseline_libero_30k',
+ 'log_freq': 100,
+ 'num_workers': 8,
+ 'optimizer': {'betas': [0.9, 0.95],
+ 'eps': 1e-08,
+ 'grad_clip_norm': 1,
+ 'lr': 0.0001,
+ 'type': 'adamw',
+ 'weight_decay': 1e-05},
+ 'output_dir': '/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/VLA-Humanoid/outputs/train/2026-02-08/07-52-25_pi0fast_baseline_libero_30k',
+ 'policy': {'adapt_to_pi_aloha': False,
+ 'checkpoint_path': None,
+ 'chunk_size': 10,
+ 'device': 'cuda',
+ 'empty_cameras': 0,
+ 'fast_skip_tokens': 128,
+ 'fast_tokenizer_path': '/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/ricl_openpi/fast',
+ 'freeze_lm_head': True,
+ 'freeze_vision_encoder': True,
+ 'grad_clip_norm': 1,
+ 'gradient_accumulation_steps': 4,
+ 'input_features': {},
+ 'interpolate_like_pi': False,
+ 'max_action_dim': 32,
+ 'max_decoding_steps': 256,
+ 'max_input_seq_len': 256,
+ 'max_state_dim': 32,
+ 'n_action_steps': 10,
+ 'n_obs_steps': 1,
+ 'normalization_mapping': {'ACTION': ,
+ 'STATE': ,
+ 'VISUAL': },
+ 'optimizer_betas': [0.9, 0.95],
+ 'optimizer_eps': 1e-08,
+ 'optimizer_lr': 0.0001,
+ 'optimizer_weight_decay': 1e-05,
+ 'output_features': {},
+ 'padding_side': 'right',
+ 'pi0_paligemma_path': '/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/VLA-Humanoid/paligemma-3b-pt-224',
+ 'precision': 'bfloat16',
+ 'proj_width': 1024,
+ 'relaxed_action_decoding': True,
+ 'resize_imgs_with_padding': [224, 224],
+ 'scheduler_decay_lr': 2.5e-06,
+ 'scheduler_decay_steps': 30000,
+ 'scheduler_warmup_steps': 1000,
+ 'tokenizer_max_length': 48,
+ 'type': 'pi0fast',
+ 'use_amp': False,
+ 'use_cache': True,
+ 'use_delta_joint_actions_aloha': False},
+ 'resume': False,
+ 'save_checkpoint': True,
+ 'save_freq': 5000,
+ 'scheduler': {'decay_lr': 2.5e-06,
+ 'num_decay_steps': 30000,
+ 'num_warmup_steps': 1000,
+ 'peak_lr': 0.0001,
+ 'type': 'cosine_decay_with_warmup'},
+ 'seed': 42,
+ 'steps': 100000,
+ 'use_policy_training_preset': True,
+ 'wandb': {'disable_artifact': True,
+ 'enable': False,
+ 'entity': 'Robotics_VLA',
+ 'mode': 'online',
+ 'notes': None,
+ 'project': 'pi0_lerobot',
+ 'run_id': None}}
+{'batch_size': 4,
+ 'dataset': {'episodes': None,
+ 'image_transforms': {'enable': True,
+ 'image_tfs': {'brightness': {'kwargs': {'brightness': [0.8,
+ 1.2]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'contrast': {'kwargs': {'contrast': [0.8,
+ 1.2]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'crop_resize': {'kwargs': {'ratio': [1,
+ 1],
+ 'scale': [0.9,
+ 0.95],
+ 'size': [256,
+ 256]},
+ 'type': 'RandomResizedCrop',
+ 'weight': 1.0},
+ 'hue': {'kwargs': {'hue': [-0.05,
+ 0.05]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'rotate': {'kwargs': {'degrees': [-5,
+ 5]},
+ 'type': 'RandomRotate',
+ 'weight': 1.0},
+ 'saturation': {'kwargs': {'saturation': [0.5,
+ 1.5]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'sharpness': {'kwargs': {'sharpness': [0.5,
+ 1.5]},
+ 'type': 'SharpnessJitter',
+ 'weight': 1.0}},
+ 'max_num_transforms': 3,
+ 'random_order': False,
+ 'wrist_tfs': {'brightness': {'kwargs': {'brightness': [0.8,
+ 1.2]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'contrast': {'kwargs': {'contrast': [0.8,
+ 1.2]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'hue': {'kwargs': {'hue': [-0.05,
+ 0.05]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'saturation': {'kwargs': {'saturation': [0.5,
+ 1.5]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'sharpness': {'kwargs': {'sharpness': [0.5,
+ 1.5]},
+ 'type': 'SharpnessJitter',
+ 'weight': 1.0}}},
+ 'repo_id': '.',
+ 'revision': None,
+ 'root': '/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/merged_libero_mask_depth_noops_lerobot_10',
+ 'use_imagenet_stats': True,
+ 'video_backend': 'pyav',
+ 'vqa_data_path': None},
+ 'env': None,
+ 'eval': {'batch_size': 50, 'n_episodes': 50, 'use_async_envs': False},
+ 'eval_freq': 20000,
+ 'job_name': 'pi0fast_baseline_libero_30k',
+ 'log_freq': 100,
+ 'num_workers': 8,
+ 'optimizer': {'betas': [0.9, 0.95],
+ 'eps': 1e-08,
+ 'grad_clip_norm': 1,
+ 'lr': 0.0001,
+ 'type': 'adamw',
+ 'weight_decay': 1e-05},
+ 'output_dir': '/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/VLA-Humanoid/outputs/train/2026-02-08/07-52-25_pi0fast_baseline_libero_30k',
+ 'policy': {'adapt_to_pi_aloha': False,
+ 'checkpoint_path': None,
+ 'chunk_size': 10,
+ 'device': 'cuda',
+ 'empty_cameras': 0,
+ 'fast_skip_tokens': 128,
+ 'fast_tokenizer_path': '/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/ricl_openpi/fast',
+ 'freeze_lm_head': True,
+ 'freeze_vision_encoder': True,
+ 'grad_clip_norm': 1,
+ 'gradient_accumulation_steps': 4,
+ 'input_features': {},
+ 'interpolate_like_pi': False,
+ 'max_action_dim': 32,
+ 'max_decoding_steps': 256,
+ 'max_input_seq_len': 256,
+ 'max_state_dim': 32,
+ 'n_action_steps': 10,
+ 'n_obs_steps': 1,
+ 'normalization_mapping': {'ACTION': ,
+ 'STATE': ,
+ 'VISUAL': },
+ 'optimizer_betas': [0.9, 0.95],
+ 'optimizer_eps': 1e-08,
+ 'optimizer_lr': 0.0001,
+ 'optimizer_weight_decay': 1e-05,
+ 'output_features': {},
+ 'padding_side': 'right',
+ 'pi0_paligemma_path': '/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/VLA-Humanoid/paligemma-3b-pt-224',
+ 'precision': 'bfloat16',
+ 'proj_width': 1024,
+ 'relaxed_action_decoding': True,
+ 'resize_imgs_with_padding': [224, 224],
+ 'scheduler_decay_lr': 2.5e-06,
+ 'scheduler_decay_steps': 30000,
+ 'scheduler_warmup_steps': 1000,
+ 'tokenizer_max_length': 48,
+ 'type': 'pi0fast',
+ 'use_amp': False,
+ 'use_cache': True,
+ 'use_delta_joint_actions_aloha': False},
+ 'resume': False,
+ 'save_checkpoint': True,
+ 'save_freq': 5000,
+ 'scheduler': {'decay_lr': 2.5e-06,
+ 'num_decay_steps': 30000,
+ 'num_warmup_steps': 1000,
+ 'peak_lr': 0.0001,
+ 'type': 'cosine_decay_with_warmup'},
+ 'seed': 42,
+ 'steps': 100000,
+ 'use_policy_training_preset': True,
+ 'wandb': {'disable_artifact': True,
+ 'enable': False,
+ 'entity': 'Robotics_VLA',
+ 'mode': 'online',
+ 'notes': None,
+ 'project': 'pi0_lerobot',
+ 'run_id': None}}
+{'batch_size': 4,
+ 'dataset': {'episodes': None,
+ 'image_transforms': {'enable': True,
+ 'image_tfs': {'brightness': {'kwargs': {'brightness': [0.8,
+ 1.2]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'contrast': {'kwargs': {'contrast': [0.8,
+ 1.2]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'crop_resize': {'kwargs': {'ratio': [1,
+ 1],
+ 'scale': [0.9,
+ 0.95],
+ 'size': [256,
+ 256]},
+ 'type': 'RandomResizedCrop',
+ 'weight': 1.0},
+ 'hue': {'kwargs': {'hue': [-0.05,
+ 0.05]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'rotate': {'kwargs': {'degrees': [-5,
+ 5]},
+ 'type': 'RandomRotate',
+ 'weight': 1.0},
+ 'saturation': {'kwargs': {'saturation': [0.5,
+ 1.5]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'sharpness': {'kwargs': {'sharpness': [0.5,
+ 1.5]},
+ 'type': 'SharpnessJitter',
+ 'weight': 1.0}},
+ 'max_num_transforms': 3,
+ 'random_order': False,
+ 'wrist_tfs': {'brightness': {'kwargs': {'brightness': [0.8,
+ 1.2]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'contrast': {'kwargs': {'contrast': [0.8,
+ 1.2]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'hue': {'kwargs': {'hue': [-0.05,
+ 0.05]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'saturation': {'kwargs': {'saturation': [0.5,
+ 1.5]},
+ 'type': 'ColorJitter',
+ 'weight': 1.0},
+ 'sharpness': {'kwargs': {'sharpness': [0.5,
+ 1.5]},
+ 'type': 'SharpnessJitter',
+ 'weight': 1.0}}},
+ 'repo_id': '.',
+ 'revision': None,
+ 'root': '/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/merged_libero_mask_depth_noops_lerobot_10',
+ 'use_imagenet_stats': True,
+ 'video_backend': 'pyav',
+ 'vqa_data_path': None},
+ 'env': None,
+ 'eval': {'batch_size': 50, 'n_episodes': 50, 'use_async_envs': False},
+ 'eval_freq': 20000,
+ 'job_name': 'pi0fast_baseline_libero_30k',
+ 'log_freq': 100,
+ 'num_workers': 8,
+ 'optimizer': {'betas': [0.9, 0.95],
+ 'eps': 1e-08,
+ 'grad_clip_norm': 1,
+ 'lr': 0.0001,
+ 'type': 'adamw',
+ 'weight_decay': 1e-05},
+ 'output_dir': '/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/VLA-Humanoid/outputs/train/2026-02-08/07-52-25_pi0fast_baseline_libero_30k',
+ 'policy': {'adapt_to_pi_aloha': False,
+ 'checkpoint_path': None,
+ 'chunk_size': 10,
+ 'device': 'cuda',
+ 'empty_cameras': 0,
+ 'fast_skip_tokens': 128,
+ 'fast_tokenizer_path': '/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/ricl_openpi/fast',
+ 'freeze_lm_head': True,
+ 'freeze_vision_encoder': True,
+ 'grad_clip_norm': 1,
+ 'gradient_accumulation_steps': 4,
+ 'input_features': {},
+ 'interpolate_like_pi': False,
+ 'max_action_dim': 32,
+ 'max_decoding_steps': 256,
+ 'max_input_seq_len': 256,
+ 'max_state_dim': 32,
+ 'n_action_steps': 10,
+ 'n_obs_steps': 1,
+ 'normalization_mapping': {'ACTION': ,
+ 'STATE': ,
+ 'VISUAL': },
+ 'optimizer_betas': [0.9, 0.95],
+ 'optimizer_eps': 1e-08,
+ 'optimizer_lr': 0.0001,
+ 'optimizer_weight_decay': 1e-05,
+ 'output_features': {},
+ 'padding_side': 'right',
+ 'pi0_paligemma_path': '/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/VLA-Humanoid/paligemma-3b-pt-224',
+ 'precision': 'bfloat16',
+ 'proj_width': 1024,
+ 'relaxed_action_decoding': True,
+ 'resize_imgs_with_padding': [224, 224],
+ 'scheduler_decay_lr': 2.5e-06,
+ 'scheduler_decay_steps': 30000,
+ 'scheduler_warmup_steps': 1000,
+ 'tokenizer_max_length': 48,
+ 'type': 'pi0fast',
+ 'use_amp': False,
+ 'use_cache': True,
+ 'use_delta_joint_actions_aloha': False},
+ 'resume': False,
+ 'save_checkpoint': True,
+ 'save_freq': 5000,
+ 'scheduler': {'decay_lr': 2.5e-06,
+ 'num_decay_steps': 30000,
+ 'num_warmup_steps': 1000,
+ 'peak_lr': 0.0001,
+ 'type': 'cosine_decay_with_warmup'},
+ 'seed': 42,
+ 'steps': 100000,
+ 'use_policy_training_preset': True,
+ 'wandb': {'disable_artifact': True,
+ 'enable': False,
+ 'entity': 'Robotics_VLA',
+ 'mode': 'online',
+ 'notes': None,
+ 'project': 'pi0_lerobot',
+ 'run_id': None}}
+load fresh policy
+load fresh policy
+load fresh policy
+load fresh policy
+===========================================
+Training completed!
+Outputs in: /projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/VLA-Humanoid/outputs/train/2026-02-08/07-52-25_pi0fast_baseline_libero_30k
+===========================================
+============ Job Information ===================================================
+Submitted: 2026-02-08T07:52:07
+Started: 2026-02-08T07:52:16
+Ended: 2026-02-09T07:22:29
+Elapsed: 1411 min, Limit: 2880 min, Difference: 1469 min
+CPUs: 32, Nodes: 1
+Estimated Consumption: 14110.00 core-hours
+================================================================================
diff --git a/scripts/serve_policy_ricl.py b/scripts/serve_policy_ricl.py
new file mode 100644
index 0000000000000000000000000000000000000000..5683c47657ca6e5081de3232ef51e34a6996f9d0
--- /dev/null
+++ b/scripts/serve_policy_ricl.py
@@ -0,0 +1,90 @@
+import dataclasses
+import enum
+import logging
+import socket
+
+import tyro
+
+from openpi.policies import policy as _policy
+from openpi.policies import policy_config as _policy_config
+from openpi.serving import websocket_policy_server
+from openpi.training import config as _config
+
+
+class EnvMode(enum.Enum):
+ """Supported environments."""
+
+ ALOHA = "aloha"
+ ALOHA_SIM = "aloha_sim"
+ DROID = "droid"
+ LIBERO = "libero"
+
+
+@dataclasses.dataclass
+class Checkpoint:
+ """Load a policy from a trained checkpoint."""
+
+ # Training config name (e.g., "pi0_aloha_sim").
+ config: str
+ # Checkpoint directory (e.g., "checkpoints/pi0_aloha_sim/exp/10000").
+ dir: str
+ # Demos directory (e.g., "ricl_droid_preprocessing/collected_demos/2025-03-04").
+ demos_dir: str
+
+
+@dataclasses.dataclass
+class Default:
+ """Use the default policy for the given environment."""
+
+
+@dataclasses.dataclass
+class Args:
+ """Arguments for the serve_policy script."""
+
+ # Environment to serve the policy for. This is only used when serving default policies.
+ env: EnvMode = EnvMode.DROID
+
+ # If provided, will be used in case the "prompt" key is not present in the data, or if the model doesn't have a default
+ # prompt.
+ default_prompt: str | None = None
+
+ # Port to serve the policy on.
+ port: int = 8000
+ # Record the policy's behavior for debugging.
+ record: bool = False
+
+ # Specifies how to load the policy. If not provided, the default policy for the environment will be used.
+ policy: Checkpoint | Default = dataclasses.field(default_factory=Default)
+
+
+def create_policy(args: Args) -> _policy.Policy:
+ """Create a policy from the given arguments."""
+ return _policy_config.create_trained_ricl_policy(
+ _config.get_config(args.policy.config), args.policy.dir, demos_dir=args.policy.demos_dir
+ )
+
+
+def main(args: Args) -> None:
+ policy = create_policy(args)
+ policy_metadata = policy.metadata
+
+ # Record the policy's behavior.
+ if args.record:
+ policy = _policy.PolicyRecorder(policy, "policy_records")
+
+ hostname = socket.gethostname()
+ local_ip = socket.gethostbyname(hostname)
+ logging.info("Creating server (host: %s, ip: %s)", hostname, local_ip)
+
+ server = websocket_policy_server.WebsocketPolicyServer(
+ policy=policy,
+ host="0.0.0.0",
+ port=args.port,
+ metadata=policy_metadata,
+ )
+ server.serve_forever()
+
+
+if __name__ == "__main__":
+ logging.basicConfig(level=logging.INFO, force=True)
+ main(tyro.cli(Args))
diff --git a/scripts/test_ricl_data_pipeline.py b/scripts/test_ricl_data_pipeline.py
new file mode 100644
index 0000000000000000000000000000000000000000..77d43fe0671ae1d39a8b76725cd2ac603b0e3788
--- /dev/null
+++ b/scripts/test_ricl_data_pipeline.py
@@ -0,0 +1,219 @@
+#!/usr/bin/env python3
+"""
+Quick test script to validate LIBERO dataset loading and RICL precomputation logic.
+
+This runs a minimal version of the precomputation to verify:
+1. Parquet files can be loaded
+2. Video frames can be extracted
+3. DINOv2 embeddings can be built
+4. FAISS index works correctly
+5. Self-exclusion in retrieval works
+
+Usage:
+ python scripts/test_ricl_data_pipeline.py
+"""
+
+import sys
+from pathlib import Path
+
+# Add src to path
+sys.path.insert(0, str(Path(__file__).parent.parent / "src"))
+
+import numpy as np
+import pandas as pd
+from glob import glob
+from tqdm import tqdm
+import torch
+import torchvision
+
+print("=" * 60)
+print("RICL Data Pipeline Test")
+print("=" * 60)
+
+# Paths - Using smaller test dataset
+DATA_DIR = Path("/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/merged_libero_mask_depth_noops_lerobot_10")
+VIDEOS_DIR = DATA_DIR / "videos"
+
+print(f"Testing with dataset: {DATA_DIR}")
+print(f"Dataset exists: {DATA_DIR.exists()}")
+
+# Test 1: Load parquet files
+print("\nTest 1: Loading parquet files...")
+parquet_files = sorted(glob(str(DATA_DIR / "data" / "**" / "*.parquet"), recursive=True))
+print(f" Found {len(parquet_files)} parquet files")
+
+# Load first few files
+dfs = []
+for f in parquet_files[:2]: # Just first 2 chunks
+ df = pd.read_parquet(f)
+ dfs.append(df)
+ print(f" Loaded {f.split('/')[-1]}: {len(df)} rows")
+
+df = pd.concat(dfs, ignore_index=True)
+print(f" Total frames: {len(df)}")
+print(f" ✓ PASSED")
+
+# Test 2: Extract state and action
+print("\nTest 2: Extracting state and action...")
+row = df.iloc[0]
+state_ee = np.array(row["observation.states.ee_state"], dtype=np.float32)
+action = np.array(row["action"], dtype=np.float32)
+
+print(f" State (ee_state) shape: {state_ee.shape}")
+print(f" Action shape: {action.shape}")
+print(f" State sample: {state_ee[:3]}")
+print(f" Action sample: {action[:3]}")
+
+assert state_ee.shape == (6,), f"Expected state shape (6,), got {state_ee.shape}"
+assert action.shape == (7,), f"Expected action shape (7,), got {action.shape}"
+print(f" ✓ PASSED")
+
+# Test 3: Load video frame
+print("\nTest 3: Loading video frame...")
+ep_idx = int(row["episode_index"])
+frame_idx = int(row["frame_index"])
+
+video_file = f"episode_{ep_idx:06d}.mp4"
+image_key = "observation.images.image"
+
+# Find video file
+video_path = None
+for chunk_dir in sorted(VIDEOS_DIR.glob("chunk-*")):
+ candidate = chunk_dir / image_key / video_file
+ if candidate.exists():
+ video_path = candidate
+ break
+
+if video_path is None:
+ print(f" ✗ FAILED: Video not found for episode {ep_idx}")
+else:
+ print(f" Video path: {video_path}")
+
+ # Load frame
+ vframes, _, _ = torchvision.io.read_video(str(video_path), pts_unit='sec')
+ if frame_idx >= len(vframes):
+ frame_idx = len(vframes) - 1
+
+ frame = vframes[frame_idx].numpy()
+ print(f" Frame shape: {frame.shape}")
+ print(f" Frame dtype: {frame.dtype}")
+ print(f" Frame range: [{frame.min()}, {frame.max()}]")
+
+ assert frame.shape[2] == 3, f"Expected RGB image, got shape {frame.shape}"
+ print(f" ✓ PASSED")
+
+# Test 4: DINOv2 encoding (if model available)
+print("\nTest 4: DINOv2 encoding...")
+DINO_MODEL_PATH = Path("/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/VLA-Humanoid/rag/dinov2-base")
+
+if not DINO_MODEL_PATH.exists():
+ print(f" SKIPPED: DINOv2 model not found at {DINO_MODEL_PATH}")
+else:
+ from transformers import AutoImageProcessor, AutoModel
+
+ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
+ processor = AutoImageProcessor.from_pretrained(str(DINO_MODEL_PATH), local_files_only=True)
+ model = AutoModel.from_pretrained(str(DINO_MODEL_PATH), local_files_only=True).to(device)
+ model.eval()
+
+ with torch.no_grad():
+ inputs = processor(images=frame, return_tensors="pt")
+ inputs = {k: v.to(device) for k, v in inputs.items()}
+ outputs = model(**inputs)
+ embedding = outputs.last_hidden_state[:, 0, :].cpu().numpy().squeeze()
+
+ print(f" Embedding shape: {embedding.shape}")
+ print(f" Embedding norm: {np.linalg.norm(embedding):.4f}")
+
+ assert embedding.shape == (768,), f"Expected embedding shape (768,), got {embedding.shape}"
+ print(f" ✓ PASSED")
+
+# Test 5: Action chunk building
+print("\nTest 5: Action chunk building...")
+ACTION_HORIZON = 50
+
+# Get all actions for this episode
+episode_mask = df["episode_index"] == ep_idx
+ep_df = df[episode_mask]
+ep_actions = np.array([
+ np.array(row["action"], dtype=np.float32)
+ for _, row in ep_df.iterrows()
+])
+
+print(f" Episode {ep_idx} length: {len(ep_df)} frames")
+print(f" Episode actions shape: {ep_actions.shape}")
+
+# Build chunk
+local_frame_idx = 0
+action_chunk = []
+for i in range(ACTION_HORIZON):
+ if local_frame_idx + i < len(ep_actions):
+ action_chunk.append(ep_actions[local_frame_idx + i])
+ else:
+ action_chunk.append(ep_actions[-1]) # Pad with last action
+
+action_chunk = np.stack(action_chunk, axis=0)
+print(f" Action chunk shape: {action_chunk.shape}")
+
+assert action_chunk.shape == (ACTION_HORIZON, 7), f"Expected shape ({ACTION_HORIZON}, 7), got {action_chunk.shape}"
+print(f" ✓ PASSED")
+
+# Test 6: FAISS index (if faiss available)
+print("\nTest 6: FAISS index and retrieval...")
+try:
+ import faiss
+
+ # Create dummy embeddings
+ N = 100
+ D = 768
+ embeddings = np.random.randn(N, D).astype(np.float32)
+
+ # L2 normalize
+ norms = np.linalg.norm(embeddings, axis=1, keepdims=True)
+ embeddings = embeddings / (norms + 1e-8)
+
+ # Build index
+ index = faiss.IndexFlatL2(D)
+ index.add(embeddings)
+
+ # Search for top-3 nearest neighbors
+ top_k = 3
+ search_k = top_k + 1 # +1 to exclude self
+
+ query_idx = 0
+ query_emb = embeddings[query_idx:query_idx+1]
+
+ D_result, I_result = index.search(query_emb, search_k)
+
+ print(f" Index built with {index.ntotal} vectors")
+ print(f" Query index: {query_idx}")
+ print(f" Retrieved indices: {I_result[0]}")
+ print(f" Retrieved distances: {D_result[0]}")
+
+ # Verify self is first result (distance ~0)
+ assert I_result[0, 0] == query_idx, "Self should be first result"
+ assert D_result[0, 0] < 1e-5, f"Self-distance should be ~0, got {D_result[0, 0]}"
+
+ # Filter out self
+ mask = I_result[0] != query_idx
+ nn_indices = I_result[0][mask][:top_k]
+ nn_distances = D_result[0][mask][:top_k]
+
+ print(f" After self-exclusion:")
+ print(f" NN indices: {nn_indices}")
+ print(f" NN distances: {nn_distances}")
+
+ assert query_idx not in nn_indices, "Self should be excluded"
+ assert len(nn_indices) == top_k, f"Should have {top_k} NNs, got {len(nn_indices)}"
+
+ print(f" ✓ PASSED")
+
+except ImportError:
+ print(f" SKIPPED: faiss not available")
+
+print("\n" + "=" * 60)
+print("✓ All tests passed!")
+print("=" * 60)
+print("\nData pipeline is ready. You can now run:")
+print(" sbatch slurm/precompute_ricl_context_libero.slurm")
+print("=" * 60)
diff --git a/scripts/test_ricl_data_pipeline_v2.py b/scripts/test_ricl_data_pipeline_v2.py
new file mode 100644
index 0000000000000000000000000000000000000000..7dfb255c131a9a72d7244dcc42b0bf945264458a
--- /dev/null
+++ b/scripts/test_ricl_data_pipeline_v2.py
@@ -0,0 +1,206 @@
+#!/usr/bin/env python3
+"""
+Quick test script for RICL LIBERO data pipeline.
+
+Tests:
+1. Dataset loading
+2. Video frame loading
+3. Action chunk building
+4. Transform pipeline
+5. Batch creation
+"""
+
+import sys
+from pathlib import Path
+
+# Add src to path
+sys.path.insert(0, str(Path(__file__).parent.parent / "src"))
+
+import numpy as np
+
+def test_video_loading():
+ """Test that videos can be loaded."""
+ print("\n" + "="*60)
+ print("TEST 1: Video Loading")
+ print("="*60)
+
+ import av
+ from pathlib import Path
+
+ data_dir = Path("/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/merged_libero_mask_depth_noops_lerobot_10")
+
+ # Try to load first frame from first episode
+ video_path = data_dir / "videos" / "chunk-000" / "observation.images.image" / "episode_000000.mp4"
+
+ if not video_path.exists():
+ print(f"✗ Video not found: {video_path}")
+ return False
+
+ try:
+ with av.open(str(video_path)) as container:
+ frame = next(container.decode(video=0))
+ img = frame.to_ndarray(format='rgb24')
+ print(f"✓ Loaded frame shape: {img.shape}")
+ print(f"✓ Frame dtype: {img.dtype}")
+ print(f"✓ Frame range: [{img.min()}, {img.max()}]")
+ return True
+ except Exception as e:
+ print(f"✗ Failed to load video: {e}")
+ return False
+
+
+def test_dataset_loading():
+ """Test RiclLiberoDataset loading."""
+ print("\n" + "="*60)
+ print("TEST 2: Dataset Loading")
+ print("="*60)
+
+ from openpi.data.ricl_libero_dataset import RiclLiberoDataset
+
+ data_dir = "/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/merged_libero_mask_depth_noops_lerobot_10"
+ context_dir = "rag/ricl_training_context_libero_10_test"
+
+ try:
+ dataset = RiclLiberoDataset(
+ data_dir=data_dir,
+ context_dir=context_dir,
+ action_horizon=15, # Use 15 like DROID
+ lambda_decay=10.0,
+ num_retrieved_observations=1,
+ )
+
+ print(f"✓ Dataset loaded: {len(dataset)} samples")
+ return dataset
+ except Exception as e:
+ print(f"✗ Failed to load dataset: {e}")
+ import traceback
+ traceback.print_exc()
+ return None
+
+
+def test_sample_loading(dataset):
+ """Test loading a single sample."""
+ print("\n" + "="*60)
+ print("TEST 3: Sample Loading")
+ print("="*60)
+
+ if dataset is None:
+ print("✗ Skipping (dataset not loaded)")
+ return False
+
+ try:
+ sample = dataset[0]
+
+ print("✓ Sample loaded successfully")
+ print(f" Keys: {list(sample.keys())}")
+
+ # Check images
+ for key in ['query_top_image', 'query_wrist_image', 'retrieved_0_top_image', 'retrieved_0_wrist_image']:
+ if key in sample:
+ img = sample[key]
+ print(f" {key}: shape={img.shape}, dtype={img.dtype}")
+ assert img.shape[2] == 3, f"Expected RGB image, got shape {img.shape}"
+ assert img.dtype == np.uint8, f"Expected uint8, got {img.dtype}"
+
+ # Check states
+ for key in ['query_state', 'retrieved_0_state']:
+ if key in sample:
+ state = sample[key]
+ print(f" {key}: shape={state.shape}")
+
+ # Check actions
+ for key in ['query_actions', 'retrieved_0_actions']:
+ if key in sample:
+ actions = sample[key]
+ print(f" {key}: shape={actions.shape}")
+ assert actions.shape[1] == 7, f"Expected 7-dim actions, got {actions.shape}"
+
+ return True
+
+ except Exception as e:
+ print(f"✗ Failed to load sample: {e}")
+ import traceback
+ traceback.print_exc()
+ return False
+
+
+def test_batch_loading(dataset):
+ """Test loading a batch."""
+ print("\n" + "="*60)
+ print("TEST 4: Batch Loading")
+ print("="*60)
+
+ if dataset is None:
+ print("✗ Skipping (dataset not loaded)")
+ return False
+
+ try:
+ batch_size = 4
+ batch = [dataset[i] for i in range(min(batch_size, len(dataset)))]
+
+ print(f"✓ Loaded batch of {len(batch)} samples")
+
+ # Check consistency
+ keys = set(batch[0].keys())
+ for i, sample in enumerate(batch[1:], 1):
+ assert set(sample.keys()) == keys, f"Sample {i} has different keys"
+
+ print(f" All samples have consistent keys: {len(keys)} keys")
+ return True
+
+ except Exception as e:
+ print(f"✗ Failed to load batch: {e}")
+ import traceback
+ traceback.print_exc()
+ return False
+
+
+def main():
+ print("\n" + "#"*60)
+ print("# RICL LIBERO Data Pipeline Test")
+ print("#"*60)
+
+ results = {}
+
+ # Run tests
+ results['video_loading'] = test_video_loading()
+ results['dataset_loading'] = test_dataset_loading()
+
+ # Get dataset for subsequent tests
+ if results['dataset_loading']:
+ from openpi.data.ricl_libero_dataset import RiclLiberoDataset
+ dataset = RiclLiberoDataset(
+ data_dir="/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/merged_libero_mask_depth_noops_lerobot_10",
+ context_dir="rag/ricl_training_context_libero_10_test",
+ action_horizon=15,
+ lambda_decay=10.0,
+ num_retrieved_observations=1,
+ )
+ else:
+ dataset = None
+
+ results['sample_loading'] = test_sample_loading(dataset)
+ results['batch_loading'] = test_batch_loading(dataset)
+
+ # Print summary
+ print("\n" + "="*60)
+ print("TEST SUMMARY")
+ print("="*60)
+
+ for test_name, passed in results.items():
+ status = "✓ PASS" if passed else "✗ FAIL"
+ print(f" {test_name:20s}: {status}")
+
+ all_passed = all(results.values())
+ print()
+ if all_passed:
+ print("🎉 ALL TESTS PASSED!")
+ else:
+ print("❌ SOME TESTS FAILED")
+
+ print("="*60)
+ return 0 if all_passed else 1
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/scripts/train.py b/scripts/train.py
new file mode 100644
index 0000000000000000000000000000000000000000..2792156cf91db2c7637e1e53b77d9ab662cf068f
--- /dev/null
+++ b/scripts/train.py
@@ -0,0 +1,273 @@
+import dataclasses
+import functools
+import logging
+import platform
+from typing import Any
+
+import etils.epath as epath
+import flax.nnx as nnx
+from flax.training import common_utils
+import flax.traverse_util as traverse_util
+import jax
+import jax.experimental
+import jax.numpy as jnp
+import optax
+import tqdm_loggable.auto as tqdm
+import wandb
+
+import openpi.models.model as _model
+import openpi.shared.array_typing as at
+import openpi.shared.nnx_utils as nnx_utils
+import openpi.training.checkpoints as _checkpoints
+import openpi.training.config as _config
+import openpi.training.data_loader as _data_loader
+import openpi.training.optimizer as _optimizer
+import openpi.training.sharding as sharding
+import openpi.training.utils as training_utils
+import openpi.training.weight_loaders as _weight_loaders
+
+
+def init_logging():
+ """Custom logging format for better readability."""
+ level_mapping = {"DEBUG": "D", "INFO": "I", "WARNING": "W", "ERROR": "E", "CRITICAL": "C"}
+
+ class CustomFormatter(logging.Formatter):
+ def format(self, record):
+ record.levelname = level_mapping.get(record.levelname, record.levelname)
+ return super().format(record)
+
+ formatter = CustomFormatter(
+ fmt="%(asctime)s.%(msecs)03d [%(levelname)s] %(message)-80s (%(process)d:%(filename)s:%(lineno)s)",
+ datefmt="%H:%M:%S",
+ )
+
+ logger = logging.getLogger()
+ logger.setLevel(logging.INFO)
+ logger.handlers[0].setFormatter(formatter)
+
+
+def init_wandb(config: _config.TrainConfig, *, resuming: bool, log_code: bool = False, enabled: bool = True):
+ if not enabled:
+ wandb.init(mode="disabled")
+ return
+
+ ckpt_dir = config.checkpoint_dir
+ if not ckpt_dir.exists():
+ raise FileNotFoundError(f"Checkpoint directory {ckpt_dir} does not exist.")
+ if resuming:
+ run_id = (ckpt_dir / "wandb_id.txt").read_text().strip()
+ wandb.init(id=run_id, resume="must", project=config.project_name)
+ else:
+ wandb.init(
+ name=config.exp_name,
+ config=dataclasses.asdict(config),
+ project=config.project_name,
+ )
+ (ckpt_dir / "wandb_id.txt").write_text(wandb.run.id)
+
+ if log_code:
+ wandb.run.log_code(epath.Path(__file__).parent.parent)
+
+
+def _load_weights_and_validate(loader: _weight_loaders.WeightLoader, params_shape: at.Params) -> at.Params:
+ """Loads and validates the weights. Returns a loaded subset of the weights."""
+ loaded_params = loader.load(params_shape)
+ at.check_pytree_equality(expected=params_shape, got=loaded_params, check_shapes=True, check_dtypes=True)
+
+ # Remove jax.ShapeDtypeStruct from the loaded params. This makes sure that only the loaded params are returned.
+ return traverse_util.unflatten_dict(
+ {k: v for k, v in traverse_util.flatten_dict(loaded_params).items() if not isinstance(v, jax.ShapeDtypeStruct)}
+ )
+
+
+@at.typecheck
+def init_train_state(
+ config: _config.TrainConfig, init_rng: at.KeyArrayLike, mesh: jax.sharding.Mesh, *, resume: bool
+) -> tuple[training_utils.TrainState, Any]:
+ tx = _optimizer.create_optimizer(config.optimizer, config.lr_schedule, weight_decay_mask=None)
+
+ def init(rng: at.KeyArrayLike, partial_params: at.Params | None = None) -> training_utils.TrainState:
+ rng, model_rng = jax.random.split(rng)
+ # initialize the model (and its parameters).
+ model = config.model.create(model_rng)
+
+ # Merge the partial params into the model.
+ if partial_params is not None:
+ graphdef, state = nnx.split(model)
+ # This will produce an error if the partial params are not a subset of the state.
+ state.replace_by_pure_dict(partial_params)
+ model = nnx.merge(graphdef, state)
+
+ params = nnx.state(model)
+ # Convert frozen params to bfloat16.
+ params = nnx_utils.state_map(params, config.freeze_filter, lambda p: p.replace(p.value.astype(jnp.bfloat16)))
+
+ return training_utils.TrainState(
+ step=0,
+ params=params,
+ model_def=nnx.graphdef(model),
+ tx=tx,
+ opt_state=tx.init(params.filter(config.trainable_filter)),
+ ema_decay=config.ema_decay,
+ ema_params=None if config.ema_decay is None else params,
+ )
+
+ train_state_shape = jax.eval_shape(init, init_rng)
+ state_sharding = sharding.fsdp_sharding(train_state_shape, mesh, log=True)
+
+ if resume:
+ return train_state_shape, state_sharding
+
+ partial_params = _load_weights_and_validate(config.weight_loader, train_state_shape.params.to_pure_dict())
+ replicated_sharding = jax.sharding.NamedSharding(mesh, jax.sharding.PartitionSpec())
+
+ # Initialize the train state and mix in the partial params.
+ train_state = jax.jit(
+ init,
+ donate_argnums=(1,), # donate the partial params buffer.
+ in_shardings=replicated_sharding,
+ out_shardings=state_sharding,
+ )(init_rng, partial_params)
+
+ return train_state, state_sharding
+
+
+@at.typecheck
+def train_step(
+ config: _config.TrainConfig,
+ rng: at.KeyArrayLike,
+ state: training_utils.TrainState,
+ batch: tuple[_model.Observation, _model.Actions],
+) -> tuple[training_utils.TrainState, dict[str, at.Array]]:
+ model = nnx.merge(state.model_def, state.params)
+ model.train()
+
+ @at.typecheck
+ def loss_fn(
+ model: _model.BaseModel, rng: at.KeyArrayLike, observation: _model.Observation, actions: _model.Actions
+ ):
+ chunked_loss = model.compute_loss(rng, observation, actions, train=True)
+ return jnp.mean(chunked_loss)
+
+ train_rng = jax.random.fold_in(rng, state.step)
+ observation, actions = batch
+
+ # Filter out frozen params.
+ diff_state = nnx.DiffState(0, config.trainable_filter)
+ loss, grads = nnx.value_and_grad(loss_fn, argnums=diff_state)(model, train_rng, observation, actions)
+
+ params = state.params.filter(config.trainable_filter)
+ updates, new_opt_state = state.tx.update(grads, state.opt_state, params)
+ new_params = optax.apply_updates(params, updates)
+
+ # Update the model in place and return the new full state.
+ nnx.update(model, new_params)
+ new_params = nnx.state(model)
+
+ new_state = dataclasses.replace(state, step=state.step + 1, params=new_params, opt_state=new_opt_state)
+ if state.ema_decay is not None:
+ new_state = dataclasses.replace(
+ new_state,
+ ema_params=jax.tree.map(
+ lambda old, new: state.ema_decay * old + (1 - state.ema_decay) * new, state.ema_params, new_params
+ ),
+ )
+
+ # Filter out params that aren't kernels.
+ kernel_params = nnx.state(
+ model,
+ nnx.All(
+ nnx.Param,
+ nnx.Not(nnx_utils.PathRegex(".*/(bias|scale|pos_embedding|input_embedding)")),
+ lambda _, x: x.value.ndim > 1,
+ ),
+ )
+ info = {
+ "loss": loss,
+ "grad_norm": optax.global_norm(grads),
+ "param_norm": optax.global_norm(kernel_params),
+ }
+ return new_state, info
+
+
+def main(config: _config.TrainConfig):
+ init_logging()
+ logging.info(f"Running on: {platform.node()}")
+
+ if config.batch_size % jax.device_count() != 0:
+ raise ValueError(
+ f"Batch size {config.batch_size} must be divisible by the number of devices {jax.device_count()}."
+ )
+
+ jax.config.update("jax_compilation_cache_dir", str(epath.Path("~/.cache/jax").expanduser()))
+
+ rng = jax.random.key(config.seed)
+ train_rng, init_rng = jax.random.split(rng)
+
+ mesh = sharding.make_mesh(config.fsdp_devices)
+ data_sharding = jax.sharding.NamedSharding(mesh, jax.sharding.PartitionSpec(sharding.DATA_AXIS))
+ replicated_sharding = jax.sharding.NamedSharding(mesh, jax.sharding.PartitionSpec())
+
+ checkpoint_manager, resuming = _checkpoints.initialize_checkpoint_dir(
+ config.checkpoint_dir,
+ keep_period=config.keep_period,
+ overwrite=config.overwrite,
+ resume=config.resume,
+ )
+ init_wandb(config, resuming=resuming, enabled=config.wandb_enabled)
+
+ data_loader = _data_loader.create_data_loader(
+ config,
+ sharding=data_sharding,
+ num_workers=config.num_workers,
+ shuffle=True,
+ )
+ data_iter = iter(data_loader)
+ batch = next(data_iter)
+ logging.info(f"Initialized data loader:\n{training_utils.array_tree_to_info(batch)}")
+
+ train_state, train_state_sharding = init_train_state(config, init_rng, mesh, resume=resuming)
+ jax.block_until_ready(train_state)
+ logging.info(f"Initialized train state:\n{training_utils.array_tree_to_info(train_state.params)}")
+
+ if resuming:
+ train_state = _checkpoints.restore_state(checkpoint_manager, train_state, data_loader)
+
+ ptrain_step = jax.jit(
+ functools.partial(train_step, config),
+ in_shardings=(replicated_sharding, train_state_sharding, data_sharding),
+ out_shardings=(train_state_sharding, replicated_sharding),
+ donate_argnums=(1,),
+ )
+
+ start_step = int(train_state.step)
+ pbar = tqdm.tqdm(
+ range(start_step, config.num_train_steps),
+ initial=start_step,
+ total=config.num_train_steps,
+ dynamic_ncols=True,
+ )
+
+ infos = []
+ for step in pbar:
+ with sharding.set_mesh(mesh):
+ train_state, info = ptrain_step(train_rng, train_state, batch)
+ infos.append(info)
+ if step % config.log_interval == 0:
+ stacked_infos = common_utils.stack_forest(infos)
+ reduced_info = jax.device_get(jax.tree.map(jnp.mean, stacked_infos))
+ info_str = ", ".join(f"{k}={v:.4f}" for k, v in reduced_info.items())
+ pbar.write(f"Step {step}: {info_str}")
+ wandb.log(reduced_info, step=step)
+ infos = []
+ batch = next(data_iter)
+
+ if (step % config.save_interval == 0 and step > start_step) or step == config.num_train_steps - 1:
+ _checkpoints.save_state(checkpoint_manager, train_state, data_loader, step)
+
+ logging.info("Waiting for checkpoint manager to finish")
+ checkpoint_manager.wait_until_finished()
+
+
+if __name__ == "__main__":
+ main(_config.cli())
diff --git a/scripts/train_debug_interactive.sh b/scripts/train_debug_interactive.sh
new file mode 100644
index 0000000000000000000000000000000000000000..035edd163c6ad40fa858cef8e5c841af805da0d4
--- /dev/null
+++ b/scripts/train_debug_interactive.sh
@@ -0,0 +1,48 @@
+#!/bin/bash
+
+# Interactive debug training (no SLURM - runs directly on allocated node)
+
+cd /projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/ricl_openpi
+
+export PYTHONPATH="/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/ricl_openpi/src:$PYTHONPATH"
+export HF_HOME="/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/hf_cache"
+
+# Memory optimization for JAX/XLA
+export XLA_PYTHON_CLIENT_PREALLOCATE=false
+
+echo "=========================================="
+echo "RICL Interactive Debug Training"
+echo "=========================================="
+echo "Config:"
+echo " Steps: 500 (quick test)"
+echo " Batch: 2 (reduced to fit single GPU memory)"
+echo " Action Horizon: 15"
+echo " Lambda: 10.0"
+echo "=========================================="
+
+python scripts/train_pi0_fast_ricl.py \
+ pi0_fast_ricl_libero \
+ --exp-name="ricl_libero_debug_interactive" \
+ --project-name="ricl_openpi" \
+ --libero-data-dir="/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/merged_libero_mask_depth_noops_lerobot_10" \
+ --libero-context-dir="rag/ricl_training_context_libero_10_test" \
+ --model.lamda=10.0 \
+ --model.use-action-interpolation \
+ --model.num-retrieved-observations=1 \
+ --model.action-horizon=15 \
+ --model.action-dim=7 \
+ --model.max-token-len=250 \
+ --batch-size=2 \
+ --num-train-steps=500 \
+ --save-interval=500 \
+ --log-interval=10 \
+ --lr-schedule.peak-lr=1e-4 \
+ --seed=42 \
+ --no-wandb-enabled \
+ --overwrite \
+ --no-resume
+
+echo ""
+echo "=========================================="
+echo "Debug Training Complete!"
+echo "=========================================="
diff --git a/scripts/train_pi0_fast_baseline_libero.slurm b/scripts/train_pi0_fast_baseline_libero.slurm
new file mode 100644
index 0000000000000000000000000000000000000000..d960211bc0fc7fc4aface1c7b9483f39c83b2ca9
--- /dev/null
+++ b/scripts/train_pi0_fast_baseline_libero.slurm
@@ -0,0 +1,85 @@
+#!/bin/bash
+#SBATCH --job-name=pi0fast_baseline_libero
+#SBATCH --output=logs/pi0fast_baseline_libero_h100.out
+#SBATCH --error=logs/pi0fast_baseline_libero_h100.err
+#SBATCH --nodes=1
+#SBATCH --ntasks=1
+#SBATCH --gres=gpu:4
+#SBATCH --cpus-per-task=32
+#SBATCH --time=48:00:00
+#SBATCH --mem=400G
+#SBATCH --partition=kisski
+
+# ==========================================
+# Baseline Pi0-FAST Training for LIBERO
+# Using VLA-Humanoid Codebase (LeRobot)
+# ==========================================
+
+# Activate Environment
+source /projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/etc/profile.d/conda.sh
+conda activate pitorch_doanh
+
+# Environment Variables
+export PROJECT_DIR="/projects/extern/kisski/kisski-spath/dir.project"
+export PYTHONPATH="$PROJECT_DIR/VLA_Groot/in_context_learning/VLA-Humanoid:$PYTHONPATH"
+export HF_HOME="$PROJECT_DIR/VLA_Groot/hf_cache"
+
+# Paths
+VLA_HUMANOID_DIR="$PROJECT_DIR/VLA_Groot/in_context_learning/VLA-Humanoid"
+DATA_DIR="$PROJECT_DIR/VLA_Groot/merged_libero_mask_depth_noops_lerobot_10"
+POLICY_CONFIG_SOURCE="$VLA_HUMANOID_DIR/configs/policy_config/pi0fast_baseline.json"
+LIBERO_CONFIG_PATH="$VLA_HUMANOID_DIR/configs/libero_config/default.json"
+
+EXP_NAME="pi0fast_baseline_libero_pretrained_fix"
+# Output directory
+OUTPUT_DIR="$VLA_HUMANOID_DIR/outputs/train/$(date +%Y-%m-%d)/$(date +%H-%M-%S)_$EXP_NAME"
+# Temp directory for initializing config (must separate from output to avoid FileExistsError)
+TEMP_CONFIG_DIR="$VLA_HUMANOID_DIR/temp/configs/$EXP_NAME"
+
+echo "==========================================="
+echo "Experiment: $EXP_NAME"
+echo "Codebase: $VLA_HUMANOID_DIR"
+echo "Data: $DATA_DIR"
+echo "Policy Config Source: $POLICY_CONFIG_SOURCE"
+echo "Temp Config Dir: $TEMP_CONFIG_DIR"
+echo "Output Dir: $OUTPUT_DIR"
+echo "==========================================="
+
+# Move to codebase directory
+cd $VLA_HUMANOID_DIR
+
+# 1. Run Verification Script (CRITICAL: Fail if setup is wrong)
+echo "==========================================="
+echo "Running verification script..."
+echo "==========================================="
+python scripts/verify_training_setup.py
+if [ $? -ne 0 ]; then
+ echo "❌ ERROR: Verification failed! Aborting training."
+ exit 1
+fi
+echo "✓ Verification passed. Proceeding to training..."
+
+# 2. Setup Temp Config Directory (Source for initialization)
+mkdir -p $TEMP_CONFIG_DIR
+cp $POLICY_CONFIG_SOURCE $TEMP_CONFIG_DIR/config.json
+
+# 3. Run Training
+# Note: OUTPUT_DIR is created by the script, it should not exist beforehand if resume=False
+export PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True
+accelerate launch --num_processes=4 --main_process_port 29600 lerobot/scripts/train_accelerate.py \
+ --policy.type=pi0fast \
+ --dataset.root=$DATA_DIR \
+ --dataset.video_backend=pyav \
+ --output_dir=$OUTPUT_DIR \
+ --job_name=$EXP_NAME \
+ --config_path=$LIBERO_CONFIG_PATH \
+ --batch_size=4 \
+ --policy.gradient_accumulation_steps=4 \
+ --save_freq=5000 \
+ --log_freq=100 \
+ --wandb.enable=false
+
+echo "==========================================="
+echo "Training completed!"
+echo "Outputs in: $OUTPUT_DIR"
+echo "==========================================="
diff --git a/scripts/train_pi0_fast_ricl.py b/scripts/train_pi0_fast_ricl.py
new file mode 100644
index 0000000000000000000000000000000000000000..649743c4493b301a46ab31936b968758f433e916
--- /dev/null
+++ b/scripts/train_pi0_fast_ricl.py
@@ -0,0 +1,317 @@
+import dataclasses
+import functools
+import logging
+import platform
+from typing import Any
+
+import etils.epath as epath
+import flax.nnx as nnx
+from flax.training import common_utils
+import flax.traverse_util as traverse_util
+import jax
+import jax.experimental
+import jax.numpy as jnp
+import optax
+import tqdm_loggable.auto as tqdm
+import wandb
+from torch.utils.tensorboard import SummaryWriter
+
+import openpi.models.model as _model
+import openpi.shared.array_typing as at
+import openpi.shared.nnx_utils as nnx_utils
+import openpi.training.checkpoints as _checkpoints
+import openpi.training.config as _config
+import openpi.training.data_loader as _data_loader
+import openpi.training.optimizer as _optimizer
+import openpi.training.sharding as sharding
+import openpi.training.utils as training_utils
+import openpi.training.weight_loaders as _weight_loaders
+
+
+def init_logging():
+ """Custom logging format for better readability."""
+ level_mapping = {"DEBUG": "D", "INFO": "I", "WARNING": "W", "ERROR": "E", "CRITICAL": "C"}
+
+ class CustomFormatter(logging.Formatter):
+ def format(self, record):
+ record.levelname = level_mapping.get(record.levelname, record.levelname)
+ return super().format(record)
+
+ formatter = CustomFormatter(
+ fmt="%(asctime)s.%(msecs)03d [%(levelname)s] %(message)-80s (%(process)d:%(filename)s:%(lineno)s)",
+ datefmt="%H:%M:%S",
+ )
+
+ logger = logging.getLogger()
+ logger.setLevel(logging.INFO)
+ logger.handlers[0].setFormatter(formatter)
+
+
+def init_wandb(config: _config.TrainConfig, *, resuming: bool, log_code: bool = False, enabled: bool = True):
+ if not enabled:
+ wandb.init(mode="disabled")
+ return
+
+ ckpt_dir = config.checkpoint_dir
+ if not ckpt_dir.exists():
+ raise FileNotFoundError(f"Checkpoint directory {ckpt_dir} does not exist.")
+ if resuming:
+ run_id = (ckpt_dir / "wandb_id.txt").read_text().strip()
+ wandb.init(id=run_id, resume="must", project=config.project_name)
+ else:
+ wandb.init(
+ name=config.exp_name,
+ config=dataclasses.asdict(config),
+ project=config.project_name,
+ )
+ (ckpt_dir / "wandb_id.txt").write_text(wandb.run.id)
+
+ if log_code:
+ wandb.run.log_code(epath.Path(__file__).parent.parent)
+
+
+def _load_weights_and_validate(loader: _weight_loaders.WeightLoader, params_shape: at.Params) -> at.Params:
+ """Loads and validates the weights. Returns a loaded subset of the weights."""
+ loaded_params = loader.load(params_shape)
+ at.check_pytree_equality(expected=params_shape, got=loaded_params, check_shapes=True, check_dtypes=True)
+
+ # Remove jax.ShapeDtypeStruct from the loaded params. This makes sure that only the loaded params are returned.
+ return traverse_util.unflatten_dict(
+ {k: v for k, v in traverse_util.flatten_dict(loaded_params).items() if not isinstance(v, jax.ShapeDtypeStruct)}
+ )
+
+
+def count_parameters(params, trainable_filter):
+ """Counts total and trainable parameters."""
+ total_params = sum(p.size for p in jax.tree_util.tree_leaves(params))
+
+ trainable_params = sum(
+ p.size for p in jax.tree_util.tree_leaves(params.filter(trainable_filter))
+ )
+
+ return total_params, trainable_params
+
+
+@at.typecheck
+def init_train_state(
+ config: _config.TrainConfig, init_rng: at.KeyArrayLike, mesh: jax.sharding.Mesh, *, resume: bool
+) -> tuple[training_utils.TrainState, Any]:
+ tx = _optimizer.create_optimizer(config.optimizer, config.lr_schedule, weight_decay_mask=None)
+
+ def init(rng: at.KeyArrayLike, partial_params: at.Params | None = None) -> training_utils.TrainState:
+ rng, model_rng = jax.random.split(rng)
+ # initialize the model (and its parameters).
+ model = config.model.create(model_rng)
+
+ # Merge the partial params into the model.
+ if partial_params is not None:
+ graphdef, state = nnx.split(model)
+ # This will produce an error if the partial params are not a subset of the state.
+ state.replace_by_pure_dict(partial_params)
+ model = nnx.merge(graphdef, state)
+
+ params = nnx.state(model)
+ # Convert frozen params to bfloat16.
+ params = nnx_utils.state_map(params, config.freeze_filter, lambda p: p.replace(p.value.astype(jnp.bfloat16)))
+
+ # Count total and trainable parameters
+ total_params, trainable_params = count_parameters(params, config.trainable_filter)
+ print(f"Total Parameters: {total_params // 1e6}M")
+ print(f"Trainable Parameters: {trainable_params // 1e6}M")
+ print(f"Trainable Parameters %: {100 * trainable_params / total_params:.2f}%")
+
+ return training_utils.TrainState(
+ step=0,
+ params=params,
+ model_def=nnx.graphdef(model),
+ tx=tx,
+ opt_state=tx.init(params.filter(config.trainable_filter)),
+ ema_decay=config.ema_decay,
+ ema_params=None if config.ema_decay is None else params,
+ )
+
+ train_state_shape = jax.eval_shape(init, init_rng)
+ state_sharding = sharding.fsdp_sharding(train_state_shape, mesh, log=True)
+
+ if resume:
+ return train_state_shape, state_sharding
+
+ partial_params = _load_weights_and_validate(config.weight_loader, train_state_shape.params.to_pure_dict())
+ replicated_sharding = jax.sharding.NamedSharding(mesh, jax.sharding.PartitionSpec())
+
+ # Initialize the train state and mix in the partial params.
+ train_state = jax.jit(
+ init,
+ donate_argnums=(1,), # donate the partial params buffer.
+ in_shardings=replicated_sharding,
+ out_shardings=state_sharding,
+ )(init_rng, partial_params)
+
+ return train_state, state_sharding
+
+
+def create_decode_indices(config: _config.TrainConfig) -> at.Int[at.Array, "decode_len"]:
+ # create the indices to decide which tokens to decode: i.e. only those belonging to each retrieved/query "prompt, state, action" prompt and not the images
+ image_token_len = 256*2 # number of image tokens times number of images
+ prompt_token_len = config.model.max_token_len # max token len for each retrieved/query "prompt, state, action" prompt
+ total_token_len = image_token_len + prompt_token_len
+ decode_indices = []
+ for i in range(config.model.num_retrieved_observations + 1):
+ decode_indices.extend(list(range(i * total_token_len + image_token_len + 1, (i+1) * total_token_len)))
+ decode_indices = jnp.asarray(decode_indices)
+ print(f'decode_indices shape: {decode_indices.shape}')
+ return decode_indices
+
+
+# @at.typecheck
+def train_step(
+ config: _config.TrainConfig,
+ rng: at.KeyArrayLike,
+ state: training_utils.TrainState,
+ batch: tuple[_model.RiclObservation, _model.Actions],
+ decode_indices: at.Int[at.Array, "decode_len"],
+) -> tuple[training_utils.TrainState, dict[str, at.Array]]:
+ model = nnx.merge(state.model_def, state.params)
+ model.train()
+
+ # @at.typecheck
+ def loss_fn(
+ model: _model.BaseModel, rng: at.KeyArrayLike, observation: _model.RiclObservation, actions: _model.Actions
+ ):
+ chunked_loss = model.compute_loss(rng, observation, actions, train=True, decode_indices=decode_indices)
+ return jnp.mean(chunked_loss)
+
+ train_rng = jax.random.fold_in(rng, state.step)
+ observation, actions = batch
+
+ # Filter out frozen params.
+ diff_state = nnx.DiffState(0, config.trainable_filter)
+ loss, grads = nnx.value_and_grad(loss_fn, argnums=diff_state)(model, train_rng, observation, actions)
+
+ params = state.params.filter(config.trainable_filter)
+ updates, new_opt_state = state.tx.update(grads, state.opt_state, params)
+ new_params = optax.apply_updates(params, updates)
+
+ # Update the model in place and return the new full state.
+ nnx.update(model, new_params)
+ new_params = nnx.state(model)
+
+ new_state = dataclasses.replace(state, step=state.step + 1, params=new_params, opt_state=new_opt_state)
+ if state.ema_decay is not None:
+ new_state = dataclasses.replace(
+ new_state,
+ ema_params=jax.tree.map(
+ lambda old, new: state.ema_decay * old + (1 - state.ema_decay) * new, state.ema_params, new_params
+ ),
+ )
+
+ # Filter out params that aren't kernels.
+ kernel_params = nnx.state(
+ model,
+ nnx.All(
+ nnx.Param,
+ nnx.Not(nnx_utils.PathRegex(".*/(bias|scale|pos_embedding|input_embedding)")),
+ lambda _, x: x.value.ndim > 1,
+ ),
+ )
+ info = {
+ "loss": loss,
+ "grad_norm": optax.global_norm(grads),
+ "param_norm": optax.global_norm(kernel_params),
+ }
+ return new_state, info
+
+
+def main(config: _config.TrainConfig):
+ init_logging()
+ logging.info(f"Running on: {platform.node()}")
+
+ if config.batch_size % jax.device_count() != 0:
+ raise ValueError(
+ f"Batch size {config.batch_size} must be divisible by the number of devices {jax.device_count()}."
+ )
+
+ jax.config.update("jax_compilation_cache_dir", str(epath.Path("~/.cache/jax").expanduser()))
+
+ rng = jax.random.key(config.seed)
+ train_rng, init_rng = jax.random.split(rng)
+
+ mesh = sharding.make_mesh(config.fsdp_devices)
+ data_sharding = jax.sharding.NamedSharding(mesh, jax.sharding.PartitionSpec(sharding.DATA_AXIS))
+ replicated_sharding = jax.sharding.NamedSharding(mesh, jax.sharding.PartitionSpec())
+
+ checkpoint_manager, resuming = _checkpoints.initialize_checkpoint_dir(
+ config.checkpoint_dir,
+ keep_period=config.keep_period,
+ overwrite=config.overwrite,
+ resume=config.resume,
+ )
+ init_wandb(config, resuming=resuming, enabled=config.wandb_enabled)
+
+ writer = None
+ if not config.wandb_enabled:
+ log_dir = config.checkpoint_dir / "tensorboard"
+ logging.info(f"WandB disabled. Using TensorBoard logging to {log_dir}")
+ writer = SummaryWriter(log_dir=str(log_dir))
+
+ data_loader = _data_loader.create_data_loader(
+ config,
+ sharding=data_sharding,
+ num_workers=config.num_workers,
+ shuffle=True,
+ )
+ data_iter = iter(data_loader)
+ batch = next(data_iter)
+ logging.info(f"Initialized data loader:\n{training_utils.array_tree_to_info(batch)}")
+
+ train_state, train_state_sharding = init_train_state(config, init_rng, mesh, resume=resuming)
+ jax.block_until_ready(train_state)
+ logging.info(f"Initialized train state:\n{training_utils.array_tree_to_info(train_state.params)}")
+
+ if resuming:
+ train_state = _checkpoints.restore_state(checkpoint_manager, train_state, data_loader)
+
+ decode_indices = create_decode_indices(config)
+
+ ptrain_step = jax.jit(
+ functools.partial(train_step, config, decode_indices=decode_indices),
+ in_shardings=(replicated_sharding, train_state_sharding, data_sharding),
+ out_shardings=(train_state_sharding, replicated_sharding),
+ donate_argnums=(1,),
+ )
+
+ start_step = int(train_state.step)
+ pbar = tqdm.tqdm(
+ range(start_step, config.num_train_steps),
+ initial=start_step,
+ total=config.num_train_steps,
+ dynamic_ncols=True,
+ )
+
+ infos = []
+ for step in pbar:
+ with sharding.set_mesh(mesh):
+ train_state, info = ptrain_step(train_rng, train_state, batch)
+ infos.append(info)
+ if step % config.log_interval == 0:
+ stacked_infos = common_utils.stack_forest(infos)
+ reduced_info = jax.device_get(jax.tree.map(jnp.mean, stacked_infos))
+ info_str = ", ".join(f"{k}={v}" for k, v in reduced_info.items())
+ pbar.write(f"Step {step}: {info_str}")
+ if config.wandb_enabled:
+ wandb.log(reduced_info, step=step)
+ elif writer is not None:
+ for k, v in reduced_info.items():
+ writer.add_scalar(k, v, step)
+ infos = []
+ batch = next(data_iter)
+
+ if (step % config.save_interval == 0 and step > start_step) or step == config.num_train_steps - 1:
+ _checkpoints.save_state(checkpoint_manager, train_state, data_loader, step)
+
+ logging.info("Waiting for checkpoint manager to finish")
+ checkpoint_manager.wait_until_finished()
+
+
+if __name__ == "__main__":
+ main(_config.cli())
diff --git a/scripts/train_pi0_fast_ricl_libero_80k.slurm b/scripts/train_pi0_fast_ricl_libero_80k.slurm
new file mode 100644
index 0000000000000000000000000000000000000000..81b847e56250b47ffb60444b0acd9f3026321ca8
--- /dev/null
+++ b/scripts/train_pi0_fast_ricl_libero_80k.slurm
@@ -0,0 +1,69 @@
+#!/bin/bash
+#SBATCH --job-name=ricl_libero_80k
+#SBATCH --output=logs/ricl_libero_80k_%j.out
+#SBATCH --error=logs/ricl_libero_80k_%j.err
+#SBATCH --nodes=1
+#SBATCH --ntasks=1
+#SBATCH --gres=gpu:4
+#SBATCH --cpus-per-task=32
+#SBATCH --time=48:00:00
+#SBATCH --mem=400G
+#SBATCH --partition=kisski
+
+# Activate Conda environment
+source /projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/etc/profile.d/conda.sh
+conda activate pitorch_doanh
+
+# Set PYTHONPATH to include the current directory
+export PYTHONPATH=/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/ricl_openpi/src:$PYTHONPATH
+
+# Workdir
+cd /projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/ricl_openpi
+
+# Create logs directory
+mkdir -p logs/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/ricl_openpi/scripts/train_pi0_fast_ricl_libero_80k.slurm
+
+# Configuration
+EXP_NAME="ricl_libero_lambda10_80k_bs16"
+PROJECT_NAME="ricl_libero_bs16"
+LIBERO_DATA_DIR="/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/merged_libero_mask_depth_noops_lerobot_10"
+LIBERO_CONTEXT_DIR="rag/ricl_training_context_libero_10_test/"
+
+# Hyperparameters (Recommended Config)
+LAMBDA=10.0
+ACTION_HORIZON=15 # Reduced from 50 to match DROID (prevents token overflow)
+NUM_RETRIEVED=1
+ACTION_DIM=7
+MAX_TOKEN_LEN=250
+
+# Training parameters
+BATCH_SIZE=16 # Global batch size (4 per GPU for 4 GPUs)
+NUM_STEPS=80000
+SAVE_INTERVAL=10000
+LOG_INTERVAL=100
+
+echo "Starting training with Experiment Name: ${EXP_NAME}"
+echo "Data Dir: ${LIBERO_DATA_DIR}"
+echo "Context Dir: ${LIBERO_CONTEXT_DIR}"
+
+python scripts/train_pi0_fast_ricl.py pi0_fast_ricl_libero \
+ --exp-name="${EXP_NAME}" \
+ --project-name="${PROJECT_NAME}" \
+ --libero-data-dir="${LIBERO_DATA_DIR}" \
+ --libero-context-dir="${LIBERO_CONTEXT_DIR}" \
+ --model.lamda=${LAMBDA} \
+ --model.use-action-interpolation \
+ --model.num-retrieved-observations=${NUM_RETRIEVED} \
+ --model.action-horizon=${ACTION_HORIZON} \
+ --model.action-dim=${ACTION_DIM} \
+ --model.max-token-len=${MAX_TOKEN_LEN} \
+ --batch-size=${BATCH_SIZE} \
+ --num-train-steps=${NUM_STEPS} \
+ --save-interval=${SAVE_INTERVAL} \
+ --log-interval=${LOG_INTERVAL} \
+ --lr-schedule.peak-lr=1e-4 \
+ --seed=42 \
+ --no-wandb-enabled \
+ --overwrite \
+ --no-resume \
+ --fsdp_devices=4
diff --git a/scripts/train_pi0_fast_ricl_libero_80k_2nn.slurm b/scripts/train_pi0_fast_ricl_libero_80k_2nn.slurm
new file mode 100644
index 0000000000000000000000000000000000000000..537920356152458e239759bb2b94b10e91c4efdd
--- /dev/null
+++ b/scripts/train_pi0_fast_ricl_libero_80k_2nn.slurm
@@ -0,0 +1,70 @@
+#!/bin/bash
+#SBATCH --job-name=ricl_libero_2nn_80k
+#SBATCH --output=logs/ricl_libero_2nn_80k_%j.out
+#SBATCH --error=logs/ricl_libero_2nn_80k_%j.err
+#SBATCH --mem=128G
+#SBATCH --cpus-per-task=32
+#SBATCH --partition=kisski
+#SBATCH --time=48:00:00
+#SBATCH --gres=gpu:4
+
+# Environment setup
+source /projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/etc/profile.d/conda.sh
+conda activate pitorch_doanh
+
+export PYTHONPATH=/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/ricl_openpi/src:$PYTHONPATH
+
+# Create logs directory
+mkdir -p logs/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/ricl_openpi/scripts/train_pi0_fast_ricl_libero_80k_2nn.slurm
+
+# Configuration
+EXP_NAME="ricl_libero_lambda10_80k_2nn_fsdp"
+PROJECT_NAME="ricl_libero_2nn"
+LIBERO_DATA_DIR="/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/merged_libero_mask_depth_noops_lerobot_10"
+LIBERO_CONTEXT_DIR="rag/ricl_training_context_libero_10_test/"
+
+# Model hyperparameters
+LAMBDA=10.0
+NUM_RETRIEVED=2 # Increased to 2 neighbors
+ACTION_HORIZON=50
+ACTION_DIM=7
+MAX_TOKEN_LEN=250
+
+# Training parameters
+BATCH_SIZE=16 # Global batch size (4 per GPU for 4 GPUs)
+NUM_STEPS=80000
+SAVE_INTERVAL=10000
+LOG_INTERVAL=100
+
+echo "Starting training job: $SLURM_JOB_ID"
+echo "Experiment: $EXP_NAME"
+echo "Batch Size: $BATCH_SIZE"
+echo "Num Retrieved: $NUM_RETRIEVED"
+
+cd /projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/ricl_openpi
+
+# Run training
+# Note: FSDP is enabled to handle memory usage
+python scripts/train_pi0_fast_ricl.py pi0_fast_ricl_libero \
+ --exp-name="$EXP_NAME" \
+ --project-name="$PROJECT_NAME" \
+ --libero-data-dir="$LIBERO_DATA_DIR" \
+ --libero-context-dir="$LIBERO_CONTEXT_DIR" \
+ --model.lamda=$LAMBDA \
+ --model.use-action-interpolation \
+ --model.num-retrieved-observations=$NUM_RETRIEVED \
+ --model.action-horizon=$ACTION_HORIZON \
+ --model.action-dim=$ACTION_DIM \
+ --model.max-token-len=$MAX_TOKEN_LEN \
+ --batch-size=$BATCH_SIZE \
+ --num-train-steps=$NUM_STEPS \
+ --save-interval=$SAVE_INTERVAL \
+ --log-interval=$LOG_INTERVAL \
+ --lr-schedule.peak-lr=1e-4 \
+ --seed=42 \
+ --no-wandb-enabled \
+ --overwrite \
+ --no-resume \
+ --fsdp_devices=4
+
+echo "Training completed"
diff --git a/scripts/train_test.py b/scripts/train_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..1a1e1fd6e0c8549e3d4117c240a171a398afa7c8
--- /dev/null
+++ b/scripts/train_test.py
@@ -0,0 +1,30 @@
+import dataclasses
+import os
+import pathlib
+
+import pytest
+
+os.environ["JAX_PLATFORMS"] = "cpu"
+
+from openpi.training import config as _config
+
+from . import train
+
+
+@pytest.mark.parametrize("config_name", ["debug"])
+def test_train(tmp_path: pathlib.Path, config_name: str):
+ config = dataclasses.replace(
+ _config._CONFIGS_DICT[config_name], # noqa: SLF001
+ batch_size=2,
+ checkpoint_base_dir=tmp_path / "checkpoint",
+ exp_name="test",
+ overwrite=False,
+ resume=False,
+ num_train_steps=2,
+ log_interval=1,
+ )
+ train.main(config)
+
+ # test resuming
+ config = dataclasses.replace(config, resume=True, num_train_steps=4)
+ train.main(config)
diff --git a/scripts/verify_ricl_training_setup.py b/scripts/verify_ricl_training_setup.py
new file mode 100644
index 0000000000000000000000000000000000000000..d7ba088e20b939799a033a01001f92f3cfd70388
--- /dev/null
+++ b/scripts/verify_ricl_training_setup.py
@@ -0,0 +1,514 @@
+#!/usr/bin/env python3
+"""
+RICL Training Verification Script
+Run this BEFORE training to ensure everything is configured correctly.
+
+Usage:
+ cd /projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/ricl_openpi
+ python scripts/verify_ricl_training_setup.py
+"""
+
+import os
+import sys
+import json
+import numpy as np
+from pathlib import Path
+
+# ============================================================
+# CONFIGURATION
+# ============================================================
+WORK_DIR = "/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/ricl_openpi"
+DATA_DIR = "/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/merged_libero_mask_depth_noops_lerobot_10"
+CONTEXT_DIR = os.path.join(WORK_DIR, "rag/ricl_training_context_libero_10_test")
+CHECKPOINT_PATH = os.path.join(WORK_DIR, "pi0_fast_base_params")
+ASSETS_DIR = os.path.join(WORK_DIR, "assets")
+
+sys.path.insert(0, WORK_DIR)
+
+passed = 0
+failed = 0
+
+def check_pass(msg):
+ global passed
+ passed += 1
+ print(f" ✓ {msg}")
+
+def check_fail(msg):
+ global failed
+ failed += 1
+ print(f" ❌ {msg}")
+
+# ============================================================
+# CHECK 1: Verify all paths exist
+# ============================================================
+print("=" * 60)
+print("CHECK 1: Verify all paths exist")
+print("=" * 60)
+
+paths_to_check = {
+ "WORK_DIR": WORK_DIR,
+ "DATA_DIR": DATA_DIR,
+ "CONTEXT_DIR": CONTEXT_DIR,
+ "CHECKPOINT_PATH (pi0_fast_base_params)": CHECKPOINT_PATH,
+ "ASSETS_DIR": ASSETS_DIR,
+}
+
+for name, path in paths_to_check.items():
+ if os.path.exists(path):
+ check_pass(f"{name}: {path}")
+ else:
+ check_fail(f"{name} MISSING: {path}")
+print()
+
+# ============================================================
+# CHECK 2: Verify JAX/Orbax checkpoint structure
+# ============================================================
+print("=" * 60)
+print("CHECK 2: Verify base model checkpoint (JAX/Orbax)")
+print("=" * 60)
+
+required_ckpt_files = ["_METADATA", "_sharding", "manifest.ocdbt"]
+for f in required_ckpt_files:
+ fpath = os.path.join(CHECKPOINT_PATH, f)
+ if os.path.exists(fpath):
+ size = os.path.getsize(fpath)
+ check_pass(f"{f}: {size} bytes")
+ else:
+ check_fail(f"{f} MISSING")
+
+ocdbt_dir = os.path.join(CHECKPOINT_PATH, "ocdbt.process_0")
+if os.path.exists(ocdbt_dir):
+ num_files = len(os.listdir(ocdbt_dir))
+ check_pass(f"ocdbt.process_0/: {num_files} shard files")
+else:
+ check_fail("ocdbt.process_0/ directory MISSING")
+print()
+
+# ============================================================
+# CHECK 3: Verify RICL context directory
+# ============================================================
+print("=" * 60)
+print("CHECK 3: Verify RICL retrieval context")
+print("=" * 60)
+
+required_context_files = {
+ "nn_indices.npy": "Nearest neighbor indices",
+ "nn_distances.npy": "Nearest neighbor distances",
+ "actions.npy": "Action chunks",
+ "states.npy": "State vectors",
+ "metadata.json": "Frame metadata",
+ "embeddings.npy": "Visual embeddings",
+ "index.faiss": "FAISS index",
+}
+
+for fname, desc in required_context_files.items():
+ fpath = os.path.join(CONTEXT_DIR, fname)
+ if os.path.exists(fpath):
+ size_mb = os.path.getsize(fpath) / 1e6
+ check_pass(f"{fname} ({desc}): {size_mb:.1f} MB")
+ else:
+ check_fail(f"{fname} ({desc}) MISSING")
+print()
+
+# ============================================================
+# CHECK 4: Load and validate context data shapes
+# ============================================================
+print("=" * 60)
+print("CHECK 4: Validate context data shapes and contents")
+print("=" * 60)
+
+try:
+ nn_indices = np.load(os.path.join(CONTEXT_DIR, "nn_indices.npy"))
+ nn_distances = np.load(os.path.join(CONTEXT_DIR, "nn_distances.npy"))
+ actions = np.load(os.path.join(CONTEXT_DIR, "actions.npy"))
+ states = np.load(os.path.join(CONTEXT_DIR, "states.npy"))
+
+ with open(os.path.join(CONTEXT_DIR, "metadata.json"), "r") as f:
+ metadata = json.load(f)
+
+ num_frames = len(metadata)
+ print(f" Total frames: {num_frames}")
+ print(f" nn_indices shape: {nn_indices.shape}")
+ print(f" nn_distances shape: {nn_distances.shape}")
+ print(f" actions shape: {actions.shape}")
+ print(f" states shape: {states.shape}")
+
+ # Validate shapes match
+ if nn_indices.shape[0] == num_frames:
+ check_pass(f"nn_indices rows ({nn_indices.shape[0]}) match metadata ({num_frames})")
+ else:
+ check_fail(f"nn_indices rows ({nn_indices.shape[0]}) != metadata ({num_frames})")
+
+ if nn_distances.shape[0] == num_frames:
+ check_pass(f"nn_distances rows ({nn_distances.shape[0]}) match metadata ({num_frames})")
+ else:
+ check_fail(f"nn_distances rows ({nn_distances.shape[0]}) != metadata ({num_frames})")
+
+ if actions.shape[0] == num_frames:
+ check_pass(f"actions rows ({actions.shape[0]}) match metadata ({num_frames})")
+ else:
+ check_fail(f"actions rows ({actions.shape[0]}) != metadata ({num_frames})")
+
+ # Check action dimensions
+ action_dim = actions.shape[-1] if len(actions.shape) > 1 else 0
+ print(f" Action dimension: {action_dim}")
+ if action_dim == 7:
+ check_pass(f"Action dimension is 7 (LIBERO standard)")
+ else:
+ check_fail(f"Action dimension is {action_dim}, expected 7")
+
+ # Action horizon
+ if len(actions.shape) == 3:
+ action_horizon = actions.shape[1]
+ print(f" Action horizon: {action_horizon}")
+
+ # Check nn_indices are valid (no out-of-range)
+ max_idx = nn_indices.max()
+ if max_idx < num_frames:
+ check_pass(f"nn_indices max ({max_idx}) < num_frames ({num_frames})")
+ else:
+ check_fail(f"nn_indices max ({max_idx}) >= num_frames ({num_frames}) - OUT OF RANGE!")
+
+ # Check no self-retrieval (query != demo)
+ if nn_indices.shape[1] >= 1:
+ self_retrieval_count = np.sum(nn_indices[:, 0] == np.arange(num_frames))
+ self_pct = 100 * self_retrieval_count / num_frames
+ if self_pct < 5:
+ check_pass(f"Self-retrieval rate: {self_pct:.1f}% (low, good)")
+ else:
+ check_fail(f"Self-retrieval rate: {self_pct:.1f}% (HIGH - might be a bug!)")
+
+ # Distance statistics
+ top1_dist = nn_distances[:, 0]
+ print(f"\n Top-1 distance statistics:")
+ print(f" Min: {top1_dist.min():.4f}")
+ print(f" Max: {top1_dist.max():.4f}")
+ print(f" Mean: {top1_dist.mean():.4f}")
+ print(f" Median: {np.median(top1_dist):.4f}")
+ print(f" Std: {top1_dist.std():.4f}")
+
+ if top1_dist.mean() > 0:
+ check_pass(f"Distances are non-zero (mean={top1_dist.mean():.4f})")
+ else:
+ check_fail(f"Distances are all zero - retrieval might be broken!")
+
+except Exception as e:
+ check_fail(f"Error loading context: {e}")
+ import traceback
+ traceback.print_exc()
+
+print()
+
+# ============================================================
+# CHECK 5: Verify dataset structure
+# ============================================================
+print("=" * 60)
+print("CHECK 5: Verify dataset structure")
+print("=" * 60)
+
+meta_path = os.path.join(DATA_DIR, "meta")
+if os.path.exists(meta_path):
+ info_path = os.path.join(meta_path, "info.json")
+ if os.path.exists(info_path):
+ with open(info_path, "r") as f:
+ info = json.load(f)
+ check_pass(f"Dataset info found")
+ print(f" Total episodes: {info.get('total_episodes', 'N/A')}")
+ print(f" Total frames: {info.get('total_frames', 'N/A')}")
+
+ features = info.get("features", {})
+ image_keys = [k for k in features if "image" in k.lower() and "mask" not in k.lower() and "depth" not in k.lower()]
+ print(f" RGB image keys: {image_keys}")
+
+ # Check video files exist
+ video_dir = os.path.join(DATA_DIR, "videos", "chunk-000")
+ if os.path.exists(video_dir):
+ check_pass(f"Video directory exists: {video_dir}")
+ for img_key in ["observation.images.image", "observation.images.wrist_image"]:
+ key_dir = os.path.join(video_dir, img_key)
+ if os.path.exists(key_dir):
+ num_vids = len([f for f in os.listdir(key_dir) if f.endswith(".mp4")])
+ check_pass(f"{img_key}: {num_vids} video files")
+ else:
+ check_fail(f"{img_key} video directory MISSING")
+ else:
+ check_fail(f"Video directory MISSING: {video_dir}")
+ else:
+ check_fail(f"info.json MISSING")
+else:
+ check_fail(f"meta directory MISSING")
+
+# Check tasks.jsonl
+tasks_file = os.path.join(DATA_DIR, "meta", "tasks.jsonl")
+if os.path.exists(tasks_file):
+ task_count = 0
+ with open(tasks_file, "r") as f:
+ for line in f:
+ task_count += 1
+ check_pass(f"tasks.jsonl: {task_count} tasks")
+else:
+ check_fail(f"tasks.jsonl MISSING")
+
+print()
+
+# ============================================================
+# CHECK 6: Retrieval quality debug (sample pairs)
+# ============================================================
+print("=" * 60)
+print("CHECK 6: Retrieval quality debug (sample pairs)")
+print("=" * 60)
+
+try:
+ import random
+ random.seed(42)
+
+ # Load task mapping
+ task_mapping = {}
+ with open(os.path.join(DATA_DIR, "meta", "tasks.jsonl"), "r") as f:
+ for line in f:
+ item = json.loads(line)
+ if "task_index" in item and "task" in item:
+ task_mapping[item["task_index"]] = item["task"]
+
+ # Load parquet for task indices
+ import pandas as pd
+ parquet_files = sorted(list(Path(DATA_DIR, "data").rglob("*.parquet")))
+ dfs = [pd.read_parquet(f) for f in parquet_files[:5]] # Load a subset for speed
+ df = pd.concat(dfs, ignore_index=True)
+
+ # Sample and display pairs
+ num_samples = 5
+ sample_indices = random.sample(range(min(len(metadata), len(df))), min(num_samples, len(metadata), len(df)))
+
+ same_task_count = 0
+ for i, query_idx in enumerate(sample_indices):
+ demo_idx = int(nn_indices[query_idx, 0])
+ distance = nn_distances[query_idx, 0]
+
+ query_meta = metadata[query_idx]
+ demo_meta = metadata[demo_idx]
+
+ # Get tasks
+ query_global = query_meta.get("global_frame_idx", query_idx)
+ demo_global = demo_meta.get("global_frame_idx", demo_idx)
+
+ query_task_idx = -1
+ demo_task_idx = -1
+ if query_global < len(df):
+ query_row = df.iloc[query_global]
+ query_task_idx = int(query_row.get("task_index", -1))
+ if demo_global < len(df):
+ demo_row = df.iloc[demo_global]
+ demo_task_idx = int(demo_row.get("task_index", -1))
+
+ query_task = task_mapping.get(query_task_idx, "Unknown")
+ demo_task = task_mapping.get(demo_task_idx, "Unknown")
+ same_task = query_task_idx == demo_task_idx
+ if same_task:
+ same_task_count += 1
+
+ # Action similarity
+ action_mse = np.mean((actions[query_idx] - actions[demo_idx]) ** 2)
+
+ print(f"\n Pair {i+1}/{num_samples}:")
+ print(f" Query: ep={query_meta.get('episode_idx', '?')}, frame={query_meta.get('frame_idx', '?')}")
+ print(f" Task: '{query_task[:70]}'")
+ print(f" Demo: ep={demo_meta.get('episode_idx', '?')}, frame={demo_meta.get('frame_idx', '?')}")
+ print(f" Task: '{demo_task[:70]}'")
+ print(f" Distance: {distance:.4f} | Action MSE: {action_mse:.4f} | Same task: {same_task}")
+
+ same_task_pct = 100 * same_task_count / num_samples
+ print(f"\n Same-task retrieval rate (in sample): {same_task_pct:.0f}%")
+ if same_task_pct >= 50:
+ check_pass(f"Same-task retrieval is reasonable ({same_task_pct:.0f}%)")
+ else:
+ print(f" ⚠ Low same-task retrieval - this could indicate cross-task retrieval (may be intended)")
+
+except Exception as e:
+ check_fail(f"Error in retrieval debug: {e}")
+ import traceback
+ traceback.print_exc()
+
+print()
+
+# ============================================================
+# CHECK 7: Verify RiclLiberoDataset can load
+# ============================================================
+print("=" * 60)
+print("CHECK 7: Verify RiclLiberoDataset loads correctly")
+print("=" * 60)
+
+try:
+ from openpi.data.ricl_libero_dataset import RiclLiberoDataset
+
+ TARGET_ACTION_HORIZON = 10 # Must match pi0fast-LIBERO default
+
+ dataset = RiclLiberoDataset(
+ data_dir=DATA_DIR,
+ context_dir=CONTEXT_DIR,
+ action_horizon=TARGET_ACTION_HORIZON, # Truncate from precomputed 50 → 10
+ use_action_interpolation=True,
+ lambda_decay=10.0,
+ num_retrieved_observations=1,
+ )
+
+ check_pass(f"Dataset created successfully with {len(dataset)} samples")
+
+ # Try loading one sample
+ print(" Loading sample [0]...")
+ sample = dataset[0]
+ print(f" Sample keys: {sorted(sample.keys())}")
+
+ # Check expected keys (matching actual RiclLiberoDataset output format)
+ expected_keys = [
+ "query_observation.images.image",
+ "query_observation.images.wrist_image",
+ "query_observation.state",
+ "query_actions",
+ "query_prompt",
+ ]
+ for key in expected_keys:
+ if key in sample:
+ val = sample[key]
+ if isinstance(val, np.ndarray):
+ check_pass(f"{key}: shape={val.shape}, dtype={val.dtype}")
+ else:
+ check_pass(f"{key}: type={type(val).__name__}")
+ else:
+ check_fail(f"{key} MISSING from sample")
+
+ # Check demo keys
+ demo_keys = [k for k in sample.keys() if "retrieved" in k or "demo" in k]
+ print(f" Demo keys: {demo_keys}")
+ for key in demo_keys:
+ val = sample[key]
+ if isinstance(val, np.ndarray):
+ check_pass(f"{key}: shape={val.shape}, dtype={val.dtype}")
+ else:
+ check_pass(f"{key}: type={type(val).__name__}")
+
+ # Check interpolation weights
+ interp_keys = [k for k in sample.keys() if "lamda" in k or "lambda" in k or "interp" in k]
+ if interp_keys:
+ for key in interp_keys:
+ val = sample[key]
+ if isinstance(val, np.ndarray):
+ print(f" {key}: shape={val.shape}, range=[{val.min():.4f}, {val.max():.4f}]")
+ else:
+ print(f" {key}: {val}")
+ check_pass("Interpolation weights present")
+ else:
+ print(f" ⚠ No interpolation weight keys found (checked: lamda, lambda, interp)")
+
+ # ============================================================
+ # CHECK 7b: Verify action truncation (50 → 10)
+ # ============================================================
+ print()
+ print("=" * 60)
+ print(f"CHECK 7b: Verify action truncation (precomputed=50 → target={TARGET_ACTION_HORIZON})")
+ print("=" * 60)
+
+ # Precomputed actions shape
+ precomputed_horizon = actions.shape[1]
+ print(f" Precomputed context action_horizon: {precomputed_horizon}")
+ print(f" Target action_horizon: {TARGET_ACTION_HORIZON}")
+
+ # Check query_actions shape
+ query_actions = sample["query_actions"]
+ if query_actions.shape[0] == TARGET_ACTION_HORIZON:
+ check_pass(f"query_actions truncated correctly: shape={query_actions.shape} (horizon={TARGET_ACTION_HORIZON})")
+ else:
+ check_fail(f"query_actions NOT truncated: shape={query_actions.shape} (expected horizon={TARGET_ACTION_HORIZON})")
+
+ # Check demo actions shape
+ demo_actions = sample["retrieved_0_actions"]
+ if demo_actions.shape[0] == TARGET_ACTION_HORIZON:
+ check_pass(f"retrieved_0_actions truncated correctly: shape={demo_actions.shape} (horizon={TARGET_ACTION_HORIZON})")
+ else:
+ check_fail(f"retrieved_0_actions NOT truncated: shape={demo_actions.shape} (expected horizon={TARGET_ACTION_HORIZON})")
+
+ # Verify truncation preserves data (first 10 of 50 should match)
+ raw_demo_actions_full = actions[int(nn_indices[0, 0])] # Full 50-step from context
+ raw_demo_truncated = raw_demo_actions_full[:TARGET_ACTION_HORIZON]
+ if np.allclose(demo_actions, raw_demo_truncated, atol=1e-5):
+ check_pass(f"Truncated demo actions match first {TARGET_ACTION_HORIZON} steps of precomputed context")
+ else:
+ check_fail(f"Truncated demo actions DO NOT match precomputed context first {TARGET_ACTION_HORIZON} steps!")
+
+ # Verify multiple samples to ensure consistency
+ num_verify = 5
+ all_correct = True
+ for vi in range(1, min(num_verify + 1, len(dataset))):
+ s = dataset[vi]
+ if s["query_actions"].shape[0] != TARGET_ACTION_HORIZON:
+ all_correct = False
+ break
+ if s["retrieved_0_actions"].shape[0] != TARGET_ACTION_HORIZON:
+ all_correct = False
+ break
+ if all_correct:
+ check_pass(f"Truncation verified across {num_verify} additional samples")
+ else:
+ check_fail(f"Truncation inconsistent across samples!")
+
+except Exception as e:
+ check_fail(f"Error loading dataset: {e}")
+ import traceback
+ traceback.print_exc()
+
+print()
+
+# ============================================================
+# CHECK 8: Verify config resolution
+# ============================================================
+print("=" * 60)
+print("CHECK 8: Verify training config resolution")
+print("=" * 60)
+
+try:
+ import etils.epath as epath
+
+ # Check PI0_FAST_BASE_CHECKPOINT resolution
+ local_ckpt = epath.Path("pi0_fast_base_params")
+ if local_ckpt.exists():
+ check_pass(f"PI0_FAST_BASE_CHECKPOINT resolves to LOCAL: pi0_fast_base_params")
+ else:
+ check_fail(f"pi0_fast_base_params NOT found locally - will try S3 (requires network!)")
+
+ # Check assets
+ assets_franka = os.path.join(WORK_DIR, "pi0_fast_base", "assets", "franka")
+ if os.path.exists(assets_franka):
+ check_pass(f"Assets dir (franka) exists")
+ else:
+ # Check alternative location
+ assets_alt = os.path.join(WORK_DIR, "assets")
+ if os.path.exists(assets_alt):
+ check_pass(f"Assets base dir exists: {assets_alt}")
+ else:
+ check_fail(f"No assets directory found")
+
+except Exception as e:
+ check_fail(f"Config check error: {e}")
+
+print()
+
+# ============================================================
+# SUMMARY
+# ============================================================
+print("=" * 60)
+print("VERIFICATION COMPLETE")
+print("=" * 60)
+total = passed + failed
+print(f"\n ✓ Passed: {passed}/{total}")
+print(f" ❌ Failed: {failed}/{total}")
+
+if failed == 0:
+ print("\n 🎉 ALL CHECKS PASSED! Ready to train.")
+else:
+ print(f"\n ⚠ {failed} check(s) failed. Fix before training.")
+
+print(f"""
+To train RICL, submit:
+ cd {WORK_DIR}
+ sbatch slurm/train_ricl_libero.slurm
+""")
diff --git a/src/openpi/__pycache__/__init__.cpython-313.pyc b/src/openpi/__pycache__/__init__.cpython-313.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a6e38c225d4c8fd9ed3adc29567580fd1c2bf5b5
Binary files /dev/null and b/src/openpi/__pycache__/__init__.cpython-313.pyc differ
diff --git a/src/openpi/data/__pycache__/ricl_libero_dataset.cpython-310.pyc b/src/openpi/data/__pycache__/ricl_libero_dataset.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6ad6c1a4a4acfd5fa547025b60399306b1fef3a5
Binary files /dev/null and b/src/openpi/data/__pycache__/ricl_libero_dataset.cpython-310.pyc differ
diff --git a/src/openpi/data/__pycache__/ricl_libero_dataset.cpython-313.pyc b/src/openpi/data/__pycache__/ricl_libero_dataset.cpython-313.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bbe60b3800ea46ac3f9ae75eede6edb8a444ee1b
Binary files /dev/null and b/src/openpi/data/__pycache__/ricl_libero_dataset.cpython-313.pyc differ
diff --git a/src/openpi/models/__pycache__/__init__.cpython-310.pyc b/src/openpi/models/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3e5bd1cabcaad14fb5ef1b5fb43a430156d99767
Binary files /dev/null and b/src/openpi/models/__pycache__/__init__.cpython-310.pyc differ
diff --git a/src/openpi/models/__pycache__/gemma.cpython-310.pyc b/src/openpi/models/__pycache__/gemma.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a3f8c87f14681f18c3c8cd28cbe4dd224e67a13c
Binary files /dev/null and b/src/openpi/models/__pycache__/gemma.cpython-310.pyc differ
diff --git a/src/openpi/models/__pycache__/gemma_fast.cpython-310.pyc b/src/openpi/models/__pycache__/gemma_fast.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f01063d26530842b5156eeaefbf004693a57030c
Binary files /dev/null and b/src/openpi/models/__pycache__/gemma_fast.cpython-310.pyc differ
diff --git a/src/openpi/models/__pycache__/lora.cpython-310.pyc b/src/openpi/models/__pycache__/lora.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..53c3414e2a1d09931601bc65b1a446dc13971b7e
Binary files /dev/null and b/src/openpi/models/__pycache__/lora.cpython-310.pyc differ
diff --git a/src/openpi/models/__pycache__/model.cpython-310.pyc b/src/openpi/models/__pycache__/model.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..df92145a2df7b1ee8c8769f26e1984f9cfd29933
Binary files /dev/null and b/src/openpi/models/__pycache__/model.cpython-310.pyc differ
diff --git a/src/openpi/models/__pycache__/pi0.cpython-310.pyc b/src/openpi/models/__pycache__/pi0.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ad9bceabdc2913d18f41a36759229c5c4d7541cc
Binary files /dev/null and b/src/openpi/models/__pycache__/pi0.cpython-310.pyc differ
diff --git a/src/openpi/models/__pycache__/pi0_fast.cpython-310.pyc b/src/openpi/models/__pycache__/pi0_fast.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..83af0ca100cadf72ef9db8926040f52d24b9f655
Binary files /dev/null and b/src/openpi/models/__pycache__/pi0_fast.cpython-310.pyc differ
diff --git a/src/openpi/models/__pycache__/pi0_fast_ricl.cpython-310.pyc b/src/openpi/models/__pycache__/pi0_fast_ricl.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..641520c7a76cd3ee3d9f12e618c13faa2dc1497f
Binary files /dev/null and b/src/openpi/models/__pycache__/pi0_fast_ricl.cpython-310.pyc differ
diff --git a/src/openpi/models/__pycache__/siglip.cpython-310.pyc b/src/openpi/models/__pycache__/siglip.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..57cee3e5a3180c0e41ef33d1ff90399c9014efe0
Binary files /dev/null and b/src/openpi/models/__pycache__/siglip.cpython-310.pyc differ
diff --git a/src/openpi/models/__pycache__/tokenizer.cpython-310.pyc b/src/openpi/models/__pycache__/tokenizer.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..55c41c083dacfa2afee8d73793c6c1e00ed4d3e8
Binary files /dev/null and b/src/openpi/models/__pycache__/tokenizer.cpython-310.pyc differ
diff --git a/src/openpi/models/pi0_fast.py b/src/openpi/models/pi0_fast.py
new file mode 100644
index 0000000000000000000000000000000000000000..2eed3d56553db4c070b9c547a8467b50393a9111
--- /dev/null
+++ b/src/openpi/models/pi0_fast.py
@@ -0,0 +1,326 @@
+import dataclasses
+import logging
+
+import einops
+import flax.nnx as nnx
+import flax.nnx.bridge as nnx_bridge
+import jax
+import jax.numpy as jnp
+from typing_extensions import override
+
+from openpi.models import model as _model
+import openpi.models.gemma_fast as _gemma
+import openpi.models.siglip as _siglip
+from openpi.shared import array_typing as at
+import openpi.shared.nnx_utils as nnx_utils
+
+logger = logging.getLogger("openpi")
+
+PALIGEMMA_EOS_TOKEN = 1
+
+
+def make_attn_mask(input_mask, mask_ar):
+ """Adapted from big_vision.
+
+ Tokens can attend to valid inputs tokens which have a cumulative mask_ar
+ smaller or equal to theirs. This way `mask_ar` bool[?B, N] can be used to
+ setup several types of attention, for example:
+
+ [[1 1 1 1 1 1]]: pure causal attention.
+
+ [[0 0 0 1 1 1]]: prefix-lm attention. The first 3 tokens can attend between
+ themselves and the last 3 tokens have a causal attention. The first
+ entry could also be a 1 without changing behaviour.
+
+ [[1 0 1 0 1 0 0 1 0 0]]: causal attention between 4 blocks. Tokens of a
+ block can attend all previous blocks and all tokens on the same block.
+
+ Args:
+ input_mask: bool[B, N] true if its part of the input, false if padding.
+ mask_ar: bool[?B, N] mask that's true where previous tokens cannot depend on
+ it and false where it shares the same attention mask as the previous token.
+ """
+ mask_ar = jnp.broadcast_to(mask_ar, input_mask.shape)
+ cumsum = jnp.cumsum(mask_ar, axis=1)
+ attn_mask = cumsum[:, None, :] <= cumsum[:, :, None]
+ valid_mask = input_mask[:, None, :] * input_mask[:, :, None]
+ return jnp.logical_and(attn_mask, valid_mask)
+
+
+@jax.vmap
+def left_to_right_align(x, input_mask, attn_mask):
+ """Converts input from left-align to right-aligned."""
+ # Due to vmap, this is operating in a single example (not batch level).
+ assert x.ndim == 2
+ assert input_mask.ndim == 1
+ assert attn_mask.ndim == 2
+ assert x.shape[0] == input_mask.shape[0]
+ assert attn_mask.shape[0] == attn_mask.shape[1], attn_mask.shape
+ seqlen = jnp.max(input_mask * jnp.arange(input_mask.shape[0])) + 1
+ x = jnp.roll(x, -seqlen, axis=0)
+ input_mask = jnp.roll(input_mask, -seqlen, axis=0)
+ attn_mask = jnp.roll(attn_mask, -seqlen, axis=(0, 1))
+ return x, input_mask, attn_mask
+
+
+def put_along_last_axis(arr, indices, values):
+ """Like np.put_along_axis(..., axis=-1), since jax is missing it."""
+ assert arr.ndim == indices.ndim == values.ndim, (arr.ndim, indices.ndim, values.ndim)
+ onehot = jax.nn.one_hot(indices, arr.shape[-1], dtype=values.dtype)
+ put_mask = jnp.einsum("...i,...in->...n", jnp.ones(values.shape, jnp.int32), onehot)
+ put_values = jnp.einsum("...i,...in->...n", values, onehot)
+ return jnp.where(put_mask, put_values, arr)
+
+
+@dataclasses.dataclass(frozen=True)
+class Pi0FASTConfig(_model.BaseModelConfig):
+ dtype: str = "bfloat16"
+ paligemma_variant: _gemma.Variant = "gemma_2b"
+
+ # Set the model specific defaults.
+ action_dim: int = 32
+ action_horizon: int = 32
+ max_token_len: int = 250
+
+ @property
+ @override
+ def model_type(self) -> _model.ModelType:
+ return _model.ModelType.PI0_FAST
+
+ @override
+ def create(self, rng: at.KeyArrayLike) -> "Pi0FAST":
+ return Pi0FAST(self, rngs=nnx.Rngs(rng))
+
+ @override
+ def inputs_spec(self, *, batch_size: int = 1) -> tuple[_model.Observation, _model.Actions]:
+ image_spec = jax.ShapeDtypeStruct([batch_size, *_model.IMAGE_RESOLUTION, 3], jnp.float32)
+ image_mask_spec = jax.ShapeDtypeStruct([batch_size], jnp.bool_)
+
+ with at.disable_typechecking():
+ observation_spec = _model.Observation(
+ images={
+ "base_0_rgb": image_spec,
+ "base_1_rgb": image_spec,
+ "wrist_0_rgb": image_spec,
+ },
+ image_masks={
+ "base_0_rgb": image_mask_spec,
+ "base_1_rgb": image_mask_spec,
+ "wrist_0_rgb": image_mask_spec,
+ },
+ state=jax.ShapeDtypeStruct([batch_size, self.action_dim], jnp.float32),
+ tokenized_prompt=jax.ShapeDtypeStruct([batch_size, self.max_token_len], jnp.int32),
+ tokenized_prompt_mask=jax.ShapeDtypeStruct([batch_size, self.max_token_len], bool),
+ token_ar_mask=jax.ShapeDtypeStruct([batch_size, self.max_token_len], jnp.int32),
+ token_loss_mask=jax.ShapeDtypeStruct([batch_size, self.max_token_len], jnp.bool_),
+ )
+ action_spec = jax.ShapeDtypeStruct([batch_size, self.action_horizon, self.action_dim], jnp.float32)
+
+ return observation_spec, action_spec
+
+ def get_freeze_filter(self) -> nnx.filterlib.Filter:
+ """Returns the freeze filter based on the model config."""
+ if "lora" in self.paligemma_variant:
+ return nnx.All(nnx_utils.PathRegex(".*llm.*"), nnx.Not(nnx_utils.PathRegex(".*lora.*")))
+ return nnx.Nothing
+
+ def get_freeze_filter_with_frozen_img_encoder(self) -> nnx.filterlib.Filter:
+ """Returns the freeze filter based on the model config."""
+ if "lora" in self.paligemma_variant:
+ # Freeze both llm (except lora parts) and img components
+ return nnx.Any(
+ nnx.All(nnx_utils.PathRegex(".*llm.*"), nnx.Not(nnx_utils.PathRegex(".*lora.*"))),
+ nnx_utils.PathRegex(".*img.*")
+ )
+ else:
+ # freeze only image encoder
+ return nnx.All(nnx_utils.PathRegex(".*img.*"), nnx.Not(nnx_utils.PathRegex(".*llm.*")))
+ return nnx.Nothing
+
+class Pi0FAST(_model.BaseModel):
+ def __init__(self, config: Pi0FASTConfig, rngs: nnx.Rngs):
+ super().__init__(config.action_dim, config.action_horizon, config.max_token_len)
+ paligemma_config = _gemma.get_config(config.paligemma_variant)
+ # TODO: rewrite gemma in NNX. For now, use bridge.
+ llm = nnx_bridge.ToNNX(
+ _gemma.Module(
+ **paligemma_config,
+ embed_dtype=config.dtype,
+ cache_dtype=config.dtype,
+ )
+ )
+ llm.lazy_init(rngs=rngs, method="init")
+ img = nnx_bridge.ToNNX(
+ _siglip.Module(
+ num_classes=paligemma_config.width,
+ variant="So400m/14",
+ pool_type="none",
+ scan=True,
+ dtype_mm=config.dtype,
+ )
+ )
+ img.lazy_init(next(iter(config.fake_obs().images.values())), train=False, rngs=rngs)
+ self.PaliGemma = nnx.Dict(llm=llm, img=img)
+
+ @at.typecheck
+ def embed_inputs(
+ self, obs: _model.Observation
+ ) -> tuple[at.Float[at.Array, "b s emb"], at.Bool[at.Array, "b s"], at.Int[at.Array, "b s"]]:
+ input_mask = []
+ ar_mask = []
+ token_embeddings = []
+ # embed images
+ for name in obs.images:
+ image_token_embeddings, _ = self.PaliGemma.img(obs.images[name], train=False)
+
+ token_embeddings.append(image_token_embeddings)
+ input_mask.append(
+ einops.repeat(
+ obs.image_masks[name],
+ "b -> b s",
+ s=image_token_embeddings.shape[1],
+ )
+ )
+ # image tokens attend to each other --> AR mask = 0
+ ar_mask.append(0 * input_mask[-1])
+
+ # add tokenized inputs
+ assert obs.tokenized_prompt is not None, "Tokenized prompt is required"
+ assert obs.tokenized_prompt_mask is not None, "Tokenized prompt mask is required"
+ assert obs.token_ar_mask is not None, "Token auto-regressive mask is required"
+ tokenized_inputs_embeddings = self.PaliGemma.llm(obs.tokenized_prompt, embed_only=True)
+ token_embeddings.append(tokenized_inputs_embeddings)
+ input_mask.append(obs.tokenized_prompt_mask)
+ ar_mask.append(obs.token_ar_mask)
+
+ # return embeddings, input mask, and ar mask
+ return (
+ jnp.concatenate(token_embeddings, axis=1),
+ jnp.concatenate(input_mask, axis=1),
+ jnp.concatenate(ar_mask, axis=1),
+ )
+
+ @override
+ def compute_loss(
+ self, rng: at.KeyArrayLike, observation: _model.Observation, actions: _model.Actions, *, train: bool = False
+ ) -> at.Float[at.Array, "*b ah"]:
+ observation = _model.preprocess_observation(
+ rng, observation, train=train, image_keys=list(observation.images.keys())
+ )
+
+ # Compute inputs: one big forward pass of prefix + suffix at once
+ input_token_embeddings, input_mask, ar_mask = self.embed_inputs(observation)
+ attn_mask = make_attn_mask(input_mask, ar_mask)
+
+ # Compute one-hot targets: we predict *next* token, so shift the input tokens by one.
+ targets = jax.nn.one_hot(
+ observation.tokenized_prompt[:, 1:],
+ self.PaliGemma.llm.module.vocab_size,
+ )
+
+ # Each input predicts *next* token, so we don't input the last token.
+ pre_logits, _, _ = self.PaliGemma.llm(
+ embedded_prefix=input_token_embeddings[:, :-1],
+ mask=attn_mask[:, :-1, :-1],
+ return_prelogits=True,
+ )
+
+ # Only decode logits for the target tokens to save memory
+ # (decoding matmul is large because it is a seq_len x vocab_size dense layer).
+ logits, _ = self.PaliGemma.llm(
+ pre_logits=pre_logits[:, -targets.shape[1] :],
+ )
+ logp = jax.nn.log_softmax(logits, axis=-1)
+
+ # Compute CE loss on token targets
+ assert observation.token_loss_mask is not None, "Token loss mask is required"
+ loss_mask = observation.token_loss_mask[:, 1:]
+ token_pplx = jnp.sum(targets * logp, axis=-1)
+ return -jnp.sum(token_pplx * loss_mask, axis=-1) / jnp.clip(jnp.sum(loss_mask, -1), 1)
+
+ @override
+ def sample_actions(
+ self,
+ rng: at.KeyArrayLike,
+ observation: _model.Observation,
+ *,
+ max_decoding_steps: int | at.Int[at.Array, ""] = 256,
+ temperature: float = 0.0,
+ ) -> _model.Actions:
+ # TODO: this is a hack to get the image keys.
+ observation = _model.preprocess_observation(
+ None, observation, train=False, image_keys=list(observation.images.keys())
+ )
+
+ # embed inputs
+ prefix_token_embeddings, prefix_mask, prefix_ar_mask = self.embed_inputs(observation)
+ prefix_attn_mask = make_attn_mask(prefix_mask, prefix_ar_mask)
+
+ # left to right align all input token sequences
+ prefix_token_embeddings, prefix_mask, prefix_attn_mask = left_to_right_align(
+ prefix_token_embeddings, prefix_mask, prefix_attn_mask
+ )
+ prefill_size = prefix_token_embeddings.shape[1]
+ prefill_len = jnp.sum(prefix_mask, axis=-1)
+ prefix_start = prefill_size - prefill_len
+
+ # first fill KV cache with a forward pass of the prefix
+ # pad attention mask to set the size of the KV cache (prefill_size + max_decoding_steps)
+ prefix_attn_mask = jnp.pad(prefix_attn_mask, ((0, 0), (0, 0), (0, max_decoding_steps)))
+ print(f'prefix_attn_mask shape: {prefix_attn_mask.shape}')
+ prefix_positions = jnp.cumsum(prefix_mask, axis=-1) - 1
+ print(f'prefix_positions shape: {prefix_positions.shape}')
+ prefix_logits, kv_cache, _ = self.PaliGemma.llm(
+ embedded_prefix=prefix_token_embeddings, mask=prefix_attn_mask, positions=prefix_positions, decode=True
+ )
+
+ # prepare decoding -- final logit decodes the first token
+ last_logit = prefix_logits[:, -1:]
+ output_tokens = jnp.zeros((last_logit.shape[0], max_decoding_steps))
+
+ def step(carry):
+ last_logit, output_tokens, cache, _, step = carry
+
+ # Sample token from last logit
+ if temperature > 0.0:
+ last_logit = last_logit / temperature
+ token = jax.random.categorical(rng, last_logit, axis=-1)
+ else:
+ token = jnp.argmax(last_logit, axis=-1)
+ output_tokens = put_along_last_axis(output_tokens, jnp.broadcast_to(step, (token.shape[0], 1)), token)
+
+ # Check for early stopping --> stop if all batch elements have EOS token
+ has_eos = jnp.any(token == PALIGEMMA_EOS_TOKEN, axis=-1)
+ all_eos = jnp.all(has_eos)
+
+ # Decode one step
+ token_embedding = self.PaliGemma.llm(token, embed_only=True)
+ positions = prefill_len[:, None] + step + 1
+ mask = jnp.logical_and(
+ jnp.arange(prefill_size + max_decoding_steps)[None, None, :] >= prefix_start[:, None, None],
+ jnp.arange(prefill_size + max_decoding_steps)[None, None, :]
+ < (jnp.broadcast_to(prefill_size + step + 1, (prefix_start.shape[0], 1, 1))),
+ )
+ print(f'mask shape: {mask.shape}')
+ print(f'mask values: {mask}')
+ print(f'positions shape: {positions.shape}')
+ print(f'positions values: {positions}')
+ print(f'token_embedding shape: {token_embedding.shape}')
+ last_logit, kv_cache, _ = self.PaliGemma.llm(
+ embedded_prefix=token_embedding, mask=mask, positions=positions, decode=True, kv_cache=cache
+ )
+
+ return last_logit, output_tokens, kv_cache, all_eos, step + 1
+
+ def cond(carry):
+ _, _, _, all_eos, step = carry
+ return (~all_eos) & (step < max_decoding_steps)
+
+ # Use lax.while_loop so we can jit the full decoding loop.
+ _, output_tokens, _, _, _ = jax.lax.while_loop(cond, step, (last_logit, output_tokens, kv_cache, False, 0))
+
+ # carry = (last_logit, output_tokens, kv_cache, False, 0)
+ # while cond(carry):
+ # carry = step(carry)
+
+ return output_tokens
diff --git a/src/openpi/models/tokenizer.py b/src/openpi/models/tokenizer.py
new file mode 100644
index 0000000000000000000000000000000000000000..0be322a6f735718ba70fd8760632fd2ec0534fdb
--- /dev/null
+++ b/src/openpi/models/tokenizer.py
@@ -0,0 +1,278 @@
+import logging
+
+import numpy as np
+import sentencepiece
+from transformers import AutoProcessor
+
+import etils.epath as epath
+import openpi.shared.download as download
+
+
+class PaligemmaTokenizer:
+ def __init__(self, max_len: int = 48):
+ self._max_len = max_len
+
+ local_path = epath.Path("assets/paligemma_tokenizer.model")
+ hf_path = epath.Path("/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/VLA-Humanoid/paligemma-3b-pt-224/tokenizer.model")
+ if local_path.exists():
+ path = local_path
+ elif hf_path.exists():
+ path = hf_path
+ else:
+ path = download.maybe_download("gs://big_vision/paligemma_tokenizer.model", gs={"token": "anon"})
+ with path.open("rb") as f:
+ self._tokenizer = sentencepiece.SentencePieceProcessor(model_proto=f.read())
+
+ def tokenize(self, prompt: str) -> tuple[np.ndarray, np.ndarray]:
+ cleaned_text = prompt.strip().replace("_", " ").replace("\n", " ")
+ # tokenize "\n" separately as the "start of answer" token
+ tokens = self._tokenizer.encode(cleaned_text, add_bos=True) + self._tokenizer.encode("\n")
+ tokens_len = len(tokens)
+ if tokens_len < self._max_len:
+ padding = [False] * (self._max_len - tokens_len)
+ mask = [True] * tokens_len + padding
+ tokens = tokens + padding
+ else:
+ if len(tokens) > self._max_len:
+ logging.warning(
+ f"Token length ({len(tokens)}) exceeds max length ({self._max_len}), truncating. "
+ "Consider increasing the `max_token_len` in your model config if this happens frequently."
+ )
+ tokens = tokens[: self._max_len]
+ mask = [True] * self._max_len
+
+ return np.asarray(tokens), np.asarray(mask)
+
+
+class FASTTokenizer:
+ def __init__(self, max_len: int = 256, fast_tokenizer_path: str = "physical-intelligence/fast"):
+ self._max_len = max_len
+
+ # Download base PaliGemma tokenizer
+ local_path = epath.Path("assets/paligemma_tokenizer.model")
+ hf_path = epath.Path("/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/VLA-Humanoid/paligemma-3b-pt-224/tokenizer.model")
+ if local_path.exists():
+ path = local_path
+ elif hf_path.exists():
+ path = hf_path
+ else:
+ path = download.maybe_download("gs://big_vision/paligemma_tokenizer.model", gs={"token": "anon"})
+ with path.open("rb") as f:
+ self._paligemma_tokenizer = sentencepiece.SentencePieceProcessor(model_proto=f.read())
+
+ # Instantiate FAST tokenizer - check for local path first
+ local_fast_path = epath.Path("fast")
+ if local_fast_path.exists():
+ fast_tokenizer_path = str(local_fast_path)
+ self._fast_tokenizer = AutoProcessor.from_pretrained(fast_tokenizer_path, trust_remote_code=True)
+ self._fast_skip_tokens = 128 # Skip last 128 tokens in PaliGemma vocab since they are special tokens
+
+ def tokenize(
+ self, prompt: str, state: np.ndarray, actions: np.ndarray | None,
+ dont_pad: bool = False,
+ dont_loss: bool = False,
+ ) -> tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
+ cleaned_text = prompt.lower().strip().replace("_", " ")
+
+ # Convention: state gets discretized into 256 discrete bins (assumed range after normalization: [-1, 1])
+ discretized_state = np.digitize(state, bins=np.linspace(-1, 1, 256 + 1)[:-1]) - 1
+
+ # Convention: prefix includes prompt and string-representation of state, followed by ';'
+ state_str = " ".join(map(str, discretized_state))
+ prefix = f"Task: {cleaned_text}, State: {state_str};\n"
+ prefix_tokens = self._paligemma_tokenizer.encode(prefix, add_bos=True)
+
+ if actions is not None:
+ # Tokenize actions with FAST tokenizer --> map to last tokens in PaliGemma vocab
+ action_tokens = self._fast_tokenizer(actions[None])[0]
+ action_tokens_in_pg = self._act_tokens_to_paligemma_tokens(action_tokens)
+
+ # Convention: postfix contains 'Action:' followed by FAST tokens, followed by '|'
+ postfix_tokens = (
+ self._paligemma_tokenizer.encode("Action: ")
+ + action_tokens_in_pg.tolist()
+ + self._paligemma_tokenizer.encode("|")
+ )
+ else:
+ postfix_tokens = []
+
+ # Create output token sequence & masks
+ # AR mask is 0 on prefix (bidirectional attention) and 1 on postfix (causal attention to all previous tokens)
+ tokens = prefix_tokens + postfix_tokens
+ token_mask = [True] * len(tokens)
+ ar_mask = [0] * len(prefix_tokens) + [1] * len(postfix_tokens)
+ if dont_loss:
+ loss_mask = [False] * len(prefix_tokens) + [False] * len(postfix_tokens) # no loss on prefix or postfix
+ else:
+ loss_mask = [False] * len(prefix_tokens) + [True] * len(postfix_tokens) # Loss on postfix only
+
+ # Pad tokens to max length
+ tokens_len = len(tokens)
+ if tokens_len < self._max_len:
+ # When padding is not desired
+ if dont_pad:
+ return np.asarray(tokens), np.asarray(token_mask), np.asarray(ar_mask), np.asarray(loss_mask)
+
+ padding = [False] * (self._max_len - tokens_len)
+ tokens = tokens + padding
+ token_mask = token_mask + padding
+ ar_mask = ar_mask + padding
+ loss_mask = loss_mask + padding
+ else:
+ if len(tokens) > self._max_len:
+ logging.warning(
+ f"Token length ({len(tokens)}) exceeds max length ({self._max_len}), truncating. "
+ "Consider increasing the `max_token_len` in your model config if this happens frequently."
+ )
+ tokens = tokens[: self._max_len]
+ token_mask = token_mask[: self._max_len]
+ ar_mask = ar_mask[: self._max_len]
+ loss_mask = loss_mask[: self._max_len]
+
+ return np.asarray(tokens), np.asarray(token_mask), np.asarray(ar_mask), np.asarray(loss_mask)
+
+ def extract_actions(self, tokens: np.ndarray, action_horizon: int, action_dim: int) -> np.ndarray:
+ # Decode predicted output tokens
+ decoded_tokens = self._paligemma_tokenizer.decode(tokens.tolist())
+
+ # Extract actions from FAST model outputs
+ if "Action: " not in decoded_tokens:
+ print(f"WARNING: No `Action: ` found in decoded tokens: {decoded_tokens}, so returning zeros")
+ return np.zeros((action_horizon, action_dim), dtype=np.float32)
+
+ # Extract actions from decoded tokens
+ raw_action_tokens = np.array(
+ self._paligemma_tokenizer.encode(decoded_tokens.split("Action: ")[1].split("|")[0].strip())
+ )
+ action_tokens = self._act_tokens_to_paligemma_tokens(raw_action_tokens)
+ return self._fast_tokenizer.decode(
+ [action_tokens.tolist()], time_horizon=action_horizon, action_dim=action_dim
+ )[0]
+
+ def _act_tokens_to_paligemma_tokens(self, tokens: np.ndarray | list[int]) -> np.ndarray:
+ if isinstance(tokens, list):
+ tokens = np.array(tokens)
+ return self._paligemma_tokenizer.vocab_size() - 1 - self._fast_skip_tokens - tokens
+
+
+class FASTTokenizerRicl:
+ def __init__(self, max_len: int = 256, fast_tokenizer_path: str = "physical-intelligence/fast", action_horizon: int = 10, action_dim: int = 8):
+ self._max_len = max_len
+ self._action_horizon = action_horizon
+ self._action_dim = action_dim
+
+ # Download base PaliGemma tokenizer
+ local_path = epath.Path("assets/paligemma_tokenizer.model")
+ hf_path = epath.Path("/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/in_context_learning/VLA-Humanoid/paligemma-3b-pt-224/tokenizer.model")
+ if local_path.exists():
+ path = local_path
+ elif hf_path.exists():
+ path = hf_path
+ else:
+ path = download.maybe_download("gs://big_vision/paligemma_tokenizer.model", gs={"token": "anon"})
+ with path.open("rb") as f:
+ self._paligemma_tokenizer = sentencepiece.SentencePieceProcessor(model_proto=f.read())
+
+ # Instantiate FAST tokenizer - check for local path first
+ local_fast_path = epath.Path("fast")
+ if local_fast_path.exists():
+ fast_tokenizer_path = str(local_fast_path)
+ self._fast_tokenizer = AutoProcessor.from_pretrained(fast_tokenizer_path, trust_remote_code=True)
+ self._fast_skip_tokens = 128 # Skip last 128 tokens in PaliGemma vocab since they are special tokens
+
+ def tokenize(
+ self, prompt: str, state: np.ndarray, actions: np.ndarray | None,
+ dont_pad: bool = False,
+ dont_loss: bool = False,
+ ) -> tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
+ cleaned_text = prompt.lower().strip().replace("_", " ")
+
+ # Convention: state gets discretized into 256 discrete bins (assumed range after normalization: [-1, 1])
+ discretized_state = np.digitize(state, bins=np.linspace(-1, 1, 256 + 1)[:-1]) - 1
+
+ # Convention: prefix includes prompt and string-representation of state, followed by ';'
+ state_str = " ".join(map(str, discretized_state))
+ prefix = f"Task: {cleaned_text}, State: {state_str};\n"
+ prefix_tokens = self._paligemma_tokenizer.encode(prefix, add_bos=True)
+
+ if actions is not None:
+ # Tokenize actions with FAST tokenizer --> map to last tokens in PaliGemma vocab
+ assert actions.shape == (self._action_horizon, self._action_dim), f"{actions.shape=}"
+ action_tokens = self._fast_tokenizer(actions[None])[0]
+ action_tokens_in_pg = self._act_tokens_to_paligemma_tokens(action_tokens)
+
+ # Convention: postfix contains 'Action:' followed by FAST tokens, followed by '|'
+ postfix_tokens = (
+ self._paligemma_tokenizer.encode("Action: ")
+ + action_tokens_in_pg.tolist()
+ + self._paligemma_tokenizer.encode("|")
+ )
+ else:
+ postfix_tokens = []
+
+ # always pad prefix tokens to 1/2 the max length
+ assert self._max_len % 2 == 0, "max_len must be divisible by 2 to pad prefix tokens to 1/2 the max length and postfix tokens to the rest"
+ if len(prefix_tokens) < self._max_len // 2:
+ prefix_padding = [False] * (self._max_len // 2 - len(prefix_tokens))
+ else:
+ raise ValueError(f"Prefix tokens length ({len(prefix_tokens)}) exceeds 1/2 the max length ({self._max_len // 2})! Increase the `max_token_len` in your model config.")
+ # pad postfix tokens if not dont_pad
+ if dont_pad:
+ postfix_padding = []
+ else:
+ postfix_padding = [False] * (self._max_len - len(prefix_tokens) - len(prefix_padding) - len(postfix_tokens))
+
+ # Create output token sequence & masks
+ # AR mask is 0 on prefix (bidirectional attention) and 1 on postfix (causal attention to all previous tokens)
+ tokens_len = len(prefix_tokens) + len(prefix_padding) + len(postfix_tokens) + len(postfix_padding)
+ if not dont_pad:
+ assert tokens_len == self._max_len
+ token_mask = [True] * len(prefix_tokens) + [False] * len(prefix_padding) + [True] * len(postfix_tokens) + [False] * len(postfix_padding)
+ ar_mask = [0] * len(prefix_tokens) + [False] * len(prefix_padding) + [1] * len(postfix_tokens) + [False] * len(postfix_padding)
+ if dont_loss:
+ loss_mask = [False] * tokens_len # no loss on prefix or postfix
+ else:
+ loss_mask = [False] * len(prefix_tokens) + [False] * len(prefix_padding) + [True] * len(postfix_tokens) + [False] * len(postfix_padding) # Loss on postfix_tokens only
+
+ # pad prefix and postfix tokens
+ prefix_tokens = prefix_tokens + prefix_padding
+ postfix_tokens = postfix_tokens + postfix_padding
+
+ if len(postfix_tokens) == 0:
+ # happens at inference time when actions are not provided and dont_pad is True
+ postfix_tokens = None
+ else:
+ postfix_tokens = np.asarray(postfix_tokens)
+
+ return np.asarray(prefix_tokens), postfix_tokens, np.asarray(token_mask), np.asarray(ar_mask), np.asarray(loss_mask)
+
+ def extract_actions(self, tokens: np.ndarray, action_horizon: int, action_dim: int) -> np.ndarray:
+ assert action_horizon == self._action_horizon and action_dim == self._action_dim, f"{action_horizon=}, {action_dim=}, {self._action_horizon=}, {self._action_dim=}"
+ # Decode predicted output tokens
+ decoded_tokens = self._paligemma_tokenizer.decode(tokens.tolist())
+
+ # Extract actions from FAST model outputs
+ if "Action: " not in decoded_tokens:
+ print(f"WARNING: No `Action: ` found in decoded tokens: {decoded_tokens}, so returning zeros")
+ return np.zeros((action_horizon, action_dim), dtype=np.float32)
+
+ # Extract actions from decoded tokens
+ print(f'decoded_tokens: {decoded_tokens}')
+ raw_action_tokens = np.array(
+ self._paligemma_tokenizer.encode(decoded_tokens.split("Action: ")[1].split("|")[0].strip())
+ )
+ print(f'raw_action_tokens: {raw_action_tokens}')
+ action_tokens = self._act_tokens_to_paligemma_tokens(raw_action_tokens)
+ print(f'action_tokens: {action_tokens}')
+ outputs = self._fast_tokenizer.decode(
+ [action_tokens.tolist()], time_horizon=action_horizon, action_dim=action_dim
+ )
+ assert outputs.shape == (1, action_horizon, action_dim), f"{outputs.shape=}"
+ outputs = outputs[0]
+ print(f'outputs before normalization: {outputs}')
+ return outputs
+
+ def _act_tokens_to_paligemma_tokens(self, tokens: np.ndarray | list[int]) -> np.ndarray:
+ if isinstance(tokens, list):
+ tokens = np.array(tokens)
+ return self._paligemma_tokenizer.vocab_size() - 1 - self._fast_skip_tokens - tokens
diff --git a/src/openpi/models/vit.py b/src/openpi/models/vit.py
new file mode 100644
index 0000000000000000000000000000000000000000..b7901d097b86d36e3564d2b664ed50af8a194080
--- /dev/null
+++ b/src/openpi/models/vit.py
@@ -0,0 +1,307 @@
+# Copyright 2024 Google LLC.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""ViT implementation adapted from https://github.com/google-research/vision_transformer/blob/main/vit_jax/models_vit.py."""
+
+from collections.abc import Callable
+from typing import Any
+
+import flax.linen as nn
+import jax
+import jax.numpy as jnp
+
+from openpi.models import resnet as models_resnet
+
+Array = Any
+PRNGKey = Any
+Shape = tuple[int]
+Dtype = Any
+
+
+class IdentityLayer(nn.Module):
+ """Identity layer, convenient for giving a name to an array."""
+
+ @nn.compact
+ def __call__(self, x):
+ return x
+
+
+class AddPositionEmbs(nn.Module):
+ """Adds learned positional embeddings to the inputs.
+
+ Attributes:
+ posemb_init: positional embedding initializer.
+ """
+
+ posemb_init: Callable[[PRNGKey, Shape, Dtype], Array]
+ param_dtype: Dtype = jnp.float32
+
+ @nn.compact
+ def __call__(self, inputs):
+ """Applies the AddPositionEmbs module.
+
+ Args:
+ inputs: Inputs to the layer.
+
+ Returns:
+ Output tensor with shape `(bs, timesteps, in_dim)`.
+ """
+ # inputs.shape is (batch_size, seq_len, emb_dim).
+ assert inputs.ndim == 3, f"Number of dimensions should be 3, but it is: {inputs.ndim}"
+ pos_emb_shape = (1, inputs.shape[1], inputs.shape[2])
+ pe = self.param("pos_embedding", self.posemb_init, pos_emb_shape, self.param_dtype)
+ return inputs + pe
+
+
+class MlpBlock(nn.Module):
+ """Transformer MLP / feed-forward block."""
+
+ mlp_dim: int
+ dtype: Dtype = jnp.float32
+ param_dtype: Dtype = jnp.float32
+ out_dim: int | None = None
+ dropout_rate: float = 0.1
+ kernel_init: Callable[[PRNGKey, Shape, Dtype], Array] = nn.initializers.xavier_uniform()
+ bias_init: Callable[[PRNGKey, Shape, Dtype], Array] = nn.initializers.normal(stddev=1e-6)
+
+ @nn.compact
+ def __call__(self, inputs, *, deterministic):
+ """Applies Transformer MlpBlock module."""
+ actual_out_dim = inputs.shape[-1] if self.out_dim is None else self.out_dim
+ x = nn.Dense(
+ features=self.mlp_dim,
+ dtype=self.dtype,
+ param_dtype=self.param_dtype,
+ kernel_init=self.kernel_init,
+ bias_init=self.bias_init,
+ )( # pytype: disable=wrong-arg-types
+ inputs
+ )
+ x = nn.gelu(x)
+ x = nn.Dropout(rate=self.dropout_rate)(x, deterministic=deterministic)
+ output = nn.Dense(
+ features=actual_out_dim,
+ dtype=self.dtype,
+ param_dtype=self.param_dtype,
+ kernel_init=self.kernel_init,
+ bias_init=self.bias_init,
+ )( # pytype: disable=wrong-arg-types
+ x
+ )
+ return nn.Dropout(rate=self.dropout_rate)(output, deterministic=deterministic)
+
+
+class Encoder1DBlock(nn.Module):
+ """Transformer encoder layer.
+
+ Attributes:
+ inputs: input data.
+ mlp_dim: dimension of the mlp on top of attention block.
+ dtype: the dtype of the computation (default: float32).
+ dropout_rate: dropout rate.
+ attention_dropout_rate: dropout for attention heads.
+ deterministic: bool, deterministic or not (to apply dropout).
+ num_heads: Number of heads in nn.MultiHeadDotProductAttention
+ """
+
+ mlp_dim: int
+ num_heads: int
+ dtype: Dtype = jnp.float32
+ dropout_rate: float = 0.1
+ attention_dropout_rate: float = 0.1
+
+ @nn.compact
+ def __call__(self, inputs, deterministic):
+ """Applies Encoder1DBlock module.
+
+ Args:
+ inputs: Inputs to the layer.
+ deterministic: Dropout will not be applied when set to true.
+
+ Returns:
+ output after transformer encoder block.
+ """
+
+ # Attention block.
+ assert inputs.ndim == 3, f"Expected (batch, seq, hidden) got {inputs.shape}"
+ x = nn.LayerNorm(dtype=self.dtype)(inputs)
+ x = nn.MultiHeadDotProductAttention(
+ dtype=self.dtype,
+ kernel_init=nn.initializers.xavier_uniform(),
+ broadcast_dropout=False,
+ deterministic=deterministic,
+ dropout_rate=self.attention_dropout_rate,
+ num_heads=self.num_heads,
+ # why isn't this true by default???
+ force_fp32_for_softmax=True,
+ )(x, x)
+ x = nn.Dropout(rate=self.dropout_rate)(x, deterministic=deterministic)
+ x = x + inputs
+
+ # MLP block.
+ y = nn.LayerNorm(dtype=self.dtype)(x)
+ y = MlpBlock(mlp_dim=self.mlp_dim, dtype=self.dtype, dropout_rate=self.dropout_rate)(
+ y, deterministic=deterministic
+ )
+
+ return x + y, None
+
+
+class Encoder(nn.Module):
+ """Transformer Model Encoder for sequence to sequence translation.
+
+ Attributes:
+ num_layers: number of layers
+ mlp_dim: dimension of the mlp on top of attention block
+ num_heads: Number of heads in nn.MultiHeadDotProductAttention
+ dropout_rate: dropout rate.
+ attention_dropout_rate: dropout rate in self attention.
+ """
+
+ dtype: jax.typing.DTypeLike
+ num_layers: int
+ mlp_dim: int
+ num_heads: int
+ dropout_rate: float = 0.1
+ attention_dropout_rate: float = 0.1
+ add_position_embedding: bool = True
+
+ @nn.compact
+ def __call__(self, x, *, train):
+ """Applies Transformer model on the inputs.
+
+ Args:
+ x: Inputs to the layer.
+ train: Set to `True` when training.
+
+ Returns:
+ output of a transformer encoder.
+ """
+ assert x.ndim == 3 # (batch, len, emb)
+
+ if self.add_position_embedding:
+ x = AddPositionEmbs(
+ posemb_init=nn.initializers.normal(stddev=0.02), # from BERT.
+ name="posembed_input",
+ )(x)
+ x = nn.Dropout(rate=self.dropout_rate)(x, deterministic=not train)
+
+ x = x.astype(self.dtype)
+ # Input Encoder
+ block = nn.remat(Encoder1DBlock, prevent_cse=False, static_argnums=(2,))
+ x, _ = nn.scan(
+ block,
+ variable_axes={"params": 0},
+ split_rngs={"params": True, "dropout": True},
+ in_axes=nn.broadcast,
+ length=self.num_layers,
+ )(
+ name="encoderblock",
+ mlp_dim=self.mlp_dim,
+ dropout_rate=self.dropout_rate,
+ attention_dropout_rate=self.attention_dropout_rate,
+ dtype=self.dtype,
+ num_heads=self.num_heads,
+ )(x, not train)
+ return nn.LayerNorm(name="encoder_norm", dtype=self.dtype)(x)
+
+
+class VisionTransformer(nn.Module):
+ """VisionTransformer."""
+
+ dtype: jax.typing.DTypeLike
+ num_classes: int
+ patches: Any
+ transformer: Any
+ hidden_size: int
+ resnet: Any | None = None
+ representation_size: int | None = None
+ classifier: str = "token"
+ head_bias_init: float = 0.0
+ encoder: type[nn.Module] = Encoder
+ model_name: str | None = None
+
+ @nn.compact
+ def __call__(self, inputs, *, train):
+ x = inputs
+ # (Possibly partial) ResNet root.
+ if self.resnet is not None:
+ width = int(64 * self.resnet.width_factor)
+
+ # Root block.
+ x = models_resnet.StdConv(
+ features=width, kernel_size=(7, 7), strides=(2, 2), use_bias=False, name="conv_root"
+ )(x)
+ x = nn.GroupNorm(name="gn_root")(x)
+ x = nn.relu(x)
+ x = nn.max_pool(x, window_shape=(3, 3), strides=(2, 2), padding="SAME")
+
+ # ResNet stages.
+ if self.resnet.num_layers:
+ x = models_resnet.ResNetStage(
+ block_size=self.resnet.num_layers[0], nout=width, first_stride=(1, 1), name="block1"
+ )(x)
+ for i, block_size in enumerate(self.resnet.num_layers[1:], 1):
+ x = models_resnet.ResNetStage(
+ block_size=block_size, nout=width * 2**i, first_stride=(2, 2), name=f"block{i + 1}"
+ )(x)
+
+ n, h, w, c = x.shape
+
+ # We can merge s2d+emb into a single conv; it's the same.
+ x = nn.Conv(
+ features=self.hidden_size,
+ kernel_size=self.patches.size,
+ strides=self.patches.size,
+ padding="VALID",
+ name="embedding",
+ )(x)
+
+ # Here, x is a grid of embeddings.
+
+ # (Possibly partial) Transformer.
+ if self.transformer is not None:
+ n, h, w, c = x.shape
+ x = jnp.reshape(x, [n, h * w, c])
+
+ # If we want to add a class token, add it here.
+ if self.classifier in ["token", "token_unpooled"]:
+ cls = self.param("cls", nn.initializers.zeros, (1, 1, c))
+ cls = jnp.tile(cls, [n, 1, 1])
+ x = jnp.concatenate([cls, x], axis=1)
+
+ x = self.encoder(name="Transformer", **self.transformer, dtype=self.dtype)(x, train=train)
+
+ if self.classifier == "token":
+ x = x[:, 0]
+ elif self.classifier == "gap":
+ x = jnp.mean(x, axis=list(range(1, x.ndim - 1))) # (1,) or (1,2)
+ elif self.classifier in ["unpooled", "token_unpooled"]:
+ pass
+ else:
+ raise ValueError(f"Invalid classifier={self.classifier}")
+
+ if self.representation_size is not None:
+ x = nn.Dense(features=self.representation_size, name="pre_logits")(x)
+ x = nn.tanh(x)
+ else:
+ x = IdentityLayer(name="pre_logits")(x)
+
+ if self.num_classes:
+ x = nn.Dense(
+ features=self.num_classes,
+ name="head",
+ kernel_init=nn.initializers.zeros,
+ bias_init=nn.initializers.constant(self.head_bias_init),
+ )(x)
+ return x
diff --git a/src/openpi/policies/__pycache__/aloha_policy.cpython-310.pyc b/src/openpi/policies/__pycache__/aloha_policy.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d400345f243267cdda700979fb2383da99dd7180
Binary files /dev/null and b/src/openpi/policies/__pycache__/aloha_policy.cpython-310.pyc differ
diff --git a/src/openpi/policies/__pycache__/droid_policy.cpython-310.pyc b/src/openpi/policies/__pycache__/droid_policy.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e6eab4b7ff0f43f6b6a9ec4e14c4c2c848b9df86
Binary files /dev/null and b/src/openpi/policies/__pycache__/droid_policy.cpython-310.pyc differ
diff --git a/src/openpi/policies/__pycache__/libero_policy.cpython-310.pyc b/src/openpi/policies/__pycache__/libero_policy.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f507a88a6f4c3bafc310d2c1a7118f65d4ad6f02
Binary files /dev/null and b/src/openpi/policies/__pycache__/libero_policy.cpython-310.pyc differ
diff --git a/src/openpi/policies/__pycache__/utils.cpython-310.pyc b/src/openpi/policies/__pycache__/utils.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b7bf16c31b839f696ca0bf368fe55b9a23cc34e8
Binary files /dev/null and b/src/openpi/policies/__pycache__/utils.cpython-310.pyc differ
diff --git a/src/openpi/policies/aloha_policy.py b/src/openpi/policies/aloha_policy.py
new file mode 100644
index 0000000000000000000000000000000000000000..f1e53265b83d2715b67b64ccdd66075335ff0843
--- /dev/null
+++ b/src/openpi/policies/aloha_policy.py
@@ -0,0 +1,206 @@
+import dataclasses
+from typing import ClassVar
+
+import einops
+import numpy as np
+
+from openpi import transforms
+
+
+def make_aloha_example() -> dict:
+ """Creates a random input example for the Aloha policy."""
+ return {
+ "state": np.ones((14,)),
+ "images": {
+ "cam_high": np.random.randint(256, size=(3, 224, 224), dtype=np.uint8),
+ "cam_low": np.random.randint(256, size=(3, 224, 224), dtype=np.uint8),
+ "cam_left_wrist": np.random.randint(256, size=(3, 224, 224), dtype=np.uint8),
+ "cam_right_wrist": np.random.randint(256, size=(3, 224, 224), dtype=np.uint8),
+ },
+ "prompt": "do something",
+ }
+
+
+@dataclasses.dataclass(frozen=True)
+class AlohaInputs(transforms.DataTransformFn):
+ """Inputs for the Aloha policy.
+
+ Expected inputs:
+ - images: dict[name, img] where img is [channel, height, width]. name must be in EXPECTED_CAMERAS.
+ - state: [14]
+ - actions: [action_horizon, 14]
+ """
+
+ # The action dimension of the model. Will be used to pad state and actions.
+ action_dim: int
+
+ # If true, this will convert the joint and gripper values from the standard Aloha space to
+ # the space used by the pi internal runtime which was used to train the base model.
+ adapt_to_pi: bool = True
+
+ # The expected cameras names. All input cameras must be in this set. Missing cameras will be
+ # replaced with black images and the corresponding `image_mask` will be set to False.
+ EXPECTED_CAMERAS: ClassVar[tuple[str, ...]] = ("cam_high", "cam_low", "cam_left_wrist", "cam_right_wrist")
+
+ def __call__(self, data: dict) -> dict:
+ data = _decode_aloha(data, adapt_to_pi=self.adapt_to_pi)
+
+ # Get the state. We are padding from 14 to the model action dim.
+ state = transforms.pad_to_dim(data["state"], self.action_dim)
+
+ in_images = data["images"]
+ if set(in_images) - set(self.EXPECTED_CAMERAS):
+ raise ValueError(f"Expected images to contain {self.EXPECTED_CAMERAS}, got {tuple(in_images)}")
+
+ # Assume that base image always exists.
+ base_image = in_images["cam_high"]
+
+ images = {
+ "base_0_rgb": base_image,
+ }
+ image_masks = {
+ "base_0_rgb": np.True_,
+ }
+
+ # Add the extra images.
+ extra_image_names = {
+ "left_wrist_0_rgb": "cam_left_wrist",
+ "right_wrist_0_rgb": "cam_right_wrist",
+ }
+ for dest, source in extra_image_names.items():
+ if source in in_images:
+ images[dest] = in_images[source]
+ image_masks[dest] = np.True_
+ else:
+ images[dest] = np.zeros_like(base_image)
+ image_masks[dest] = np.False_
+
+ inputs = {
+ "image": images,
+ "image_mask": image_masks,
+ "state": state,
+ }
+
+ # Actions are only available during training.
+ if "actions" in data:
+ actions = np.asarray(data["actions"])
+ actions = _encode_actions_inv(actions, adapt_to_pi=self.adapt_to_pi)
+ inputs["actions"] = transforms.pad_to_dim(actions, self.action_dim)
+
+ if "prompt" in data:
+ inputs["prompt"] = data["prompt"]
+
+ return inputs
+
+
+@dataclasses.dataclass(frozen=True)
+class AlohaOutputs(transforms.DataTransformFn):
+ """Outputs for the Aloha policy."""
+
+ # If true, this will convert the joint and gripper values from the standard Aloha space to
+ # the space used by the pi internal runtime which was used to train the base model.
+ adapt_to_pi: bool = True
+
+ def __call__(self, data: dict) -> dict:
+ # Only return the first 14 dims.
+ actions = np.asarray(data["actions"][:, :14])
+ return {"actions": _encode_actions(actions, adapt_to_pi=self.adapt_to_pi)}
+
+
+def _joint_flip_mask() -> np.ndarray:
+ """Used to convert between aloha and pi joint angles."""
+ return np.array([1, -1, -1, 1, 1, 1, 1, 1, -1, -1, 1, 1, 1, 1])
+
+
+def _normalize(x, min_val, max_val):
+ return (x - min_val) / (max_val - min_val)
+
+
+def _unnormalize(x, min_val, max_val):
+ return x * (max_val - min_val) + min_val
+
+
+def _gripper_to_angular(value):
+ # Aloha transforms the gripper positions into a linear space. The following code
+ # reverses this transformation to be consistent with pi0 which is pretrained in
+ # angular space.
+ #
+ # These values are coming from the Aloha code:
+ # PUPPET_GRIPPER_POSITION_OPEN, PUPPET_GRIPPER_POSITION_CLOSED
+ value = _unnormalize(value, min_val=0.01844, max_val=0.05800)
+
+ # This is the inverse of the angular to linear transformation inside the Interbotix code.
+ def linear_to_radian(linear_position, arm_length, horn_radius):
+ value = (horn_radius**2 + linear_position**2 - arm_length**2) / (2 * horn_radius * linear_position)
+ return np.arcsin(np.clip(value, -1.0, 1.0))
+
+ # The constants are taken from the Interbotix code.
+ value = linear_to_radian(value, arm_length=0.036, horn_radius=0.022)
+
+ # Normalize to [0, 1].
+ # The values 0.4 and 1.5 were measured on an actual Trossen robot.
+ return _normalize(value, min_val=0.4, max_val=1.5)
+
+
+def _gripper_from_angular(value):
+ # Convert from the gripper position used by pi0 to the gripper position that is used by Aloha.
+ # Note that the units are still angular but the range is different.
+
+ # The values 0.4 and 1.5 were measured on an actual Trossen robot.
+ value = _unnormalize(value, min_val=0.4, max_val=1.5)
+
+ # These values are coming from the Aloha code:
+ # PUPPET_GRIPPER_JOINT_OPEN, PUPPET_GRIPPER_JOINT_CLOSE
+ return _normalize(value, min_val=-0.6213, max_val=1.4910)
+
+
+def _gripper_from_angular_inv(value):
+ # Directly inverts the gripper_from_angular function.
+ value = _unnormalize(value, min_val=-0.6213, max_val=1.4910)
+ return _normalize(value, min_val=0.4, max_val=1.5)
+
+
+def _decode_aloha(data: dict, *, adapt_to_pi: bool = False) -> dict:
+ # state is [left_arm_joint_angles, right_arm_joint_angles, left_arm_gripper, right_arm_gripper]
+ # dim sizes: [6, 1, 6, 1]
+ state = np.asarray(data["state"])
+ state = _decode_state(state, adapt_to_pi=adapt_to_pi)
+
+ def convert_image(img):
+ img = np.asarray(img)
+ # Convert to uint8 if using float images.
+ if np.issubdtype(img.dtype, np.floating):
+ img = (255 * img).astype(np.uint8)
+ # Convert from [channel, height, width] to [height, width, channel].
+ return einops.rearrange(img, "c h w -> h w c")
+
+ images = data["images"]
+ images_dict = {name: convert_image(img) for name, img in images.items()}
+
+ data["images"] = images_dict
+ data["state"] = state
+ return data
+
+
+def _decode_state(state: np.ndarray, *, adapt_to_pi: bool = False) -> np.ndarray:
+ if adapt_to_pi:
+ # Flip the joints.
+ state = _joint_flip_mask() * state
+ # Reverse the gripper transformation that is being applied by the Aloha runtime.
+ state[[6, 13]] = _gripper_to_angular(state[[6, 13]])
+ return state
+
+
+def _encode_actions(actions: np.ndarray, *, adapt_to_pi: bool = False) -> np.ndarray:
+ if adapt_to_pi:
+ # Flip the joints.
+ actions = _joint_flip_mask() * actions
+ actions[:, [6, 13]] = _gripper_from_angular(actions[:, [6, 13]])
+ return actions
+
+
+def _encode_actions_inv(actions: np.ndarray, *, adapt_to_pi: bool = False) -> np.ndarray:
+ if adapt_to_pi:
+ actions = _joint_flip_mask() * actions
+ actions[:, [6, 13]] = _gripper_from_angular_inv(actions[:, [6, 13]])
+ return actions
diff --git a/src/openpi/policies/libero_policy.py b/src/openpi/policies/libero_policy.py
new file mode 100644
index 0000000000000000000000000000000000000000..d21a117ef1edd45aaba2d9d3715c2ea55bd7b78c
--- /dev/null
+++ b/src/openpi/policies/libero_policy.py
@@ -0,0 +1,223 @@
+import dataclasses
+
+import einops
+import numpy as np
+
+from openpi import transforms
+from openpi.models import model as _model
+
+
+def make_libero_example() -> dict:
+ """Creates a random input example for the Libero policy."""
+ return {
+ "observation/state": np.random.rand(8),
+ "observation/image": np.random.randint(256, size=(224, 224, 3), dtype=np.uint8),
+ "observation/wrist_image": np.random.randint(256, size=(224, 224, 3), dtype=np.uint8),
+ "prompt": "do something",
+ }
+
+
+def _parse_image(image) -> np.ndarray:
+ image = np.asarray(image)
+ if np.issubdtype(image.dtype, np.floating):
+ image = (255 * image).astype(np.uint8)
+ if image.shape[0] == 3:
+ image = einops.rearrange(image, "c h w -> h w c")
+ return image
+
+
+@dataclasses.dataclass(frozen=True)
+class LiberoInputs(transforms.DataTransformFn):
+ """
+ This class is used to convert inputs to the model to the expected format. It is used for both training and inference.
+
+ For your own dataset, you can copy this class and modify the keys based on the comments below to pipe
+ the correct elements of your dataset into the model.
+ """
+
+ # The action dimension of the model. Will be used to pad state and actions for pi0 model (not pi0-FAST).
+ # Do not change this for your own dataset.
+ action_dim: int
+
+ # Determines which model will be used.
+ # Do not change this for your own dataset.
+ model_type: _model.ModelType = _model.ModelType.PI0
+
+ def __call__(self, data: dict) -> dict:
+ # We only mask padding for pi0 model, not pi0-FAST. Do not change this for your own dataset.
+ mask_padding = self.model_type == _model.ModelType.PI0
+
+ # We pad the proprioceptive input to the action dimension of the model.
+ # For pi0-FAST, we don't pad the state. For Libero, we don't need to differentiate
+ # since the pi0-FAST action_dim = 7, which is < state_dim = 8, so pad is skipped.
+ # Keep this for your own dataset, but if your dataset stores the proprioceptive input
+ # in a different key than "observation/state", you should change it below.
+ state = transforms.pad_to_dim(data["observation/state"], self.action_dim)
+
+ # Possibly need to parse images to uint8 (H,W,C) since LeRobot automatically
+ # stores as float32 (C,H,W), gets skipped for policy inference.
+ # Keep this for your own dataset, but if your dataset stores the images
+ # in a different key than "observation/image" or "observation/wrist_image",
+ # you should change it below.
+ # Pi0 models support three image inputs at the moment: one third-person view,
+ # and two wrist views (left and right). If your dataset does not have a particular type
+ # of image, e.g. wrist images, you can comment it out here and replace it with zeros like we do for the
+ # right wrist image below.
+ base_image = _parse_image(data["observation/image"])
+ wrist_image = _parse_image(data["observation/wrist_image"])
+
+ # Create inputs dict. Do not change the keys in the dict below.
+ inputs = {
+ "state": state,
+ "image": {
+ "base_0_rgb": base_image,
+ "left_wrist_0_rgb": wrist_image,
+ # Pad any non-existent images with zero-arrays of the appropriate shape.
+ "right_wrist_0_rgb": np.zeros_like(base_image),
+ },
+ "image_mask": {
+ "base_0_rgb": np.True_,
+ "left_wrist_0_rgb": np.True_,
+ # Mask any non-existent images with False (if ``mask_padding`` is True).
+ "right_wrist_0_rgb": np.False_ if mask_padding else np.True_,
+ },
+ }
+
+ # Pad actions to the model action dimension. Keep this for your own dataset.
+ # Actions are only available during training.
+ if "actions" in data:
+ # We are padding to the model action dim.
+ # For pi0-FAST, this is a no-op (since action_dim = 7).
+ actions = transforms.pad_to_dim(data["actions"], self.action_dim)
+ inputs["actions"] = actions
+
+ # Pass the prompt (aka language instruction) to the model.
+ # Keep this for your own dataset (but modify the key if the instruction is not
+ # stored in "prompt"; the output dict always needs to have the key "prompt").
+ if "prompt" in data:
+ inputs["prompt"] = data["prompt"]
+
+ return inputs
+
+
+@dataclasses.dataclass(frozen=True)
+class LiberoOutputs(transforms.DataTransformFn):
+ """
+ This class is used to convert outputs from the model back the the dataset specific format. It is
+ used for inference only.
+
+ For your own dataset, you can copy this class and modify the action dimension based on the comments below.
+ """
+
+ def __call__(self, data: dict) -> dict:
+ # Only return the first N actions -- since we padded actions above to fit the model action
+ # dimension, we need to now parse out the correct number of actions in the return dict.
+ # For Libero, we only return the first 7 actions (since the rest is padding).
+ # For your own dataset, replace `7` with the action dimension of your dataset.
+ return {"actions": np.asarray(data["actions"][:, :7])}
+
+
+@dataclasses.dataclass(frozen=True)
+class RiclLiberoInputs(transforms.DataTransformFn):
+ """
+ Inputs transform for RICL Libero policy.
+ """
+ action_dim: int
+ num_retrieved_observations: int
+ model_type: _model.ModelType = _model.ModelType.PI0
+
+ def __call__(self, data: dict) -> dict:
+ # Create inputs dict.
+ all_prefix = [f"retrieved_{i}_" for i in range(self.num_retrieved_observations)] + ["query_"]
+
+ # Helper to map Libero/LeRobot keys to model keys
+ def map_single_obs(prefix, data):
+ # Key mapping based on RiclLiberoDataset output
+ # Data keys: "observation.state", "observation.images.image", "observation.images.wrist_image"
+ # We prefix them with "retrieved_i_" or "query_" in the inputs
+
+ # Note: RiclLiberoDataset likely returns keys like:
+ # "retrieved_0_observation.state"
+ # "retrieved_0_observation.images.image"
+
+ # Let's handle the prefixing logic. The Dataset probably returns flat keys with prefix.
+ # But wait, looking at data_loader.py for standard Libero, it returns "observation/state".
+ # RiclLiberoDataset likely returns "retrieved_0_observation.state" (dot notation or slash).
+ # I will assume dot notation based on my previous work or common LeRobot patterns,
+ # BUT standard Libero policy uses "observation/state".
+ # Let's assume the Dataset returns keys with prefixes followed by dots or slashes.
+ # SAFE BET: Check keys in data.
+
+ # Actually, let's look at `data` argument structure.
+ # If `RiclLiberoDataset` constructs it, it might use specific keys.
+ # Given I cannot verify easily, I will implement robust key lookup or assume standard mapping.
+
+ # Assuming keys are like: retrieved_0_observation.state
+
+ src_state = f"{prefix}observation.state"
+ src_image = f"{prefix}observation.images.image"
+ src_wrist = f"{prefix}observation.images.wrist_image"
+
+ # If keys use slashes:
+ if src_state not in data and f"{prefix}observation/state" in data:
+ src_state = f"{prefix}observation/state"
+ src_image = f"{prefix}observation/image"
+ src_wrist = f"{prefix}observation/wrist_image"
+
+ state = transforms.pad_to_dim(data[src_state], self.action_dim)
+ base_image = _parse_image(data[src_image])
+
+ wrist_image = np.zeros_like(base_image)
+ if src_wrist in data:
+ wrist_image = _parse_image(data[src_wrist])
+
+ return {
+ f"{prefix}state": state,
+ f"{prefix}image": {
+ "base_0_rgb": base_image,
+ "left_wrist_0_rgb": wrist_image,
+ "right_wrist_0_rgb": np.zeros_like(base_image),
+ },
+ f"{prefix}image_mask": {
+ "base_0_rgb": np.True_,
+ "left_wrist_0_rgb": np.True_ if src_wrist in data else np.False_,
+ "right_wrist_0_rgb": np.False_,
+ },
+ }
+
+ inputs_dicts = [map_single_obs(prefix, data) for prefix in all_prefix]
+
+ # collapse to single dict
+ inputs = {k: v for d in inputs_dicts for k, v in d.items()}
+
+ # include retrieved actions and, if present, include query actions
+ for prefix in all_prefix[:-1]:
+ inputs[f"{prefix}actions"] = transforms.pad_to_dim(data[f"{prefix}actions"], self.action_dim)
+
+ if "query_actions" in data:
+ inputs["query_actions"] = transforms.pad_to_dim(data["query_actions"], self.action_dim)
+
+ # Prompt
+ # RiclLiberoDataset should return "query_prompt" which is the prompt.
+ # And maybe "retrieved_i_prompt".
+ for prefix in all_prefix:
+ task_key = f"{prefix}prompt"
+ if task_key in data:
+ inputs[f"{prefix}prompt"] = data[task_key]
+
+ # Distances
+ if "exp_lamda_distances" in data:
+ inputs["exp_lamda_distances"] = data["exp_lamda_distances"]
+
+ # Propagate inference flag
+ if "inference_time" in data:
+ inputs["inference_time"] = data["inference_time"]
+
+ return inputs
+
+@dataclasses.dataclass(frozen=True)
+class RiclLiberoOutputs(transforms.DataTransformFn):
+ def __call__(self, data: dict) -> dict:
+ # Only return the first 7 dims (Libero action dim)
+ return {"query_actions": np.asarray(data["query_actions"][:, :7])}
+
diff --git a/src/openpi/policies/policy.py b/src/openpi/policies/policy.py
new file mode 100644
index 0000000000000000000000000000000000000000..5aace8d571de9a95cc2d3390bc7713f9762d158e
--- /dev/null
+++ b/src/openpi/policies/policy.py
@@ -0,0 +1,268 @@
+from collections.abc import Sequence
+import logging
+import pathlib
+from typing import Any, TypeAlias
+
+import flax
+import flax.traverse_util
+import jax
+import jax.numpy as jnp
+import numpy as np
+from openpi_client import base_policy as _base_policy
+from typing_extensions import override
+
+from openpi import transforms as _transforms
+from openpi.models import model as _model
+from openpi.models import pi0_fast_ricl as _pi0_fast_ricl
+from openpi.shared import array_typing as at
+from openpi.shared import nnx_utils
+from openpi.policies.utils import embed, embed_with_batches, load_dinov2, EMBED_DIM
+import os
+from autofaiss import build_index
+import logging
+from datetime import datetime
+import json
+from PIL import Image
+logger = logging.getLogger()
+BasePolicy: TypeAlias = _base_policy.BasePolicy
+
+
+class Policy(BasePolicy):
+ def __init__(
+ self,
+ model: _model.BaseModel,
+ *,
+ rng: at.KeyArrayLike | None = None,
+ transforms: Sequence[_transforms.DataTransformFn] = (),
+ output_transforms: Sequence[_transforms.DataTransformFn] = (),
+ sample_kwargs: dict[str, Any] | None = None,
+ metadata: dict[str, Any] | None = None,
+ ):
+ self._sample_actions = nnx_utils.module_jit(model.sample_actions)
+ self._input_transform = _transforms.compose(transforms)
+ self._output_transform = _transforms.compose(output_transforms)
+ self._rng = rng or jax.random.key(0)
+ self._sample_kwargs = sample_kwargs or {}
+ self._metadata = metadata or {}
+ self._model = model
+
+ @override
+ def infer(self, obs: dict) -> dict: # type: ignore[misc]
+ # Make a copy since transformations may modify the inputs in place.
+ inputs = jax.tree.map(lambda x: x, obs)
+ inputs = self._input_transform(inputs)
+ # Make a batch and convert to jax.Array.
+ inputs = jax.tree.map(lambda x: jnp.asarray(x)[np.newaxis, ...], inputs)
+
+ self._rng, sample_rng = jax.random.split(self._rng)
+ outputs = {
+ "state": inputs["state"],
+ "actions": self._sample_actions(sample_rng, _model.Observation.from_dict(inputs), **self._sample_kwargs),
+ }
+
+ # Unbatch and convert to np.ndarray.
+ outputs = jax.tree.map(lambda x: np.asarray(x[0, ...]), outputs)
+ print(f'outputs: {outputs}')
+ final_outputs = self._output_transform(outputs)
+ logger.info(f'final_outputs: {final_outputs}')
+ return final_outputs
+
+ @property
+ def metadata(self) -> dict[str, Any]:
+ return self._metadata
+
+
+def get_action_chunk_at_inference_time(actions, step_idx, action_horizon):
+ num_steps = len(actions)
+ action_chunk = []
+ for i in range(action_horizon):
+ if step_idx+i < num_steps:
+ action_chunk.append(actions[step_idx+i])
+ else:
+ action_chunk.append(np.concatenate([np.zeros(actions.shape[-1]-1, dtype=np.float32), actions[-1, -1:]], axis=0)) # combines 0 joint vels with last gripper pos
+ action_chunk = np.stack(action_chunk, axis=0)
+ assert action_chunk.shape == (action_horizon, 8), f"{action_chunk.shape=}"
+ return action_chunk
+
+
+class RiclPolicy(BasePolicy):
+ def __init__(
+ self,
+ model: _pi0_fast_ricl.Pi0FASTRicl,
+ *,
+ rng: at.KeyArrayLike | None = None,
+ transforms: Sequence[_transforms.DataTransformFn] = (),
+ output_transforms: Sequence[_transforms.DataTransformFn] = (),
+ sample_kwargs: dict[str, Any] | None = None,
+ metadata: dict[str, Any] | None = None,
+ demos_dir: str | None = None,
+ use_action_interpolation: bool | None = None,
+ lamda: float | None = None,
+ action_horizon: int | None = None,
+ ):
+ self._sample_actions = nnx_utils.module_jit(model.sample_actions)
+ self._input_transform = _transforms.compose(transforms)
+ self._output_transform = _transforms.compose(output_transforms)
+ self._rng = rng or jax.random.key(0)
+ self._sample_kwargs = sample_kwargs or {}
+ self._metadata = metadata or {}
+ self._model = model
+ self._use_action_interpolation = use_action_interpolation
+ self._lamda = lamda
+ self._action_horizon = action_horizon
+ # setup demos for retrieval
+ print()
+ logger.info(f'loading demos from {demos_dir}...')
+ self._demos = {demo_idx: np.load(f"{demos_dir}/{folder}/processed_demo.npz") for demo_idx, folder in enumerate(os.listdir(demos_dir)) if os.path.isdir(f"{demos_dir}/{folder}")}
+ self._all_indices = np.array([(ep_idx, step_idx) for ep_idx in list(self._demos.keys()) for step_idx in range(self._demos[ep_idx]["actions"].shape[0])])
+ _all_embeddings = np.concatenate([self._demos[ep_idx]["top_image_embeddings"] for ep_idx in list(self._demos.keys())])
+ assert _all_embeddings.shape == (len(self._all_indices), EMBED_DIM), f"{_all_embeddings.shape=}"
+ self._knn_k = self._model.num_retrieved_observations
+ print()
+ logger.info(f'building retrieval index...')
+ self._knn_index, knn_index_infos = build_index(embeddings=_all_embeddings, # Note: embeddings have to be float to avoid errors in autofaiss / embedding_reader!
+ save_on_disk=False,
+ min_nearest_neighbors_to_retrieve=self._knn_k + 5, # default: 20
+ max_index_query_time_ms=10, # default: 10
+ max_index_memory_usage="25G", # default: "16G"
+ current_memory_available="50G", # default: "32G"
+ metric_type='l2',
+ nb_cores=8, # default: None # "The number of cores to use, by default will use all cores" as seen in https://criteo.github.io/autofaiss/getting_started/quantization.html#the-build-index-command
+ )
+ # setup the dinov2 model for embedding only
+ logger.info('loading dinov2 for image embedding...')
+ self._dinov2 = load_dinov2()
+ self._max_dist = json.load(open(f"assets/max_distance.json", 'r'))['distances']['max']
+ print(f'self._max_dist: {self._max_dist} [helpful to carefully check this value in case of any issues]')
+
+ def retrieve(self, obs: dict) -> dict:
+ more_obs = {"inference_time": True}
+ # embed
+ query_embedding = embed(obs["query_top_image"], self._dinov2)
+ assert query_embedding.shape == (1, EMBED_DIM), f"{query_embedding.shape=}"
+ # retrieve
+ topk_distance, topk_indices = self._knn_index.search(query_embedding, self._knn_k)
+ retrieved_indices = self._all_indices[topk_indices]
+ assert retrieved_indices.shape == (1, self._knn_k, 2), f"{retrieved_indices.shape=}"
+ # collect retrieved info
+ for ct, (ep_idx, step_idx) in enumerate(retrieved_indices[0]):
+ for key in ["state", "wrist_image", "top_image", "right_image"]:
+ more_obs[f"retrieved_{ct}_{key}"] = self._demos[ep_idx][key][step_idx]
+ more_obs[f"retrieved_{ct}_actions"] = get_action_chunk_at_inference_time(self._demos[ep_idx]["actions"], step_idx, self._action_horizon)
+ more_obs[f"retrieved_{ct}_prompt"] = self._demos[ep_idx]["prompt"].item()
+ # Compute exp_lamda_distances if use_action_interpolation
+ if self._use_action_interpolation:
+ first_embedding = self._demos[retrieved_indices[0, 0, 0]]["top_image_embeddings"][retrieved_indices[0, 0, 1]]
+ distances = [0.0] + [np.linalg.norm(self._demos[ep_idx]["top_image_embeddings"][step_idx:step_idx+1] - first_embedding) for ep_idx, step_idx in retrieved_indices[0, 1:]]
+ distances.append(np.linalg.norm(query_embedding - first_embedding))
+ distances = np.clip(np.array(distances), 0, self._max_dist) / self._max_dist
+ print(f'distances: {distances}')
+ more_obs["exp_lamda_distances"] = np.exp(-self._lamda * distances).reshape(-1, 1)
+ print(f'exp_lamda_distances: {more_obs["exp_lamda_distances"]}')
+ return {**obs, **more_obs}
+
+ def save_obs(self, obs: dict, date: str, prefix: str):
+ fol = f"obs_logs/{date}/{prefix}"
+ os.makedirs(fol, exist_ok=True)
+ current_datettime = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
+ # save all images in one png
+ big_top_image = []
+ big_right_image = []
+ big_wrist_image = []
+ for ct in range(self._knn_k):
+ big_top_image.append(obs[f"retrieved_{ct}_top_image"])
+ big_right_image.append(obs[f"retrieved_{ct}_right_image"])
+ big_wrist_image.append(obs[f"retrieved_{ct}_wrist_image"])
+ big_top_image.append(obs["query_top_image"])
+ big_right_image.append(obs["query_right_image"])
+ big_wrist_image.append(obs["query_wrist_image"])
+ final_image = np.concatenate((np.concatenate(big_top_image, axis=1), np.concatenate(big_right_image, axis=1), np.concatenate(big_wrist_image, axis=1)), axis=0)
+ Image.fromarray(final_image).save(f"{fol}/{current_datettime}.png")
+ # save everything else to json
+ with open(f"{fol}/{current_datettime}.json", "w") as f:
+ everything_else = {k: v.tolist() if isinstance(v, np.ndarray) else v for k, v in obs.items() if "image" not in k}
+ everything_else["final_image_shape"] = list(final_image.shape)
+ json.dump(everything_else, f, indent=4)
+ return current_datettime
+
+ def save_tokenized_inputs(self, inputs: dict, current_datettime: str, date: str, prefix: str):
+ fol = f"obs_logs/{date}/{prefix}"
+ os.makedirs(fol, exist_ok=True)
+ every_tokenized_input = {}
+ for k, v in inputs.items():
+ if "token" in k:
+ if v is None:
+ every_tokenized_input[k] = v
+ continue
+ assert isinstance(v, np.ndarray) and v.dtype in [np.bool_, np.int64], f"{k=}, {v.dtype=}"
+ every_tokenized_input[k] = v.astype(np.int32).tolist()
+ with open(f"{fol}/{current_datettime}_token_inputs.json", "w") as f:
+ json.dump(every_tokenized_input, f, indent=4)
+
+ @override
+ def infer(self, obs: dict, debug: bool = True) -> dict: # type: ignore[misc]
+ # Remove the prefix from the obs; get date; below for saving folder only
+ prefix = obs.pop("prefix", "temp")
+ date = datetime.now().strftime("%m%d")
+ # Retrieval
+ print()
+ logger.info(f'retrieving...')
+ obs = self.retrieve(obs)
+ # for debugging, save everything in obs
+ if debug:
+ logger.info(f'saving obs...')
+ current_datettime = self.save_obs(obs, date, prefix)
+ # Make a copy since transformations may modify the inputs in place.
+ logger.info(f'transforming...')
+ inputs = jax.tree.map(lambda x: x, obs)
+ inputs = self._input_transform(inputs)
+ # for debugging, save tokenized inputs
+ if debug:
+ logger.info(f'saving tokenized inputs...')
+ self.save_tokenized_inputs(inputs, current_datettime, date, prefix)
+ # Make a batch and convert to jax.Array.
+ logger.info(f'batching...')
+ inputs = jax.tree.map(lambda x: jnp.asarray(x)[np.newaxis, ...], inputs)
+
+ self._rng, sample_rng = jax.random.split(self._rng)
+ logger.info(f'sampling...')
+ outputs = {
+ "query_state": inputs["query_state"],
+ "query_actions": self._sample_actions(sample_rng, _model.RiclObservation.from_dict(inputs, num_retrieved_observations=self._knn_k), **self._sample_kwargs),
+ }
+
+ # Unbatch and convert to np.ndarray.
+ logger.info(f'unbatching...')
+ outputs = jax.tree.map(lambda x: np.asarray(x[0, ...]), outputs)
+ final_outputs = self._output_transform(outputs)
+ print(f'final_outputs: {final_outputs}')
+ return final_outputs
+
+ @property
+ def metadata(self) -> dict[str, Any]:
+ return self._metadata
+
+
+class PolicyRecorder(_base_policy.BasePolicy):
+ """Records the policy's behavior to disk."""
+
+ def __init__(self, policy: _base_policy.BasePolicy, record_dir: str):
+ self._policy = policy
+
+ logging.info(f"Dumping policy records to: {record_dir}")
+ self._record_dir = pathlib.Path(record_dir)
+ self._record_dir.mkdir(parents=True, exist_ok=True)
+ self._record_step = 0
+
+ @override
+ def infer(self, obs: dict) -> dict: # type: ignore[misc]
+ results = self._policy.infer(obs)
+
+ data = {"inputs": obs, "outputs": results}
+ data = flax.traverse_util.flatten_dict(data, sep="/")
+
+ output_path = self._record_dir / f"step_{self._record_step}"
+ self._record_step += 1
+
+ np.save(output_path, np.asarray(data))
+ return results
diff --git a/src/openpi/policies/policy_config.py b/src/openpi/policies/policy_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..144eba515746f1c0711b9a04afaf2eaf1bdf29b2
--- /dev/null
+++ b/src/openpi/policies/policy_config.py
@@ -0,0 +1,130 @@
+from collections.abc import Sequence
+import dataclasses
+import logging
+import pathlib
+from typing import Any
+
+import jax.numpy as jnp
+
+import openpi.models.model as _model
+import openpi.policies.policy as _policy
+import openpi.shared.download as download
+from openpi.training import checkpoints as _checkpoints
+from openpi.training import config as _config
+import openpi.transforms as transforms
+import logging
+logger = logging.getLogger()
+
+@dataclasses.dataclass
+class PolicyConfig:
+ model: _model.BaseModel
+ norm_stats: dict[str, transforms.NormStats]
+
+ input_layers: Sequence[transforms.DataTransformFn]
+ output_layers: Sequence[transforms.DataTransformFn]
+
+ model_type: _model.ModelType = _model.ModelType.PI0
+ default_prompt: str | None = None
+ sample_kwargs: dict[str, Any] | None = None
+
+
+def create_trained_policy(
+ train_config: _config.TrainConfig,
+ checkpoint_dir: pathlib.Path | str,
+ *,
+ repack_transforms: transforms.Group | None = None,
+ sample_kwargs: dict[str, Any] | None = None,
+ default_prompt: str | None = None,
+ norm_stats: dict[str, transforms.NormStats] | None = None,
+) -> _policy.Policy:
+ """Create a policy from a trained checkpoint.
+
+ Args:
+ train_config: The training config to use to create the model.
+ checkpoint_dir: The directory to load the model from.
+ repack_transforms: Optional transforms that will be applied before any other transforms.
+ sample_kwargs: The kwargs to pass to the `sample_actions` method. If not provided, the default
+ kwargs will be used.
+ default_prompt: The default prompt to use for the policy. Will inject the prompt into the input
+ data if it doesn't already exist.
+ norm_stats: The norm stats to use for the policy. If not provided, the norm stats will be loaded
+ from the checkpoint directory.
+ """
+ repack_transforms = repack_transforms or transforms.Group()
+ checkpoint_dir = download.maybe_download(str(checkpoint_dir))
+
+ logging.info("Loading model...")
+ model = train_config.model.load(_model.restore_params(checkpoint_dir / "params", dtype=jnp.bfloat16))
+
+ data_config = train_config.data.create(train_config.assets_dirs, train_config.model)
+ if norm_stats is None:
+ # We are loading the norm stats from the checkpoint instead of the config assets dir to make sure
+ # that the policy is using the same normalization stats as the original training process.
+ if data_config.asset_id is None:
+ raise ValueError("Asset id is required to load norm stats.")
+ norm_stats = _checkpoints.load_norm_stats(checkpoint_dir / "assets", data_config.asset_id)
+
+ return _policy.Policy(
+ model,
+ transforms=[
+ *repack_transforms.inputs,
+ transforms.InjectDefaultPrompt(default_prompt),
+ *data_config.data_transforms.inputs,
+ transforms.Normalize(norm_stats, use_quantiles=data_config.use_quantile_norm),
+ *data_config.model_transforms.inputs,
+ ],
+ output_transforms=[
+ *data_config.model_transforms.outputs,
+ transforms.Unnormalize(norm_stats, use_quantiles=data_config.use_quantile_norm),
+ *data_config.data_transforms.outputs,
+ *repack_transforms.outputs,
+ ],
+ sample_kwargs=sample_kwargs,
+ metadata=train_config.policy_metadata,
+ )
+
+
+def create_trained_ricl_policy(
+ train_config: _config.TrainConfig,
+ checkpoint_dir: str,
+ demos_dir: str,
+ norm_stats: dict[str, transforms.NormStats] | None = None,
+) -> _policy.RiclPolicy:
+ """Create a ricl policy from a trained checkpoint.
+
+ Args:
+ train_config: The training config to use to create the model.
+ checkpoint_dir: The directory to load the model from.
+ demos_dir: The directory to load the demos from.
+ norm_stats: The norm stats to use for the policy. If not provided, the norm stats will be loaded
+ from the checkpoint directory.
+ """
+ logging.info("Loading model...")
+ model = train_config.model.load(_model.restore_params(f"{checkpoint_dir}/params", dtype=jnp.bfloat16))
+
+ data_config = train_config.data.create(train_config.assets_dirs, train_config.model)
+ if norm_stats is None:
+ # We are loading the norm stats from the checkpoint instead of the config assets dir to make sure
+ # that the policy is using the same normalization stats as the original training process.
+ if data_config.asset_id is None:
+ raise ValueError("Asset id is required to load norm stats.")
+ norm_stats = _checkpoints.load_norm_stats(f"{checkpoint_dir}/assets", data_config.asset_id)
+
+ return _policy.RiclPolicy(
+ model,
+ transforms=[
+ *data_config.data_transforms.inputs,
+ transforms.Normalize(norm_stats, use_quantiles=data_config.use_quantile_norm),
+ *data_config.model_transforms.inputs,
+ ],
+ output_transforms=[
+ *data_config.model_transforms.outputs,
+ transforms.UnnormalizeRicl(norm_stats, use_quantiles=data_config.use_quantile_norm),
+ *data_config.data_transforms.outputs,
+ ],
+ metadata=train_config.policy_metadata,
+ demos_dir=demos_dir,
+ use_action_interpolation=train_config.model.use_action_interpolation,
+ lamda=train_config.model.lamda,
+ action_horizon=train_config.model.action_horizon,
+ )
\ No newline at end of file
diff --git a/src/openpi/policies/policy_test.py b/src/openpi/policies/policy_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..f729c50aea0a79a2c17884a04ed94da097321c67
--- /dev/null
+++ b/src/openpi/policies/policy_test.py
@@ -0,0 +1,34 @@
+from openpi_client import action_chunk_broker
+import pytest
+
+from openpi.policies import aloha_policy
+from openpi.policies import policy_config as _policy_config
+from openpi.training import config as _config
+
+
+@pytest.mark.manual
+def test_infer():
+ config = _config.get_config("pi0_aloha_sim")
+ policy = _policy_config.create_trained_policy(config, "s3://openpi-assets/checkpoints/pi0_aloha_sim")
+
+ example = aloha_policy.make_aloha_example()
+ result = policy.infer(example)
+
+ assert result["actions"].shape == (config.model.action_horizon, 14)
+
+
+@pytest.mark.manual
+def test_broker():
+ config = _config.get_config("pi0_aloha_sim")
+ policy = _policy_config.create_trained_policy(config, "s3://openpi-assets/checkpoints/pi0_aloha_sim")
+
+ broker = action_chunk_broker.ActionChunkBroker(
+ policy,
+ # Only execute the first half of the chunk.
+ action_horizon=config.model.action_horizon // 2,
+ )
+
+ example = aloha_policy.make_aloha_example()
+ for _ in range(config.model.action_horizon):
+ outputs = broker.infer(example)
+ assert outputs["actions"].shape == (14,)
diff --git a/src/openpi/policies/utils.py b/src/openpi/policies/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..012316b286fbf6a26969658ddad9f494afd1ff2f
--- /dev/null
+++ b/src/openpi/policies/utils.py
@@ -0,0 +1,116 @@
+"""
+This is a copy of the utils.py file in the ricl_droid_preprocessing directory.
+Also includes init_logging function from the scripts/train_pi0_fast_ricl.py file.
+"""
+
+import os
+from datetime import datetime
+import numpy as np
+from openpi_client.image_tools import resize_with_pad as resize_with_pad_numpy
+import logging
+import torch
+import torchvision.transforms as TorchVT
+import math
+IMAGENET_DEFAULT_MEAN = (0.485, 0.456, 0.406)
+IMAGENET_DEFAULT_STD = (0.229, 0.224, 0.225)
+EMBEDDING_TYPE = '64PATCHES' # 'CLS', 'AVG', '16PATCHES'
+EMBED_DIM = int(EMBEDDING_TYPE.split('PATCHES')[0])*768 # based on the choice of the embedding type arg above
+
+def init_logging():
+ """Custom logging format for better readability."""
+ level_mapping = {"DEBUG": "D", "INFO": "I", "WARNING": "W", "ERROR": "E", "CRITICAL": "C"}
+
+ class CustomFormatter(logging.Formatter):
+ def format(self, record):
+ record.levelname = level_mapping.get(record.levelname, record.levelname)
+ return super().format(record)
+
+ formatter = CustomFormatter(
+ fmt="%(asctime)s.%(msecs)03d [%(levelname)s] %(message)-80s (%(process)d:%(filename)s:%(lineno)s)",
+ datefmt="%H:%M:%S",
+ )
+
+ logger = logging.getLogger()
+ logger.setLevel(logging.INFO)
+ logger.handlers[0].setFormatter(formatter)
+
+def get_time():
+ return datetime.now().strftime("%Y-%m-%d %H:%M:%S")
+
+def myprint(s):
+ print(f'{get_time()}: {s}')
+
+def load_dinov2():
+ dinov2 = torch.hub.load('facebookresearch/dinov2', 'dinov2_vitb14')
+ dinov2.eval()
+ if torch.cuda.is_available():
+ dinov2 = dinov2.cuda()
+ return dinov2
+
+def process_dinov2(images):
+ assert isinstance(images, np.ndarray)
+ assert images.dtype == np.uint8
+ # if batch dimension not present, add it
+ if len(images.shape) == 3:
+ images = images[np.newaxis, ...]
+ # if resolution is not 224x224, change resolution to 224x224.
+ if not (images.shape[1:3] == (224, 224) or images.shape[2:4] == (224, 224)):
+ # if channel first, convert to channel last before resolution change
+ if images.shape[1] == 3:
+ images = images.transpose(0, 2, 3, 1)
+ # actual resolution change
+ images = resize_with_pad_numpy(images, 224, 224)
+ # if channel last, convert to channel first before pytorch steps
+ if images.shape[3] == 3:
+ images = images.transpose(0, 3, 1, 2)
+ # convert uint8 numpy arrays to float32 tensors and normalize from [0,255] to [0,1]
+ images = torch.from_numpy(images).float() / 255.0
+ # normalize with imagenet mean and std
+ normalize = TorchVT.Normalize(IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD)
+ images = normalize(images)
+ # if gpu is available, move to gpu
+ if torch.cuda.is_available():
+ images = images.cuda()
+ return images
+
+def embed(images, dinov2):
+ images = process_dinov2(images)
+
+ with torch.no_grad():
+ features = dinov2.forward_features(images) # dict_keys(['x_norm_clstoken', 'x_norm_regtokens', 'x_norm_patchtokens', 'x_prenorm', 'masks']) # shape of x_norm_regtokens = (batch_size, 0, 768)
+ if EMBEDDING_TYPE == 'CLS': # output of the CLS token
+ batch_embeddings = features["x_norm_clstoken"] # (batch_size, 768)
+ elif EMBEDDING_TYPE == 'AVG': # average of num_tokens (e.g., num_tokens = 256 for 224x224 image since patch size is 14)
+ batch_embeddings = features["x_norm_patchtokens"] # (batch_size, num_tokens, 768)
+ batch_embeddings = batch_embeddings.mean(dim=1) # (batch_size, 768)
+ elif 'PATCHES' in EMBEDDING_TYPE: # reduces 256 patches to N patches
+ batch_embeddings = features["x_norm_patchtokens"] # (batch_size, 256, 768)
+ batch_size = batch_embeddings.shape[0]
+ N_patches = int(EMBEDDING_TYPE.split('PATCHES')[0])
+ assert 256 % N_patches == 0, f"256 is not divisible by {N_patches=}"
+ assert math.sqrt(N_patches) ** 2 == N_patches, f"{N_patches=} must be a perfect square"
+ patches = []
+ rows, cols = 16, 16 # since 16*16 == 256
+ patch_rows, patch_cols = int(rows // math.sqrt(N_patches)), int(cols // math.sqrt(N_patches))
+ for i in range(0, rows, patch_rows): # Step by patch height
+ for j in range(0, cols, patch_cols): # Step by patch width
+ patch_indices_2d = [(r, c) for r in range(i, i + patch_rows) for c in range(j, j + patch_cols)]
+ patch_indices_in_flattened = [r * cols + c for r, c in patch_indices_2d]
+ # print(patch_indices_in_flattened)
+ patch = batch_embeddings[:, patch_indices_in_flattened, :] # (batch_size, 16, 768)
+ assert patch.shape == (batch_size, patch_rows*patch_cols, 768), f"{patch.shape=}"
+ patch = patch.mean(dim=1) # (batch_size, 768)
+ assert patch.shape == (batch_size, 768), f"{patch.shape=}"
+ patches.append(patch)
+ assert len(patches) == N_patches, f"{len(patches)=} {N_patches=}"
+ batch_embeddings = torch.cat(patches, dim=1) # (batch_size, 16*768)
+
+ return batch_embeddings.cpu().numpy()
+
+def embed_with_batches(images, dinov2, batch_size=256):
+ all_embeddings = []
+ for i in range(0, len(images), batch_size):
+ images_batch = images[i:i+batch_size]
+ embeddings = embed(images_batch, dinov2)
+ all_embeddings.append(embeddings)
+ return np.concatenate(all_embeddings, axis=0)
diff --git a/src/openpi/serving/websocket_policy_server.py b/src/openpi/serving/websocket_policy_server.py
new file mode 100644
index 0000000000000000000000000000000000000000..33ebd14560b7fa08ed5ffb27b98a24b94712b755
--- /dev/null
+++ b/src/openpi/serving/websocket_policy_server.py
@@ -0,0 +1,63 @@
+import asyncio
+import logging
+import traceback
+
+from openpi_client import base_policy as _base_policy
+from openpi_client import msgpack_numpy
+import websockets.asyncio.server
+import websockets.frames
+
+
+class WebsocketPolicyServer:
+ """Serves a policy using the websocket protocol. See websocket_client_policy.py for a client implementation.
+
+ Currently only implements the `load` and `infer` methods.
+ """
+
+ def __init__(
+ self,
+ policy: _base_policy.BasePolicy,
+ host: str = "0.0.0.0",
+ port: int = 8000,
+ metadata: dict | None = None,
+ ) -> None:
+ self._policy = policy
+ self._host = host
+ self._port = port
+ self._metadata = metadata or {}
+ logging.getLogger("websockets.server").setLevel(logging.INFO)
+
+ def serve_forever(self) -> None:
+ asyncio.run(self.run())
+
+ async def run(self):
+ async with websockets.asyncio.server.serve(
+ self._handler,
+ self._host,
+ self._port,
+ compression=None,
+ max_size=None,
+ ) as server:
+ await server.serve_forever()
+
+ async def _handler(self, websocket: websockets.asyncio.server.ServerConnection):
+ logging.info(f"Connection from {websocket.remote_address} opened")
+ packer = msgpack_numpy.Packer()
+
+ await websocket.send(packer.pack(self._metadata))
+
+ while True:
+ try:
+ obs = msgpack_numpy.unpackb(await websocket.recv())
+ action = self._policy.infer(obs)
+ await websocket.send(packer.pack(action))
+ except websockets.ConnectionClosed:
+ logging.info(f"Connection from {websocket.remote_address} closed")
+ break
+ except Exception:
+ await websocket.send(traceback.format_exc())
+ await websocket.close(
+ code=websockets.frames.CloseCode.INTERNAL_ERROR,
+ reason="Internal server error. Traceback included in previous frame.",
+ )
+ raise
diff --git a/src/openpi/shared/__init__.py b/src/openpi/shared/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/openpi/shared/__pycache__/__init__.cpython-310.pyc b/src/openpi/shared/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..435ae3e48dd2f14fd2f98e384d2f1df1130c3a25
Binary files /dev/null and b/src/openpi/shared/__pycache__/__init__.cpython-310.pyc differ
diff --git a/src/openpi/shared/__pycache__/array_typing.cpython-310.pyc b/src/openpi/shared/__pycache__/array_typing.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bd20d3c3f948e688439373f0fa0a96cc285d80c4
Binary files /dev/null and b/src/openpi/shared/__pycache__/array_typing.cpython-310.pyc differ
diff --git a/src/openpi/shared/__pycache__/download.cpython-310.pyc b/src/openpi/shared/__pycache__/download.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b7e50047d57e5031283ca86aba47ed0aa4ad417e
Binary files /dev/null and b/src/openpi/shared/__pycache__/download.cpython-310.pyc differ
diff --git a/src/openpi/shared/__pycache__/image_tools.cpython-310.pyc b/src/openpi/shared/__pycache__/image_tools.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bb8538af464ed06f88a136319ba4a9889cbe17e7
Binary files /dev/null and b/src/openpi/shared/__pycache__/image_tools.cpython-310.pyc differ
diff --git a/src/openpi/shared/__pycache__/nnx_utils.cpython-310.pyc b/src/openpi/shared/__pycache__/nnx_utils.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..44c2955eb5ec3e1f650f66746ebc244f0922ddaf
Binary files /dev/null and b/src/openpi/shared/__pycache__/nnx_utils.cpython-310.pyc differ
diff --git a/src/openpi/shared/__pycache__/normalize.cpython-310.pyc b/src/openpi/shared/__pycache__/normalize.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ddbd5a34dedbabd24ae7dca2137f41cf007a5fcf
Binary files /dev/null and b/src/openpi/shared/__pycache__/normalize.cpython-310.pyc differ
diff --git a/src/openpi/shared/array_typing.py b/src/openpi/shared/array_typing.py
new file mode 100644
index 0000000000000000000000000000000000000000..47619c8b5be20a033a350ef1be6437611a59b2b7
--- /dev/null
+++ b/src/openpi/shared/array_typing.py
@@ -0,0 +1,73 @@
+import contextlib
+import functools as ft
+import inspect
+from typing import TypeAlias, TypeVar, cast
+
+import beartype
+import jax
+import jax._src.tree_util as private_tree_util
+import jax.core
+from jaxtyping import Array # noqa: F401
+from jaxtyping import ArrayLike
+from jaxtyping import Bool # noqa: F401
+from jaxtyping import DTypeLike # noqa: F401
+from jaxtyping import Float
+from jaxtyping import Int # noqa: F401
+from jaxtyping import Key # noqa: F401
+from jaxtyping import Num # noqa: F401
+from jaxtyping import PyTree
+from jaxtyping import Real # noqa: F401
+from jaxtyping import UInt8 # noqa: F401
+from jaxtyping import config
+from jaxtyping import jaxtyped
+import jaxtyping._decorator
+
+# patch removed as it is incompatible with jaxtyping >= 0.3.x
+
+KeyArrayLike: TypeAlias = jax.typing.ArrayLike
+Params: TypeAlias = PyTree[Float[ArrayLike, "..."]]
+
+T = TypeVar("T")
+
+
+# runtime type-checking decorator
+# runtime type-checking decorator
+def typecheck(t: T) -> T:
+ # return cast(T, ft.partial(jaxtyped, typechecker=beartype.beartype)(t))
+ return t
+
+
+@contextlib.contextmanager
+def disable_typechecking():
+ initial = config.jaxtyping_disable
+ config.update("jaxtyping_disable", True) # noqa: FBT003
+ yield
+ config.update("jaxtyping_disable", initial)
+
+
+def check_pytree_equality(*, expected: PyTree, got: PyTree, check_shapes: bool = False, check_dtypes: bool = False):
+ """Checks that two PyTrees have the same structure and optionally checks shapes and dtypes. Creates a much nicer
+ error message than if `jax.tree.map` is naively used on PyTrees with different structures.
+ """
+
+ if errors := list(private_tree_util.equality_errors(expected, got)):
+ raise ValueError(
+ "PyTrees have different structure:\n"
+ + (
+ "\n".join(
+ f" - at keypath '{jax.tree_util.keystr(path)}': expected {thing1}, got {thing2}, so {explanation}.\n"
+ for path, thing1, thing2, explanation in errors
+ )
+ )
+ )
+
+ if check_shapes or check_dtypes:
+
+ def check(kp, x, y):
+ if check_shapes and x.shape != y.shape:
+ raise ValueError(f"Shape mismatch at {jax.tree_util.keystr(kp)}: expected {x.shape}, got {y.shape}")
+
+ if check_dtypes and x.dtype != y.dtype:
+ raise ValueError(f"Dtype mismatch at {jax.tree_util.keystr(kp)}: expected {x.dtype}, got {y.dtype}")
+
+ jax.tree_util.tree_map_with_path(check, expected, got)
diff --git a/src/openpi/shared/download.py b/src/openpi/shared/download.py
new file mode 100644
index 0000000000000000000000000000000000000000..8f3d6f986b976c4c3763d0e3dea70855571afe41
--- /dev/null
+++ b/src/openpi/shared/download.py
@@ -0,0 +1,330 @@
+import concurrent.futures
+import datetime
+import getpass
+import logging
+import os
+import pathlib
+import re
+import shutil
+import stat
+import time
+import urllib.parse
+
+import boto3
+import boto3.s3.transfer as s3_transfer
+import botocore
+import filelock
+import fsspec
+import fsspec.generic
+import s3transfer.futures as s3_transfer_futures
+import tqdm_loggable.auto as tqdm
+from types_boto3_s3.service_resource import ObjectSummary
+
+# Environment variable to control cache directory path, ~/.cache/openpi will be used by default.
+_OPENPI_DATA_HOME = "OPENPI_DATA_HOME"
+
+logger = logging.getLogger(__name__)
+
+
+def get_cache_dir() -> pathlib.Path:
+ default_dir = "~/.cache/openpi"
+ if os.path.exists("/mnt/weka"): # noqa: PTH110
+ default_dir = f"/mnt/weka/{getpass.getuser()}/.cache/openpi"
+
+ cache_dir = pathlib.Path(os.getenv(_OPENPI_DATA_HOME, default_dir)).expanduser().resolve()
+ cache_dir.mkdir(parents=True, exist_ok=True)
+ _set_folder_permission(cache_dir)
+ return cache_dir
+
+
+def maybe_download(url: str, *, force_download: bool = False, **kwargs) -> pathlib.Path:
+ """Download a file or directory from a remote filesystem to the local cache, and return the local path.
+
+ If the local file already exists, it will be returned directly.
+
+ It is safe to call this function concurrently from multiple processes.
+ See `get_cache_dir` for more details on the cache directory.
+
+ Args:
+ url: URL to the file to download.
+ force_download: If True, the file will be downloaded even if it already exists in the cache.
+ **kwargs: Additional arguments to pass to fsspec.
+
+ Returns:
+ Local path to the downloaded file or directory. That path is guaranteed to exist and is absolute.
+ """
+ # Don't use fsspec to parse the url to avoid unnecessary connection to the remote filesystem.
+ parsed = urllib.parse.urlparse(url)
+
+ # Short circuit if this is a local path.
+ if parsed.scheme == "":
+ path = pathlib.Path(url)
+ if not path.exists():
+ raise FileNotFoundError(f"File not found at {url}")
+ return path.resolve()
+
+ cache_dir = get_cache_dir()
+
+ local_path = cache_dir / parsed.netloc / parsed.path.strip("/")
+ local_path = local_path.resolve()
+
+ # Check if the cache should be invalidated.
+ invalidate_cache = False
+ if local_path.exists():
+ if force_download or _should_invalidate_cache(cache_dir, local_path):
+ invalidate_cache = True
+ else:
+ return local_path
+
+ try:
+ lock_path = local_path.with_suffix(".lock")
+ with filelock.FileLock(lock_path):
+ # Ensure consistent permissions for the lock file.
+ _ensure_permissions(lock_path)
+ # First, remove the existing cache if it is expired.
+ if invalidate_cache:
+ logger.info(f"Removing expired cached entry: {local_path}")
+ if local_path.is_dir():
+ shutil.rmtree(local_path)
+ else:
+ local_path.unlink()
+
+ # Download the data to a local cache.
+ logger.info(f"Downloading {url} to {local_path}")
+ scratch_path = local_path.with_suffix(".partial")
+
+ if _is_openpi_url(url):
+ # Download without credentials.
+ _download_boto3(
+ url,
+ scratch_path,
+ boto_session=boto3.Session(
+ region_name="us-west-1",
+ ),
+ botocore_config=botocore.config.Config(signature_version=botocore.UNSIGNED),
+ )
+ elif url.startswith("s3://"):
+ # Download with default boto3 credentials.
+ _download_boto3(url, scratch_path)
+ else:
+ _download_fsspec(url, scratch_path, **kwargs)
+
+ shutil.move(scratch_path, local_path)
+ _ensure_permissions(local_path)
+
+ except PermissionError as e:
+ msg = (
+ f"Local file permission error was encountered while downloading {url}. "
+ f"Please try again after removing the cached data using: `rm -rf {local_path}*`"
+ )
+ raise PermissionError(msg) from e
+
+ return local_path
+
+
+def _download_fsspec(url: str, local_path: pathlib.Path, **kwargs) -> None:
+ """Download a file from a remote filesystem to the local cache, and return the local path."""
+ fs, _ = fsspec.core.url_to_fs(url, **kwargs)
+ info = fs.info(url)
+ if is_dir := (info["type"] == "directory"): # noqa: SIM108
+ total_size = fs.du(url)
+ else:
+ total_size = info["size"]
+ with tqdm.tqdm(total=total_size, unit="iB", unit_scale=True, unit_divisor=1024) as pbar:
+ executor = concurrent.futures.ThreadPoolExecutor(max_workers=1)
+ future = executor.submit(fs.get, url, local_path, recursive=is_dir)
+ while not future.done():
+ current_size = sum(f.stat().st_size for f in [*local_path.rglob("*"), local_path] if f.is_file())
+ pbar.update(current_size - pbar.n)
+ time.sleep(1)
+ pbar.update(total_size - pbar.n)
+
+
+def _download_boto3(
+ url: str,
+ local_path: pathlib.Path,
+ *,
+ boto_session: boto3.Session | None = None,
+ botocore_config: botocore.config.Config | None = None,
+ workers: int = 16,
+) -> None:
+ """Download a file from the OpenPI S3 bucket using boto3. This is a more performant version of download but can
+ only handle s3 urls. In openpi repo, this is mainly used to access assets in S3 with higher throughput.
+
+ Input:
+ url: URL to openpi checkpoint path.
+ local_path: local path to the downloaded file.
+ boto_session: Optional boto3 session, will create by default if not provided.
+ botocore_config: Optional botocore config.
+ workers: number of workers for downloading.
+ """
+
+ def validate_and_parse_url(maybe_s3_url: str) -> tuple[str, str]:
+ parsed = urllib.parse.urlparse(maybe_s3_url)
+ if parsed.scheme != "s3":
+ raise ValueError(f"URL must be an S3 URL (s3://), got: {maybe_s3_url}")
+ bucket_name = parsed.netloc
+ prefix = parsed.path.strip("/")
+ return bucket_name, prefix
+
+ bucket_name, prefix = validate_and_parse_url(url)
+ session = boto_session or boto3.Session()
+
+ s3api = session.resource("s3", config=botocore_config)
+ bucket = s3api.Bucket(bucket_name)
+
+ # Check if prefix points to an object and if not, assume that it's a directory and add a trailing slash.
+ try:
+ bucket.Object(prefix).load()
+ except botocore.exceptions.ClientError:
+ # Make sure to append a "/" to prevent getting objects from a different directory that shares the same prefix.
+ # For example, if we are downloading from s3://bucket/foo, we don't want to also download from s3://bucket/foobar.
+ if not prefix.endswith("/"):
+ prefix = prefix + "/"
+
+ # Get all candidate objects, filter out directories.
+ objects = [x for x in bucket.objects.filter(Prefix=prefix) if not x.key.endswith("/")]
+ if not objects:
+ raise FileNotFoundError(f"No objects found at {url}")
+
+ total_size = sum(obj.size for obj in objects)
+
+ s3t = _get_s3_transfer_manager(session, workers, botocore_config=botocore_config)
+
+ def transfer(
+ s3obj: ObjectSummary, dest_path: pathlib.Path, progress_func
+ ) -> s3_transfer_futures.TransferFuture | None:
+ if dest_path.exists():
+ dest_stat = dest_path.stat()
+ if s3obj.size == dest_stat.st_size:
+ progress_func(s3obj.size)
+ return None
+ dest_path.parent.mkdir(parents=True, exist_ok=True)
+ return s3t.download(
+ bucket_name,
+ s3obj.key,
+ str(dest_path),
+ subscribers=[
+ s3_transfer.ProgressCallbackInvoker(progress_func),
+ ],
+ )
+
+ try:
+ with tqdm.tqdm(total=total_size, unit="iB", unit_scale=True, unit_divisor=1024) as pbar:
+ if os.getenv("IS_DOCKER", "false").lower() == "true":
+ # tqdm is bugged when using docker-compose. See https://github.com/tqdm/tqdm/issues/771
+ def update_progress(size: int) -> None:
+ pbar.update(size)
+ print(pbar)
+ else:
+
+ def update_progress(size: int) -> None:
+ pbar.update(size)
+
+ futures = []
+ for obj in objects:
+ relative_path = pathlib.Path(obj.key).relative_to(prefix)
+ dest_path = local_path / relative_path
+ if future := transfer(obj, dest_path, update_progress):
+ futures.append(future)
+ for future in futures:
+ future.result()
+ finally:
+ s3t.shutdown()
+
+
+def _get_s3_transfer_manager(
+ session: boto3.Session, workers: int, botocore_config: botocore.config.Config | None = None
+) -> s3_transfer.TransferManager:
+ # Add a few extra connections to prevent exceeding the pool size.
+ config = botocore.config.Config(max_pool_connections=workers + 2)
+ if botocore_config is not None:
+ config = config.merge(botocore_config)
+ s3client = session.client("s3", config=config)
+ transfer_config = s3_transfer.TransferConfig(
+ use_threads=True,
+ max_concurrency=workers,
+ )
+ return s3_transfer.create_transfer_manager(s3client, transfer_config)
+
+
+def _set_permission(path: pathlib.Path, target_permission: int):
+ """chmod requires executable permission to be set, so we skip if the permission is already match with the target."""
+ if path.stat().st_mode & target_permission == target_permission:
+ logger.debug(f"Skipping {path} because it already has correct permissions")
+ return
+ path.chmod(target_permission)
+ logger.debug(f"Set {path} to {target_permission}")
+
+
+def _set_folder_permission(folder_path: pathlib.Path) -> None:
+ """Set folder permission to be read, write and searchable."""
+ _set_permission(folder_path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
+
+
+def _ensure_permissions(path: pathlib.Path) -> None:
+ """Since we are sharing cache directory with containerized runtime as well as training script, we need to
+ ensure that the cache directory has the correct permissions.
+ """
+
+ def _setup_folder_permission_between_cache_dir_and_path(path: pathlib.Path) -> None:
+ cache_dir = get_cache_dir()
+ relative_path = path.relative_to(cache_dir)
+ moving_path = cache_dir
+ for part in relative_path.parts:
+ _set_folder_permission(moving_path / part)
+ moving_path = moving_path / part
+
+ def _set_file_permission(file_path: pathlib.Path) -> None:
+ """Set all files to be read & writable, if it is a script, keep it as a script."""
+ file_rw = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IROTH | stat.S_IWOTH
+ if file_path.stat().st_mode & 0o100:
+ _set_permission(file_path, file_rw | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
+ else:
+ _set_permission(file_path, file_rw)
+
+ _setup_folder_permission_between_cache_dir_and_path(path)
+ for root, dirs, files in os.walk(str(path)):
+ root_path = pathlib.Path(root)
+ for file in files:
+ file_path = root_path / file
+ _set_file_permission(file_path)
+
+ for dir in dirs:
+ dir_path = root_path / dir
+ _set_folder_permission(dir_path)
+
+
+def _is_openpi_url(url: str) -> bool:
+ """Check if the url is an OpenPI S3 bucket url."""
+ return url.startswith("s3://openpi-assets/")
+
+
+def _get_mtime(year: int, month: int, day: int) -> float:
+ """Get the mtime of a given date at midnight UTC."""
+ date = datetime.datetime(year, month, day, tzinfo=datetime.timezone.utc)
+ return time.mktime(date.timetuple())
+
+
+# Map of relative paths, defined as regular expressions, to expiration timestamps (mtime format).
+# Partial matching will be used from top to bottom and the first match will be chosen.
+# Cached entries will be retained only if they are newer than the expiration timestamp.
+_INVALIDATE_CACHE_DIRS: dict[re.Pattern, float] = {
+ re.compile("openpi-assets/checkpoints/pi0_aloha_pen_uncap"): _get_mtime(2025, 2, 17),
+ re.compile("openpi-assets/checkpoints/pi0_libero"): _get_mtime(2025, 2, 6),
+ re.compile("openpi-assets/checkpoints/"): _get_mtime(2025, 2, 3),
+}
+
+
+def _should_invalidate_cache(cache_dir: pathlib.Path, local_path: pathlib.Path) -> bool:
+ """Invalidate the cache if it is expired. Return True if the cache was invalidated."""
+
+ assert local_path.exists(), f"File not found at {local_path}"
+
+ relative_path = str(local_path.relative_to(cache_dir))
+ for pattern, expire_time in _INVALIDATE_CACHE_DIRS.items():
+ if pattern.match(relative_path):
+ # Remove if not newer than the expiration timestamp.
+ return local_path.stat().st_mtime <= expire_time
+
+ return False
diff --git a/src/openpi/shared/download_test.py b/src/openpi/shared/download_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..0bfcdce3405ba6aac90bbbb50b9e900fb5a1a3b9
--- /dev/null
+++ b/src/openpi/shared/download_test.py
@@ -0,0 +1,54 @@
+import pathlib
+
+import pytest
+
+import openpi.shared.download as download
+
+
+@pytest.fixture(scope="session", autouse=True)
+def set_openpi_data_home(tmp_path_factory):
+ temp_dir = tmp_path_factory.mktemp("openpi_data")
+ with pytest.MonkeyPatch().context() as mp:
+ mp.setenv("OPENPI_DATA_HOME", str(temp_dir))
+ yield
+
+
+def test_download_local(tmp_path: pathlib.Path):
+ local_path = tmp_path / "local"
+ local_path.touch()
+
+ result = download.maybe_download(str(local_path))
+ assert result == local_path
+
+ with pytest.raises(FileNotFoundError):
+ download.maybe_download("bogus")
+
+
+def test_download_s3_dir():
+ remote_path = "s3://openpi-assets/testdata/random"
+
+ local_path = download.maybe_download(remote_path)
+ assert local_path.exists()
+
+ new_local_path = download.maybe_download(remote_path)
+ assert new_local_path == local_path
+
+
+def test_download_s3():
+ remote_path = "s3://openpi-assets/testdata/random/random_512kb.bin"
+
+ local_path = download.maybe_download(remote_path)
+ assert local_path.exists()
+
+ new_local_path = download.maybe_download(remote_path)
+ assert new_local_path == local_path
+
+
+def test_download_fsspec():
+ remote_path = "gs://big_vision/paligemma_tokenizer.model"
+
+ local_path = download.maybe_download(remote_path, gs={"token": "anon"})
+ assert local_path.exists()
+
+ new_local_path = download.maybe_download(remote_path, gs={"token": "anon"})
+ assert new_local_path == local_path
diff --git a/src/openpi/shared/image_tools.py b/src/openpi/shared/image_tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..4d63e1c710cfeb64bf994b2c6323cd26888a80a4
--- /dev/null
+++ b/src/openpi/shared/image_tools.py
@@ -0,0 +1,50 @@
+import functools
+
+import jax
+import jax.numpy as jnp
+
+import openpi.shared.array_typing as at
+
+
+@functools.partial(jax.jit, static_argnums=(1, 2, 3))
+@at.typecheck
+def resize_with_pad(
+ images: at.UInt8[at.Array, "*b h w c"] | at.Float[at.Array, "*b h w c"],
+ height: int,
+ width: int,
+ method: jax.image.ResizeMethod = jax.image.ResizeMethod.LINEAR,
+) -> at.UInt8[at.Array, "*b {height} {width} c"] | at.Float[at.Array, "*b {height} {width} c"]:
+ """Replicates tf.image.resize_with_pad. Resizes an image to a target height and width without distortion
+ by padding with black. If the image is float32, it must be in the range [-1, 1].
+ """
+ has_batch_dim = images.ndim == 4
+ if not has_batch_dim:
+ images = images[None] # type: ignore
+ cur_height, cur_width = images.shape[1:3]
+ ratio = max(cur_width / width, cur_height / height)
+ resized_height = int(cur_height / ratio)
+ resized_width = int(cur_width / ratio)
+ resized_images = jax.image.resize(
+ images, (images.shape[0], resized_height, resized_width, images.shape[3]), method=method
+ )
+ if images.dtype == jnp.uint8:
+ # round from float back to uint8
+ resized_images = jnp.round(resized_images).clip(0, 255).astype(jnp.uint8)
+ elif images.dtype == jnp.float32:
+ resized_images = resized_images.clip(-1.0, 1.0)
+ else:
+ raise ValueError(f"Unsupported image dtype: {images.dtype}")
+
+ pad_h0, remainder_h = divmod(height - resized_height, 2)
+ pad_h1 = pad_h0 + remainder_h
+ pad_w0, remainder_w = divmod(width - resized_width, 2)
+ pad_w1 = pad_w0 + remainder_w
+ padded_images = jnp.pad(
+ resized_images,
+ ((0, 0), (pad_h0, pad_h1), (pad_w0, pad_w1), (0, 0)),
+ constant_values=0 if images.dtype == jnp.uint8 else -1.0,
+ )
+
+ if not has_batch_dim:
+ padded_images = padded_images[0]
+ return padded_images
diff --git a/src/openpi/shared/image_tools_test.py b/src/openpi/shared/image_tools_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..c19bee2ed1ca8aacb1f29cb8c7154037c7ce8d0c
--- /dev/null
+++ b/src/openpi/shared/image_tools_test.py
@@ -0,0 +1,37 @@
+import jax.numpy as jnp
+
+from openpi.shared import image_tools
+
+
+def test_resize_with_pad_shapes():
+ # Test case 1: Resize image with larger dimensions
+ images = jnp.zeros((2, 10, 10, 3), dtype=jnp.uint8) # Input images of shape (batch_size, height, width, channels)
+ height = 20
+ width = 20
+ resized_images = image_tools.resize_with_pad(images, height, width)
+ assert resized_images.shape == (2, height, width, 3)
+ assert jnp.all(resized_images == 0)
+
+ # Test case 2: Resize image with smaller dimensions
+ images = jnp.zeros((3, 30, 30, 3), dtype=jnp.uint8)
+ height = 15
+ width = 15
+ resized_images = image_tools.resize_with_pad(images, height, width)
+ assert resized_images.shape == (3, height, width, 3)
+ assert jnp.all(resized_images == 0)
+
+ # Test case 3: Resize image with the same dimensions
+ images = jnp.zeros((1, 50, 50, 3), dtype=jnp.uint8)
+ height = 50
+ width = 50
+ resized_images = image_tools.resize_with_pad(images, height, width)
+ assert resized_images.shape == (1, height, width, 3)
+ assert jnp.all(resized_images == 0)
+
+ # Test case 3: Resize image with odd-numbered padding
+ images = jnp.zeros((1, 256, 320, 3), dtype=jnp.uint8)
+ height = 60
+ width = 80
+ resized_images = image_tools.resize_with_pad(images, height, width)
+ assert resized_images.shape == (1, height, width, 3)
+ assert jnp.all(resized_images == 0)
diff --git a/src/openpi/shared/nnx_utils.py b/src/openpi/shared/nnx_utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..29df222bcbc0e815d0b6bec046a2893e3b2b18a5
--- /dev/null
+++ b/src/openpi/shared/nnx_utils.py
@@ -0,0 +1,69 @@
+from collections.abc import Callable
+import dataclasses
+import functools
+import inspect
+import re
+from typing import Any, ParamSpec, TypeVar
+
+import flax.nnx as nnx
+import jax
+
+P = ParamSpec("P")
+R = TypeVar("R")
+
+
+def module_jit(meth: Callable[P, R], *jit_args, **jit_kwargs) -> Callable[P, R]:
+ """A higher-order function to JIT-compile `nnx.Module` methods, freezing the module's state in the process.
+
+ Why not `nnx.jit`? For some reason, naively applying `nnx.jit` to `nnx.Module` methods, bound or unbound, uses much
+ more memory than necessary. I'm guessing it has something to do with the fact that it must keep track of module
+ mutations. Also, `nnx.jit` has some inherent overhead compared to a standard `jax.jit`, since every call must
+ traverse the NNX module graph. See https://github.com/google/flax/discussions/4224 for details.
+
+ `module_jit` is an alternative that avoids these issues by freezing the module's state. The function returned by
+ `module_jit` acts exactly like the original method, except that the state of the module is frozen to whatever it was
+ when `module_jit` was called. Mutations to the module within `meth` are still allowed, but they will be discarded
+ after the method call completes.
+ """
+ if not (inspect.ismethod(meth) and isinstance(meth.__self__, nnx.Module)):
+ raise ValueError("module_jit must only be used on bound methods of nnx.Modules.")
+
+ graphdef, state = nnx.split(meth.__self__)
+
+ def fun(state: nnx.State, *args: P.args, **kwargs: P.kwargs) -> R:
+ module = nnx.merge(graphdef, state)
+ return meth.__func__(module, *args, **kwargs)
+
+ jitted_fn = jax.jit(fun, *jit_args, **jit_kwargs)
+
+ @functools.wraps(meth)
+ def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
+ return jitted_fn(state, *args, **kwargs)
+
+ return wrapper
+
+
+@dataclasses.dataclass(frozen=True)
+class PathRegex:
+ """NNX Filter that matches paths using a regex.
+
+ By default, paths are joined with a `/` separator. This can be overridden by setting the `sep` argument.
+ """
+
+ pattern: str | re.Pattern
+ sep: str = "/"
+
+ def __post_init__(self):
+ if not isinstance(self.pattern, re.Pattern):
+ object.__setattr__(self, "pattern", re.compile(self.pattern))
+
+ def __call__(self, path: nnx.filterlib.PathParts, x: Any) -> bool:
+ joined_path = self.sep.join(str(x) for x in path)
+ assert isinstance(self.pattern, re.Pattern)
+ return self.pattern.fullmatch(joined_path) is not None
+
+
+def state_map(state: nnx.State, filter: nnx.filterlib.Filter, fn: Callable[[Any], Any]) -> nnx.State:
+ """Apply a function to the leaves of the state that match the filter."""
+ filtered_keys = set(state.filter(filter).flat_state())
+ return state.map(lambda k, v: fn(v) if k in filtered_keys else v)
diff --git a/src/openpi/shared/normalize.py b/src/openpi/shared/normalize.py
new file mode 100644
index 0000000000000000000000000000000000000000..5c6049a3ed63ac9643ae2ce42ae4628b6f3cb18c
--- /dev/null
+++ b/src/openpi/shared/normalize.py
@@ -0,0 +1,147 @@
+import json
+import pathlib
+
+import numpy as np
+import numpydantic
+import pydantic
+
+
+@pydantic.dataclasses.dataclass
+class NormStats:
+ mean: numpydantic.NDArray
+ std: numpydantic.NDArray
+ q01: numpydantic.NDArray | None = None # 1st quantile
+ q99: numpydantic.NDArray | None = None # 99th quantile
+
+
+class RunningStats:
+ """Compute running statistics of a batch of vectors."""
+
+ def __init__(self):
+ self._count = 0
+ self._mean = None
+ self._mean_of_squares = None
+ self._min = None
+ self._max = None
+ self._histograms = None
+ self._bin_edges = None
+ self._num_quantile_bins = 5000 # for computing quantiles on the fly
+
+ def update(self, batch: np.ndarray) -> None:
+ """
+ Update the running statistics with a batch of vectors.
+
+ Args:
+ vectors (np.ndarray): A 2D array where each row is a new vector.
+ """
+ if batch.ndim == 1:
+ batch = batch.reshape(-1, 1)
+ num_elements, vector_length = batch.shape
+ if self._count == 0:
+ self._mean = np.mean(batch, axis=0)
+ self._mean_of_squares = np.mean(batch**2, axis=0)
+ self._min = np.min(batch, axis=0)
+ self._max = np.max(batch, axis=0)
+ self._histograms = [np.zeros(self._num_quantile_bins) for _ in range(vector_length)]
+ self._bin_edges = [
+ np.linspace(self._min[i] - 1e-10, self._max[i] + 1e-10, self._num_quantile_bins + 1)
+ for i in range(vector_length)
+ ]
+ else:
+ if vector_length != self._mean.size:
+ raise ValueError("The length of new vectors does not match the initialized vector length.")
+ new_max = np.max(batch, axis=0)
+ new_min = np.min(batch, axis=0)
+ max_changed = np.any(new_max > self._max)
+ min_changed = np.any(new_min < self._min)
+ self._max = np.maximum(self._max, new_max)
+ self._min = np.minimum(self._min, new_min)
+
+ if max_changed or min_changed:
+ self._adjust_histograms()
+
+ self._count += num_elements
+
+ batch_mean = np.mean(batch, axis=0)
+ batch_mean_of_squares = np.mean(batch**2, axis=0)
+
+ # Update running mean and mean of squares.
+ self._mean += (batch_mean - self._mean) * (num_elements / self._count)
+ self._mean_of_squares += (batch_mean_of_squares - self._mean_of_squares) * (num_elements / self._count)
+
+ self._update_histograms(batch)
+
+ def get_statistics(self) -> NormStats:
+ """
+ Compute and return the statistics of the vectors processed so far.
+
+ Returns:
+ dict: A dictionary containing the computed statistics.
+ """
+ if self._count < 2:
+ raise ValueError("Cannot compute statistics for less than 2 vectors.")
+
+ variance = self._mean_of_squares - self._mean**2
+ stddev = np.sqrt(np.maximum(0, variance))
+ q01, q99 = self._compute_quantiles([0.01, 0.99])
+ return NormStats(mean=self._mean, std=stddev, q01=q01, q99=q99)
+
+ def _adjust_histograms(self):
+ """Adjust histograms when min or max changes."""
+ for i in range(len(self._histograms)):
+ old_edges = self._bin_edges[i]
+ new_edges = np.linspace(self._min[i], self._max[i], self._num_quantile_bins + 1)
+
+ # Redistribute the existing histogram counts to the new bins
+ new_hist, _ = np.histogram(old_edges[:-1], bins=new_edges, weights=self._histograms[i])
+
+ self._histograms[i] = new_hist
+ self._bin_edges[i] = new_edges
+
+ def _update_histograms(self, batch: np.ndarray) -> None:
+ """Update histograms with new vectors."""
+ for i in range(batch.shape[1]):
+ hist, _ = np.histogram(batch[:, i], bins=self._bin_edges[i])
+ self._histograms[i] += hist
+
+ def _compute_quantiles(self, quantiles):
+ """Compute quantiles based on histograms."""
+ results = []
+ for q in quantiles:
+ target_count = q * self._count
+ q_values = []
+ for hist, edges in zip(self._histograms, self._bin_edges, strict=True):
+ cumsum = np.cumsum(hist)
+ idx = np.searchsorted(cumsum, target_count)
+ q_values.append(edges[idx])
+ results.append(np.array(q_values))
+ return results
+
+
+class _NormStatsDict(pydantic.BaseModel):
+ norm_stats: dict[str, NormStats]
+
+
+def serialize_json(norm_stats: dict[str, NormStats]) -> str:
+ """Serialize the running statistics to a JSON string."""
+ return _NormStatsDict(norm_stats=norm_stats).model_dump_json(indent=2)
+
+
+def deserialize_json(data: str) -> dict[str, NormStats]:
+ """Deserialize the running statistics from a JSON string."""
+ return _NormStatsDict(**json.loads(data)).norm_stats
+
+
+def save(directory: pathlib.Path | str, norm_stats: dict[str, NormStats]) -> None:
+ """Save the normalization stats to a directory."""
+ path = pathlib.Path(directory) / "norm_stats.json"
+ path.parent.mkdir(parents=True, exist_ok=True)
+ path.write_text(serialize_json(norm_stats))
+
+
+def load(directory: pathlib.Path | str) -> dict[str, NormStats]:
+ """Load the normalization stats from a directory."""
+ path = pathlib.Path(directory) / "norm_stats.json"
+ if not path.exists():
+ raise FileNotFoundError(f"Norm stats file not found at: {path}")
+ return deserialize_json(path.read_text())
diff --git a/src/openpi/shared/normalize_test.py b/src/openpi/shared/normalize_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..65722d8bc35aeccc14f7bbd415fd3320ee529f99
--- /dev/null
+++ b/src/openpi/shared/normalize_test.py
@@ -0,0 +1,25 @@
+import numpy as np
+
+import openpi.shared.normalize as normalize
+
+
+def test_normalize_update():
+ arr = np.arange(12)
+
+ stats = normalize.RunningStats()
+ for i in range(0, len(arr), 3):
+ stats.update(arr[i : i + 3])
+ results = stats.get_statistics()
+
+ assert np.allclose(results.mean, np.mean(arr))
+ assert np.allclose(results.std, np.std(arr))
+
+
+def test_serialize_deserialize():
+ stats = normalize.RunningStats()
+ stats.update(np.arange(12))
+
+ norm_stats = {"test": stats.get_statistics()}
+ norm_stats2 = normalize.deserialize_json(normalize.serialize_json(norm_stats))
+ assert np.allclose(norm_stats["test"].mean, norm_stats2["test"].mean)
+ assert np.allclose(norm_stats["test"].std, norm_stats2["test"].std)
diff --git a/src/openpi/training/__pycache__/checkpoints.cpython-310.pyc b/src/openpi/training/__pycache__/checkpoints.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a7217c673d8c83f85a88bb3311b808a874f15d33
Binary files /dev/null and b/src/openpi/training/__pycache__/checkpoints.cpython-310.pyc differ
diff --git a/src/openpi/training/__pycache__/config.cpython-310.pyc b/src/openpi/training/__pycache__/config.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1eca3ef0f6d98b566ed3ef869b261ebceafae04d
Binary files /dev/null and b/src/openpi/training/__pycache__/config.cpython-310.pyc differ
diff --git a/src/openpi/training/__pycache__/config.cpython-313.pyc b/src/openpi/training/__pycache__/config.cpython-313.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9daa57234c9a6a09c84b2235ceb68fa4bf0d3188
Binary files /dev/null and b/src/openpi/training/__pycache__/config.cpython-313.pyc differ
diff --git a/src/openpi/training/__pycache__/data_loader.cpython-310.pyc b/src/openpi/training/__pycache__/data_loader.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7ff0357dea50a9659804bafad6589d7f29d4d5f3
Binary files /dev/null and b/src/openpi/training/__pycache__/data_loader.cpython-310.pyc differ
diff --git a/src/openpi/training/__pycache__/optimizer.cpython-310.pyc b/src/openpi/training/__pycache__/optimizer.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..07c540d824eeff9940395000505437055355bce4
Binary files /dev/null and b/src/openpi/training/__pycache__/optimizer.cpython-310.pyc differ
diff --git a/src/openpi/training/__pycache__/sharding.cpython-310.pyc b/src/openpi/training/__pycache__/sharding.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..7f0ef0f229ad17fc5a9d1b0d6ab5a1702b07fa09
Binary files /dev/null and b/src/openpi/training/__pycache__/sharding.cpython-310.pyc differ
diff --git a/src/openpi/training/__pycache__/utils.cpython-310.pyc b/src/openpi/training/__pycache__/utils.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..48f26f90854c2a603a7fe72fcd7ff660b0e9dbca
Binary files /dev/null and b/src/openpi/training/__pycache__/utils.cpython-310.pyc differ
diff --git a/src/openpi/training/__pycache__/weight_loaders.cpython-310.pyc b/src/openpi/training/__pycache__/weight_loaders.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..89fe0ac8903a3608136c9a1bbcfdb1ebe479f663
Binary files /dev/null and b/src/openpi/training/__pycache__/weight_loaders.cpython-310.pyc differ
diff --git a/src/openpi/training/checkpoints.py b/src/openpi/training/checkpoints.py
new file mode 100644
index 0000000000000000000000000000000000000000..671732dd83b258ce90f883c568820a5bae7733c3
--- /dev/null
+++ b/src/openpi/training/checkpoints.py
@@ -0,0 +1,161 @@
+import concurrent.futures as futures
+import dataclasses
+import logging
+from typing import Protocol
+
+from etils import epath
+import jax
+import orbax.checkpoint as ocp
+
+from openpi.shared import array_typing as at
+import openpi.shared.normalize as _normalize
+import openpi.training.data_loader as _data_loader
+import openpi.training.utils as training_utils
+
+
+def initialize_checkpoint_dir(
+ checkpoint_dir: epath.Path | str, *, keep_period: int | None, overwrite: bool, resume: bool
+) -> tuple[ocp.CheckpointManager, bool]:
+ checkpoint_dir = epath.Path(checkpoint_dir).resolve()
+ resuming = False
+ if checkpoint_dir.exists():
+ if overwrite:
+ checkpoint_dir.rmtree()
+ checkpoint_dir.mkdir(parents=True, exist_ok=True)
+ logging.info(f"Wiped checkpoint directory {checkpoint_dir}")
+ elif resume:
+ resuming = True
+ else:
+ raise FileExistsError(
+ f"Checkpoint directory {checkpoint_dir} already exists. Use --overwrite or --resume "
+ "to indicate how to handle it."
+ )
+
+ checkpoint_dir.mkdir(parents=True, exist_ok=True)
+
+ mngr = ocp.CheckpointManager(
+ checkpoint_dir,
+ item_handlers={
+ "assets": CallbackHandler(),
+ "train_state": ocp.PyTreeCheckpointHandler(),
+ "params": ocp.PyTreeCheckpointHandler(),
+ },
+ options=ocp.CheckpointManagerOptions(
+ max_to_keep=1,
+ keep_period=keep_period,
+ create=False,
+ async_options=ocp.AsyncOptions(timeout_secs=7200),
+ ),
+ )
+
+ # special case: the checkpoint directory exists and the user requests to resume training, but the training run did
+ # not get to the first checkpoint saved. in this case, we don't actually want the train script to try and restore a
+ # checkpoint, since it will fail.
+ if resuming and tuple(mngr.all_steps()) in [(), (0,)]:
+ logging.info("Checkpoint directory exists, but does not contain any checkpoints. Aborting resume.")
+ resuming = False
+
+ return mngr, resuming
+
+
+def save_state(
+ checkpoint_manager: ocp.CheckpointManager,
+ state: training_utils.TrainState,
+ data_loader: _data_loader.DataLoader,
+ step: int,
+):
+ def save_assets(directory: epath.Path):
+ # Save the normalization stats.
+ data_config = data_loader.data_config()
+ norm_stats = data_config.norm_stats
+ if norm_stats is not None and data_config.asset_id is not None:
+ _normalize.save(directory / data_config.asset_id, norm_stats)
+
+ # Split params that can be used for inference into a separate item.
+ with at.disable_typechecking():
+ train_state, params = _split_params(state)
+ items = {
+ "assets": save_assets,
+ # "train_state": train_state, # Commented out to reduce saving time and memory
+ "params": {"params": params},
+ }
+ checkpoint_manager.save(step, items)
+
+
+def restore_state(
+ checkpoint_manager: ocp.CheckpointManager,
+ state: training_utils.TrainState,
+ data_loader: _data_loader.DataLoader,
+ step: int | None = None,
+) -> training_utils.TrainState:
+ del data_loader
+
+ with at.disable_typechecking():
+ # Split params that can be used for inference into a separate item.
+ train_state, params = _split_params(state)
+ restored = checkpoint_manager.restore(
+ step,
+ items={
+ "train_state": train_state,
+ "params": {"params": params},
+ },
+ )
+ return _merge_params(restored["train_state"], restored["params"])
+
+
+def load_norm_stats(assets_dir: epath.Path | str, asset_id: str) -> dict[str, _normalize.NormStats] | None:
+ norm_stats_dir = epath.Path(assets_dir) / asset_id
+ norm_stats = _normalize.load(norm_stats_dir)
+ logging.info(f"Loaded norm stats from {norm_stats_dir}")
+ return norm_stats
+
+
+class Callback(Protocol):
+ def __call__(self, directory: epath.Path) -> None: ...
+
+
+class CallbackHandler(ocp.AsyncCheckpointHandler):
+ """A CheckpointHandler for calling an arbitrary function asynchronously. Only for saving, not for restoring."""
+
+ def __init__(self):
+ self._executor = futures.ThreadPoolExecutor(max_workers=1)
+
+ def close(self):
+ self._executor.shutdown()
+
+ def save(self, directory: epath.Path, args: "CallbackSave"):
+ if jax.process_index() == 0:
+ args.callback(directory)
+
+ async def async_save(self, directory: epath.Path, args: "CallbackSave") -> list[futures.Future]:
+ return [self._executor.submit(self.save, directory, args)]
+
+ def restore(self, *args, **kwargs):
+ raise NotImplementedError("CallbackHandler does not support restore")
+
+
+@ocp.args.register_with_handler(CallbackHandler, for_save=True)
+@dataclasses.dataclass
+class CallbackSave(ocp.args.CheckpointArgs):
+ callback: Callback
+
+
+@ocp.args.register_with_handler(CallbackHandler, for_restore=True)
+class CallbackRestore(ocp.args.CheckpointArgs): ...
+
+
+def _split_params(state: training_utils.TrainState) -> tuple[training_utils.TrainState, at.Params]:
+ if state.ema_params is not None:
+ params = state.ema_params
+ train_state = dataclasses.replace(state, ema_params=None)
+ else:
+ params = state.params
+ train_state = dataclasses.replace(state, params={})
+ return train_state, params
+
+
+def _merge_params(train_state: training_utils.TrainState, params: dict[str, at.Params]) -> training_utils.TrainState:
+ # Revert the logic inside `_split_params`. Assumes that existence of `params` means that EMA params were used during the split.
+ if train_state.params:
+ return dataclasses.replace(train_state, ema_params=params["params"])
+ return dataclasses.replace(train_state, params=params["params"])
diff --git a/src/openpi/training/config.py b/src/openpi/training/config.py
new file mode 100644
index 0000000000000000000000000000000000000000..b1d460ee8e4c8e1441eaa158936bcf3344a0ee7c
--- /dev/null
+++ b/src/openpi/training/config.py
@@ -0,0 +1,933 @@
+"""See _CONFIGS for the list of available configs."""
+
+import abc
+import numpy as np
+from collections.abc import Sequence
+import dataclasses
+import difflib
+import logging
+import pathlib
+from typing import Any, Protocol, TypeAlias
+
+import etils.epath as epath
+import flax.nnx as nnx
+from typing_extensions import override
+import tyro
+
+import openpi.models.model as _model
+import openpi.models.pi0 as pi0
+import openpi.models.pi0_fast as pi0_fast
+import openpi.models.pi0_fast_ricl as pi0_fast_ricl
+
+
+# Check for local checkpoint
+if epath.Path("pi0_fast_base_params").exists():
+ PI0_FAST_BASE_CHECKPOINT = "pi0_fast_base_params"
+else:
+ PI0_FAST_BASE_CHECKPOINT = "s3://openpi-assets/checkpoints/pi0_fast_base/params"
+import openpi.models.tokenizer as _tokenizer
+import openpi.policies.aloha_policy as aloha_policy
+import openpi.policies.droid_policy as droid_policy
+import openpi.policies.libero_policy as libero_policy
+import openpi.shared.download as _download
+import openpi.shared.normalize as _normalize
+import openpi.training.optimizer as _optimizer
+import openpi.training.weight_loaders as weight_loaders
+import openpi.transforms as _transforms
+
+ModelType: TypeAlias = _model.ModelType
+# Work around a tyro issue with using nnx.filterlib.Filter directly.
+Filter: TypeAlias = nnx.filterlib.Filter
+
+
+@dataclasses.dataclass(frozen=True)
+class AssetsConfig:
+ """Determines the location of assets (e.g., norm stats) that will be used to set up the data pipeline.
+
+ These assets will be replicated inside the checkpoint under the `assets/asset_id` directory.
+
+ This can be used to load assets from a different checkpoint (e.g., base model checkpoint) or some other
+ centralized location. For example, to load the norm stats for the Trossen robot from the base model checkpoint
+ during fine-tuning, use:
+
+ ```
+ AssetsConfig(
+ assets_dir="s3://openpi-assets/checkpoints/pi0_base/assets",
+ asset_id="trossen",
+ )
+ ```
+ """
+
+ # Assets directory. If not provided, the config assets_dirs will be used. This is useful to load assets from
+ # a different checkpoint (e.g., base model checkpoint) or some other centralized location.
+ assets_dir: str | None = None
+
+ # Asset id. If not provided, the repo id will be used. This allows users to reference assets that describe
+ # different robot platforms.
+ asset_id: str | None = None
+
+
+@dataclasses.dataclass(frozen=True)
+class DataConfig:
+ # LeRobot repo id. If None, fake data will be created.
+ repo_id: str | None = None
+ # Directory within the assets directory containing the data assets.
+ asset_id: str | None = None
+ # Contains precomputed normalization stats. If None, normalization will not be performed.
+ norm_stats: dict[str, _transforms.NormStats] | None = None
+
+ # Used to adopt the inputs from a dataset specific format to a common format
+ # which is expected by the data transforms.
+ repack_transforms: _transforms.Group = dataclasses.field(default_factory=_transforms.Group)
+ # Data transforms, typically include robot specific transformations. Will be applied
+ # before the data is normalized. See `model.Observation` and `model.Actions` to learn about the
+ # normalized data.
+ data_transforms: _transforms.Group = dataclasses.field(default_factory=_transforms.Group)
+ # Model specific transforms. Will be applied after the data is normalized.
+ model_transforms: _transforms.Group = dataclasses.field(default_factory=_transforms.Group)
+ # If true, will use quantile normalization. Otherwise, normal z-score normalization will be used.
+ use_quantile_norm: bool = False
+
+ # Names of keys that will be used by the data loader to generate the action sequence. The length of the
+ # sequence is defined by the `action_horizon` field in the model config. This should be adjusted if your
+ # LeRobot dataset is using different keys to represent the action.
+ action_sequence_keys: Sequence[str] = ("actions",)
+
+ # If true, will use the LeRobot dataset task to define the prompt.
+ prompt_from_task: bool = False
+
+ # If true, will disable syncing the dataset from the Hugging Face Hub. Allows training on local-only datasets.
+ local_files_only: bool = False
+
+
+class GroupFactory(Protocol):
+ def __call__(self, model_config: _model.BaseModelConfig) -> _transforms.Group:
+ """Create a group."""
+
+
+@dataclasses.dataclass(frozen=True)
+class ModelTransformFactory(GroupFactory):
+ """Creates model transforms for standard pi0 models."""
+
+ # If provided, will determine the default prompt that be used by the model.
+ default_prompt: str | None = None
+
+ def __call__(self, model_config: _model.BaseModelConfig) -> _transforms.Group:
+ match model_config.model_type:
+ case _model.ModelType.PI0:
+ return _transforms.Group(
+ inputs=[
+ _transforms.InjectDefaultPrompt(self.default_prompt),
+ _transforms.ResizeImages(224, 224),
+ _transforms.TokenizePrompt(
+ _tokenizer.PaligemmaTokenizer(model_config.max_token_len),
+ ),
+ ],
+ )
+ case _model.ModelType.PI0_FAST:
+ return _transforms.Group(
+ inputs=[
+ _transforms.InjectDefaultPrompt(self.default_prompt),
+ _transforms.ResizeImages(224, 224),
+ _transforms.TokenizeFASTInputs(
+ _tokenizer.FASTTokenizer(model_config.max_token_len),
+ ),
+ ],
+ outputs=[
+ _transforms.ExtractFASTActions(
+ _tokenizer.FASTTokenizer(model_config.max_token_len),
+ action_horizon=model_config.action_horizon,
+ action_dim=model_config.action_dim,
+ )
+ ],
+ )
+
+
+@dataclasses.dataclass(frozen=True)
+class DataConfigFactory(abc.ABC):
+ # The LeRobot repo id.
+ repo_id: str = tyro.MISSING
+ # Determines how the assets will be loaded.
+ assets: AssetsConfig = dataclasses.field(default_factory=AssetsConfig)
+ # Base config that will be updated by the factory.
+ base_config: tyro.conf.Suppress[DataConfig | None] = None
+
+ @abc.abstractmethod
+ def create(self, assets_dirs: pathlib.Path, model_config: _model.BaseModelConfig) -> DataConfig:
+ """Create a data config."""
+
+ def create_base_config(self, assets_dirs: pathlib.Path) -> DataConfig:
+ repo_id = self.repo_id if self.repo_id is not tyro.MISSING else None
+ asset_id = self.assets.asset_id or repo_id
+ return dataclasses.replace(
+ self.base_config or DataConfig(),
+ repo_id=repo_id,
+ asset_id=asset_id,
+ norm_stats=self._load_norm_stats(epath.Path(self.assets.assets_dir or assets_dirs), asset_id),
+ )
+
+ def _load_norm_stats(self, assets_dir: epath.Path, asset_id: str | None) -> dict[str, _transforms.NormStats] | None:
+ if asset_id is None:
+ return None
+ try:
+ data_assets_dir = str(assets_dir / asset_id)
+ try:
+ norm_stats = _normalize.load(_download.maybe_download(data_assets_dir))
+ except FileNotFoundError:
+ # Try loading from meta/stats.json (LeRobot format)
+ stats_path = pathlib.Path(data_assets_dir) / "meta" / "stats.json"
+ if stats_path.exists():
+ import json
+ with open(stats_path, "r") as f:
+ lerobot_stats = json.load(f)
+
+ # Convert to openpi NormStats
+ norm_stats = {}
+ for key, stats in lerobot_stats.items():
+ if key in ["observation.state", "observation.states.ee_state", "action", "observation.images.cam_high", "observation.images.cam_left_wrist", "observation.images.cam_right_wrist", "observation.images.top"]:
+ norm_stats[key] = _normalize.NormStats(
+ mean=np.array(stats["mean"], dtype=np.float32),
+ std=np.array(stats["std"], dtype=np.float32),
+ q01=np.array(stats.get("q01", stats["min"]), dtype=np.float32),
+ q99=np.array(stats.get("q99", stats["max"]), dtype=np.float32),
+ )
+ else:
+ raise
+
+ logging.info(f"Loaded norm stats from {data_assets_dir}")
+ return norm_stats
+ except FileNotFoundError:
+ logging.info(f"Norm stats not found in {data_assets_dir}, skipping.")
+ return None
+
+
+@dataclasses.dataclass(frozen=True)
+class FakeDataConfig(DataConfigFactory):
+ repo_id: str = "fake"
+
+ @override
+ def create(self, assets_dirs: pathlib.Path, model_config: _model.BaseModelConfig) -> DataConfig:
+ return DataConfig(repo_id=self.repo_id)
+
+
+@dataclasses.dataclass(frozen=True)
+class SimpleDataConfig(DataConfigFactory):
+ # Factory for the data transforms.
+ data_transforms: tyro.conf.Suppress[GroupFactory] = dataclasses.field(default_factory=GroupFactory)
+ # Factory for the model transforms.
+ model_transforms: tyro.conf.Suppress[GroupFactory] = dataclasses.field(default_factory=ModelTransformFactory)
+
+ @override
+ def create(self, assets_dirs: pathlib.Path, model_config: _model.BaseModelConfig) -> DataConfig:
+ return dataclasses.replace(
+ self.create_base_config(assets_dirs),
+ data_transforms=self.data_transforms(model_config),
+ model_transforms=self.model_transforms(model_config),
+ use_quantile_norm=model_config.model_type == ModelType.PI0_FAST,
+ )
+
+
+@dataclasses.dataclass(frozen=True)
+class LeRobotAlohaDataConfig(DataConfigFactory):
+ # If true, will convert joint dimensions to deltas with respect to the current state before passing to the model.
+ # Gripper dimensions will remain in absolute values.
+ use_delta_joint_actions: bool = True
+ # If provided, will be injected into the input data if the "prompt" key is not present.
+ default_prompt: str | None = None
+ # If true, this will convert the joint and gripper values from the standard Aloha space to
+ # the space used by the pi internal runtime which was used to train the base model. People who
+ # use standard Aloha data should set this to true.
+ adapt_to_pi: bool = True
+
+ # Repack transforms.
+ repack_transforms: tyro.conf.Suppress[_transforms.Group] = dataclasses.field(
+ default=_transforms.Group(
+ inputs=[
+ _transforms.RepackTransform(
+ {
+ "images": {"cam_high": "observation.images.top"},
+ "state": "observation.state",
+ "actions": "action",
+ }
+ )
+ ]
+ )
+ )
+ # Action keys that will be used to read the action sequence from the dataset.
+ action_sequence_keys: Sequence[str] = ("action",)
+
+ @override
+ def create(self, assets_dirs: pathlib.Path, model_config: _model.BaseModelConfig) -> DataConfig:
+ data_transforms = _transforms.Group(
+ inputs=[aloha_policy.AlohaInputs(action_dim=model_config.action_dim, adapt_to_pi=self.adapt_to_pi)],
+ outputs=[aloha_policy.AlohaOutputs(adapt_to_pi=self.adapt_to_pi)],
+ )
+ if self.use_delta_joint_actions:
+ delta_action_mask = _transforms.make_bool_mask(6, -1, 6, -1)
+ data_transforms = data_transforms.push(
+ inputs=[_transforms.DeltaActions(delta_action_mask)],
+ outputs=[_transforms.AbsoluteActions(delta_action_mask)],
+ )
+
+ model_transforms = ModelTransformFactory(default_prompt=self.default_prompt)(model_config)
+
+ return dataclasses.replace(
+ self.create_base_config(assets_dirs),
+ repack_transforms=self.repack_transforms,
+ data_transforms=data_transforms,
+ model_transforms=model_transforms,
+ action_sequence_keys=self.action_sequence_keys,
+ )
+
+
+@dataclasses.dataclass(frozen=True)
+class LeRobotLiberoDataConfig(DataConfigFactory):
+ """
+ This config is used to configure transforms that are applied at various parts of the data pipeline.
+ For your own dataset, you can copy this class and modify the transforms to match your dataset based on the
+ comments below.
+ """
+
+ @override
+ def create(self, assets_dirs: pathlib.Path, model_config: _model.BaseModelConfig) -> DataConfig:
+ # The repack transform is *only* applied to the data coming from the dataset,
+ # and *not* during inference. We can use it to make inputs from the dataset look
+ # as close as possible to those coming from the inference environment (e.g. match the keys).
+ # Below, we match the keys in the dataset (which we defined in the data conversion script) to
+ # the keys we use in our inference pipeline (defined in the inference script for libero).
+ # For your own dataset, first figure out what keys your environment passes to the policy server
+ # and then modify the mappings below so your dataset's keys get matched to those target keys.
+ # The repack transform simply remaps key names here.
+ # The repack transform simply remaps key names here.
+ action_key = self.base_config.action_sequence_keys[0]
+ repack_transform = _transforms.Group(
+ inputs=[
+ _transforms.RepackTransform(
+ {
+ "observation/image": "image",
+ "observation/wrist_image": "wrist_image",
+ "observation/state": "state",
+ action_key: "actions",
+ "prompt": "prompt",
+ }
+ )
+ ]
+ )
+
+ # The data transforms are applied to the data coming from the dataset *and* during inference.
+ # Below, we define the transforms for data going into the model (``inputs``) and the transforms
+ # for data coming out of the model (``outputs``) (the latter is only used during inference).
+ # We defined these transforms in `libero_policy.py`. You can check the detailed comments there for
+ # how to modify the transforms to match your dataset. Once you created your own transforms, you can
+ # replace the transforms below with your own.
+ data_transforms = _transforms.Group(
+ inputs=[libero_policy.LiberoInputs(action_dim=model_config.action_dim, model_type=model_config.model_type)],
+ outputs=[libero_policy.LiberoOutputs()],
+ )
+
+ # One additional data transform: pi0 models are trained on delta actions (relative to the first
+ # state in each action chunk). IF your data has ``absolute`` actions (e.g. target joint angles)
+ # you can uncomment the following line to convert the actions to delta actions. The only exception
+ # is for the gripper actions which are always absolute.
+ # In the example below, we would apply the delta conversion to the first 6 actions (joints) and
+ # leave the 7th action (gripper) unchanged, i.e. absolute.
+ # In Libero, the raw actions in the dataset are already delta actions, so we *do not* need to
+ # apply a separate delta conversion (that's why it's commented out). Choose whether to apply this
+ # transform based on whether your dataset uses ``absolute`` or ``delta`` actions out of the box.
+
+ # delta_action_mask = _transforms.make_bool_mask(6, -1)
+ # data_transforms = data_transforms.push(
+ # inputs=[_transforms.DeltaActions(delta_action_mask)],
+ # outputs=[_transforms.AbsoluteActions(delta_action_mask)],
+ # )
+
+ # Model transforms include things like tokenizing the prompt and action targets
+ # You do not need to change anything here for your own dataset.
+ model_transforms = ModelTransformFactory()(model_config)
+
+ # We return all data transforms for training and inference. No need to change anything here.
+ return dataclasses.replace(
+ self.create_base_config(assets_dirs),
+ repack_transforms=repack_transform,
+ data_transforms=data_transforms,
+ model_transforms=model_transforms,
+ )
+
+
+@dataclasses.dataclass(frozen=True)
+class RiclLiberoDataConfig(DataConfigFactory):
+ """
+ Config for RICL Libero data.
+ """
+ repo_id: str | None = None
+
+ @override
+ def create(self, assets_dirs: pathlib.Path, model_config: pi0_fast_ricl.Pi0FASTRiclConfig) -> DataConfig:
+ repack_transform = _transforms.Group(
+ inputs=[_transforms.IdentityTransform()],
+ )
+
+ data_transforms = _transforms.Group(
+ inputs=[libero_policy.RiclLiberoInputs(
+ action_dim=model_config.action_dim,
+ num_retrieved_observations=model_config.num_retrieved_observations
+ )],
+ outputs=[libero_policy.RiclLiberoOutputs()],
+ )
+
+ model_transforms = _transforms.Group(
+ inputs=[
+ _transforms.ResizeImagesRicl(224, 224, model_config.num_retrieved_observations),
+ _transforms.TokenizeFASTInputsRicl(
+ _tokenizer.FASTTokenizerRicl(max_len=model_config.max_token_len, action_horizon=model_config.action_horizon, action_dim=model_config.action_dim),
+ num_retrieved_observations=model_config.num_retrieved_observations,
+ ),
+ ],
+ outputs=[
+ _transforms.ExtractFASTActionsRicl(
+ _tokenizer.FASTTokenizerRicl(max_len=model_config.max_token_len, action_horizon=model_config.action_horizon, action_dim=model_config.action_dim),
+ action_horizon=model_config.action_horizon,
+ action_dim=model_config.action_dim,
+ )
+ ],
+ )
+
+ # Load norm stats from LIBERO dataset directory (not assets dir)
+ norm_stats = None
+ if self.repo_id and self.repo_id != "fake":
+ # Try to infer dataset path from repo_id or use a hardcoded path
+ # For now, we'll load it in the data_loader directly from the dataset path
+ # This is a placeholder that will be replaced by actual loading
+ pass
+
+ base_config = self.create_base_config(assets_dirs)
+ return dataclasses.replace(
+ base_config,
+ repack_transforms=repack_transform,
+ data_transforms=data_transforms,
+ model_transforms=model_transforms,
+ # norm_stats will be loaded differently for RICL LIBERO
+ )
+
+
+@dataclasses.dataclass(frozen=True)
+class RiclDroidDataConfig(DataConfigFactory):
+ """
+ This config is used to configure transforms that are applied at various parts of the data pipeline.
+ For your own dataset, you can copy this class and modify the transforms to match your dataset based on the
+ comments below.
+ """
+
+ @override
+ def create(self, assets_dirs: pathlib.Path, model_config: pi0_fast_ricl.Pi0FASTRiclConfig) -> DataConfig:
+ # The repack transform is *only* applied to the data coming from the dataset,
+ # and *not* during inference. We can use it to make inputs from the dataset look
+ # as close as possible to those coming from the inference environment (e.g. match the keys).
+ # Below, we match the keys in the dataset (which we defined in the data conversion script) to
+ # the keys we use in our inference pipeline (defined in the inference script for libero).
+ # For your own dataset, first figure out what keys your environment passes to the policy server
+ # and then modify the mappings below so your dataset's keys get matched to those target keys.
+ # The repack transform simply remaps key names here.
+ repack_transform = _transforms.Group(
+ inputs=[_transforms.IdentityTransform()],
+ )
+
+ # The data transforms are applied to the data coming from the dataset *and* during inference.
+ # Below, we define the transforms for data going into the model (``inputs``) and the transforms
+ # for data coming out of the model (``outputs``) (the latter is only used during inference).
+ # We defined these transforms in `libero_policy.py`. You can check the detailed comments there for
+ # how to modify the transforms to match your dataset. Once you created your own transforms, you can
+ # replace the transforms below with your own.
+ data_transforms = _transforms.Group(
+ inputs=[droid_policy.RiclDroidInputs(action_dim=model_config.action_dim, num_retrieved_observations=model_config.num_retrieved_observations)],
+ outputs=[droid_policy.RiclDroidOutputs()],
+ )
+
+ # One additional data transform: pi0 models are trained on delta actions (relative to the first
+ # state in each action chunk). IF your data has ``absolute`` actions (e.g. target joint angles)
+ # you can uncomment the following line to convert the actions to delta actions. The only exception
+ # is for the gripper actions which are always absolute.
+ # In the example below, we would apply the delta conversion to the first 6 actions (joints) and
+ # leave the 7th action (gripper) unchanged, i.e. absolute.
+ # In Libero, the raw actions in the dataset are already delta actions, so we *do not* need to
+ # apply a separate delta conversion (that's why it's commented out). Choose whether to apply this
+ # transform based on whether your dataset uses ``absolute`` or ``delta`` actions out of the box.
+
+ # delta_action_mask = _transforms.make_bool_mask(6, -1)
+ # data_transforms = data_transforms.push(
+ # inputs=[_transforms.DeltaActions(delta_action_mask)],
+ # outputs=[_transforms.AbsoluteActions(delta_action_mask)],
+ # )
+
+ # Model transforms include things like tokenizing the prompt and action targets
+ # You do not need to change anything here for your own dataset.
+ model_transforms = _transforms.Group(
+ inputs=[
+ _transforms.ResizeImagesRicl(224, 224, model_config.num_retrieved_observations),
+ _transforms.TokenizeFASTInputsRicl(
+ _tokenizer.FASTTokenizerRicl(max_len = model_config.max_token_len, action_horizon=model_config.action_horizon, action_dim=model_config.action_dim),
+ num_retrieved_observations=model_config.num_retrieved_observations,
+ ),
+ ],
+ outputs=[
+ _transforms.ExtractFASTActionsRicl(
+ _tokenizer.FASTTokenizerRicl(max_len = model_config.max_token_len, action_horizon=model_config.action_horizon, action_dim=model_config.action_dim),
+ action_horizon=model_config.action_horizon,
+ action_dim=model_config.action_dim,
+ )
+ ],
+ )
+
+ # We return all data transforms for training and inference. No need to change anything here.
+ return dataclasses.replace(
+ self.create_base_config(assets_dirs),
+ repack_transforms=repack_transform,
+ data_transforms=data_transforms,
+ model_transforms=model_transforms,
+ )
+
+
+@dataclasses.dataclass(frozen=True)
+class TrainConfig:
+ # Name of the config. Must be unique. Will be used to reference this config.
+ name: tyro.conf.Suppress[str]
+ finetuning_collected_demos_dir: str | None = None
+ # Project name.
+ project_name: str = "openpi"
+ # Experiment name. Will be used to name the metadata and checkpoint directories.
+ exp_name: str = tyro.MISSING
+
+ # Defines the model config. Some attributes (action_dim, action_horizon, and max_token_len) are shared by all models
+ # -- see BaseModelConfig. Specific model implementations (e.g., Pi0Config) inherit from BaseModelConfig and may
+ # define additional attributes.
+ model: _model.BaseModelConfig = dataclasses.field(default_factory=pi0.Pi0Config)
+
+ # A weight loader can optionally load (possibly partial) weights from disk after the model is initialized.
+ weight_loader: weight_loaders.WeightLoader = dataclasses.field(default_factory=weight_loaders.NoOpWeightLoader)
+
+ lr_schedule: _optimizer.LRScheduleConfig = dataclasses.field(default_factory=_optimizer.CosineDecaySchedule)
+ optimizer: _optimizer.OptimizerConfig = dataclasses.field(default_factory=_optimizer.AdamW)
+ ema_decay: float | None = 0.99
+
+ # Specifies which weights should be frozen.
+ freeze_filter: tyro.conf.Suppress[Filter] = dataclasses.field(default_factory=nnx.Nothing)
+
+ # Determines the data to be trained on.
+ data: DataConfigFactory = dataclasses.field(default_factory=FakeDataConfig)
+
+ # Base directory for config assets (e.g., norm stats).
+ assets_base_dir: str = "./assets"
+ # Base directory for checkpoints.
+ checkpoint_base_dir: str = "./checkpoints"
+
+ # Random seed that will be used by random generators during training.
+ seed: int = 42
+ # Global batch size.
+ batch_size: int = 32
+ # Number of workers to use for the data loader. Increasing this number will speed up data loading but
+ # will increase memory and CPU usage.
+ num_workers: int = 2
+ # Number of train steps (batches) to run.
+ num_train_steps: int = 30_000
+
+ # How often (in steps) to log training metrics.
+ log_interval: int = 100
+ # How often (in steps) to save checkpoints.
+ save_interval: int = 1000
+ # If set, any existing checkpoints matching step % keep_period == 0 will not be deleted.
+ keep_period: int | None = 5000
+
+ # If true, will overwrite the checkpoint directory if it already exists.
+ overwrite: bool = False
+ # If true, will resume training from the last checkpoint.
+ resume: bool = False
+
+ # If true, will enable wandb logging.
+ wandb_enabled: bool = True
+
+ # Used to pass metadata to the policy server.
+ policy_metadata: dict[str, Any] | None = None
+
+ # If the value is greater than 1, FSDP will be enabled and shard across number of specified devices; overall
+ # device memory will be reduced but training could potentially be slower.
+ # eg. if total device is 4 and fsdp devices is 2; then the model will shard to 2 devices and run
+ # data parallel between 2 groups of devices.
+ fsdp_devices: int = 1
+
+ # Libero specific fields
+ libero_data_dir: str | None = None
+ libero_context_dir: str | None = None
+
+ @property
+ def assets_dirs(self) -> pathlib.Path:
+ """Get the assets directory for this config."""
+ return (pathlib.Path(self.assets_base_dir) / self.name).resolve()
+
+ @property
+ def checkpoint_dir(self) -> pathlib.Path:
+ """Get the checkpoint directory for this config."""
+ if not self.exp_name:
+ raise ValueError("--exp_name must be set")
+ return (pathlib.Path(self.checkpoint_base_dir) / self.name / self.exp_name).resolve()
+
+ @property
+ def trainable_filter(self) -> nnx.filterlib.Filter:
+ """Get the filter for the trainable parameters."""
+ return nnx.All(nnx.Param, nnx.Not(self.freeze_filter))
+
+ def __post_init__(self) -> None:
+ if self.resume and self.overwrite:
+ raise ValueError("Cannot resume and overwrite at the same time.")
+
+
+# Use `get_config` if you need to get a config by name in your code.
+_CONFIGS = [
+ #
+ # Inference Aloha configs.
+ #
+ TrainConfig(
+ name="pi0_aloha",
+ model=pi0.Pi0Config(),
+ data=LeRobotAlohaDataConfig(
+ assets=AssetsConfig(asset_id="trossen"),
+ ),
+ ),
+ TrainConfig(
+ name="pi0_aloha_towel",
+ model=pi0.Pi0Config(),
+ data=LeRobotAlohaDataConfig(
+ assets=AssetsConfig(asset_id="trossen"),
+ default_prompt="fold the towel",
+ ),
+ ),
+ TrainConfig(
+ name="pi0_aloha_tupperware",
+ model=pi0.Pi0Config(),
+ data=LeRobotAlohaDataConfig(
+ assets=AssetsConfig(asset_id="trossen"),
+ default_prompt="open the tupperware and put the food on the plate",
+ ),
+ ),
+ #
+ # Inference DROID configs.
+ #
+ TrainConfig(
+ name="pi0_droid",
+ model=pi0.Pi0Config(action_horizon=10),
+ data=SimpleDataConfig(
+ assets=AssetsConfig(asset_id="droid"),
+ data_transforms=lambda model: _transforms.Group(
+ inputs=[droid_policy.DroidInputs(action_dim=model.action_dim)],
+ outputs=[droid_policy.DroidOutputs()],
+ ),
+ base_config=DataConfig(
+ prompt_from_task=True,
+ ),
+ ),
+ ),
+ TrainConfig(
+ name="pi0_fast_droid",
+ model=pi0_fast.Pi0FASTConfig(action_dim=8, action_horizon=10),
+ data=SimpleDataConfig(
+ assets=AssetsConfig(asset_id="droid"),
+ data_transforms=lambda model: _transforms.Group(
+ inputs=[droid_policy.DroidInputs(action_dim=model.action_dim, model_type=ModelType.PI0_FAST)],
+ outputs=[droid_policy.DroidOutputs()],
+ ),
+ base_config=DataConfig(
+ prompt_from_task=True,
+ ),
+ ),
+ ),
+ #
+ # Creating RICL-Pi0-FAST-DROID configs.
+ #
+ TrainConfig(
+ name="pi0_fast_droid_ricl",
+ model=pi0_fast_ricl.Pi0FASTRiclConfig(action_dim=8, action_horizon=15, max_token_len=250, num_retrieved_observations=4, use_action_interpolation=True, lamda=10.0),
+ data=RiclDroidDataConfig(
+ repo_id=None,
+ assets=AssetsConfig(asset_id="droid"),
+ base_config=DataConfig(
+ prompt_from_task=False, # only needed for LeRobot datasets to convert task_index to prompt
+ ),
+ ),
+ weight_loader=weight_loaders.CheckpointWeightLoader("s3://openpi-assets/checkpoints/pi0_fast_droid/params"),
+ num_train_steps=10_000,
+ batch_size=16,
+ freeze_filter=pi0_fast_ricl.Pi0FASTRiclConfig(action_dim=8, action_horizon=15, max_token_len=250, num_retrieved_observations=4, use_action_interpolation=True, lamda=10.0).get_freeze_filter_with_frozen_img_encoder(),
+ ema_decay=None,
+ log_interval=1,
+ save_interval=300,
+ keep_period=300,
+ lr_schedule=_optimizer.CosineDecaySchedule(warmup_steps=300, peak_lr=2.5e-5, decay_steps=3000, decay_lr=2.5e-6),
+ ),
+ #
+ # RICL-Pi0-FAST-DROID Finetuning configs.
+ # Please carefully change the name and finetuning_collected_demos_dir to match your experiment.
+ #
+ TrainConfig(
+ name="pi0_fast_droid_ricl___finetune_on_new_task",
+ finetuning_collected_demos_dir="ricl_droid_preprocessing/collected_demos/YYYY-MM-DD_new_task_prompt",
+ model=pi0_fast_ricl.Pi0FASTRiclConfig(action_dim=8, action_horizon=15, max_token_len=250, num_retrieved_observations=4, use_action_interpolation=True, lamda=10.0),
+ data=RiclDroidDataConfig(repo_id=None, assets=AssetsConfig(asset_id="droid"), base_config=DataConfig(prompt_from_task=False)),
+ weight_loader=weight_loaders.CheckpointWeightLoader("pi0_fast_droid_ricl_checkpoint/params"),
+ num_train_steps=1_000,
+ batch_size=16,
+ freeze_filter=pi0_fast_ricl.Pi0FASTRiclConfig(action_dim=8, action_horizon=15, max_token_len=250, num_retrieved_observations=4, use_action_interpolation=True, lamda=10.0).get_freeze_filter_with_frozen_img_encoder(),
+ log_interval=1,
+ save_interval=100,
+ keep_period=100,
+ lr_schedule=_optimizer.CosineDecaySchedule(warmup_steps=50, peak_lr=2.5e-5, decay_steps=1_000, decay_lr=2.5e-6),
+ ),
+ #
+ # Fine-tuning Libero configs.
+ #
+ # These train configs define the hyperparameters for fine-tuning the base model on your own dataset.
+ # They are used to define key elements like the dataset you are training on, the base checkpoint you
+ # are using, and other hyperparameters like how many training steps to run or what learning rate to use.
+ # For your own dataset, you can copy this class and modify the dataset name, and data transforms based on
+ # the comments below.
+ TrainConfig(
+ # Change the name to reflect your model and dataset.
+ name="pi0_libero",
+ # Here you define the model config -- In this example we use pi0 as the model
+ # architecture and perform *full* finetuning. in the examples below we show how to modify
+ # this to perform *low-memory* (LORA) finetuning and use pi0-FAST as an alternative architecture.
+ model=pi0.Pi0Config(),
+ # Here you define the dataset you are training on. In this example we use the Libero
+ # dataset. For your own dataset, you can change the repo_id to point to your dataset.
+ # Also modify the DataConfig to use the new config you made for your dataset above.
+ data=LeRobotLiberoDataConfig(
+ repo_id="physical-intelligence/libero",
+ base_config=DataConfig(
+ local_files_only=False, # Set to True for local-only datasets.
+ # This flag determines whether we load the prompt (i.e. the task instruction) from the
+ # ``task`` field in the LeRobot dataset. If set to True, the prompt will show up in
+ # a field called ``prompt`` in the input dict. The recommended setting is True.
+ prompt_from_task=True,
+ ),
+ ),
+ # Here you define which pre-trained checkpoint you want to load to initialize the model.
+ # This should match the model config you chose above -- i.e. in this case we use the pi0 base model.
+ weight_loader=weight_loaders.CheckpointWeightLoader("s3://openpi-assets/checkpoints/pi0_base/params"),
+ # Below you can define other hyperparameters like the learning rate, number of training steps, etc.
+ # Check the base TrainConfig class for a full list of available hyperparameters.
+ num_train_steps=30_000,
+ ),
+ TrainConfig(
+ name="pi0_libero_low_mem_finetune",
+ # Here is an example of loading a pi0 model for LoRA fine-tuning.
+ model=pi0.Pi0Config(paligemma_variant="gemma_2b_lora", action_expert_variant="gemma_300m_lora"),
+ data=LeRobotLiberoDataConfig(
+ repo_id="physical-intelligence/libero",
+ base_config=DataConfig(
+ local_files_only=False, # Set to True for local-only datasets.
+ prompt_from_task=True,
+ ),
+ ),
+ weight_loader=weight_loaders.CheckpointWeightLoader("s3://openpi-assets/checkpoints/pi0_base/params"),
+ num_train_steps=30_000,
+ # The freeze filter defines which parameters should be frozen during training.
+ # We have a convenience function in the model config that returns the default freeze filter
+ # for the given model config for LoRA finetuning. Just make sure it matches the model config
+ # you chose above.
+ freeze_filter=pi0.Pi0Config(
+ paligemma_variant="gemma_2b_lora", action_expert_variant="gemma_300m_lora"
+ ).get_freeze_filter(),
+ # Turn off EMA for LoRA finetuning.
+ ema_decay=None,
+ ),
+ TrainConfig(
+ name="pi0_fast_libero",
+ # Here is an example of loading a pi0-FAST model for full finetuning.
+ # Modify action_dim and action_horizon to match your dataset (action horizon is equal to
+ # the desired action chunk length).
+ # The max_token_len is the maximum number of (non-image) tokens the model can handle.
+ # This includes the tokenized prompt, proprioceptive state, and (FAST-tokenized) action tokens.
+ # Choosing this value too small may chop off tokens at the end of your sequence (the code will throw
+ # a warning), while choosing it too large will waste memory (since we pad each batch element to the
+ # max_token_len). A good rule of thumb is to use approx 180 for single-arm robots, and approx 250 for
+ # two-arm robots. Generally, err on the lower side here first, and potentially increase the value if
+ # you see many warnings being thrown during training.
+ model=pi0_fast.Pi0FASTConfig(action_dim=7, action_horizon=10, max_token_len=180),
+ data=LeRobotLiberoDataConfig(
+ repo_id="physical-intelligence/libero",
+ base_config=DataConfig(
+ local_files_only=False, # Set to True for local-only datasets.
+ prompt_from_task=True,
+ ),
+ ),
+ # Note that we load the pi0-FAST base model checkpoint here.
+ weight_loader=weight_loaders.CheckpointWeightLoader("s3://openpi-assets/checkpoints/pi0_fast_base/params"),
+ num_train_steps=30_000,
+ ),
+ TrainConfig(
+ name="pi0_fast_libero_baseline",
+ model=pi0_fast.Pi0FASTConfig(action_dim=7, action_horizon=10, max_token_len=180),
+ data=LeRobotLiberoDataConfig(
+ repo_id="/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/merged_libero_mask_depth_noops_lerobot_10",
+ base_config=DataConfig(
+ local_files_only=True,
+ prompt_from_task=True,
+ action_sequence_keys=("action",),
+ ),
+ ),
+ weight_loader=weight_loaders.CheckpointWeightLoader(PI0_FAST_BASE_CHECKPOINT),
+ num_train_steps=30_000,
+ ),
+ TrainConfig(
+ name="pi0_fast_libero_low_mem_finetune",
+ # Here is an example of loading a pi0-FAST model for LoRA finetuning.
+ # For setting action_dim, action_horizon, and max_token_len, see the comments above.
+ model=pi0_fast.Pi0FASTConfig(
+ action_dim=7, action_horizon=10, max_token_len=180, paligemma_variant="gemma_2b_lora"
+ ),
+ data=LeRobotLiberoDataConfig(
+ repo_id="physical-intelligence/libero",
+ base_config=DataConfig(
+ local_files_only=False, # Set to True for local-only datasets.
+ prompt_from_task=True,
+ ),
+ ),
+ weight_loader=weight_loaders.CheckpointWeightLoader("s3://openpi-assets/checkpoints/pi0_fast_base/params"),
+ num_train_steps=30_000,
+ # Again, make sure to match the model config above when extracting the freeze filter
+ # that specifies which parameters should be frozen during LoRA finetuning.
+ freeze_filter=pi0_fast.Pi0FASTConfig(
+ action_dim=7, action_horizon=10, max_token_len=180, paligemma_variant="gemma_2b_lora"
+ ).get_freeze_filter(),
+ # Turn off EMA for LoRA finetuning.
+ ema_decay=None,
+ ),
+ TrainConfig(
+ name="pi0_fast_ricl_libero",
+ model=pi0_fast_ricl.Pi0FASTRiclConfig(
+ action_dim=7,
+ action_horizon=15, # Match pi0fast-DROID-RICL default (was 10)
+ max_token_len=180, # Match pi0fast-LIBERO default (was 250)
+ num_retrieved_observations=1,
+ use_action_interpolation=True,
+ lamda=10.0
+ ),
+ data=RiclLiberoDataConfig(
+ repo_id="physical-intelligence/libero",
+ base_config=DataConfig(
+ prompt_from_task=True,
+ ),
+ ),
+ # Freeze the image encoder to match Droid setup
+ freeze_filter=pi0_fast_ricl.Pi0FASTRiclConfig(
+ action_dim=7,
+ action_horizon=15,
+ max_token_len=180,
+ num_retrieved_observations=1,
+ use_action_interpolation=True,
+ lamda=10.0
+ ).get_freeze_filter_with_frozen_img_encoder(),
+ # Use conservative LR schedule from Droid
+ lr_schedule=_optimizer.CosineDecaySchedule(
+ warmup_steps=300,
+ peak_lr=2.5e-5,
+ decay_steps=3000,
+ decay_lr=2.5e-6
+ ),
+ # Disable EMA for fine-tuning (match Droid)
+ ema_decay=None,
+ save_interval=1000,
+ weight_loader=weight_loaders.CheckpointWeightLoader(PI0_FAST_BASE_CHECKPOINT),
+ num_train_steps=30_000,
+ ),
+ #
+ # Fine-tuning Aloha configs.
+ #
+ # This is a test config that is used to illustate how train on a custom LeRobot dataset.
+ # For instuctions on how to convert and train on your own Aloha dataset see examples/aloha_real/README.md
+ TrainConfig(
+ name="pi0_aloha_pen_uncap",
+ model=pi0.Pi0Config(),
+ data=LeRobotAlohaDataConfig(
+ repo_id="physical-intelligence/aloha_pen_uncap_diverse",
+ assets=AssetsConfig(
+ assets_dir="s3://openpi-assets/checkpoints/pi0_base/assets",
+ asset_id="trossen",
+ ),
+ default_prompt="uncap the pen",
+ repack_transforms=_transforms.Group(
+ inputs=[
+ _transforms.RepackTransform(
+ {
+ "images": {
+ "cam_high": "observation.images.cam_high",
+ "cam_left_wrist": "observation.images.cam_left_wrist",
+ "cam_right_wrist": "observation.images.cam_right_wrist",
+ },
+ "state": "observation.state",
+ "actions": "action",
+ }
+ )
+ ]
+ ),
+ base_config=DataConfig(
+ local_files_only=False, # Set to True for local-only datasets.
+ ),
+ ),
+ weight_loader=weight_loaders.CheckpointWeightLoader("s3://openpi-assets/checkpoints/pi0_base/params"),
+ num_train_steps=20_000,
+ ),
+ # This config is used to demonstrate how to train on a simple simulated environment.
+ TrainConfig(
+ name="pi0_aloha_sim",
+ model=pi0.Pi0Config(),
+ data=LeRobotAlohaDataConfig(
+ repo_id="lerobot/aloha_sim_transfer_cube_human",
+ default_prompt="Transfer cube",
+ use_delta_joint_actions=False,
+ ),
+ weight_loader=weight_loaders.CheckpointWeightLoader("s3://openpi-assets/checkpoints/pi0_base/params"),
+ num_train_steps=20_000,
+ ),
+ #
+ # Debugging configs.
+ #
+ TrainConfig(
+ name="debug",
+ data=FakeDataConfig(),
+ batch_size=2,
+ model=pi0.Pi0Config(paligemma_variant="dummy", action_expert_variant="dummy"),
+ save_interval=100,
+ overwrite=True,
+ exp_name="debug",
+ num_train_steps=10,
+ wandb_enabled=False,
+ ),
+ TrainConfig(
+ name="debug_restore",
+ data=FakeDataConfig(),
+ batch_size=2,
+ model=pi0.Pi0Config(paligemma_variant="dummy", action_expert_variant="dummy"),
+ weight_loader=weight_loaders.CheckpointWeightLoader("./checkpoints/debug/debug/9/params"),
+ overwrite=True,
+ exp_name="debug",
+ num_train_steps=10,
+ wandb_enabled=False,
+ ),
+]
+
+if len({config.name for config in _CONFIGS}) != len(_CONFIGS):
+ raise ValueError("Config names must be unique.")
+_CONFIGS_DICT = {config.name: config for config in _CONFIGS}
+
+
+def cli() -> TrainConfig:
+ return tyro.extras.overridable_config_cli({k: (k, v) for k, v in _CONFIGS_DICT.items()})
+
+
+def get_config(config_name: str) -> TrainConfig:
+ """Get a config by name."""
+ if config_name not in _CONFIGS_DICT:
+ closest = difflib.get_close_matches(config_name, _CONFIGS_DICT.keys(), n=1, cutoff=0.0)
+ closest_str = f" Did you mean '{closest[0]}'? " if closest else ""
+ raise ValueError(f"Config '{config_name}' not found.{closest_str}")
+
+ return _CONFIGS_DICT[config_name]
diff --git a/src/openpi/training/data_loader.py b/src/openpi/training/data_loader.py
new file mode 100644
index 0000000000000000000000000000000000000000..9e5d777f33ce618b819e6fc12f708ef07bfe2880
--- /dev/null
+++ b/src/openpi/training/data_loader.py
@@ -0,0 +1,588 @@
+from collections.abc import Iterator, Sequence
+import dataclasses
+import multiprocessing
+import os
+import pathlib
+import typing
+from typing import Protocol, SupportsIndex, TypeVar
+
+import jax
+import jax.numpy as jnp
+import lerobot.common.datasets.lerobot_dataset as lerobot_dataset
+import numpy as np
+import torch
+
+import openpi.models.model as _model
+import openpi.models.pi0_fast_ricl as _pi0_fast_ricl
+import openpi.training.config as _config
+import openpi.transforms as _transforms
+import json
+
+# Import LIBERO dataset
+import sys
+from pathlib import Path
+sys.path.insert(0, str(Path(__file__).parent.parent.parent))
+try:
+ from openpi.data.ricl_libero_dataset import RiclLiberoDataset
+except ImportError:
+ RiclLiberoDataset = None
+
+T_co = TypeVar("T_co", covariant=True)
+
+
+class Dataset(Protocol[T_co]):
+ """Interface for a dataset with random access."""
+
+ def __getitem__(self, index: SupportsIndex) -> T_co:
+ raise NotImplementedError("Subclasses of Dataset should implement __getitem__.")
+
+ def __len__(self) -> int:
+ raise NotImplementedError("Subclasses of Dataset should implement __len__.")
+
+
+class DataLoader(Protocol[T_co]):
+ """Interface for a data loader."""
+
+ def data_config(self) -> _config.DataConfig:
+ """Get the data config for this data loader."""
+ raise NotImplementedError("Subclasses of DataLoader should implement data_config.")
+
+ def __iter__(self) -> Iterator[T_co]:
+ raise NotImplementedError("Subclasses of DataLoader should implement __iter__.")
+
+
+class TransformedDataset(Dataset[T_co]):
+ def __init__(self, dataset: Dataset, transforms: Sequence[_transforms.DataTransformFn]):
+ self._dataset = dataset
+ self._transform = _transforms.compose(transforms)
+
+ def __getitem__(self, index: SupportsIndex) -> T_co:
+ return self._transform(self._dataset[index])
+
+ def __len__(self) -> int:
+ return len(self._dataset)
+
+
+class FakeDataset(Dataset):
+ def __init__(self, model_config: _model.BaseModelConfig, num_samples: int):
+ self._num_samples = num_samples
+ self._observation_spec, self._action_spec = model_config.inputs_spec()
+
+ def __getitem__(self, index: SupportsIndex) -> dict:
+ rng = jax.random.key(index.__index__())
+
+ def make_from_spec(spec: jax.ShapeDtypeStruct):
+ nonlocal rng
+ rng, data_rng = jax.random.split(rng)
+ # Remove the batch dimension.
+ shape = spec.shape[1:]
+ if spec.dtype == jnp.float32:
+ return jax.random.uniform(data_rng, shape=shape, minval=-1.0, maxval=1.0)
+ if spec.dtype == jnp.int32:
+ return jax.random.randint(data_rng, shape=shape, minval=0, maxval=2048)
+ return jnp.zeros(shape=shape, dtype=spec.dtype)
+
+ observation = jax.tree.map(make_from_spec, self._observation_spec)
+ action = jax.tree.map(make_from_spec, self._action_spec)
+
+ return {
+ **observation.to_dict(),
+ "actions": action,
+ }
+
+ def __len__(self) -> int:
+ return self._num_samples
+
+
+def get_action_chunk(action_joint_vels, action_gripper_pos, step_idx, action_horizon):
+ num_steps = len(action_joint_vels)
+ assert action_joint_vels.shape == (num_steps, 7) and action_gripper_pos.shape == (num_steps, 1)
+ action_chunk = []
+ for i in range(action_horizon):
+ if step_idx+i < num_steps:
+ action_chunk.append(np.concatenate([action_joint_vels[step_idx+i], action_gripper_pos[step_idx+i]], axis=0))
+ else:
+ action_chunk.append(np.concatenate([np.zeros(action_joint_vels.shape[-1], dtype=np.float32), action_gripper_pos[-1]], axis=0))
+ action_chunk = np.stack(action_chunk, axis=0)
+ assert action_chunk.shape == (action_horizon, 8), f"{action_chunk.shape=}"
+ return action_chunk
+
+
+class Pi0FastDroidFinetuneDataset(Dataset):
+ def __init__(self, model_config: _pi0_fast_ricl.Pi0FASTRiclConfig, finetuning_collected_demos_dir: str | None):
+ assert finetuning_collected_demos_dir is not None
+ collected_demos_infos = {k: json.load(open(f"{finetuning_collected_demos_dir}/{k}.json")) for k in ['ep_idxs_to_fol', 'fols_to_ep_idxs', 'groups_to_ep_fols', 'groups_to_ep_idxs']}
+
+ # files from the collected demos for training
+ indices_files = []
+ for group_name, ep_fols in collected_demos_infos["groups_to_ep_fols"].items():
+ for ep_fol in ep_fols:
+ indices_files.append(f"ricl_droid_preprocessing/{ep_fol}/indices_and_distances.npz")
+
+ # actual loading...
+ count_collected_demos = 0
+ all_query_indices = []
+ for file_idx, file_path in enumerate(indices_files):
+ indices_and_dists = np.load(file_path)
+ query_indices = indices_and_dists["query_indices"]
+ num_steps = query_indices.shape[0]
+ assert query_indices.shape == (num_steps, 2) and query_indices.dtype == np.int32
+ expected_query_indices = np.array([[100000+file_idx, i] for i in range(num_steps)], dtype=np.int32)
+ assert np.allclose(query_indices, expected_query_indices), f"{query_indices=}, {expected_query_indices=}"
+ all_query_indices.append(query_indices)
+ count_collected_demos += num_steps
+ print(f"num states in collected demos given by count_collected_demos: {count_collected_demos}")
+ all_query_indices = np.concatenate(all_query_indices, axis=0)
+ len_dataset = all_query_indices.shape[0]
+ print(f"len_dataset: {len_dataset}")
+ assert len_dataset == count_collected_demos
+ assert all_query_indices.shape == (len_dataset, 2) and all_query_indices.dtype == np.int32
+
+ # load all data paths
+ all_ep_idxs = list(np.unique(all_query_indices[:, 0]))
+ all_ep_data_paths = {ep_idx:
+ f"ricl_droid_preprocessing/{collected_demos_infos['ep_idxs_to_fol'][str(ep_idx)]}/processed_demo.npz"
+ for ep_idx in all_ep_idxs}
+ common_prompt = " ".join(collected_demos_infos['ep_idxs_to_fol']['100000'].split("/")[1].split("_")[1:])
+ print(f'num episodes: {len(all_ep_idxs)}')
+ print(f"common_prompt: {common_prompt}")
+
+ # save
+ self.len_dataset = len_dataset
+ self.all_ep_data_paths = all_ep_data_paths
+ self.common_prompt = common_prompt
+ self.all_query_indices = all_query_indices
+ self.action_horizon = model_config.action_horizon
+
+ def __getitem__(self, index: SupportsIndex) -> dict:
+ query_ep_idx, query_step_idx = self.all_query_indices[index, :]
+ ep_data = np.load(self.all_ep_data_paths[query_ep_idx])
+ data = {'observation/exterior_image_1_left': ep_data['right_image'][query_step_idx],
+ 'observation/wrist_image_left': ep_data['wrist_image'][query_step_idx],
+ 'observation/joint_position': ep_data['state'][query_step_idx][:-1],
+ 'observation/gripper_position': ep_data['state'][query_step_idx][-1:],
+ 'actions': get_action_chunk(ep_data['actions'][:, :-1], ep_data['actions'][:, -1:], query_step_idx, self.action_horizon),
+ 'prompt': self.common_prompt}
+ return data
+
+ def __len__(self) -> int:
+ return self.len_dataset
+
+
+class RiclDroidDataset(Dataset):
+ def __init__(self, model_config: _pi0_fast_ricl.Pi0FASTRiclConfig, finetuning_collected_demos_dir: str | None):
+ # setup
+ num_retrieved_observations = model_config.num_retrieved_observations
+ knn_k = 100
+ assert num_retrieved_observations <= knn_k
+ embedding_type = "embeddings__wrist_image_left" # retrieval based on embeddings of wrist images
+ indices_and_dists_fol = f"ricl_droid_preprocessing/droid_new_broken_up_indices_and_distances/chosenIDscene_id_numepisodes20_embtype{embedding_type}_knnk100"
+ outer_dir = "ricl_droid_preprocessing/collected_demos_training" if finetuning_collected_demos_dir is None else finetuning_collected_demos_dir
+ collected_demos_infos = {k: json.load(open(f"{outer_dir}/{k}.json")) for k in ['ep_idxs_to_fol', 'fols_to_ep_idxs', 'groups_to_ep_fols', 'groups_to_ep_idxs']}
+ # load indices_and_dists
+ all_retrieved_indices = []
+ all_query_indices = []
+ all_distances = []
+
+ ## files from the droid dataset
+ # indices_files = os.listdir(indices_and_dists_fol)
+ # indices_files = [os.path.join(indices_and_dists_fol, f) for f in indices_files]
+ indices_files = [] ## no files from droid dataset
+
+ # files from the collected demos for training
+ for group_name, ep_fols in collected_demos_infos["groups_to_ep_fols"].items():
+ for ep_fol in ep_fols:
+ indices_files.append(f"ricl_droid_preprocessing/{ep_fol}/indices_and_distances.npz")
+ # actual loading...
+ count_droid = 0
+ count_collected_demos = 0
+ for file_path in indices_files:
+ indices_and_dists = np.load(file_path)
+ query_indices, retrieved_indices = indices_and_dists["query_indices"], indices_and_dists["retrieved_indices"][:, :num_retrieved_observations, :]
+ distances = np.concatenate((indices_and_dists["distances"][:, :num_retrieved_observations], indices_and_dists["distances"][:, -1:]), axis=1)
+ num_steps = query_indices.shape[0]
+ assert retrieved_indices.shape == (num_steps, num_retrieved_observations, 2) and retrieved_indices.dtype == np.int32
+ assert query_indices.shape == (num_steps, 2) and query_indices.dtype == np.int32
+ all_retrieved_indices.append(retrieved_indices)
+ all_query_indices.append(query_indices)
+ all_distances.append(distances)
+ if "collected_demos_training" in file_path or "collected_demos" in file_path:
+ count_collected_demos += num_steps
+ else:
+ count_droid += num_steps
+ print(f"count_droid: {count_droid}, count_collected_demos: {count_collected_demos}")
+ all_retrieved_indices = np.concatenate(all_retrieved_indices, axis=0)
+ all_query_indices = np.concatenate(all_query_indices, axis=0)
+ all_distances = np.concatenate(all_distances, axis=0)
+ len_dataset = all_retrieved_indices.shape[0]
+ print(f"len_dataset: {len_dataset}")
+ assert len_dataset == count_droid + count_collected_demos
+ assert all_retrieved_indices.shape == (len_dataset, num_retrieved_observations, 2) and all_retrieved_indices.dtype == np.int32
+ assert all_query_indices.shape == (len_dataset, 2) and all_query_indices.dtype == np.int32
+ assert all_distances.shape == (len_dataset, num_retrieved_observations + 1) and all_distances.dtype == np.float64
+
+ # normalize all_distances and convert to float32
+ max_dist_value = json.load(open(f"assets/max_distance.json", 'r'))['distances']['max']
+ if finetuning_collected_demos_dir is None:
+ assert max_dist_value == np.max(all_distances), f"{max_dist_value=} from norm stats time does not match {np.max(all_distances)=} from dataset"
+ print(f'max distance value: {max_dist_value}')
+ all_distances = all_distances / max_dist_value
+ all_distances = all_distances.astype(np.float32)
+
+ # load all data paths
+ ds_name = f"droid_new"
+ ds_fol = f"ricl_droid_preprocessing/{ds_name}_broken_up"
+ all_ep_idxs = list(np.unique(all_retrieved_indices[:, :, 0])) + list(np.unique(all_query_indices[:, 0]))
+ all_ep_data_paths = {ep_idx:
+ f"{ds_fol}/episode_{ep_idx}.npz"
+ if ep_idx < 100000 else
+ f"ricl_droid_preprocessing/{collected_demos_infos['ep_idxs_to_fol'][str(ep_idx)]}/processed_demo.npz"
+ for ep_idx in all_ep_idxs}
+ all_ep_prompts = {ep_idx:
+ json.load(open(f"{ds_fol}/episode_{ep_idx}.json"))["language_instruction"]
+ if ep_idx < 100000 else
+ " ".join(collected_demos_infos['ep_idxs_to_fol'][str(ep_idx)].split("/")[1].split("_")[1:])
+ for ep_idx in all_ep_idxs}
+
+ # if all episode prompts are the same, print the first prompt
+ if all(all_ep_prompts[ep_idx] == all_ep_prompts[list(all_ep_prompts.keys())[0]] for ep_idx in all_ep_prompts):
+ print(f"all {len(all_ep_prompts)} episode prompts are the same: {all_ep_prompts[list(all_ep_prompts.keys())[0]]}")
+
+ # save
+ self.len_dataset = len_dataset
+ self.all_ep_data_paths = all_ep_data_paths
+ self.all_ep_prompts = all_ep_prompts
+ self.all_retrieved_indices = all_retrieved_indices
+ self.all_query_indices = all_query_indices
+ self.all_distances = all_distances
+ self.use_action_interpolation = model_config.use_action_interpolation
+ self.lamda = model_config.lamda
+ self.action_horizon = model_config.action_horizon
+
+ def __getitem__(self, index: SupportsIndex) -> dict:
+ retrieved_indices = self.all_retrieved_indices[index, :, :]
+ query_ep_idx, query_step_idx = self.all_query_indices[index, :]
+
+ ep_idxs = list(np.unique(retrieved_indices[:, 0])) + [query_ep_idx]
+ ep_data = {ep_idx: np.load(self.all_ep_data_paths[ep_idx]) for ep_idx in ep_idxs}
+ data = {}
+ random_ext_img = np.random.choice(["left", "right"])
+ for ct, (ep_idx, step_idx) in enumerate(retrieved_indices):
+ prefix = f"retrieved_{ct}_"
+ if ep_idx < 100000:
+ data[f"{prefix}top_image"] = ep_data[ep_idx]["observation__exterior_image_1_left"][step_idx]
+ data[f"{prefix}right_image"] = ep_data[ep_idx]["observation__exterior_image_2_left"][step_idx]
+ data[f"{prefix}wrist_image"] = ep_data[ep_idx]["observation__wrist_image_left"][step_idx]
+ data[f"{prefix}state"] = np.concatenate([ep_data[ep_idx]["observation__joint_position"][step_idx], ep_data[ep_idx]["observation__gripper_position"][step_idx]], axis=0)
+ data[f"{prefix}actions"] = get_action_chunk(ep_data[ep_idx]["action_dict__joint_velocity"], ep_data[ep_idx]["action_dict__gripper_position"], step_idx, self.action_horizon)
+ else:
+ data[f"{prefix}top_image"] = ep_data[ep_idx]["top_image"][step_idx]
+ data[f"{prefix}right_image"] = ep_data[ep_idx]["right_image"][step_idx]
+ data[f"{prefix}wrist_image"] = ep_data[ep_idx]["wrist_image"][step_idx]
+ data[f"{prefix}state"] = ep_data[ep_idx]["state"][step_idx]
+ data[f"{prefix}actions"] = get_action_chunk(ep_data[ep_idx]["actions"][:, :-1], ep_data[ep_idx]["actions"][:, -1:], step_idx, self.action_horizon)
+ data[f"{prefix}prompt"] = self.all_ep_prompts[ep_idx]
+
+ prefix = "query_"
+ if query_ep_idx < 100000:
+ data[f"{prefix}top_image"] = ep_data[query_ep_idx]["observation__exterior_image_1_left"][query_step_idx]
+ data[f"{prefix}right_image"] = ep_data[query_ep_idx]["observation__exterior_image_2_left"][query_step_idx]
+ data[f"{prefix}wrist_image"] = ep_data[query_ep_idx]["observation__wrist_image_left"][query_step_idx]
+ data[f"{prefix}state"] = np.concatenate([ep_data[query_ep_idx]["observation__joint_position"][query_step_idx], ep_data[query_ep_idx]["observation__gripper_position"][query_step_idx]], axis=0)
+ data[f"{prefix}actions"] = get_action_chunk(ep_data[query_ep_idx]["action_dict__joint_velocity"], ep_data[query_ep_idx]["action_dict__gripper_position"], query_step_idx, self.action_horizon)
+ else:
+ data[f"{prefix}top_image"] = ep_data[query_ep_idx]["top_image"][query_step_idx]
+ data[f"{prefix}right_image"] = ep_data[query_ep_idx]["right_image"][query_step_idx]
+ data[f"{prefix}wrist_image"] = ep_data[query_ep_idx]["wrist_image"][query_step_idx]
+ data[f"{prefix}state"] = ep_data[query_ep_idx]["state"][query_step_idx]
+ data[f"{prefix}actions"] = get_action_chunk(ep_data[query_ep_idx]["actions"][:, :-1], ep_data[query_ep_idx]["actions"][:, -1:], query_step_idx, self.action_horizon)
+ data[f"{prefix}prompt"] = self.all_ep_prompts[query_ep_idx]
+
+ if self.use_action_interpolation:
+ # read distances
+ distances = self.all_distances[index, :]
+ # then compute exp(-lamda * distances)
+ data["exp_lamda_distances"] = np.exp(-self.lamda * distances).reshape(-1, 1)
+
+ return data
+
+ def __len__(self) -> int:
+ return self.len_dataset
+
+
+class CleanLeRobotDataset(lerobot_dataset.LeRobotDataset):
+ """
+ A subclass of LeRobotDataset that overrides __getitem__ to provide a standard implementation,
+ bypassing the custom object detection/future frame logic present in the installed version's __getitem__
+ which causes KeyErrors with standard datasets.
+ """
+ def __getitem__(self, idx) -> dict:
+ item = self.hf_dataset[idx]
+ ep_idx = item["episode_index"].item()
+
+ query_indices = None
+ if self.delta_indices is not None:
+ query_indices, padding = self._get_query_indices(idx, ep_idx)
+ query_result = self._query_hf_dataset(query_indices)
+ item = {**item, **padding}
+ for key, val in query_result.items():
+ item[key] = val
+
+ if len(self.meta.video_keys) > 0:
+ current_ts = item["timestamp"].item()
+ query_timestamps = self._get_query_timestamps(current_ts, query_indices)
+ video_frames = self._query_videos(query_timestamps, ep_idx)
+ item = {**video_frames, **item}
+
+ # Add task as a string
+ if "task_index" in item:
+ task_idx = item["task_index"].item()
+ item["task"] = self.meta.tasks[task_idx]
+
+ return item
+
+def create_dataset(data_config: _config.DataConfig, model_config: _model.BaseModelConfig) -> Dataset:
+ """Create a dataset for training."""
+ repo_id = data_config.repo_id
+ if repo_id is None:
+ raise ValueError("Repo ID is not set. Cannot create dataset.")
+ if repo_id == "fake":
+ return FakeDataset(model_config, num_samples=1024)
+
+ dataset_meta = lerobot_dataset.LeRobotDatasetMetadata(repo_id)
+ dataset = CleanLeRobotDataset(
+ data_config.repo_id,
+ delta_timestamps={
+ key: [t / dataset_meta.fps for t in range(model_config.action_horizon)]
+ for key in data_config.action_sequence_keys
+ },
+ video_backend="pyav",
+ )
+
+ if data_config.prompt_from_task:
+ dataset = TransformedDataset(dataset, [_transforms.PromptFromLeRobotTask(dataset_meta.tasks)])
+
+ return dataset
+
+
+def transform_dataset(dataset: Dataset, data_config: _config.DataConfig, *, skip_norm_stats: bool = False) -> Dataset:
+ """Transform the dataset by applying the data transforms."""
+ norm_stats = {}
+ if data_config.repo_id != "fake" and not skip_norm_stats:
+ if data_config.norm_stats is None:
+ raise ValueError(
+ "Normalization stats not found. "
+ "Make sure to run `scripts/compute_norm_stats.py --config-name=`."
+ )
+ norm_stats = data_config.norm_stats
+
+ return TransformedDataset(
+ dataset,
+ [
+ *data_config.repack_transforms.inputs,
+ *data_config.data_transforms.inputs,
+ _transforms.Normalize(norm_stats, use_quantiles=data_config.use_quantile_norm),
+ *data_config.model_transforms.inputs,
+ ],
+ )
+
+
+def create_data_loader(
+ config: _config.TrainConfig,
+ *,
+ sharding: jax.sharding.Sharding | None = None,
+ skip_norm_stats: bool = False,
+ shuffle: bool = False,
+ num_batches: int | None = None,
+ num_workers: int = 0,
+) -> DataLoader[tuple[_model.Observation, _model.Actions]]:
+ """Create a data loader for training.
+
+ Args:
+ config: The training configuration.
+ sharding: The sharding to use for the data loader. If None, the data loader will
+ use a single device sharding.
+ skip_norm_stats: Whether to skip data normalization.
+ shuffle: Whether to shuffle the data.
+ num_batches: Determines the number of batches to return. If the number exceeds the
+ number of batches in the dataset, the data loader will loop over the dataset.
+ If not provided, will iterate over the dataset indefinitely.
+ num_workers: The number of worker processes to use. If zero, the data loader will
+ execute in the main process.
+ """
+ data_config = config.data.create(config.assets_dirs, config.model)
+
+ if "ricl" in config.name:
+ # Check if using LIBERO dataset
+ if hasattr(config, 'libero_data_dir') and config.libero_data_dir is not None:
+ if RiclLiberoDataset is None:
+ raise ImportError("RiclLiberoDataset not available. Check openpi/data/ricl_libero_dataset.py")
+ print(f"Using LIBERO dataset from: {config.libero_data_dir}")
+ print(f"Using RICL context from: {config.libero_context_dir}")
+ dataset = RiclLiberoDataset(
+ data_dir=config.libero_data_dir,
+ context_dir=config.libero_context_dir,
+ action_horizon=config.model.action_horizon,
+ use_action_interpolation=config.model.use_action_interpolation,
+ lambda_decay=config.model.lamda,
+ num_retrieved_observations=config.model.num_retrieved_observations,
+ )
+
+ # Load norm stats from LIBERO dataset directory (LeRobot format)
+ import json
+ stats_path = pathlib.Path(config.libero_data_dir) / "meta" / "stats.json"
+ if stats_path.exists():
+ with open(stats_path, 'r') as f:
+ lerobot_stats = json.load(f)
+
+ # Convert LeRobot stats format to openpi NormStats format
+ import openpi.shared.normalize as _normalize_module
+ norm_stats = {}
+
+ # Map observation fields
+ for key in ["observation.state", "observation.states.ee_state", "action"]:
+ if key in lerobot_stats:
+ stats_data = lerobot_stats[key]
+ norm_stats[key] = _normalize_module.NormStats(
+ mean=np.array(stats_data["mean"], dtype=np.float32),
+ std=np.array(stats_data["std"], dtype=np.float32),
+ q01=np.array(stats_data.get("q01", stats_data["min"]), dtype=np.float32),
+ q99=np.array(stats_data.get("q99", stats_data["max"]), dtype=np.float32),
+ )
+
+ data_config = dataclasses.replace(data_config, norm_stats=norm_stats)
+ print(f"Loaded norm stats from {stats_path}")
+ print(f" Keys: {list(norm_stats.keys())}")
+ else:
+ print(f"Warning: Norm stats not found at {stats_path}")
+ else:
+ # Use DROID dataset
+ dataset = RiclDroidDataset(config.model, config.finetuning_collected_demos_dir)
+ elif "pi0_fast_droid___finetune_on_" in config.name:
+ dataset = Pi0FastDroidFinetuneDataset(config.model, config.finetuning_collected_demos_dir)
+ else:
+ dataset = create_dataset(data_config, config.model)
+ dataset = transform_dataset(dataset, data_config, skip_norm_stats=skip_norm_stats)
+
+ data_loader = TorchDataLoader(
+ dataset,
+ local_batch_size=config.batch_size // jax.process_count(),
+ sharding=sharding,
+ shuffle=shuffle,
+ num_batches=num_batches,
+ num_workers=num_workers,
+ seed=config.seed,
+ )
+
+ class DataLoaderImpl(DataLoader):
+ def __init__(self, data_config: _config.DataConfig, data_loader: TorchDataLoader):
+ self._data_config = data_config
+ self._data_loader = data_loader
+
+ def data_config(self) -> _config.DataConfig:
+ return self._data_config
+
+ def __iter__(self):
+ for batch in self._data_loader:
+ if "ricl" in config.name:
+ yield _model.RiclObservation.from_dict(batch, config.model.num_retrieved_observations), batch["query_actions"]
+ else:
+ yield _model.Observation.from_dict(batch), batch["actions"]
+
+ return DataLoaderImpl(data_config, data_loader)
+
+
+class TorchDataLoader:
+ def __init__(
+ self,
+ dataset,
+ local_batch_size: int,
+ *,
+ sharding: jax.sharding.Sharding | None = None,
+ shuffle: bool = False,
+ num_batches: int | None = None,
+ num_workers: int = 0,
+ seed: int = 0,
+ ):
+ """Create a PyTorch data loader.
+
+ Args:
+ dataset: The dataset to load.
+ local_batch_size: The local batch size for each process.
+ sharding: The sharding to use for the data loader.
+ shuffle: Whether to shuffle the data.
+ num_batches: If provided, determines the number of returned batches. If the
+ number is larger than the number of batches in the dataset, the data loader
+ will loop over the dataset. If not provided, will iterate over the dataset
+ indefinitely.
+ num_workers: The number of worker processes to use. If zero, the data loader will
+ execute in the main process.
+ seed: The seed to use for shuffling the data.
+ """
+ if jax.process_count() > 1:
+ raise NotImplementedError("Data loading with multiple processes is not supported.")
+
+ if len(dataset) < local_batch_size:
+ raise ValueError(f"Local batch size ({local_batch_size}) is larger than the dataset size ({len(dataset)}).")
+
+ if sharding is None:
+ # Use data parallel sharding by default.
+ sharding = jax.sharding.NamedSharding(
+ jax.sharding.Mesh(jax.devices(), ("B",)),
+ jax.sharding.PartitionSpec("B"),
+ )
+
+ self._sharding = sharding
+ self._num_batches = num_batches
+
+ mp_context = None
+ if num_workers > 0:
+ mp_context = multiprocessing.get_context("spawn")
+
+ generator = torch.Generator()
+ generator.manual_seed(seed)
+ self._data_loader = torch.utils.data.DataLoader(
+ typing.cast(torch.utils.data.Dataset, dataset),
+ batch_size=local_batch_size,
+ shuffle=shuffle,
+ num_workers=num_workers,
+ multiprocessing_context=mp_context,
+ persistent_workers=num_workers > 0,
+ collate_fn=_collate_fn,
+ worker_init_fn=_worker_init_fn,
+ drop_last=True,
+ generator=generator,
+ )
+
+ @property
+ def torch_loader(self) -> torch.utils.data.DataLoader:
+ return self._data_loader
+
+ def __iter__(self):
+ num_items = 0
+ while True:
+ data_iter = iter(self._data_loader)
+ while True:
+ if self._num_batches is not None and num_items >= self._num_batches:
+ return
+ try:
+ batch = next(data_iter)
+ except StopIteration:
+ break # We've exhausted the dataset. Create a new iterator and start over.
+ num_items += 1
+ yield jax.tree.map(lambda x: jax.make_array_from_process_local_data(self._sharding, x), batch)
+
+
+def _collate_fn(items):
+ """Collate the batch elements into batched numpy arrays."""
+ # Make sure to convert to numpy arrays before stacking since some of the incoming elements
+ # may be JAX arrays.
+ return jax.tree.map(lambda *x: np.stack(np.asarray(x), axis=0), *items)
+
+
+def _worker_init_fn(worker_id: int) -> None:
+ """Tell JAX inside the worker process not to preallocate the GPU memory."""
+ # NOTE: This is called after jax is imported inside the worker process. This
+ # means that this approach will not work for selecting the backend.
+ os.environ["XLA_PYTHON_CLIENT_PREALLOCATE"] = "false"
+ os.environ["XLA_PYTHON_CLIENT_ALLOCATOR"] = "platform"
diff --git a/src/openpi/training/data_loader_test.py b/src/openpi/training/data_loader_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..8ef6f900e4a2cf23e7156584e0470bfbb24b06e3
--- /dev/null
+++ b/src/openpi/training/data_loader_test.py
@@ -0,0 +1,84 @@
+import dataclasses
+
+import jax
+
+from openpi.models import pi0
+from openpi.training import config as _config
+from openpi.training import data_loader as _data_loader
+
+
+def test_torch_data_loader():
+ config = pi0.Pi0Config(action_dim=24, action_horizon=50, max_token_len=48)
+ dataset = _data_loader.FakeDataset(config, 16)
+
+ loader = _data_loader.TorchDataLoader(
+ dataset,
+ local_batch_size=4,
+ num_batches=2,
+ )
+ batches = list(loader)
+
+ assert len(batches) == 2
+ for batch in batches:
+ assert all(x.shape[0] == 4 for x in jax.tree.leaves(batch))
+
+
+def test_torch_data_loader_infinite():
+ config = pi0.Pi0Config(action_dim=24, action_horizon=50, max_token_len=48)
+ dataset = _data_loader.FakeDataset(config, 4)
+
+ loader = _data_loader.TorchDataLoader(dataset, local_batch_size=4)
+ data_iter = iter(loader)
+
+ for _ in range(10):
+ _ = next(data_iter)
+
+
+def test_torch_data_loader_parallel():
+ config = pi0.Pi0Config(action_dim=24, action_horizon=50, max_token_len=48)
+ dataset = _data_loader.FakeDataset(config, 10)
+
+ loader = _data_loader.TorchDataLoader(dataset, local_batch_size=4, num_batches=2, num_workers=2)
+ batches = list(loader)
+
+ assert len(batches) == 2
+
+ for batch in batches:
+ assert all(x.shape[0] == 4 for x in jax.tree.leaves(batch))
+
+
+def test_with_fake_dataset():
+ config = _config.get_config("debug")
+
+ loader = _data_loader.create_data_loader(config, skip_norm_stats=True, num_batches=2)
+ batches = list(loader)
+
+ assert len(batches) == 2
+
+ for batch in batches:
+ assert all(x.shape[0] == config.batch_size for x in jax.tree.leaves(batch))
+
+ for _, actions in batches:
+ assert actions.shape == (config.batch_size, config.model.action_horizon, config.model.action_dim)
+
+
+def test_with_real_dataset():
+ config = _config.get_config("pi0_aloha_sim")
+ config = dataclasses.replace(config, batch_size=4)
+
+ loader = _data_loader.create_data_loader(
+ config,
+ # Skip since we may not have the data available.
+ skip_norm_stats=True,
+ num_batches=2,
+ shuffle=True,
+ )
+ # Make sure that we can get the data config.
+ assert loader.data_config().repo_id == config.data.repo_id
+
+ batches = list(loader)
+
+ assert len(batches) == 2
+
+ for _, actions in batches:
+ assert actions.shape == (config.batch_size, config.model.action_horizon, config.model.action_dim)
diff --git a/src/openpi/training/optimizer.py b/src/openpi/training/optimizer.py
new file mode 100644
index 0000000000000000000000000000000000000000..183daa4ecea74864d34546efb69e64233422659d
--- /dev/null
+++ b/src/openpi/training/optimizer.py
@@ -0,0 +1,108 @@
+import dataclasses
+from typing import Protocol, runtime_checkable
+
+import jax.numpy as jnp
+import optax
+
+import openpi.shared.array_typing as at
+
+
+@runtime_checkable
+class LRScheduleConfig(Protocol):
+ def create(self) -> optax.Schedule: ...
+
+
+@dataclasses.dataclass(frozen=True)
+class CosineDecaySchedule(LRScheduleConfig):
+ """Cosine decay schedule with warmup."""
+
+ warmup_steps: int = 1_000
+ peak_lr: float = 2.5e-5
+ decay_steps: int = 30_000
+ decay_lr: float = 2.5e-6
+
+ def create(self) -> optax.Schedule:
+ return optax.warmup_cosine_decay_schedule(
+ init_value=self.peak_lr / (self.warmup_steps + 1),
+ peak_value=self.peak_lr,
+ warmup_steps=self.warmup_steps,
+ decay_steps=self.decay_steps,
+ end_value=self.decay_lr,
+ )
+
+
+@dataclasses.dataclass(frozen=True)
+class RsqrtDecaySchedule(LRScheduleConfig):
+ """Inverse square root decay schedule with warmup."""
+
+ warmup_steps: int = 1_000
+ peak_lr: float = 5e-5
+ timescale: float = 10_000
+
+ def create(self) -> optax.Schedule:
+ return optax.join_schedules(
+ [
+ optax.linear_schedule(
+ init_value=self.peak_lr / (self.warmup_steps + 1),
+ end_value=self.peak_lr,
+ transition_steps=self.warmup_steps,
+ ),
+ lambda step: self.peak_lr / jnp.sqrt((self.timescale + step) / self.timescale),
+ ],
+ [self.warmup_steps],
+ )
+
+
+@runtime_checkable
+class OptimizerConfig(Protocol):
+ def create(
+ self,
+ lr: optax.ScalarOrSchedule,
+ weight_decay_mask: at.PyTree | None = None,
+ ) -> optax.GradientTransformation: ...
+
+
+@dataclasses.dataclass(frozen=True)
+class AdamW(OptimizerConfig):
+ """AdamW optimizer."""
+
+ b1: float = 0.9
+ b2: float = 0.95
+ eps: float = 1e-8
+ weight_decay: float = 1e-10
+ clip_gradient_norm: float = 1.0
+
+ def create(
+ self,
+ lr: optax.ScalarOrSchedule,
+ weight_decay_mask: at.PyTree | None = None,
+ ) -> optax.GradientTransformation:
+ tx = optax.adamw(
+ lr, b1=self.b1, b2=self.b2, eps=self.eps, weight_decay=self.weight_decay, mask=weight_decay_mask
+ )
+
+ return optax.chain(optax.clip_by_global_norm(self.clip_gradient_norm), tx)
+
+
+@dataclasses.dataclass(frozen=True)
+class SGD(OptimizerConfig):
+ """SGD optimizer."""
+
+ lr: float = 5e-5
+ momentum: float = 0.9
+ nesterov: bool = False
+
+ def create(
+ self,
+ lr: optax.ScalarOrSchedule,
+ weight_decay_mask: at.PyTree | None = None,
+ ) -> optax.GradientTransformation:
+ assert weight_decay_mask is None, "Weight decay is not supported for SGD"
+ return optax.sgd(lr, momentum=self.momentum, nesterov=self.nesterov)
+
+
+def create_optimizer(
+ optimizer: OptimizerConfig, lr_schedule: LRScheduleConfig, weight_decay_mask: at.PyTree | None = None
+) -> optax.GradientTransformation:
+ lr = lr_schedule.create()
+ return optimizer.create(lr, weight_decay_mask=weight_decay_mask)
diff --git a/src/openpi/training/sharding.py b/src/openpi/training/sharding.py
new file mode 100644
index 0000000000000000000000000000000000000000..2c54a5c125ef0e17a8fc0d3afc206e14f9831a1c
--- /dev/null
+++ b/src/openpi/training/sharding.py
@@ -0,0 +1,102 @@
+import contextlib
+import logging
+
+import jax
+import numpy as np
+
+BATCH_AXIS = "batch"
+FSDP_AXIS = "fsdp"
+# In FSDP, we shard the data across both the batch and FSDP axes.
+DATA_AXIS = (BATCH_AXIS, FSDP_AXIS)
+
+
+class _MeshState:
+ active_mesh: jax.sharding.Mesh | None = None
+
+
+def make_mesh(num_fsdp_devices: int) -> jax.sharding.Mesh:
+ if jax.device_count() % num_fsdp_devices != 0:
+ raise ValueError(
+ f"Number of devices {jax.device_count()} must be divisible by the number of FSDP devices {num_fsdp_devices}."
+ )
+ mesh_shape = (jax.device_count() // num_fsdp_devices, num_fsdp_devices)
+ return jax.make_mesh(mesh_shape, (BATCH_AXIS, FSDP_AXIS))
+
+
+@contextlib.contextmanager
+def set_mesh(mesh: jax.sharding.Mesh):
+ """Plumbing the mesh deep into the module tree is extremeley cumbersome; until the JAX team lands a better API, a
+ custom context manager like this one is the recommended way to maintain a reference to a global mesh. This is only used
+ in `activation_sharding_constraint` below."""
+ if _MeshState.active_mesh is not None:
+ raise ValueError("Cannot nest set_mesh context managers.")
+ _MeshState.active_mesh = mesh
+ try:
+ yield
+ finally:
+ _MeshState.active_mesh = None
+
+
+def activation_sharding_constraint(pytree):
+ if _MeshState.active_mesh is None:
+ return pytree
+ return jax.lax.with_sharding_constraint(
+ pytree, jax.sharding.NamedSharding(_MeshState.active_mesh, jax.sharding.PartitionSpec(DATA_AXIS))
+ )
+
+
+def fsdp_sharding(
+ pytree,
+ mesh: jax.sharding.Mesh,
+ *,
+ min_size_mbytes: int = 4, # 4 MiB
+ log: bool = False,
+):
+ """Apply FSDP sharding to a pytree of arrays based on the mesh shape.
+
+ Args:
+ pytree: A pytree to be apply sharding specified by the mesh, note that only array types (eg. contains .shape attr)
+ will be considered for sharding.
+ mesh: The mesh being used for applying sharding on to pytree.
+ min_size_mbytes: The minimum size of the array in MiB to be considered for sharding, any array smaller than this
+ will be replicated.
+ log: If true, will log the sharding decisions for arrays that are being considered for sharding.
+
+ Returns:
+ The sharded pytree.
+ """
+ min_size_bytes = min_size_mbytes * 2**20
+
+ def _shard_arr(kp, array: jax.ShapeDtypeStruct):
+ # if fsdp is not actually going to be used, replicate everything to avoid extraneous logging
+ if mesh.shape[FSDP_AXIS] == 1:
+ return jax.sharding.NamedSharding(mesh, jax.sharding.PartitionSpec())
+ # replicate scalar and vector arrays
+ if not hasattr(array, "shape"):
+ return jax.sharding.NamedSharding(mesh, jax.sharding.PartitionSpec())
+ if len(array.shape) < 2:
+ return jax.sharding.NamedSharding(mesh, jax.sharding.PartitionSpec())
+ # replicate small arrays
+ if (arr_size := np.prod(array.shape) * np.dtype(array.dtype).itemsize) < min_size_bytes:
+ return jax.sharding.NamedSharding(mesh, jax.sharding.PartitionSpec())
+
+ # shard matrices and larger tensors along the largest axis that is divisible by the fsdp dimension
+ axes = np.argsort(array.shape)[::-1]
+ spec = [None] * len(axes)
+ for i in axes:
+ if array.shape[i] % mesh.shape[FSDP_AXIS] == 0:
+ if log:
+ logging.info(
+ f"Sharding {jax.tree_util.keystr(kp)} of shape {array.shape} ({arr_size / 2**20:.2f} MiB) along axis {i}"
+ )
+ spec[i] = FSDP_AXIS
+ return jax.sharding.NamedSharding(mesh, jax.sharding.PartitionSpec(*spec))
+
+ # replicate if no valid sharding was found
+ if log:
+ logging.warning(
+ f"Could not find a valid sharding for {jax.tree_util.keystr(kp)} of shape {array.shape} with mesh of shape {mesh.shape}"
+ )
+ return jax.sharding.NamedSharding(mesh, jax.sharding.PartitionSpec())
+
+ return jax.tree_util.tree_map_with_path(_shard_arr, pytree)
diff --git a/src/openpi/training/utils.py b/src/openpi/training/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..6bac8e33d550b57d51f55f8a4095a969fb1c59a8
--- /dev/null
+++ b/src/openpi/training/utils.py
@@ -0,0 +1,43 @@
+from collections.abc import Callable
+from typing import Any
+
+from flax import nnx
+from flax import struct
+import jax
+import optax
+
+from openpi.models import model as _model
+from openpi.shared import array_typing as at
+
+
+@at.typecheck
+@struct.dataclass
+class TrainState:
+ step: at.Int[at.ArrayLike, ""]
+ params: nnx.State
+ model_def: nnx.GraphDef[_model.BaseModel]
+ opt_state: optax.OptState
+ tx: optax.GradientTransformation = struct.field(pytree_node=False)
+
+ ema_decay: float | None = struct.field(pytree_node=False)
+ ema_params: nnx.State | None = None
+
+
+@at.typecheck
+def tree_to_info(tree: at.PyTree, interp_func: Callable[[Any], str] = str) -> str:
+ """Converts a PyTree into a human-readable string for logging. Optionally, `interp_func` can be provided to convert
+ the leaf values to more meaningful strings.
+ """
+ tree, _ = jax.tree_util.tree_flatten_with_path(tree)
+ return "\n".join(f"{jax.tree_util.keystr(path)}: {interp_func(value)}" for path, value in tree)
+
+
+@at.typecheck
+def array_tree_to_info(tree: at.PyTree) -> str:
+ """Converts a PyTree of arrays into a human-readable string for logging."""
+ def format_value(x):
+ if hasattr(x, 'shape') and hasattr(x, 'dtype'):
+ return f"{x.shape}@{x.dtype}"
+ else:
+ return f"{type(x).__name__}: {x}"
+ return tree_to_info(tree, format_value)
diff --git a/src/openpi/training/weight_loaders.py b/src/openpi/training/weight_loaders.py
new file mode 100644
index 0000000000000000000000000000000000000000..ebf6c6a168b9720427d22d9cb9fcbef988d0e106
--- /dev/null
+++ b/src/openpi/training/weight_loaders.py
@@ -0,0 +1,102 @@
+import dataclasses
+import logging
+import re
+from typing import Protocol, runtime_checkable
+
+import flax.traverse_util
+import numpy as np
+
+import openpi.models.model as _model
+import openpi.shared.array_typing as at
+import openpi.shared.download as download
+
+logger = logging.getLogger(__name__)
+
+
+@runtime_checkable
+class WeightLoader(Protocol):
+ def load(self, params: at.Params) -> at.Params:
+ """Loads the model weights.
+
+ Args:
+ params: Parameters of the model. This is a nested structure of array-like objects that
+ represent the model's parameters.
+
+ Returns:
+ Loaded parameters. The structure must be identical to `params`. If returning a subset of
+ the parameters the loader must merge the loaded parameters with `params`.
+ """
+
+
+@dataclasses.dataclass(frozen=True)
+class NoOpWeightLoader(WeightLoader):
+ def load(self, params: at.Params) -> at.Params:
+ return params
+
+
+@dataclasses.dataclass(frozen=True)
+class CheckpointWeightLoader(WeightLoader):
+ """Loads an entire set of weights from a checkpoint.
+
+ Compatible with:
+ trained checkpoints:
+ example: "./checkpoints////params"
+ released checkpoints:
+ example: "s3://openpi-assets/checkpoints//params"
+ """
+
+ params_path: str
+
+ def load(self, params: at.Params) -> at.Params:
+ # We are loading np.ndarray and relying on the training code to properly convert and shard the params.
+ loaded_params = _model.restore_params(download.maybe_download(self.params_path), restore_type=np.ndarray)
+ # Add all missing LoRA weights.
+ return _merge_params(loaded_params, params, missing_regex=".*lora.*")
+
+
+@dataclasses.dataclass(frozen=True)
+class PaliGemmaWeightLoader(WeightLoader):
+ """Loads weights from the official PaliGemma checkpoint.
+
+ This will overwrite existing weights with similar names while keeping all extra weights intact.
+ This allows us to support the action expert which is used by the Pi0 model.
+ """
+
+ def load(self, params: at.Params) -> at.Params:
+ path = download.maybe_download(
+ "gs://vertex-model-garden-paligemma-us/paligemma/pt_224.npz", gs={"token": "anon"}
+ )
+ with path.open("rb") as f:
+ flat_params = dict(np.load(f, allow_pickle=False))
+ loaded_params = {"PaliGemma": flax.traverse_util.unflatten_dict(flat_params, sep="/")["params"]}
+ # Add all missing weights.
+ return _merge_params(loaded_params, params, missing_regex=".*")
+
+
+def _merge_params(loaded_params: at.Params, params: at.Params, *, missing_regex: str) -> at.Params:
+ """Merges the loaded parameters with the reference parameters.
+
+ Args:
+ loaded_params: The parameters to merge.
+ params: The reference parameters.
+ missing_regex: A regex pattern for all missing keys that should be merged from the reference parameters.
+
+ Returns:
+ A new dictionary with the merged parameters.
+ """
+ flat_ref = flax.traverse_util.flatten_dict(params, sep="/")
+ flat_loaded = flax.traverse_util.flatten_dict(loaded_params, sep="/")
+
+ # First, take all weights that are a subset of the reference weights.
+ result = {}
+ for k, v in flat_loaded.items():
+ if k in flat_ref:
+ result[k] = v.astype(flat_ref[k].dtype)
+
+ # Then, merge any missing weights as defined by the missing regex.
+ pattern = re.compile(missing_regex)
+ for k in {k for k in flat_ref if pattern.fullmatch(k)}:
+ if k not in result:
+ result[k] = flat_ref[k]
+
+ return flax.traverse_util.unflatten_dict(result, sep="/")
diff --git a/wandb/run-20260128_201755-g4zbuxt5/logs/debug.log b/wandb/run-20260128_201755-g4zbuxt5/logs/debug.log
new file mode 100644
index 0000000000000000000000000000000000000000..b31dc10cf111a1e7a7c80d65ee65f14cbf02bea7
--- /dev/null
+++ b/wandb/run-20260128_201755-g4zbuxt5/logs/debug.log
@@ -0,0 +1,94 @@
+2026-01-28 20:17:55,645 INFO MainThread:4064435 [wandb_setup.py:_flush():80] Current SDK version is 0.23.0
+2026-01-28 20:17:55,645 INFO MainThread:4064435 [wandb_setup.py:_flush():80] Configure stats pid to 4064435
+2026-01-28 20:17:55,645 INFO MainThread:4064435 [wandb_setup.py:_flush():80] Loading settings from /user/hominhduy.nguyen01/u24877/.config/wandb/settings
+2026-01-28 20:17:55,645 INFO MainThread:4064435 [wandb_setup.py:_flush():80] Loading settings from /mnt/vast-kisski/projects/kisski-spath/VLA_Groot/in_context_learning/ricl_openpi/wandb/settings
+2026-01-28 20:17:55,645 INFO MainThread:4064435 [wandb_setup.py:_flush():80] Loading settings from environment variables
+2026-01-28 20:17:55,645 INFO MainThread:4064435 [wandb_init.py:setup_run_log_directory():713] Logging user logs to /mnt/vast-kisski/projects/kisski-spath/VLA_Groot/in_context_learning/ricl_openpi/wandb/run-20260128_201755-g4zbuxt5/logs/debug.log
+2026-01-28 20:17:55,645 INFO MainThread:4064435 [wandb_init.py:setup_run_log_directory():714] Logging internal logs to /mnt/vast-kisski/projects/kisski-spath/VLA_Groot/in_context_learning/ricl_openpi/wandb/run-20260128_201755-g4zbuxt5/logs/debug-internal.log
+2026-01-28 20:17:55,645 INFO MainThread:4064435 [wandb_init.py:init():840] calling init triggers
+2026-01-28 20:17:55,645 INFO MainThread:4064435 [wandb_init.py:init():845] wandb.init called with sweep_config: {}
+config: {'name': 'pi0_fast_ricl_libero', 'finetuning_collected_demos_dir': None, 'project_name': 'ricl_libero', 'exp_name': 'ricl_libero_test_run', 'model': {'action_dim': 7, 'action_horizon': 50, 'max_token_len': 250, 'dtype': 'bfloat16', 'paligemma_variant': 'dummy', 'num_retrieved_observations': 1, 'use_action_interpolation': True, 'lamda': 10.0}, 'weight_loader': {'params_path': 's3://openpi-assets/checkpoints/pi0_fast_base/params'}, 'lr_schedule': {'warmup_steps': 1000, 'peak_lr': 0.0001, 'decay_steps': 30000, 'decay_lr': 2.5e-06}, 'optimizer': {'b1': 0.9, 'b2': 0.95, 'eps': 1e-08, 'weight_decay': 1e-10, 'clip_gradient_norm': 1.0}, 'ema_decay': 0.99, 'freeze_filter': Nothing(), 'data': {'repo_id': 'physical-intelligence/libero', 'assets': {'assets_dir': None, 'asset_id': None}, 'base_config': {'repo_id': None, 'asset_id': None, 'norm_stats': None, 'repack_transforms': {'inputs': (), 'outputs': ()}, 'data_transforms': {'inputs': (), 'outputs': ()}, 'model_transforms': {'inputs': (), 'outputs': ()}, 'use_quantile_norm': False, 'action_sequence_keys': ('actions',), 'prompt_from_task': True, 'local_files_only': False}}, 'assets_base_dir': './assets', 'checkpoint_base_dir': './checkpoints', 'seed': 42, 'batch_size': 4, 'num_workers': 2, 'num_train_steps': 1000, 'log_interval': 10, 'save_interval': 500, 'keep_period': 5000, 'overwrite': True, 'resume': False, 'wandb_enabled': True, 'policy_metadata': None, 'fsdp_devices': 1, 'libero_data_dir': '/projects/extern/kisski/kisski-spath/dir.project/VLA_Groot/merged_libero_mask_depth_noops_lerobot_10', 'libero_context_dir': 'rag/ricl_training_context_libero_10_test/', '_wandb': {}}
+2026-01-28 20:17:55,645 INFO MainThread:4064435 [wandb_init.py:init():888] starting backend
+2026-01-28 20:17:56,586 INFO MainThread:4064435 [wandb_init.py:init():891] sending inform_init request
+2026-01-28 20:17:56,595 INFO MainThread:4064435 [wandb_init.py:init():899] backend started and connected
+2026-01-28 20:17:56,597 INFO MainThread:4064435 [wandb_init.py:init():969] updated telemetry
+2026-01-28 20:17:56,620 INFO MainThread:4064435 [wandb_init.py:init():993] communicating run to backend with 90.0 second timeout
+2026-01-28 20:18:26,858 INFO Thread-2 (wrapped_target):4064435 [retry.py:__call__():164] [no run ID] Retry attempt failed:
+Traceback (most recent call last):
+ File "/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/urllib3/connection.py", line 198, in _new_conn
+ sock = connection.create_connection(
+ File "/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/urllib3/util/connection.py", line 85, in create_connection
+ raise err
+ File "/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/urllib3/util/connection.py", line 73, in create_connection
+ sock.connect(sa)
+TimeoutError: timed out
+
+The above exception was the direct cause of the following exception:
+
+Traceback (most recent call last):
+ File "/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/urllib3/connectionpool.py", line 787, in urlopen
+ response = self._make_request(
+ File "/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/urllib3/connectionpool.py", line 488, in _make_request
+ raise new_e
+ File "/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/urllib3/connectionpool.py", line 464, in _make_request
+ self._validate_conn(conn)
+ File "/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/urllib3/connectionpool.py", line 1093, in _validate_conn
+ conn.connect()
+ File "/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/urllib3/connection.py", line 753, in connect
+ self.sock = sock = self._new_conn()
+ File "/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/urllib3/connection.py", line 207, in _new_conn
+ raise ConnectTimeoutError(
+urllib3.exceptions.ConnectTimeoutError: (, 'Connection to api.wandb.ai timed out. (connect timeout=20)')
+
+The above exception was the direct cause of the following exception:
+
+Traceback (most recent call last):
+ File "/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/requests/adapters.py", line 644, in send
+ resp = conn.urlopen(
+ File "/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/urllib3/connectionpool.py", line 841, in urlopen
+ retries = retries.increment(
+ File "/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/urllib3/util/retry.py", line 519, in increment
+ raise MaxRetryError(_pool, url, reason) from reason # type: ignore[arg-type]
+urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='api.wandb.ai', port=443): Max retries exceeded with url: /graphql (Caused by ConnectTimeoutError(, 'Connection to api.wandb.ai timed out. (connect timeout=20)'))
+
+During handling of the above exception, another exception occurred:
+
+Traceback (most recent call last):
+ File "/user/hominhduy.nguyen01/u24877/.local/lib/python3.10/site-packages/wandb/sdk/lib/retry.py", line 157, in __call__
+ result = self._call_fn(*args, **kwargs)
+ File "/user/hominhduy.nguyen01/u24877/.local/lib/python3.10/site-packages/wandb/sdk/internal/internal_api.py", line 391, in execute
+ return self.client.execute(*args, **kwargs) # type: ignore
+ File "/user/hominhduy.nguyen01/u24877/.local/lib/python3.10/site-packages/wandb/vendor/gql-0.2.0/wandb_gql/client.py", line 52, in execute
+ result = self._get_result(document, *args, **kwargs)
+ File "/user/hominhduy.nguyen01/u24877/.local/lib/python3.10/site-packages/wandb/vendor/gql-0.2.0/wandb_gql/client.py", line 60, in _get_result
+ return self.transport.execute(document, *args, **kwargs)
+ File "/user/hominhduy.nguyen01/u24877/.local/lib/python3.10/site-packages/wandb/sdk/lib/gql_request.py", line 70, in execute
+ request = self.session.post(self.url, **post_args)
+ File "/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/requests/sessions.py", line 637, in post
+ return self.request("POST", url, data=data, json=json, **kwargs)
+ File "/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/requests/sessions.py", line 589, in request
+ resp = self.send(prep, **send_kwargs)
+ File "/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/requests/sessions.py", line 703, in send
+ r = adapter.send(request, **kwargs)
+ File "/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/site-packages/requests/adapters.py", line 665, in send
+ raise ConnectTimeout(e, request=request)
+requests.exceptions.ConnectTimeout: HTTPSConnectionPool(host='api.wandb.ai', port=443): Max retries exceeded with url: /graphql (Caused by ConnectTimeoutError(, 'Connection to api.wandb.ai timed out. (connect timeout=20)'))
+2026-01-28 20:19:05,232 WARNING MainThread:4064435 [wandb_init.py:init():1595] [no run ID] interrupted
+Traceback (most recent call last):
+ File "/user/hominhduy.nguyen01/u24877/.local/lib/python3.10/site-packages/wandb/sdk/wandb_init.py", line 1586, in init
+ run = wi.init(run_settings, run_config, run_printer)
+ File "/user/hominhduy.nguyen01/u24877/.local/lib/python3.10/site-packages/wandb/sdk/wandb_init.py", line 1004, in init
+ result = wait_with_progress(
+ File "/user/hominhduy.nguyen01/u24877/.local/lib/python3.10/site-packages/wandb/sdk/mailbox/wait_with_progress.py", line 23, in wait_with_progress
+ return wait_all_with_progress(
+ File "/user/hominhduy.nguyen01/u24877/.local/lib/python3.10/site-packages/wandb/sdk/mailbox/wait_with_progress.py", line 77, in wait_all_with_progress
+ return asyncer.run(progress_loop_with_timeout)
+ File "/user/hominhduy.nguyen01/u24877/.local/lib/python3.10/site-packages/wandb/sdk/lib/asyncio_manager.py", line 136, in run
+ return future.result()
+ File "/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/concurrent/futures/_base.py", line 453, in result
+ self._condition.wait(timeout)
+ File "/projects/extern/kisski/kisski-spath/dir.project/VLA_3D/miniconda3/envs/pitorch_doanh/lib/python3.10/threading.py", line 320, in wait
+ waiter.acquire()
+KeyboardInterrupt
+2026-01-28 20:19:05,298 INFO wandb-AsyncioManager-main:4064435 [service_client.py:_forward_responses():80] Reached EOF.
+2026-01-28 20:19:05,298 INFO wandb-AsyncioManager-main:4064435 [mailbox.py:close():137] Closing mailbox, abandoning 2 handles.