pi05tests-openpi-multiarm / openpi /scripts /run_preprocess_twin.sh
lsnu's picture
Expand TWIN preprocessing script to remaining PerAct2 tasks
ec121d2 verified
#!/usr/bin/env bash
set -euo pipefail
IFS=$'\n\t'
export PATH="$HOME/.local/bin:$PATH"
SCRIPT_DIR="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd)"
OPENPI_ROOT="${OPENPI_ROOT:-$(cd -- "$SCRIPT_DIR/.." && pwd)}"
log() {
printf '%s %s\n' "$(date -u +'%Y-%m-%dT%H:%M:%SZ')" "$*"
}
on_exit() {
local rc=$?
if [[ $rc -eq 0 ]]; then
log "Run exited successfully"
else
log "Run exited with status $rc"
fi
}
trap on_exit EXIT
require_env() {
local name="$1"
if [[ -z "${!name:-}" ]]; then
echo "Required environment variable is not set: $name" >&2
exit 1
fi
}
project_python_path() {
echo "$OPENPI_ROOT/.venv/bin/python"
}
hf_cli() {
local venv_python
venv_python="$(project_python_path)"
if [[ -x "$venv_python" ]]; then
"$venv_python" -m huggingface_hub.commands.huggingface_cli "$@"
return
fi
if command -v huggingface-cli >/dev/null 2>&1; then
huggingface-cli "$@"
return
fi
if command -v hf >/dev/null 2>&1; then
hf "$@"
return
fi
echo "No Hugging Face CLI found in PATH or project venv." >&2
exit 1
}
project_python() {
local venv_python
venv_python="$(project_python_path)"
if [[ -x "$venv_python" ]]; then
"$venv_python" "$@"
return
fi
python3 "$@"
}
ensure_hf_auth() {
if hf_cli whoami >/dev/null 2>&1; then
return 0
fi
if [[ -n "${HF_TOKEN:-}" ]]; then
hf_cli login --token "$HF_TOKEN" >/dev/null
hf_cli whoami >/dev/null
return 0
fi
echo "Hugging Face auth is not available. Set HF_TOKEN or login with 'huggingface-cli login' first." >&2
exit 1
}
export HF_HUB_ENABLE_HF_TRANSFER=0
export ROOT="${ROOT:-$HOME/pi05prep-work}"
export INBOX="${INBOX:-$ROOT/inbox}"
export TMPDIR="${TMPDIR:-$ROOT/tmp}"
export LOGDIR="${LOGDIR:-$ROOT/logs}"
export MODEL_REPO="${MODEL_REPO:-lsnu/pi05tests-openpi-multiarm}"
export HF_HOME="${HF_HOME:-$ROOT/hf-home}"
export HF_HUB_CACHE="${HF_HUB_CACHE:-$HF_HOME/hub}"
export HF_DATASETS_CACHE="${HF_DATASETS_CACHE:-$HF_HOME/datasets}"
export HF_LEROBOT_HOME="${HF_LEROBOT_HOME:-$HF_HOME/lerobot}"
export STATS_BATCH_SIZE="${STATS_BATCH_SIZE:-64}"
export STATS_NUM_WORKERS="${STATS_NUM_WORKERS:-0}"
# LeRobot rejects the deprecated env var. Clear it even if inherited.
unset LEROBOT_HOME || true
export DP_TRAIN="${DP_TRAIN:-lsnu/twin_dual_push_256_train}"
export DP_VAL="${DP_VAL:-lsnu/twin_dual_push_256_val}"
export DP_TEST="${DP_TEST:-lsnu/twin_dual_push_256_test}"
export DP128_TRAIN="${DP128_TRAIN:-lsnu/twin_dual_push_128_train}"
export DP128_VAL="${DP128_VAL:-lsnu/twin_dual_push_128_val}"
export DP128_TEST="${DP128_TEST:-lsnu/twin_dual_push_128_test}"
export HO_TRAIN="${HO_TRAIN:-lsnu/twin_handover_256_train}"
export HO_VAL="${HO_VAL:-lsnu/twin_handover_256_val}"
export HO_TEST="${HO_TEST:-lsnu/twin_handover_256_test}"
export HOE_TRAIN="${HOE_TRAIN:-lsnu/twin_handover_item_easy_256_train}"
export HOE_VAL="${HOE_VAL:-lsnu/twin_handover_item_easy_256_val}"
export HOE_TEST="${HOE_TEST:-lsnu/twin_handover_item_easy_256_test}"
export SR_TRAIN="${SR_TRAIN:-lsnu/twin_straighten_rope_256_train}"
export SR_VAL="${SR_VAL:-lsnu/twin_straighten_rope_256_val}"
export SR_TEST="${SR_TEST:-lsnu/twin_straighten_rope_256_test}"
export LB_TRAIN="${LB_TRAIN:-lsnu/twin_lift_ball_256_train}"
export LB_VAL="${LB_VAL:-lsnu/twin_lift_ball_256_val}"
export LB_TEST="${LB_TEST:-lsnu/twin_lift_ball_256_test}"
export LT_TRAIN="${LT_TRAIN:-lsnu/twin_lift_tray_256_train}"
export LT_VAL="${LT_VAL:-lsnu/twin_lift_tray_256_val}"
export LT_TEST="${LT_TEST:-lsnu/twin_lift_tray_256_test}"
export PL_TRAIN="${PL_TRAIN:-lsnu/twin_pick_laptop_256_train}"
export PL_VAL="${PL_VAL:-lsnu/twin_pick_laptop_256_val}"
export PL_TEST="${PL_TEST:-lsnu/twin_pick_laptop_256_test}"
export PP_TRAIN="${PP_TRAIN:-lsnu/twin_pick_plate_256_train}"
export PP_VAL="${PP_VAL:-lsnu/twin_pick_plate_256_val}"
export PP_TEST="${PP_TEST:-lsnu/twin_pick_plate_256_test}"
export PB_TRAIN="${PB_TRAIN:-lsnu/twin_push_box_256_train}"
export PB_VAL="${PB_VAL:-lsnu/twin_push_box_256_val}"
export PB_TEST="${PB_TEST:-lsnu/twin_push_box_256_test}"
export PBF_TRAIN="${PBF_TRAIN:-lsnu/twin_put_bottle_in_fridge_256_train}"
export PBF_VAL="${PBF_VAL:-lsnu/twin_put_bottle_in_fridge_256_val}"
export PBF_TEST="${PBF_TEST:-lsnu/twin_put_bottle_in_fridge_256_test}"
export PID_TRAIN="${PID_TRAIN:-lsnu/twin_put_item_in_drawer_256_train}"
export PID_VAL="${PID_VAL:-lsnu/twin_put_item_in_drawer_256_val}"
export PID_TEST="${PID_TEST:-lsnu/twin_put_item_in_drawer_256_test}"
export SWEEP_TRAIN="${SWEEP_TRAIN:-lsnu/twin_sweep_to_dustpan_256_train}"
export SWEEP_VAL="${SWEEP_VAL:-lsnu/twin_sweep_to_dustpan_256_val}"
export SWEEP_TEST="${SWEEP_TEST:-lsnu/twin_sweep_to_dustpan_256_test}"
export OVEN_TRAIN="${OVEN_TRAIN:-lsnu/twin_take_tray_out_of_oven_256_train}"
export OVEN_VAL="${OVEN_VAL:-lsnu/twin_take_tray_out_of_oven_256_val}"
export OVEN_TEST="${OVEN_TEST:-lsnu/twin_take_tray_out_of_oven_256_test}"
mkdir -p "$INBOX" "$TMPDIR" "$LOGDIR" "$HF_HOME" "$HF_HUB_CACHE" "$HF_DATASETS_CACHE" "$HF_LEROBOT_HOME"
prepare_openpi_env() {
require_env HF_TOKEN
cd "$OPENPI_ROOT"
if rg -q 'jax\[cuda12\]==0\.5\.3' pyproject.toml; then
echo "pyproject.toml still points at jax[cuda12]; patch it before running." >&2
exit 1
fi
log "Authenticating Hugging Face CLI"
hf_cli login --token "$HF_TOKEN"
hf_cli whoami
log "Syncing uv environment"
GIT_LFS_SKIP_SMUDGE=1 uv sync --python 3.11
GIT_LFS_SKIP_SMUDGE=1 uv pip install -e .
log "LeRobot cache root: $HF_LEROBOT_HOME"
}
download_and_verify() {
local remote_path="$1"
project_python - "$remote_path" "$INBOX" "$MODEL_REPO" <<'PY'
import hashlib
import json
import os
import sys
from pathlib import Path
from huggingface_hub import HfApi, hf_hub_download
remote_path, inbox, repo_id = sys.argv[1:4]
api = HfApi()
parent = str(Path(remote_path).parent)
info = None
for item in api.list_repo_tree(repo_id, repo_type="model", path_in_repo=parent, recursive=False, expand=True):
if getattr(item, "path", None) == remote_path:
info = item
break
if info is None:
raise SystemExit(f"Remote file not found: {remote_path}")
if info.lfs is None:
raise SystemExit(f"Expected an LFS file for {remote_path}")
local_path = hf_hub_download(
repo_id=repo_id,
repo_type="model",
filename=remote_path,
local_dir=inbox,
)
size = os.path.getsize(local_path)
if size != info.size:
raise SystemExit(f"Size mismatch for {remote_path}: local={size} remote={info.size}")
digest = hashlib.sha256()
with open(local_path, "rb") as f:
for chunk in iter(lambda: f.read(16 * 1024 * 1024), b""):
digest.update(chunk)
sha256 = digest.hexdigest()
if sha256 != info.lfs.sha256:
raise SystemExit(
f"SHA256 mismatch for {remote_path}: local={sha256} remote={info.lfs.sha256}"
)
print(local_path)
print(
json.dumps(
{
"remote_path": remote_path,
"local_path": local_path,
"size": size,
"sha256": sha256,
"commit": info.last_commit.oid if info.last_commit else None,
}
)
)
PY
}
probe_squashfs() {
local local_file="$1"
local log_file="$2"
log "Inspecting squashfs archive: $local_file"
unsquashfs -s "$local_file" | tee "$log_file"
}
convert_local() {
local local_file="$1"
local repo_id="$2"
local log_file="$3"
log "Converting $local_file -> $repo_id"
(
cd "$OPENPI_ROOT"
.venv/bin/python scripts/convert_twin_squashfs_to_lerobot.py \
--squashfs-path "$local_file" \
--repo-id "$repo_id" \
--verbose
) >"$log_file" 2>&1
log "Conversion log written to: $log_file"
}
verify_local_dataset() {
local repo_id="$1"
local log_file="$2"
log "Verifying local LeRobot dataset: $repo_id"
(
cd "$OPENPI_ROOT"
.venv/bin/python - "$repo_id" <<'PY'
import json
import sys
from pathlib import Path
import pyarrow.parquet as pq
from lerobot.common.constants import HF_LEROBOT_HOME
repo_id = sys.argv[1]
root = Path(HF_LEROBOT_HOME) / repo_id
if not root.exists():
raise SystemExit(f"Local dataset directory not found: {root}")
info_path = root / "meta" / "info.json"
episodes_path = root / "meta" / "episodes.jsonl"
episodes_stats_path = root / "meta" / "episodes_stats.jsonl"
if not info_path.exists():
raise SystemExit(f"Missing info.json: {info_path}")
if not episodes_path.exists():
raise SystemExit(f"Missing episodes.jsonl: {episodes_path}")
if not episodes_stats_path.exists():
raise SystemExit(f"Missing episodes_stats.jsonl: {episodes_stats_path}")
parquet_files = sorted(root.rglob("*.parquet"))
if not parquet_files:
raise SystemExit(f"No parquet files found under {root}")
table = pq.read_table(parquet_files[0])
columns = set(table.column_names)
required = {"front_image", "wrist_left_image", "wrist_right_image", "state", "action", "task_index"}
missing = sorted(required - columns)
if missing:
raise SystemExit(f"Missing required parquet columns in {parquet_files[0]}: {missing}")
state_type = str(table.schema.field("state").type)
action_type = str(table.schema.field("action").type)
if "16" not in state_type:
raise SystemExit(f"Unexpected state schema: {state_type}")
if "16" not in action_type:
raise SystemExit(f"Unexpected action schema: {action_type}")
info = json.loads(info_path.read_text())
features = info.get("features", {})
if features.get("state", {}).get("shape") != [16]:
raise SystemExit(f"Unexpected state feature shape in {info_path}: {features.get('state')}")
if features.get("action", {}).get("shape") != [16]:
raise SystemExit(f"Unexpected action feature shape in {info_path}: {features.get('action')}")
episode_lines = episodes_path.read_text().splitlines()
episodes_stats_lines = episodes_stats_path.read_text().splitlines()
if not episode_lines:
raise SystemExit(f"No episodes found in {episodes_path}")
if not episodes_stats_lines:
raise SystemExit(f"No episode stats found in {episodes_stats_path}")
episodes = info.get("total_episodes")
frames = info.get("total_frames")
if episodes is not None and episodes <= 0:
raise SystemExit(f"Invalid episode count in {info_path}: {episodes}")
if frames is not None and frames <= 0:
raise SystemExit(f"Invalid frame count in {info_path}: {frames}")
print(
json.dumps(
{
"repo_id": repo_id,
"root": str(root),
"parquet_files": len(parquet_files),
"first_parquet": str(parquet_files[0]),
"columns": sorted(columns),
"total_episodes": episodes,
"total_frames": frames,
"episode_lines": len(episode_lines),
"episodes_stats_lines": len(episodes_stats_lines),
}
)
)
PY
) >"$log_file" 2>&1
cat "$log_file"
}
upload_dataset() {
local repo_id="$1"
log "Uploading dataset repo: $repo_id"
hf_cli upload-large-folder "$repo_id" "$HF_LEROBOT_HOME/$repo_id" --repo-type dataset --num-workers 16
}
verify_remote_dataset() {
local repo_id="$1"
project_python - "$repo_id" <<'PY'
import json
import sys
from huggingface_hub import HfApi
repo_id = sys.argv[1]
api = HfApi()
paths = [item.path for item in api.list_repo_tree(repo_id, repo_type="dataset", recursive=True, expand=True)]
required = {"meta/info.json", "meta/episodes.jsonl", "meta/episodes_stats.jsonl"}
missing = sorted(required - set(paths))
if missing:
raise SystemExit(f"Remote dataset is missing required files: {missing}")
parquet_files = [p for p in paths if p.endswith(".parquet")]
video_files = [p for p in paths if p.endswith(".mp4")]
if not parquet_files:
raise SystemExit(f"Remote dataset {repo_id} has no parquet files")
print(
json.dumps(
{
"repo_id": repo_id,
"files": len(paths),
"parquet_files": len(parquet_files),
"video_files": len(video_files),
}
)
)
PY
}
stats_one() {
local config_name="$1"
local repo_id="$2"
log "Computing norm stats: $config_name / $repo_id"
(
cd "$OPENPI_ROOT"
PYTHONUNBUFFERED=1 .venv/bin/python -u scripts/compute_norm_stats_repo.py \
--config-name "$config_name" \
--repo-id "$repo_id" \
--batch-size "$STATS_BATCH_SIZE" \
--num-workers "$STATS_NUM_WORKERS" \
--assets-base-dir ./assets
) >"$LOGDIR/${config_name//\//_}__${repo_id//\//_}.stats.log" 2>&1
tail -n 40 "$LOGDIR/${config_name//\//_}__${repo_id//\//_}.stats.log" || true
}
upload_stats() {
local config_name="$1"
local repo_id="$2"
local src_dir="$OPENPI_ROOT/assets/$config_name/$repo_id"
local dst_dir="openpi/assets/$config_name/$repo_id"
log "Uploading norm stats: $MODEL_REPO::$dst_dir"
hf_cli upload "$MODEL_REPO" "$src_dir" "$dst_dir"
}
verify_remote_stats() {
local config_name="$1"
local repo_id="$2"
project_python - "$MODEL_REPO" "$config_name" "$repo_id" <<'PY'
import sys
from huggingface_hub import HfApi
model_repo, config_name, repo_id = sys.argv[1:4]
target = f"openpi/assets/{config_name}/{repo_id}/norm_stats.json"
api = HfApi()
paths = [item.path for item in api.list_repo_tree(model_repo, repo_type="model", recursive=True, expand=True)]
if target not in paths:
raise SystemExit(f"Missing remote norm stats file: {target}")
print(target)
PY
}
cleanup_local() {
local local_file="$1"
local repo_id="$2"
log "Cleaning local artifacts for $repo_id"
rm -f "$local_file"
rm -rf "$HF_LEROBOT_HOME/$repo_id"
}
process_train_split() {
local remote_path="$1"
local repo_id="$2"
local prefix="$3"
if verify_remote_dataset "$repo_id" >"$LOGDIR/${prefix}_verify_remote.json" 2>/dev/null; then
log "Remote dataset already verified; skipping conversion/upload for $repo_id"
else
log "Train split download+verify: $remote_path -> $repo_id"
mapfile -t dl_out < <(download_and_verify "$remote_path")
local local_file="${dl_out[0]}"
local meta_json="${dl_out[1]}"
log "Train split download verified: $local_file"
echo "$meta_json" | tee "$LOGDIR/${prefix}_download.json"
probe_squashfs "$local_file" "$LOGDIR/${prefix}_unsquashfs.txt"
convert_local "$local_file" "$repo_id" "$LOGDIR/${prefix}_convert.log"
verify_local_dataset "$repo_id" "$LOGDIR/${prefix}_verify_local.json"
upload_dataset "$repo_id"
verify_remote_dataset "$repo_id" | tee "$LOGDIR/${prefix}_verify_remote.json"
fi
if verify_remote_stats "pi05_twin_bimanual_finetune" "$repo_id" >"$LOGDIR/${prefix}_baseline_stats_remote.txt" 2>/dev/null; then
log "Baseline norm stats already verified remotely; skipping compute/upload for $repo_id"
else
stats_one "pi05_twin_bimanual_finetune" "$repo_id"
upload_stats "pi05_twin_bimanual_finetune" "$repo_id"
verify_remote_stats "pi05_twin_bimanual_finetune" "$repo_id" | tee "$LOGDIR/${prefix}_baseline_stats_remote.txt"
fi
if verify_remote_stats "pi05_twin_bimanual_parallel_finetune" "$repo_id" >"$LOGDIR/${prefix}_parallel_stats_remote.txt" 2>/dev/null; then
log "Parallel norm stats already verified remotely; skipping compute/upload for $repo_id"
else
stats_one "pi05_twin_bimanual_parallel_finetune" "$repo_id"
upload_stats "pi05_twin_bimanual_parallel_finetune" "$repo_id"
verify_remote_stats "pi05_twin_bimanual_parallel_finetune" "$repo_id" | tee "$LOGDIR/${prefix}_parallel_stats_remote.txt"
fi
if [[ -n "${local_file:-}" ]]; then
cleanup_local "$local_file" "$repo_id"
fi
}
process_eval_split() {
local remote_path="$1"
local repo_id="$2"
local prefix="$3"
if verify_remote_dataset "$repo_id" >"$LOGDIR/${prefix}_verify_remote.json" 2>/dev/null; then
log "Remote eval dataset already verified; skipping conversion/upload for $repo_id"
else
log "Eval split download+verify: $remote_path -> $repo_id"
mapfile -t dl_out < <(download_and_verify "$remote_path")
local local_file="${dl_out[0]}"
local meta_json="${dl_out[1]}"
log "Eval split download verified: $local_file"
echo "$meta_json" | tee "$LOGDIR/${prefix}_download.json"
probe_squashfs "$local_file" "$LOGDIR/${prefix}_unsquashfs.txt"
convert_local "$local_file" "$repo_id" "$LOGDIR/${prefix}_convert.log"
verify_local_dataset "$repo_id" "$LOGDIR/${prefix}_verify_local.json"
upload_dataset "$repo_id"
verify_remote_dataset "$repo_id" | tee "$LOGDIR/${prefix}_verify_remote.json"
cleanup_local "$local_file" "$repo_id"
fi
}
process_task() {
local task_name="$1"
local train_remote="$2"
local train_repo="$3"
local val_remote="$4"
local val_repo="$5"
local test_remote="$6"
local test_repo="$7"
log "=== Task: $task_name / train split ==="
process_train_split "$train_remote" "$train_repo" "${task_name}_train"
log "=== Task: $task_name / val+test splits ==="
process_eval_split "$val_remote" "$val_repo" "${task_name}_val" &
local val_pid=$!
process_eval_split "$test_remote" "$test_repo" "${task_name}_test" &
local test_pid=$!
wait "$val_pid"
wait "$test_pid"
}
main() {
if [[ "${SKIP_PREPARE:-0}" == "1" ]]; then
ensure_hf_auth
log "Skipping environment bootstrap; using HF_LEROBOT_HOME=$HF_LEROBOT_HOME"
else
prepare_openpi_env
fi
process_task \
"dual_push_buttons_128" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_128/bimanual_dual_push_buttons.train.squashfs" \
"$DP128_TRAIN" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_128/bimanual_dual_push_buttons.val.squashfs" \
"$DP128_VAL" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_128/bimanual_dual_push_buttons.test.squashfs" \
"$DP128_TEST"
process_task \
"dual_push_buttons" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256_dual_push_buttons/bimanual_dual_push_buttons.train.squashfs" \
"$DP_TRAIN" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256_dual_push_buttons/bimanual_dual_push_buttons.val.squashfs" \
"$DP_VAL" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256_dual_push_buttons/bimanual_dual_push_buttons.test.squashfs" \
"$DP_TEST"
process_task \
"handover_item" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_handover_item/bimanual_handover_item.train.squashfs" \
"$HO_TRAIN" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_handover_item/bimanual_handover_item.val.squashfs" \
"$HO_VAL" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_handover_item/bimanual_handover_item.test.squashfs" \
"$HO_TEST"
process_task \
"handover_item_easy" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_handover_item_easy/bimanual_handover_item_easy.train.squashfs" \
"$HOE_TRAIN" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_handover_item_easy/bimanual_handover_item_easy.val.squashfs" \
"$HOE_VAL" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_handover_item_easy/bimanual_handover_item_easy.test.squashfs" \
"$HOE_TEST"
process_task \
"lift_ball" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_lift_ball/bimanual_lift_ball.train.squashfs" \
"$LB_TRAIN" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_lift_ball/bimanual_lift_ball.val.squashfs" \
"$LB_VAL" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_lift_ball/bimanual_lift_ball.test.squashfs" \
"$LB_TEST"
process_task \
"lift_tray" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_lift_tray/bimanual_lift_tray.train.squashfs" \
"$LT_TRAIN" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_lift_tray/bimanual_lift_tray.val.squashfs" \
"$LT_VAL" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_lift_tray/bimanual_lift_tray.test.squashfs" \
"$LT_TEST"
process_task \
"pick_laptop" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_pick_laptop/bimanual_pick_laptop.train.squashfs" \
"$PL_TRAIN" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_pick_laptop/bimanual_pick_laptop.val.squashfs" \
"$PL_VAL" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_pick_laptop/bimanual_pick_laptop.test.squashfs" \
"$PL_TEST"
process_task \
"pick_plate" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_pick_plate/bimanual_pick_plate.train.squashfs" \
"$PP_TRAIN" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_pick_plate/bimanual_pick_plate.val.squashfs" \
"$PP_VAL" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_pick_plate/bimanual_pick_plate.test.squashfs" \
"$PP_TEST"
process_task \
"push_box" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_push_box/bimanual_push_box.train.squashfs" \
"$PB_TRAIN" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_push_box/bimanual_push_box.val.squashfs" \
"$PB_VAL" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_push_box/bimanual_push_box.test.squashfs" \
"$PB_TEST"
process_task \
"put_bottle_in_fridge" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_put_bottle_in_fridge/bimanual_put_bottle_in_fridge.train.squashfs" \
"$PBF_TRAIN" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_put_bottle_in_fridge/bimanual_put_bottle_in_fridge.val.squashfs" \
"$PBF_VAL" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_put_bottle_in_fridge/bimanual_put_bottle_in_fridge.test.squashfs" \
"$PBF_TEST"
process_task \
"put_item_in_drawer" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_put_item_in_drawer/bimanual_put_item_in_drawer.train.squashfs" \
"$PID_TRAIN" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_put_item_in_drawer/bimanual_put_item_in_drawer.val.squashfs" \
"$PID_VAL" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_put_item_in_drawer/bimanual_put_item_in_drawer.test.squashfs" \
"$PID_TEST"
process_task \
"straighten_rope" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_straighten_rope/bimanual_straighten_rope.train.squashfs" \
"$SR_TRAIN" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_straighten_rope/bimanual_straighten_rope.val.squashfs" \
"$SR_VAL" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_straighten_rope/bimanual_straighten_rope.test.squashfs" \
"$SR_TEST"
process_task \
"sweep_to_dustpan" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_sweep_to_dustpan/bimanual_sweep_to_dustpan.train.squashfs" \
"$SWEEP_TRAIN" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_sweep_to_dustpan/bimanual_sweep_to_dustpan.val.squashfs" \
"$SWEEP_VAL" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_sweep_to_dustpan/bimanual_sweep_to_dustpan.test.squashfs" \
"$SWEEP_TEST"
process_task \
"take_tray_out_of_oven" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_take_tray_out_of_oven/bimanual_take_tray_out_of_oven.train.squashfs" \
"$OVEN_TRAIN" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_take_tray_out_of_oven/bimanual_take_tray_out_of_oven.val.squashfs" \
"$OVEN_VAL" \
"datasets/benchmarks/peract2_twin/bimanual/image_size_256/bimanual_take_tray_out_of_oven/bimanual_take_tray_out_of_oven.test.squashfs" \
"$OVEN_TEST"
log "All tasks completed"
}
if [[ "${BASH_SOURCE[0]}" == "$0" ]]; then
main "$@"
fi