dex_fixed_long_episodes / convert_v20_v21.py
sumanthpashuparthi's picture
Upload folder using huggingface_hub
bb4cf72 verified
#!/usr/bin/env python3
"""
Convert a local LeRobot dataset from v2.0 to v2.1, or generate episodes_stats.jsonl
for a dataset already marked as v2.1 but missing the file.
Usage:
uv run convert_local_dataset_v20_to_v21.py --dataset-path /mnt/ugreen-nfs/jetson-raw-data/lerobot_merged
"""
import argparse
import json
import os
from pathlib import Path
# Set offline mode before importing to avoid HuggingFace Hub access
os.environ["HF_HUB_OFFLINE"] = "1"
from lerobot.common.datasets.lerobot_dataset import CODEBASE_VERSION, LeRobotDataset
from lerobot.common.datasets.utils import EPISODES_STATS_PATH, STATS_PATH, load_stats, write_info
from lerobot.common.datasets.v21.convert_stats import check_aggregate_stats, convert_stats
# Monkey-patch get_safe_version to avoid HuggingFace Hub access for local datasets
from lerobot.common.datasets import utils as lerobot_utils
_original_get_safe_version = lerobot_utils.get_safe_version
def _patched_get_safe_version(repo_id, revision):
"""Patched version that returns revision directly for local datasets."""
if repo_id is None or repo_id.startswith("local"):
return revision
return _original_get_safe_version(repo_id, revision)
lerobot_utils.get_safe_version = _patched_get_safe_version
V20 = "v2.0"
V21 = "v2.1"
def convert_local_dataset(dataset_path: str, num_workers: int = 4):
"""Convert a local LeRobot dataset from v2.0 to v2.1, or generate missing episodes_stats.jsonl."""
dataset_root = Path(dataset_path).resolve()
if not dataset_root.exists():
raise ValueError(f"Dataset path does not exist: {dataset_root}")
# Read info.json to check current version
info_path = dataset_root / "meta" / "info.json"
if not info_path.exists():
raise ValueError(f"info.json not found at {info_path}")
with open(info_path, 'r') as f:
info = json.load(f)
current_version = info.get("codebase_version", "unknown")
episodes_stats_path = dataset_root / "meta" / EPISODES_STATS_PATH
# Check if episodes_stats.jsonl already exists
if episodes_stats_path.exists() and current_version == V21:
print(f"Dataset is already v2.1 with episodes_stats.jsonl. Nothing to do.")
return
# If v2.1 but missing episodes_stats.jsonl, we need to generate it
# Temporarily set version to v2.0 to allow loading, then generate stats
needs_version_restore = False
if current_version == V21 and not episodes_stats_path.exists():
print(f"Dataset is marked as v2.1 but missing {EPISODES_STATS_PATH}.")
print(f"Temporarily setting version to v2.0 to generate stats...")
info["codebase_version"] = V20
with open(info_path, 'w') as f:
json.dump(info, f, indent=4)
needs_version_restore = True
# Load the dataset - the monkey-patch should allow this to work now
print("Loading dataset...")
dataset = LeRobotDataset("local", root=dataset_root, revision=V20)
# Check if it's actually v2.0 (or was temporarily set to v2.0)
if dataset.meta.info.get("codebase_version") != V20:
current_version = dataset.meta.info.get("codebase_version", "unknown")
print(f"Warning: Dataset is version {current_version}, not {V20}. Skipping conversion.")
if needs_version_restore:
# Restore original version
info["codebase_version"] = V21
with open(info_path, 'w') as f:
json.dump(info, f, indent=4)
return
print(f"Converting dataset from {V20} to {V21}...")
print(f"Dataset path: {dataset_root}")
# Remove existing episodes_stats.jsonl if it exists
episodes_stats_path = dataset_root / "meta" / EPISODES_STATS_PATH
if episodes_stats_path.is_file():
print(f"Removing existing {EPISODES_STATS_PATH}...")
episodes_stats_path.unlink()
# Convert stats
print("Computing per-episode stats...")
try:
convert_stats(dataset, num_workers=num_workers)
except Exception as e:
print(f"Error during stats conversion: {e}")
if needs_version_restore:
# Restore original version on error
info["codebase_version"] = V21
with open(info_path, 'w') as f:
json.dump(info, f, indent=4)
raise
# Load reference stats and check consistency (if stats.json exists)
stats_path = dataset_root / "meta" / STATS_PATH
if stats_path.is_file():
print("Checking consistency with aggregate stats...")
ref_stats = load_stats(dataset_root)
if ref_stats is not None:
try:
check_aggregate_stats(dataset, ref_stats)
except Exception as e:
print(f"Warning: Consistency check failed: {e}")
else:
print("Warning: Could not load reference stats, skipping consistency check.")
else:
print("No stats.json found, skipping consistency check.")
# Update codebase version in info.json
print("Updating codebase version...")
info["codebase_version"] = CODEBASE_VERSION
write_info(info, dataset_root)
# Delete old stats.json file if it exists
stats_path = dataset_root / "meta" / STATS_PATH
if stats_path.is_file():
print(f"Removing deprecated {STATS_PATH}...")
stats_path.unlink()
print(f"✓ Successfully converted dataset to {V21}!")
print(f" Updated: {dataset_root / 'meta' / 'info.json'}")
if episodes_stats_path.exists():
print(f" Created: {episodes_stats_path}")
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Convert a local LeRobot dataset from v2.0 to v2.1"
)
parser.add_argument(
"--dataset-path",
type=str,
required=True,
help="Path to the local LeRobot dataset directory",
)
parser.add_argument(
"--num-workers",
type=int,
default=4,
help="Number of workers for parallelizing stats compute. Defaults to 4.",
)
args = parser.parse_args()
convert_local_dataset(args.dataset_path, num_workers=args.num_workers)