| |
| """ |
| Convert a local LeRobot dataset from v2.0 to v2.1, or generate episodes_stats.jsonl |
| for a dataset already marked as v2.1 but missing the file. |
| |
| Usage: |
| uv run convert_local_dataset_v20_to_v21.py --dataset-path /mnt/ugreen-nfs/jetson-raw-data/lerobot_merged |
| """ |
|
|
| import argparse |
| import json |
| import os |
| from pathlib import Path |
|
|
| |
| os.environ["HF_HUB_OFFLINE"] = "1" |
|
|
| from lerobot.common.datasets.lerobot_dataset import CODEBASE_VERSION, LeRobotDataset |
| from lerobot.common.datasets.utils import EPISODES_STATS_PATH, STATS_PATH, load_stats, write_info |
| from lerobot.common.datasets.v21.convert_stats import check_aggregate_stats, convert_stats |
|
|
| |
| from lerobot.common.datasets import utils as lerobot_utils |
| _original_get_safe_version = lerobot_utils.get_safe_version |
|
|
| def _patched_get_safe_version(repo_id, revision): |
| """Patched version that returns revision directly for local datasets.""" |
| if repo_id is None or repo_id.startswith("local"): |
| return revision |
| return _original_get_safe_version(repo_id, revision) |
|
|
| lerobot_utils.get_safe_version = _patched_get_safe_version |
|
|
| V20 = "v2.0" |
| V21 = "v2.1" |
|
|
|
|
| def convert_local_dataset(dataset_path: str, num_workers: int = 4): |
| """Convert a local LeRobot dataset from v2.0 to v2.1, or generate missing episodes_stats.jsonl.""" |
| dataset_root = Path(dataset_path).resolve() |
| |
| if not dataset_root.exists(): |
| raise ValueError(f"Dataset path does not exist: {dataset_root}") |
| |
| |
| info_path = dataset_root / "meta" / "info.json" |
| if not info_path.exists(): |
| raise ValueError(f"info.json not found at {info_path}") |
| |
| with open(info_path, 'r') as f: |
| info = json.load(f) |
| |
| current_version = info.get("codebase_version", "unknown") |
| episodes_stats_path = dataset_root / "meta" / EPISODES_STATS_PATH |
| |
| |
| if episodes_stats_path.exists() and current_version == V21: |
| print(f"Dataset is already v2.1 with episodes_stats.jsonl. Nothing to do.") |
| return |
| |
| |
| |
| needs_version_restore = False |
| if current_version == V21 and not episodes_stats_path.exists(): |
| print(f"Dataset is marked as v2.1 but missing {EPISODES_STATS_PATH}.") |
| print(f"Temporarily setting version to v2.0 to generate stats...") |
| info["codebase_version"] = V20 |
| with open(info_path, 'w') as f: |
| json.dump(info, f, indent=4) |
| needs_version_restore = True |
| |
| |
| print("Loading dataset...") |
| dataset = LeRobotDataset("local", root=dataset_root, revision=V20) |
| |
| |
| if dataset.meta.info.get("codebase_version") != V20: |
| current_version = dataset.meta.info.get("codebase_version", "unknown") |
| print(f"Warning: Dataset is version {current_version}, not {V20}. Skipping conversion.") |
| if needs_version_restore: |
| |
| info["codebase_version"] = V21 |
| with open(info_path, 'w') as f: |
| json.dump(info, f, indent=4) |
| return |
| |
| print(f"Converting dataset from {V20} to {V21}...") |
| print(f"Dataset path: {dataset_root}") |
| |
| |
| episodes_stats_path = dataset_root / "meta" / EPISODES_STATS_PATH |
| if episodes_stats_path.is_file(): |
| print(f"Removing existing {EPISODES_STATS_PATH}...") |
| episodes_stats_path.unlink() |
| |
| |
| print("Computing per-episode stats...") |
| try: |
| convert_stats(dataset, num_workers=num_workers) |
| except Exception as e: |
| print(f"Error during stats conversion: {e}") |
| if needs_version_restore: |
| |
| info["codebase_version"] = V21 |
| with open(info_path, 'w') as f: |
| json.dump(info, f, indent=4) |
| raise |
| |
| |
| stats_path = dataset_root / "meta" / STATS_PATH |
| if stats_path.is_file(): |
| print("Checking consistency with aggregate stats...") |
| ref_stats = load_stats(dataset_root) |
| if ref_stats is not None: |
| try: |
| check_aggregate_stats(dataset, ref_stats) |
| except Exception as e: |
| print(f"Warning: Consistency check failed: {e}") |
| else: |
| print("Warning: Could not load reference stats, skipping consistency check.") |
| else: |
| print("No stats.json found, skipping consistency check.") |
| |
| |
| print("Updating codebase version...") |
| info["codebase_version"] = CODEBASE_VERSION |
| write_info(info, dataset_root) |
| |
| |
| stats_path = dataset_root / "meta" / STATS_PATH |
| if stats_path.is_file(): |
| print(f"Removing deprecated {STATS_PATH}...") |
| stats_path.unlink() |
| |
| print(f"✓ Successfully converted dataset to {V21}!") |
| print(f" Updated: {dataset_root / 'meta' / 'info.json'}") |
| if episodes_stats_path.exists(): |
| print(f" Created: {episodes_stats_path}") |
|
|
|
|
| if __name__ == "__main__": |
| parser = argparse.ArgumentParser( |
| description="Convert a local LeRobot dataset from v2.0 to v2.1" |
| ) |
| parser.add_argument( |
| "--dataset-path", |
| type=str, |
| required=True, |
| help="Path to the local LeRobot dataset directory", |
| ) |
| parser.add_argument( |
| "--num-workers", |
| type=int, |
| default=4, |
| help="Number of workers for parallelizing stats compute. Defaults to 4.", |
| ) |
| |
| args = parser.parse_args() |
| convert_local_dataset(args.dataset_path, num_workers=args.num_workers) |
|
|
|
|
|
|