|
|
|
|
|
import argparse |
|
|
import json |
|
|
import sys |
|
|
from pathlib import Path |
|
|
|
|
|
PROJECT_ROOT = Path(__file__).resolve().parents[1] |
|
|
if str(PROJECT_ROOT) not in sys.path: |
|
|
sys.path.insert(0, str(PROJECT_ROOT)) |
|
|
|
|
|
|
|
|
def main() -> None: |
|
|
parser = argparse.ArgumentParser(description="Check LeRobot language embeddings coverage.") |
|
|
parser.add_argument("--data_root", default="/hfm/data/pick_box") |
|
|
args = parser.parse_args() |
|
|
|
|
|
data_root = Path(args.data_root) |
|
|
meta = data_root / "meta" |
|
|
episodes_path = meta / "episodes.jsonl" |
|
|
lang_map_path = meta / "lang_map.json" |
|
|
embed_dir = meta / "lang_embeddings" |
|
|
|
|
|
if not episodes_path.exists(): |
|
|
raise FileNotFoundError(f"Missing {episodes_path}") |
|
|
|
|
|
instructions = [] |
|
|
with episodes_path.open() as f: |
|
|
for line in f: |
|
|
row = json.loads(line) |
|
|
instructions.append(str(row.get("instruction", "") or "").strip()) |
|
|
|
|
|
unique_instr = sorted(set(instructions)) |
|
|
empty_instr = sum(1 for i in instructions if not i) |
|
|
print("Unique instructions:", len(unique_instr)) |
|
|
print("Empty instruction count:", empty_instr) |
|
|
|
|
|
if not lang_map_path.exists(): |
|
|
print("Missing lang_map.json -> embeddings likely not generated.") |
|
|
return |
|
|
|
|
|
lang_map = json.loads(lang_map_path.read_text()) |
|
|
missing = [i for i in unique_instr if i not in lang_map] |
|
|
print("lang_map entries:", len(lang_map)) |
|
|
print("Missing instructions in lang_map:", len(missing)) |
|
|
|
|
|
if embed_dir.exists(): |
|
|
embed_files = list(embed_dir.glob("*.pt")) |
|
|
print("Embedding files:", len(embed_files)) |
|
|
else: |
|
|
print("Missing lang_embeddings directory.") |
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
main() |
|
|
|