File size: 3,622 Bytes
56b1ab6
 
 
 
 
 
 
 
 
 
 
 
d212377
 
56b1ab6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ac61d04
 
 
 
d212377
 
ac61d04
 
56b1ab6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
#!/usr/bin/env python3
"""Utilities to convert the raw metadata dumps into viewer-friendly JSONL files."""
from __future__ import annotations

import json
from dataclasses import asdict, dataclass
from pathlib import Path
from typing import Dict, Iterator, MutableMapping

ROOT = Path(__file__).resolve().parents[1]
RAW_DIR = ROOT / "raw"
OUTPUT_DIR = ROOT / "data"
HF_DATASET_ID = "ishwarbb23/cuebench"
HF_IMAGE_PREFIX = f"hf://datasets/{HF_DATASET_ID}/"

CONFIG_SOURCES: Dict[str, Path] = {
    "clue": RAW_DIR / "clue_metadata.jsonl",
    "mep": RAW_DIR / "mep_metadata.jsonl",
}

@dataclass
class BuildStats:
    """Simple container for summary numbers we surface in README/stats.json."""

    num_examples: int
    num_bytes: int
    source_path: str
    output_path: str

    def as_dict(self) -> Dict[str, object]:
        return asdict(self)


def _normalize_record(record: MutableMapping[str, object]) -> MutableMapping[str, object]:
    """Add the columns expected by the README and dataset viewer."""

    image_id = record.get("aligned_id") or record.get("image_id")
    if image_id is None:
        seq = record.get("seq_name", "seq")
        frame = record.get("frame_count", 0)
        image_id = f"{seq}.{int(frame):05d}"
    record["image_id"] = image_id

    observed = record.get("observed_classes") or record.get("detected_classes") or []
    record["observed_classes"] = observed
    # Preserve the detected_classes alias so legacy tooling keeps working.
    record.setdefault("detected_classes", observed)

    record["target_classes"] = record.get("target_classes", [])

    image_path = record.get("image_path")
    record["image_path"] = image_path
    if image_path:
        normalized_path = str(Path(image_path))
        record["image"] = f"{HF_IMAGE_PREFIX}{normalized_path}"
    else:
        record["image"] = None
    return record


def _iter_records(path: Path) -> Iterator[MutableMapping[str, object]]:
    with path.open("r", encoding="utf-8") as src:
        for line in src:
            if not line.strip():
                continue
            yield json.loads(line)


def build_split(config_name: str, source_path: Path, output_path: Path) -> BuildStats:
    output_path.parent.mkdir(parents=True, exist_ok=True)
    count = 0
    with output_path.open("w", encoding="utf-8") as dst:
        for record in _iter_records(source_path):
            normalized = _normalize_record(record)
            dst.write(json.dumps(normalized, ensure_ascii=False) + "\n")
            count += 1
    num_bytes = output_path.stat().st_size
    return BuildStats(
        num_examples=count,
        num_bytes=num_bytes,
        source_path=str(source_path.relative_to(ROOT)),
        output_path=str(output_path.relative_to(ROOT)),
    )


def main() -> None:
    stats: Dict[str, Dict[str, object]] = {}
    for config_name, source in CONFIG_SOURCES.items():
        if not source.exists():
            raise FileNotFoundError(f"Missing source file for {config_name}: {source}")
        output_path = OUTPUT_DIR / config_name / "train.jsonl"
        summary = build_split(config_name, source, output_path)
        stats[config_name] = summary.as_dict()
        print(
            f"[{config_name}] wrote {summary.num_examples} examples -> {summary.output_path} "
            f"({summary.num_bytes} bytes)."
        )
    stats_path = OUTPUT_DIR / "stats.json"
    with stats_path.open("w", encoding="utf-8") as handle:
        json.dump(stats, handle, indent=2)
        handle.write("\n")
    print(f"Wrote summary stats to {stats_path.relative_to(ROOT)}")


if __name__ == "__main__":
    main()