pakkinlau's picture
> Make dataset viewer‑ready: preview/dev Parquet, scripts, README, .gitignore
650d06b
#!/usr/bin/env python3
"""
Scan data/ and create a metadata-only manifest JSONL with
- parcellation, subject, corr_path, ts_path
- corr_shape [n,n] and ts_shape [n] inferred from sidecars if available
- optional sha256/bytes joined from manifests/manifest.jsonl
Examples:
python scripts/scan_to_manifest.py --out manifests/dev.jsonl --parcs AAL116,harvard48 --limit-per-parc 20
python scripts/scan_to_manifest.py --out manifests/all.jsonl
"""
import argparse
import json
import os
from pathlib import Path
from typing import Dict, Iterable, List, Optional, Tuple
def load_inventory(inv_path: Optional[Path]) -> Dict[str, Dict[str, object]]:
mapping: Dict[str, Dict[str, object]] = {}
if not inv_path or not inv_path.exists():
return mapping
with inv_path.open("r", encoding="utf-8") as f:
for line in f:
if not line.strip():
continue
row = json.loads(line)
target_rel = row.get("target_rel")
if not target_rel:
continue
mapping[target_rel] = {"sha256": row.get("sha256"), "bytes": row.get("bytes")}
return mapping
def infer_shapes_from_sidecars(repo: Path, corr_path: str, ts_path: str):
corr_shape = []
ts_shape = []
cjson = repo / (Path(corr_path).with_suffix(".json"))
if cjson.exists():
try:
meta = json.loads(cjson.read_text())
n = meta.get("NodeCount")
if isinstance(n, int) and n > 0:
corr_shape = [n, n]
except Exception:
pass
tjson = repo / (Path(ts_path).with_suffix(".json"))
if tjson.exists():
try:
meta = json.loads(tjson.read_text())
n = meta.get("NodeCount")
if isinstance(n, int) and n > 0:
ts_shape = [n]
except Exception:
pass
return corr_shape, ts_shape
def iter_subjects(repo: Path, parcs: Optional[List[str]]) -> Iterable[Tuple[str, Path]]:
data = repo / "data"
for d in sorted((data).glob("parc-*")):
parc = d.name[len("parc-"):]
if parcs and parc not in parcs:
continue
for sub in sorted(d.glob("sub-*")):
yield parc, sub
def find_pair(sub_dir: Path, parc: str) -> Optional[Tuple[str, str]]:
# Expects files named <id>_desc-timeseries_parc-<parc>.mat and <id>_desc-correlation_matrix_parc-<parc>.mat
subj = sub_dir.name
ts = sub_dir / f"{subj}_desc-timeseries_parc-{parc}.mat"
cm = sub_dir / f"{subj}_desc-correlation_matrix_parc-{parc}.mat"
if ts.exists() and cm.exists():
rel_ts = str(ts.as_posix())
rel_cm = str(cm.as_posix())
# store relative to repo root
root = sub_dir.parents[3] if sub_dir.as_posix().endswith(subj) else sub_dir.parents[3]
rel_ts = os.path.relpath(ts, root)
rel_cm = os.path.relpath(cm, root)
return rel_cm.replace(os.sep, "/"), rel_ts.replace(os.sep, "/")
return None
def main():
ap = argparse.ArgumentParser()
ap.add_argument("--out", type=Path, required=True, help="Output JSONL path")
ap.add_argument("--parcs", type=str, default="", help="Comma-separated parcellations (default: all)")
ap.add_argument("--limit-per-parc", type=int, default=0, help="Max rows per parcellation (0 = no limit)")
ap.add_argument("--inventory", type=Path, default=Path("manifests/manifest.jsonl"))
args = ap.parse_args()
repo = Path(__file__).resolve().parents[1]
inv = load_inventory(args.inventory if args.inventory and args.inventory.exists() else None)
parcs = [p.strip() for p in args.parcs.split(",") if p.strip()] or None
counts: Dict[str, int] = {}
rows: List[Dict[str, object]] = []
for parc, subdir in iter_subjects(repo, parcs):
if args.limit_per_parc and counts.get(parc, 0) >= args.limit_per_parc:
continue
pair = find_pair(subdir, parc)
if not pair:
continue
corr_path, ts_path = pair
corr_shape, ts_shape = infer_shapes_from_sidecars(repo, corr_path, ts_path)
subj = subdir.name
row: Dict[str, object] = {
"parcellation": parc,
"subject": subj,
"corr_path": corr_path,
"ts_path": ts_path,
"corr_shape": corr_shape,
"ts_shape": ts_shape,
"correlation_matrix": [], # metadata-only
}
if corr_path in inv:
row["corr_sha256"] = inv[corr_path].get("sha256")
row["corr_bytes"] = inv[corr_path].get("bytes")
if ts_path in inv:
row["ts_sha256"] = inv[ts_path].get("sha256")
row["ts_bytes"] = inv[ts_path].get("bytes")
rows.append(row)
counts[parc] = counts.get(parc, 0) + 1
args.out.parent.mkdir(parents=True, exist_ok=True)
with args.out.open("w", encoding="utf-8") as f:
for r in rows:
f.write(json.dumps(r, ensure_ascii=False) + "\n")
print(f"Wrote {len(rows)} rows to {args.out}")
if __name__ == "__main__":
main()