fixing dataviewer issue
Browse files- manifests/dev.parquet +2 -2
- manifests/preview.parquet +2 -2
- scripts/jsonl_to_parquet_arrow.py +90 -0
manifests/dev.parquet
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d71f9cae09d6310900e09728373f0b7422481f872ad117f7a2fb2cf12a9e6c00
|
| 3 |
+
size 8297
|
manifests/preview.parquet
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:90bcb110dcc8d1b54b2ec4ab071a3059e5b235959fb3c6fafb41a2f7c047c1cd
|
| 3 |
+
size 6180
|
scripts/jsonl_to_parquet_arrow.py
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
"""
|
| 3 |
+
Convert JSONL to Parquet using an explicit Arrow schema suitable for the MMDN dataset.
|
| 4 |
+
|
| 5 |
+
Usage:
|
| 6 |
+
python scripts/jsonl_to_parquet_arrow.py manifests/preview.jsonl manifests/preview.parquet
|
| 7 |
+
python scripts/jsonl_to_parquet_arrow.py manifests/dev.jsonl manifests/dev.parquet
|
| 8 |
+
"""
|
| 9 |
+
import argparse
|
| 10 |
+
import json
|
| 11 |
+
from pathlib import Path
|
| 12 |
+
from typing import List, Dict, Any
|
| 13 |
+
|
| 14 |
+
import pyarrow as pa
|
| 15 |
+
import pyarrow.parquet as pq
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
SCHEMA = pa.schema([
|
| 19 |
+
pa.field("parcellation", pa.string()),
|
| 20 |
+
pa.field("subject", pa.string()),
|
| 21 |
+
pa.field("corr_path", pa.string()),
|
| 22 |
+
pa.field("ts_path", pa.string()),
|
| 23 |
+
pa.field("correlation_matrix", pa.list_(pa.list_(pa.float32()))),
|
| 24 |
+
pa.field("corr_shape", pa.list_(pa.int64())),
|
| 25 |
+
pa.field("ts_shape", pa.list_(pa.int64())),
|
| 26 |
+
pa.field("corr_sha256", pa.string()),
|
| 27 |
+
pa.field("corr_bytes", pa.int64()),
|
| 28 |
+
pa.field("ts_sha256", pa.string()),
|
| 29 |
+
pa.field("ts_bytes", pa.int64()),
|
| 30 |
+
])
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def load_jsonl(path: Path) -> List[Dict[str, Any]]:
|
| 34 |
+
rows: List[Dict[str, Any]] = []
|
| 35 |
+
with path.open("r", encoding="utf-8") as f:
|
| 36 |
+
for line in f:
|
| 37 |
+
if not line.strip():
|
| 38 |
+
continue
|
| 39 |
+
rows.append(json.loads(line))
|
| 40 |
+
return rows
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def to_parquet(rows: List[Dict[str, Any]], dst: Path):
|
| 44 |
+
# Ensure all fields exist in each row
|
| 45 |
+
for r in rows:
|
| 46 |
+
r.setdefault("parcellation", "")
|
| 47 |
+
r.setdefault("subject", "")
|
| 48 |
+
r.setdefault("corr_path", "")
|
| 49 |
+
r.setdefault("ts_path", "")
|
| 50 |
+
r.setdefault("correlation_matrix", [])
|
| 51 |
+
r.setdefault("corr_shape", [])
|
| 52 |
+
r.setdefault("ts_shape", [])
|
| 53 |
+
r.setdefault("corr_sha256", "")
|
| 54 |
+
r.setdefault("corr_bytes", 0)
|
| 55 |
+
r.setdefault("ts_sha256", "")
|
| 56 |
+
r.setdefault("ts_bytes", 0)
|
| 57 |
+
|
| 58 |
+
# Build columns with explicit types
|
| 59 |
+
cols = {}
|
| 60 |
+
cols["parcellation"] = pa.array([r["parcellation"] for r in rows], type=pa.string())
|
| 61 |
+
cols["subject"] = pa.array([r["subject"] for r in rows], type=pa.string())
|
| 62 |
+
cols["corr_path"] = pa.array([r["corr_path"] for r in rows], type=pa.string())
|
| 63 |
+
cols["ts_path"] = pa.array([r["ts_path"] for r in rows], type=pa.string())
|
| 64 |
+
cols["correlation_matrix"] = pa.array([r["correlation_matrix"] for r in rows], type=pa.list_(pa.list_(pa.float32())))
|
| 65 |
+
cols["corr_shape"] = pa.array([r["corr_shape"] for r in rows], type=pa.list_(pa.int64()))
|
| 66 |
+
cols["ts_shape"] = pa.array([r["ts_shape"] for r in rows], type=pa.list_(pa.int64()))
|
| 67 |
+
cols["corr_sha256"] = pa.array([r["corr_sha256"] for r in rows], type=pa.string())
|
| 68 |
+
cols["corr_bytes"] = pa.array([int(r["corr_bytes"]) if r["corr_bytes"] is not None else 0 for r in rows], type=pa.int64())
|
| 69 |
+
cols["ts_sha256"] = pa.array([r["ts_sha256"] for r in rows], type=pa.string())
|
| 70 |
+
cols["ts_bytes"] = pa.array([int(r["ts_bytes"]) if r["ts_bytes"] is not None else 0 for r in rows], type=pa.int64())
|
| 71 |
+
|
| 72 |
+
table = pa.table(cols, schema=SCHEMA)
|
| 73 |
+
dst.parent.mkdir(parents=True, exist_ok=True)
|
| 74 |
+
pq.write_table(table, dst)
|
| 75 |
+
print(f"Wrote {dst} with schema: {table.schema}")
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
def main():
|
| 79 |
+
ap = argparse.ArgumentParser()
|
| 80 |
+
ap.add_argument("src", type=Path)
|
| 81 |
+
ap.add_argument("dst", type=Path)
|
| 82 |
+
args = ap.parse_args()
|
| 83 |
+
|
| 84 |
+
rows = load_jsonl(args.src)
|
| 85 |
+
to_parquet(rows, args.dst)
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
if __name__ == "__main__":
|
| 89 |
+
main()
|
| 90 |
+
|