File size: 1,323 Bytes
a7e6ab2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 |
\
#!/usr/bin/env python3
from __future__ import annotations
import argparse
import json
from pathlib import Path
import pandas as pd
def iter_jsonl(path: Path):
with path.open("r", encoding="utf-8") as f:
for line in f:
line = line.strip()
if not line:
continue
yield json.loads(line)
def main() -> int:
ap = argparse.ArgumentParser(description="JSONL → Parquet shards (Within Us AI)")
ap.add_argument("--jsonl", required=True)
ap.add_argument("--out_dir", required=True)
ap.add_argument("--rows_per_shard", type=int, default=5000)
args = ap.parse_args()
src = Path(args.jsonl)
out_dir = Path(args.out_dir)
out_dir.mkdir(parents=True, exist_ok=True)
buf = []
shard = 0
for obj in iter_jsonl(src):
buf.append(obj)
if len(buf) >= args.rows_per_shard:
df = pd.DataFrame(buf)
out = out_dir / f"{src.stem}-{shard:05d}.parquet"
df.to_parquet(out, index=False)
buf.clear()
shard += 1
if buf:
df = pd.DataFrame(buf)
out = out_dir / f"{src.stem}-{shard:05d}.parquet"
df.to_parquet(out, index=False)
print(f"Wrote shards to: {out_dir}")
return 0
if __name__ == "__main__":
raise SystemExit(main())
|