| #!/usr/bin/env python3 | |
| """Convert JSONL data files to HuggingFace-compatible parquet format.""" | |
| import subprocess | |
| import sys | |
| from pathlib import Path | |
| def ensure_datasets(): | |
| try: | |
| import datasets # noqa: F401 | |
| except ImportError: | |
| print("Installing datasets library...") | |
| subprocess.check_call([sys.executable, "-m", "pip", "install", "datasets"]) | |
| def main(): | |
| ensure_datasets() | |
| from datasets import Dataset | |
| data_dir = Path(__file__).resolve().parent.parent / "data" | |
| hf_dir = Path(__file__).resolve().parent.parent / "hf" | |
| hf_dir.mkdir(exist_ok=True) | |
| configs = ["turns", "conversations", "sft_pairs", "chunks", "lexicon"] | |
| for name in configs: | |
| jsonl_path = data_dir / f"{name}.jsonl" | |
| if not jsonl_path.exists(): | |
| print(f"SKIP {name}: {jsonl_path} not found") | |
| continue | |
| print(f"Loading {name}...") | |
| ds = Dataset.from_json(str(jsonl_path)) | |
| out_path = hf_dir / f"{name}.parquet" | |
| ds.to_parquet(str(out_path)) | |
| print(f" -> {out_path} ({len(ds)} rows, {out_path.stat().st_size / 1e6:.1f} MB)") | |
| print("\nDone. Parquet files:") | |
| for f in sorted(hf_dir.glob("*.parquet")): | |
| print(f" {f.name}: {f.stat().st_size / 1e6:.1f} MB") | |
| if __name__ == "__main__": | |
| main() | |