|
|
|
|
|
""" |
|
|
Build Hugging Face dataset artifacts from JSONL. |
|
|
|
|
|
- Loads data/mux_assets.jsonl |
|
|
- Runs a light validation (required fields + raw payload) |
|
|
- Builds a datasets.Dataset |
|
|
- Exports to Parquet at data/mux_assets.parquet |
|
|
|
|
|
Usage: |
|
|
pip install -U datasets pyarrow |
|
|
python scripts/build_dataset.py |
|
|
""" |
|
|
|
|
|
from __future__ import annotations |
|
|
|
|
|
import json |
|
|
import os |
|
|
from typing import Any, Dict, List |
|
|
|
|
|
from datasets import Dataset |
|
|
|
|
|
JSONL_PATH = os.path.join("data", "mux_assets.jsonl") |
|
|
PARQUET_PATH = os.path.join("data", "mux_assets.parquet") |
|
|
|
|
|
REQUIRED_FIELDS = [ |
|
|
"asset_id", |
|
|
"upload_id", |
|
|
"title", |
|
|
"external_id", |
|
|
"creator_id", |
|
|
"status", |
|
|
"video_quality", |
|
|
"resolution_tier", |
|
|
"duration_seconds", |
|
|
"aspect_ratio", |
|
|
"max_width", |
|
|
"max_height", |
|
|
"max_frame_rate", |
|
|
"encoding_tier", |
|
|
"normalize_audio", |
|
|
"mp4_support", |
|
|
"playback_policy", |
|
|
"playback_id_public", |
|
|
"video_track_id", |
|
|
"audio_track_id", |
|
|
"audio_primary", |
|
|
"audio_channels", |
|
|
"audio_language_code", |
|
|
"created_at_unix", |
|
|
"raw_mux_json", |
|
|
] |
|
|
|
|
|
|
|
|
def load_jsonl(path: str) -> List[Dict[str, Any]]: |
|
|
rows: List[Dict[str, Any]] = [] |
|
|
with open(path, "r", encoding="utf-8") as f: |
|
|
for i, line in enumerate(f, start=1): |
|
|
line = line.strip() |
|
|
if not line: |
|
|
continue |
|
|
try: |
|
|
rows.append(json.loads(line)) |
|
|
except json.JSONDecodeError as e: |
|
|
raise SystemExit(f"[ERROR] Invalid JSON on line {i}: {e}") from e |
|
|
return rows |
|
|
|
|
|
|
|
|
def light_validate(rows: List[Dict[str, Any]]) -> None: |
|
|
if not rows: |
|
|
raise SystemExit("[ERROR] No rows found in JSONL.") |
|
|
for idx, row in enumerate(rows): |
|
|
missing = [k for k in REQUIRED_FIELDS if k not in row] |
|
|
if missing: |
|
|
raise SystemExit(f"[ERROR] Row {idx} missing required fields: {missing}") |
|
|
if not isinstance(row["raw_mux_json"], dict): |
|
|
raise SystemExit(f"[ERROR] Row {idx} raw_mux_json must be an object/dict.") |
|
|
|
|
|
|
|
|
def main() -> None: |
|
|
if not os.path.exists(JSONL_PATH): |
|
|
raise SystemExit(f"[ERROR] Missing file: {JSONL_PATH}") |
|
|
|
|
|
rows = load_jsonl(JSONL_PATH) |
|
|
light_validate(rows) |
|
|
|
|
|
ds = Dataset.from_list(rows) |
|
|
|
|
|
print("\n=== Dataset Preview ===") |
|
|
print(ds) |
|
|
print("\n=== Features ===") |
|
|
print(ds.features) |
|
|
|
|
|
ds.to_parquet(PARQUET_PATH) |
|
|
print(f"\n[OK] Wrote Parquet: {PARQUET_PATH}") |
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
main() |
|
|
|