RocketReviews / scripts /reviews /02_build_data.py
ppak10's picture
Adds compiled data for flights and reviews.
8379a32
#!/usr/bin/env python3
import json
import logging
import sys
from pathlib import Path
# ---------------------------------------------------------------------------
# Config
# ---------------------------------------------------------------------------
ROOT = Path(__file__).parent.parent.parent
SOURCE_DIR = ROOT / "source" / "reviews" / "detail"
OUTPUT_FILE = ROOT / "data" / "reviews.jsonl"
PREFIX = "review"
logging.basicConfig(level=logging.INFO, format="%(message)s")
log = logging.getLogger(__name__)
# ---------------------------------------------------------------------------
# Mapper
# ---------------------------------------------------------------------------
def transform_review(data: dict) -> dict:
"""Flatten nested review JSON into a ChromaDB-ready format."""
# 1. Generate Globally Unique ID
chroma_id = f"{PREFIX}:{int(data['id']):06d}"
# 2. Build searchable document text
sections = data.get("sections", {})
text_blocks = []
if data.get("kit"): text_blocks.append(f"Review of the {data['kit']}.")
# Add all narrative sections
for title, content in sections.items():
text_blocks.append(f"{title}: {content}")
document = " ".join(text_blocks)
# 3. Flatten metadata (simple types only)
metadata = {
"id": data["id"],
"type": data.get("type"),
"date": data.get("date"),
"kit_name": data.get("kit"),
"manufacturer_name": data.get("manufacturer", {}).get("name") if isinstance(data.get("manufacturer"), dict) else data.get("manufacturer"),
"rating_construction": data.get("ratings", {}).get("construction"),
"rating_flight": data.get("ratings", {}).get("flight"),
"rating_overall": data.get("ratings", {}).get("overall"),
"url": data.get("url")
}
# Remove nulls to keep metadata clean
metadata = {k: v for k, v in metadata.items() if v is not None}
return {
"id": chroma_id,
"document": document,
"metadata": metadata
}
# ---------------------------------------------------------------------------
# Main
# ---------------------------------------------------------------------------
def main():
if not SOURCE_DIR.exists():
log.error(f"Source directory {SOURCE_DIR} not found.")
return
OUTPUT_FILE.parent.mkdir(parents=True, exist_ok=True)
count = 0
with OUTPUT_FILE.open("w", encoding="utf-8") as out:
# Walk the sharded detail directory
for shard_dir in sorted(SOURCE_DIR.iterdir()):
if not shard_dir.is_dir(): continue
for file_path in sorted(shard_dir.glob("*.json")):
try:
with file_path.open("r", encoding="utf-8") as f:
raw_data = json.load(f)
processed = transform_review(raw_data)
out.write(json.dumps(processed, ensure_ascii=False) + "\n")
count += 1
except Exception as e:
log.error(f"Error processing {file_path}: {e}")
log.info(f"Successfully built {count} records in {OUTPUT_FILE}")
if __name__ == "__main__":
main()