RocketReviews / scripts /designs /02_build_data.py
ppak10's picture
Adds data for remaining sections.
08b40b0
#!/usr/bin/env python3
import json
import logging
from pathlib import Path
# ---------------------------------------------------------------------------
# Config
# ---------------------------------------------------------------------------
ROOT = Path(__file__).parent.parent.parent
SOURCE_DIR = ROOT / "source" / "designs" / "detail"
DEEP_PARSED_DIR = ROOT / "source" / "designs" / "files" / "json"
OUTPUT_FILE = ROOT / "data" / "designs.jsonl"
PREFIX = "design"
logging.basicConfig(level=logging.INFO, format="%(message)s")
log = logging.getLogger(__name__)
# ---------------------------------------------------------------------------
# Mapper
# ---------------------------------------------------------------------------
def transform_design(data: dict, deep_data: dict = None) -> dict:
"""Flatten design JSON into a ChromaDB-ready format."""
chroma_id = f"{PREFIX}:{int(data['id']):06d}"
# Build searchable document text
parts = []
title = data.get('title') or data.get('name')
mfr = data.get('manufacturer')
fmt = data.get('format')
summary = f"Rocket Design: {title}"
if mfr: summary += f" (Manufacturer: {mfr})"
summary += f" in {fmt} format."
parts.append(summary)
designer_obj = data.get("designer")
if isinstance(designer_obj, dict) and designer_obj.get("name"):
parts.append(f"Designed by {designer_obj['name']}.")
if data.get("comments"):
parts.append(f"Comments: {data['comments']}")
# Stability metrics
metrics = []
cg = data.get("cg")
if isinstance(cg, dict) and cg.get("location_in"):
metrics.append(f"CG: {cg['location_in']} in")
cp = data.get("cp")
if isinstance(cp, dict) and cp.get("location_in"):
metrics.append(f"CP: {cp['location_in']} in")
if data.get("margin"):
metrics.append(f"Margin: {data['margin']} {data.get('margin_status', '')}")
if metrics:
parts.append("Metrics: " + ", ".join(metrics) + ".")
# Parts List (Relationship)
if data.get("parts"):
parts.append(f"Includes components: {', '.join(data['parts'])}.")
document = " ".join(parts)
# Flatten metadata
metadata = {
"id": data["id"],
"format": fmt,
"manufacturer": mfr,
"designer_name": designer_obj.get("name") if isinstance(designer_obj, dict) else None,
"stage_count": deep_data.get("stage_count") if isinstance(deep_data, dict) else None,
"url": data.get("url")
}
metadata = {k: v for k, v in metadata.items() if v is not None}
return {
"id": chroma_id,
"document": document,
"metadata": metadata
}
# ---------------------------------------------------------------------------
# Main
# ---------------------------------------------------------------------------
def main():
if not SOURCE_DIR.exists():
log.error(f"Source directory {SOURCE_DIR} not found.")
return
OUTPUT_FILE.parent.mkdir(parents=True, exist_ok=True)
count = 0
with OUTPUT_FILE.open("w", encoding="utf-8") as out:
for shard_dir in sorted(SOURCE_DIR.iterdir()):
if not shard_dir.is_dir(): continue
for file_path in sorted(shard_dir.glob("*.json")):
try:
with file_path.open("r", encoding="utf-8") as f:
raw_data = json.load(f)
# Try to find deep parsed data
deep_data = None
deep_path = DEEP_PARSED_DIR / f"{int(raw_data['id']):06d}.json"
if deep_path.exists():
with deep_path.open("r", encoding="utf-8") as df:
deep_data = json.load(df)
processed = transform_design(raw_data, deep_data)
out.write(json.dumps(processed, ensure_ascii=False) + "\n")
count += 1
except Exception as e:
log.error(f"Error processing {file_path}: {e}")
log.info(f"Successfully built {count} records in {OUTPUT_FILE}")
if __name__ == "__main__":
main()