File size: 4,246 Bytes
08b40b0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
#!/usr/bin/env python3
import json
import logging
from pathlib import Path

# ---------------------------------------------------------------------------
# Config
# ---------------------------------------------------------------------------

ROOT = Path(__file__).parent.parent.parent
SOURCE_DIR = ROOT / "source" / "designs" / "detail"
DEEP_PARSED_DIR = ROOT / "source" / "designs" / "files" / "json"
OUTPUT_FILE = ROOT / "data" / "designs.jsonl"
PREFIX = "design"

logging.basicConfig(level=logging.INFO, format="%(message)s")
log = logging.getLogger(__name__)

# ---------------------------------------------------------------------------
# Mapper
# ---------------------------------------------------------------------------

def transform_design(data: dict, deep_data: dict = None) -> dict:
    """Flatten design JSON into a ChromaDB-ready format."""
    
    chroma_id = f"{PREFIX}:{int(data['id']):06d}"
    
    # Build searchable document text
    parts = []
    title = data.get('title') or data.get('name')
    mfr = data.get('manufacturer')
    fmt = data.get('format')
    
    summary = f"Rocket Design: {title}"
    if mfr: summary += f" (Manufacturer: {mfr})"
    summary += f" in {fmt} format."
    parts.append(summary)
    
    designer_obj = data.get("designer")
    if isinstance(designer_obj, dict) and designer_obj.get("name"):
        parts.append(f"Designed by {designer_obj['name']}.")
        
    if data.get("comments"):
        parts.append(f"Comments: {data['comments']}")
        
    # Stability metrics
    metrics = []
    cg = data.get("cg")
    if isinstance(cg, dict) and cg.get("location_in"):
        metrics.append(f"CG: {cg['location_in']} in")
        
    cp = data.get("cp")
    if isinstance(cp, dict) and cp.get("location_in"):
        metrics.append(f"CP: {cp['location_in']} in")
        
    if data.get("margin"):
        metrics.append(f"Margin: {data['margin']} {data.get('margin_status', '')}")
        
    if metrics:
        parts.append("Metrics: " + ", ".join(metrics) + ".")

    # Parts List (Relationship)
    if data.get("parts"):
        parts.append(f"Includes components: {', '.join(data['parts'])}.")

    document = " ".join(parts)
    
    # Flatten metadata
    metadata = {
        "id": data["id"],
        "format": fmt,
        "manufacturer": mfr,
        "designer_name": designer_obj.get("name") if isinstance(designer_obj, dict) else None,
        "stage_count": deep_data.get("stage_count") if isinstance(deep_data, dict) else None,
        "url": data.get("url")
    }
    
    metadata = {k: v for k, v in metadata.items() if v is not None}
    
    return {
        "id": chroma_id,
        "document": document,
        "metadata": metadata
    }

# ---------------------------------------------------------------------------
# Main
# ---------------------------------------------------------------------------

def main():
    if not SOURCE_DIR.exists():
        log.error(f"Source directory {SOURCE_DIR} not found.")
        return

    OUTPUT_FILE.parent.mkdir(parents=True, exist_ok=True)
    
    count = 0
    with OUTPUT_FILE.open("w", encoding="utf-8") as out:
        for shard_dir in sorted(SOURCE_DIR.iterdir()):
            if not shard_dir.is_dir(): continue
            for file_path in sorted(shard_dir.glob("*.json")):
                try:
                    with file_path.open("r", encoding="utf-8") as f:
                        raw_data = json.load(f)
                    
                    # Try to find deep parsed data
                    deep_data = None
                    deep_path = DEEP_PARSED_DIR / f"{int(raw_data['id']):06d}.json"
                    if deep_path.exists():
                        with deep_path.open("r", encoding="utf-8") as df:
                            deep_data = json.load(df)
                            
                    processed = transform_design(raw_data, deep_data)
                    out.write(json.dumps(processed, ensure_ascii=False) + "\n")
                    count += 1
                except Exception as e:
                    log.error(f"Error processing {file_path}: {e}")

    log.info(f"Successfully built {count} records in {OUTPUT_FILE}")

if __name__ == "__main__":
    main()