File size: 3,421 Bytes
08b40b0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
#!/usr/bin/env python3
import json
import logging
from pathlib import Path

# ---------------------------------------------------------------------------
# Config
# ---------------------------------------------------------------------------

ROOT = Path(__file__).parent.parent.parent
SOURCE_DIR = ROOT / "source" / "products" / "detail"
OUTPUT_FILE = ROOT / "data" / "products.jsonl"
PREFIX = "product"

logging.basicConfig(level=logging.INFO, format="%(message)s")
log = logging.getLogger(__name__)

# ---------------------------------------------------------------------------
# Mapper
# ---------------------------------------------------------------------------

def transform_product(data: dict) -> dict:
    """Flatten product JSON into a ChromaDB-ready format."""
    
    chroma_id = f"{PREFIX}:{int(data['id']):06d}"
    
    # Build searchable document text
    parts = []
    title = data.get('title') or data.get('name')
    mfr = data.get('manufacturer', {}).get('name')
    
    summary = f"Product: {title}"
    if mfr: summary += f" manufactured by {mfr}"
    summary += "."
    parts.append(summary)
    
    specs = data.get("specs")
    if specs:
        spec_parts = []
        if specs.get("diameter_in"): spec_parts.append(f"diameter: {specs['diameter_in']} in")
        if specs.get("length_in"): spec_parts.append(f"length: {specs['length_in']} in")
        if specs.get("power_class"): spec_parts.append(f"power class: {specs['power_class']}")
        if specs.get("skill_level"): spec_parts.append(f"skill level: {specs['skill_level']}")
        if specs.get("recovery"): spec_parts.append(f"recovery: {specs['recovery']}")
        if spec_parts:
            parts.append("Specifications: " + ", ".join(spec_parts) + ".")

    document = " ".join(parts)
    
    # Flatten metadata
    metadata = {
        "id": data["id"],
        "name": data.get("name"),
        "type": data.get("type"),
        "manufacturer_name": mfr,
        "power_class": specs.get("power_class") if specs else None,
        "skill_level": specs.get("skill_level") if specs else None,
        "url": data.get("url")
    }
    
    metadata = {k: v for k, v in metadata.items() if v is not None}
    
    return {
        "id": chroma_id,
        "document": document,
        "metadata": metadata
    }

# ---------------------------------------------------------------------------
# Main
# ---------------------------------------------------------------------------

def main():
    if not SOURCE_DIR.exists():
        log.error(f"Source directory {SOURCE_DIR} not found.")
        return

    OUTPUT_FILE.parent.mkdir(parents=True, exist_ok=True)
    
    count = 0
    with OUTPUT_FILE.open("w", encoding="utf-8") as out:
        for shard_dir in sorted(SOURCE_DIR.iterdir()):
            if not shard_dir.is_dir(): continue
            for file_path in sorted(shard_dir.glob("*.json")):
                try:
                    with file_path.open("r", encoding="utf-8") as f:
                        raw_data = json.load(f)
                    processed = transform_product(raw_data)
                    out.write(json.dumps(processed, ensure_ascii=False) + "\n")
                    count += 1
                except Exception as e:
                    log.error(f"Error processing {file_path}: {e}")

    log.info(f"Successfully built {count} records in {OUTPUT_FILE}")

if __name__ == "__main__":
    main()