| |
| import json |
| import logging |
| from pathlib import Path |
|
|
| |
| |
| |
|
|
| ROOT = Path(__file__).parent.parent.parent |
| SOURCE_DIR = ROOT / "source" / "products" / "detail" |
| OUTPUT_FILE = ROOT / "data" / "products.jsonl" |
| PREFIX = "product" |
|
|
| logging.basicConfig(level=logging.INFO, format="%(message)s") |
| log = logging.getLogger(__name__) |
|
|
| |
| |
| |
|
|
| def transform_product(data: dict) -> dict: |
| """Flatten product JSON into a ChromaDB-ready format.""" |
| |
| chroma_id = f"{PREFIX}:{int(data['id']):06d}" |
| |
| |
| parts = [] |
| title = data.get('title') or data.get('name') |
| mfr = data.get('manufacturer', {}).get('name') |
| |
| summary = f"Product: {title}" |
| if mfr: summary += f" manufactured by {mfr}" |
| summary += "." |
| parts.append(summary) |
| |
| specs = data.get("specs") |
| if specs: |
| spec_parts = [] |
| if specs.get("diameter_in"): spec_parts.append(f"diameter: {specs['diameter_in']} in") |
| if specs.get("length_in"): spec_parts.append(f"length: {specs['length_in']} in") |
| if specs.get("power_class"): spec_parts.append(f"power class: {specs['power_class']}") |
| if specs.get("skill_level"): spec_parts.append(f"skill level: {specs['skill_level']}") |
| if specs.get("recovery"): spec_parts.append(f"recovery: {specs['recovery']}") |
| if spec_parts: |
| parts.append("Specifications: " + ", ".join(spec_parts) + ".") |
|
|
| document = " ".join(parts) |
| |
| |
| metadata = { |
| "id": data["id"], |
| "name": data.get("name"), |
| "type": data.get("type"), |
| "manufacturer_name": mfr, |
| "power_class": specs.get("power_class") if specs else None, |
| "skill_level": specs.get("skill_level") if specs else None, |
| "url": data.get("url") |
| } |
| |
| metadata = {k: v for k, v in metadata.items() if v is not None} |
| |
| return { |
| "id": chroma_id, |
| "document": document, |
| "metadata": metadata |
| } |
|
|
| |
| |
| |
|
|
| def main(): |
| if not SOURCE_DIR.exists(): |
| log.error(f"Source directory {SOURCE_DIR} not found.") |
| return |
|
|
| OUTPUT_FILE.parent.mkdir(parents=True, exist_ok=True) |
| |
| count = 0 |
| with OUTPUT_FILE.open("w", encoding="utf-8") as out: |
| for shard_dir in sorted(SOURCE_DIR.iterdir()): |
| if not shard_dir.is_dir(): continue |
| for file_path in sorted(shard_dir.glob("*.json")): |
| try: |
| with file_path.open("r", encoding="utf-8") as f: |
| raw_data = json.load(f) |
| processed = transform_product(raw_data) |
| out.write(json.dumps(processed, ensure_ascii=False) + "\n") |
| count += 1 |
| except Exception as e: |
| log.error(f"Error processing {file_path}: {e}") |
|
|
| log.info(f"Successfully built {count} records in {OUTPUT_FILE}") |
|
|
| if __name__ == "__main__": |
| main() |
|
|