import os import sys # Add project root to path to allow imports if running as script if __name__ == "__main__": sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) import argparse import json import numpy as np from pydantic import TypeAdapter # from compiler.parser import AbilityParser from engine.models.card import EnergyCard, LiveCard, MemberCard CHAR_MAP = { "高坂 穂乃果": 1, "絢瀬 絵里": 2, "南 ことり": 3, "園田 海未": 4, "星空 凛": 5, "西木野 真姫": 6, "東條 希": 7, "小泉 花陽": 8, "矢澤 にこ": 9, "高海 千歌": 11, "桜内 梨子": 12, "松浦 果南": 13, "黒澤 ダイヤ": 14, "渡辺 曜": 15, "津島 善子": 16, "国木田 花丸": 17, "小原 鞠莉": 18, "黒澤 ルビィ": 19, "上原 歩夢": 21, "中須 かすみ": 22, "桜坂 しずく": 23, "朝香 果林": 24, "宮下 愛": 25, "近江 彼方": 26, "優木 せつ菜": 27, "エマ・ヴェルデ": 28, "天王寺 璃奈": 29, "三船 栞子": 30, "ミア・テイラー": 31, "鐘 嵐珠": 32, "高咲 侑": 33, "澁谷 かのん": 41, "唐 可可": 42, "嵐 千砂都": 43, "平安名 すみれ": 44, "葉月 恋": 45, "桜小路 きな子": 46, "米女 メイ": 47, "若菜 四季": 48, "鬼塚 夏美": 49, "ウィーン・マルガレーテ": 50, "鬼塚 冬毬": 51, "日野下 花帆": 61, "村野 さやか": 62, "乙宗 梢": 63, "夕霧 綴理": 64, "大沢 瑠璃乃": 65, "藤島 慈": 66, "百生 吟子": 67, "徒町 小鈴": 68, "安養寺 姫芽": 69, } def compile_cards(input_path: str, output_path: str): print(f"Loading raw cards from {input_path}...") with open(input_path, "r", encoding="utf-8") as f: raw_data = json.load(f) compiled_data = {"member_db": {}, "live_db": {}, "energy_db": {}, "meta": {"version": "1.0", "source": input_path}} sorted_keys = sorted(raw_data.keys()) m_idx = 0 l_idx = 30000 e_idx = 40000 success_count = 0 errors = [] # Pre-create adapters member_adapter = TypeAdapter(MemberCard) live_adapter = TypeAdapter(LiveCard) energy_adapter = TypeAdapter(EnergyCard) for key in sorted_keys: item = raw_data[key] ctype = item.get("type", "") # print(f"DEBUG: Processing {key} Type: '{ctype}'") if "ライブ" in ctype or "Live" in ctype: print(f"FOUND LIVE: {key} Type: '{ctype}'") elif "Member" not in ctype and "メンバー" not in ctype and "Energy" not in ctype and "エネルギー" not in ctype: print(f"UNKNOWN TYPE: {key} Type: '{ctype}'") # Collect variants from rare_list variants = [{"card_no": key, "name": item.get("name", ""), "data": item}] if "rare_list" in item and isinstance(item["rare_list"], list): for r in item["rare_list"]: v_no = r.get("card_no") if v_no and v_no != key: print(f"DEBUG: Found variant {v_no} in rare_list of {key}") # Create a variant that inherits base data but overrides metadata v_item = item.copy() v_item.update(r) variants.append({"card_no": v_no, "name": r.get("name", item.get("name", "")), "data": v_item}) for v in variants: v_key = v["card_no"] v_data = v["data"] try: if ctype == "メンバー": m_card = parse_member(m_idx, v_key, v_data) compiled_item = member_adapter.dump_python(m_card, mode="json") compiled_data["member_db"][str(m_idx)] = compiled_item m_idx += 1 elif ctype == "ライブ": l_card = parse_live(l_idx, v_key, v_data) compiled_data["live_db"][str(l_idx)] = live_adapter.dump_python(l_card, mode="json") l_idx += 1 else: # Treat everything else (Energy, etc.) as basic cards to preserve IDs for decks e_card = parse_energy(e_idx, v_key, v_data) compiled_data["energy_db"][str(e_idx)] = energy_adapter.dump_python(e_card, mode="json") e_idx += 1 success_count += 1 except Exception as e: errors.append(f"Error parsing card {v_key}: {e}") print(f"Compilation complete. Processed {success_count} cards.") if errors: print(f"Encountered {len(errors)} errors. See compiler_errors.log for details.") with open("compiler_errors.log", "w", encoding="utf-8") as f_err: for err_msg in errors: f_err.write(f"- {err_msg}\n") # Write output print(f"Writing compiled data to {output_path}...") with open(output_path, "w", encoding="utf-8") as f: json.dump(compiled_data, f, ensure_ascii=False, indent=2) print("Done.") def _resolve_img_path(data: dict) -> str: # Use cards_webp as the flattened source img_path = str(data.get("_img", "")) if img_path: filename = os.path.basename(img_path) if filename.lower().endswith(".png"): filename = filename[:-4] + ".webp" return f"cards_webp/{filename}" raw_url = str(data.get("img", "")) if raw_url: filename = os.path.basename(raw_url) if filename.lower().endswith(".png"): filename = filename[:-4] + ".webp" return f"cards_webp/{filename}" return raw_url from compiler.parser_v2 import AbilityParserV2 # Initialize parser globally _v2_parser = AbilityParserV2() # Load manual overrides MANUAL_OVERRIDES_PATH = "data/manual_pseudocode.json" _manual_overrides = {} if os.path.exists(MANUAL_OVERRIDES_PATH): print(f"Loading manual overrides from {MANUAL_OVERRIDES_PATH}") with open(MANUAL_OVERRIDES_PATH, "r", encoding="utf-8") as f: _manual_overrides = json.load(f) def parse_member(card_id: int, card_no: str, data: dict) -> MemberCard: spec = data.get("special_heart", {}) # Use manual override if present override_data = _manual_overrides.get(card_no, {}) # Use manual pseudo/text if available, else raw data if "pseudocode" in override_data: raw_ability = str(override_data["pseudocode"]) else: raw_ability = str(data.get("pseudocode", data.get("ability", ""))) abilities = _v2_parser.parse(raw_ability) for ab in abilities: try: ab.bytecode = ab.compile() except Exception as e: print(f"Warning: Failed to compile bytecode for {card_no} ability: {e}") return MemberCard( card_id=card_id, card_no=card_no, name=str(data.get("name", "Unknown")), cost=data.get("cost", 0), hearts=parse_hearts(data.get("base_heart", {})), blade_hearts=parse_blade_hearts(data.get("blade_heart", {})), blades=data.get("blade", 0), groups=data.get("series", ""), # Validator will handle string -> List[Group] units=data.get("unit", ""), # Validator will handle string -> List[Unit] abilities=abilities, img_path=_resolve_img_path(data), ability_text=raw_ability, original_text=str(data.get("ability", "")), volume_icons=spec.get("score", 0), draw_icons=spec.get("draw", 0), char_id=CHAR_MAP.get(str(data.get("name", "")), 0), faq=data.get("faq", []), ) def parse_live(card_id: int, card_no: str, data: dict) -> LiveCard: spec = data.get("special_heart", {}) # Use manual override if present override_data = _manual_overrides.get(card_no, {}) # Prioritize 'pseudocode' over 'ability' raw_ability = str(override_data.get("pseudocode", data.get("pseudocode", data.get("ability", "")))) abilities = _v2_parser.parse(raw_ability) for ab in abilities: try: ab.bytecode = ab.compile() except Exception as e: print(f"Warning: Failed to compile bytecode for {card_no} ability: {e}") return LiveCard( card_id=card_id, card_no=card_no, name=str(data.get("name", "Unknown")), score=data.get("score", 0), required_hearts=parse_live_reqs(data.get("need_heart", {})), abilities=abilities, groups=data.get("series", ""), units=data.get("unit", ""), img_path=_resolve_img_path(data), ability_text=raw_ability, original_text=str(data.get("ability", "")), volume_icons=spec.get("score", 0), draw_icons=spec.get("draw", 0), blade_hearts=parse_blade_hearts(data.get("blade_heart", {})), faq=data.get("faq", []), ) def parse_energy(card_id: int, card_no: str, data: dict) -> EnergyCard: return EnergyCard( card_id=card_id, card_no=card_no, name=str(data.get("name", "Energy")), img_path=_resolve_img_path(data), ability_text=str(data.get("ability", "")), original_text=str(data.get("ability", "")), rare=str(data.get("rare", "N")), ) def parse_hearts(heart_dict: dict) -> np.ndarray: hearts = np.zeros(7, dtype=np.int32) if not heart_dict: return hearts for k, v in heart_dict.items(): if k.startswith("heart"): try: num_str = k.replace("heart", "") if num_str == "0": # Handle heart0 as ANY/STAR hearts[6] = int(v) continue idx = int(num_str) - 1 if 0 <= idx < 6: hearts[idx] = int(v) except ValueError: pass elif k in ["common", "any", "star"]: hearts[6] = int(v) return hearts def parse_blade_hearts(heart_dict: dict) -> np.ndarray: hearts = np.zeros(7, dtype=np.int32) if not heart_dict: return hearts for k, v in heart_dict.items(): if k == "b_all": hearts[6] = int(v) elif k.startswith("b_heart"): try: idx = int(k.replace("b_heart", "")) - 1 if 0 <= idx < 6: hearts[idx] = int(v) except ValueError: pass return hearts def parse_live_reqs(req_dict: dict) -> np.ndarray: # Use parse_hearts directly as it now handles 7 elements correctly return parse_hearts(req_dict) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("--input", default="data/cards.json", help="Path to raw cards.json") parser.add_argument("--output", default="data/cards_compiled.json", help="Output path") args = parser.parse_args() # Resolve paths relative to cwd if needed, or assume running from root compile_cards(args.input, args.output) # Copy to both data/ and engine/data/ for compatibility with all scripts import shutil root_data_path = os.path.join(os.getcwd(), "data", "cards_compiled.json") engine_data_path = os.path.join(os.getcwd(), "engine", "data", "cards_compiled.json") # Sync to root data/ if os.path.abspath(args.output) != os.path.abspath(root_data_path): try: shutil.copy(args.output, root_data_path) print(f"Copied compiled data to {root_data_path}") except Exception as e: print(f"Warning: Failed to copy to root data directory: {e}") # Sync to engine/data/ to keep paths consistent try: os.makedirs(os.path.dirname(engine_data_path), exist_ok=True) shutil.copy(root_data_path, engine_data_path) print(f"Synced compiled data to {engine_data_path}") except Exception as e: print(f"Warning: Failed to sync to engine/data directory: {e}")