Upload src/data/zoo_canonical_names.py with huggingface_hub
Browse files- src/data/zoo_canonical_names.py +1043 -0
src/data/zoo_canonical_names.py
ADDED
|
@@ -0,0 +1,1043 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Canonical joint name mappings for Truebones Zoo animal skeletons (73 species).
|
| 2 |
+
|
| 3 |
+
Rule-based + lookup mapper that converts diverse Truebones naming conventions
|
| 4 |
+
(Bip01_, BN_, jt_, Japanese romaji, Sabrecat_, NPC_, game-engine names) into
|
| 5 |
+
standardised lowercase English anatomical labels.
|
| 6 |
+
|
| 7 |
+
Generated: 2026-03-18
|
| 8 |
+
Dataset: truebones_zoo (73 species, 1193 unique raw joint names)
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
from __future__ import annotations
|
| 12 |
+
|
| 13 |
+
import re
|
| 14 |
+
from typing import Callable
|
| 15 |
+
|
| 16 |
+
# ---------------------------------------------------------------------------
|
| 17 |
+
# Pattern 4: Japanese romaji -> English lookup
|
| 18 |
+
# ---------------------------------------------------------------------------
|
| 19 |
+
|
| 20 |
+
JAPANESE_MAP: dict[str, str] = {
|
| 21 |
+
# Torso / core
|
| 22 |
+
"koshi": "pelvis",
|
| 23 |
+
"kosi": "pelvis", # alternate romanisation
|
| 24 |
+
"hara": "abdomen",
|
| 25 |
+
"mune": "chest",
|
| 26 |
+
"kubi": "neck",
|
| 27 |
+
"atama": "head",
|
| 28 |
+
"kao": "face",
|
| 29 |
+
"ago": "jaw",
|
| 30 |
+
# Limbs (bare form, without L_/R_ prefix)
|
| 31 |
+
"kata": "shoulder",
|
| 32 |
+
"hiji": "elbow",
|
| 33 |
+
"te": "hand",
|
| 34 |
+
"momo": "thigh",
|
| 35 |
+
"hiza": "knee",
|
| 36 |
+
"ashi": "foot",
|
| 37 |
+
# Tail
|
| 38 |
+
"sippo": "tail", # numbered variants handled by regex
|
| 39 |
+
# Fish-specific (Pirrana / Tukan)
|
| 40 |
+
"obire": "tail fin",
|
| 41 |
+
"obireA": "tail fin upper",
|
| 42 |
+
"obireB": "tail fin lower",
|
| 43 |
+
"sebire": "dorsal fin",
|
| 44 |
+
"harabireR": "right pectoral fin",
|
| 45 |
+
"harabireL": "left pectoral fin",
|
| 46 |
+
"munabireR": "right pectoral fin upper",
|
| 47 |
+
"munabireL": "left pectoral fin upper",
|
| 48 |
+
"eraR": "right gill",
|
| 49 |
+
"eraL": "left gill",
|
| 50 |
+
"shiribire": "anal fin",
|
| 51 |
+
"shiribireA": "anal fin upper",
|
| 52 |
+
"shirihireB": "anal fin lower", # note: typo in original data
|
| 53 |
+
"shippoA": "tail upper",
|
| 54 |
+
"shippoB": "tail lower",
|
| 55 |
+
# Misc
|
| 56 |
+
"o": "tail base",
|
| 57 |
+
}
|
| 58 |
+
|
| 59 |
+
# ---------------------------------------------------------------------------
|
| 60 |
+
# Lookup tables for non-regex special cases
|
| 61 |
+
# ---------------------------------------------------------------------------
|
| 62 |
+
|
| 63 |
+
_STANDALONE_MAP: dict[str, str] = {
|
| 64 |
+
# Root / global joints
|
| 65 |
+
"Hips": "hips",
|
| 66 |
+
"locator": "root",
|
| 67 |
+
"locator2": "root",
|
| 68 |
+
"Trajectory": "trajectory",
|
| 69 |
+
"Head": "head",
|
| 70 |
+
"Spine": "spine",
|
| 71 |
+
"Handle": "handle",
|
| 72 |
+
"Saddle": "saddle",
|
| 73 |
+
"MESH": "mesh",
|
| 74 |
+
"N_ALL": "root all",
|
| 75 |
+
"MagicEffectsNode": "effects node",
|
| 76 |
+
"EyesBlue": "right eye",
|
| 77 |
+
"EyesBlue_2": "left eye",
|
| 78 |
+
"ElkJaw": "jaw",
|
| 79 |
+
"C_ctrl": "center control",
|
| 80 |
+
"BN_P": "pelvis",
|
| 81 |
+
"BN_Shell": "shell",
|
| 82 |
+
"BN_Down": "lower body",
|
| 83 |
+
"BN_Downbody": "lower body",
|
| 84 |
+
# Standalone sided
|
| 85 |
+
"LeftArm": "left arm",
|
| 86 |
+
"RightArm": "right arm",
|
| 87 |
+
"LeftForeArm": "left forearm",
|
| 88 |
+
"RightForeArm": "right forearm",
|
| 89 |
+
"LeftHand": "left hand",
|
| 90 |
+
"RightHand": "right hand",
|
| 91 |
+
"LeftFoot": "left foot",
|
| 92 |
+
"RightFoot": "right foot",
|
| 93 |
+
"LeftLeg": "left leg",
|
| 94 |
+
"RightLeg": "right leg",
|
| 95 |
+
"LeftUpLeg": "left upper leg",
|
| 96 |
+
"RightUpLeg": "right upper leg",
|
| 97 |
+
}
|
| 98 |
+
|
| 99 |
+
# Body part name normalisation for Bip01_ / BN_ / jt_ suffixes
|
| 100 |
+
_BODY_PART_MAP: dict[str, str] = {
|
| 101 |
+
# Core
|
| 102 |
+
"pelvis": "pelvis",
|
| 103 |
+
"spine": "spine",
|
| 104 |
+
"neck": "neck",
|
| 105 |
+
"head": "head",
|
| 106 |
+
"jaw": "jaw",
|
| 107 |
+
"ribcage": "ribcage",
|
| 108 |
+
# Upper limb
|
| 109 |
+
"clavicle": "clavicle",
|
| 110 |
+
"upperarm": "upper arm",
|
| 111 |
+
"forearm": "forearm",
|
| 112 |
+
"hand": "hand",
|
| 113 |
+
"finger": "finger",
|
| 114 |
+
"thumb": "thumb",
|
| 115 |
+
"wrist": "wrist",
|
| 116 |
+
"palm": "palm",
|
| 117 |
+
# Lower limb
|
| 118 |
+
"thigh": "thigh",
|
| 119 |
+
"calf": "calf",
|
| 120 |
+
"horselink": "pastern", # extra joint in digitigrade legs
|
| 121 |
+
"foot": "foot",
|
| 122 |
+
"toe": "toe",
|
| 123 |
+
"ankle": "ankle",
|
| 124 |
+
"knee": "knee",
|
| 125 |
+
"hip": "hip",
|
| 126 |
+
# Tail
|
| 127 |
+
"tail": "tail",
|
| 128 |
+
"tai": "tail", # BN_Tai variants
|
| 129 |
+
# Face / head
|
| 130 |
+
"ear": "ear",
|
| 131 |
+
"eye": "eye",
|
| 132 |
+
"eyeball": "eyeball",
|
| 133 |
+
"eyebrow": "eyebrow",
|
| 134 |
+
"eyelid": "eyelid",
|
| 135 |
+
"nose": "nose",
|
| 136 |
+
"mouth": "mouth",
|
| 137 |
+
"lip": "lip",
|
| 138 |
+
"beard": "beard",
|
| 139 |
+
"chin": "chin",
|
| 140 |
+
"tongue": "tongue",
|
| 141 |
+
"thouge": "tongue", # typo in original data
|
| 142 |
+
"tone": "tongue", # BN_Tone (Anaconda typo for tongue)
|
| 143 |
+
"mascara": "eyelash",
|
| 144 |
+
# Appendages
|
| 145 |
+
"wing": "wing",
|
| 146 |
+
"feeler": "feeler",
|
| 147 |
+
"feelers": "feeler",
|
| 148 |
+
"clip": "claw", # BN_Clip = pincers
|
| 149 |
+
"claw": "claw",
|
| 150 |
+
"pincers": "pincer",
|
| 151 |
+
"tentacles": "tentacle",
|
| 152 |
+
"piers": "pincer", # BN_Piers = pincers (typo)
|
| 153 |
+
"pliers": "pincer", # BN_Pliers (typo variant)
|
| 154 |
+
# Fur / mane / hair
|
| 155 |
+
"fur": "fur",
|
| 156 |
+
"mane": "mane",
|
| 157 |
+
"hair": "hair",
|
| 158 |
+
"ponytail": "ponytail",
|
| 159 |
+
"ponitail": "ponytail", # typo in original data
|
| 160 |
+
# Armour / tack
|
| 161 |
+
"halter": "halter",
|
| 162 |
+
"reins": "reins",
|
| 163 |
+
"shall": "shell plate", # BN_Shall (shell plates)
|
| 164 |
+
# Anatomy
|
| 165 |
+
"shoulder": "shoulder",
|
| 166 |
+
"elbow": "elbow",
|
| 167 |
+
# Dinosaur-specific
|
| 168 |
+
"cog": "center of gravity",
|
| 169 |
+
"spline": "spine", # BN_Spline = spine segments
|
| 170 |
+
# Body
|
| 171 |
+
"body": "body",
|
| 172 |
+
"dorsal": "dorsal",
|
| 173 |
+
"leg": "leg",
|
| 174 |
+
"arm": "arm",
|
| 175 |
+
"collarbone": "collarbone",
|
| 176 |
+
"fang": "fang",
|
| 177 |
+
# Index / middle / ring / pinky
|
| 178 |
+
"index": "index",
|
| 179 |
+
"middle": "middle",
|
| 180 |
+
"ring": "ring",
|
| 181 |
+
"pinky": "pinky",
|
| 182 |
+
}
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
# ---------------------------------------------------------------------------
|
| 186 |
+
# Helper: CamelCase splitter
|
| 187 |
+
# ---------------------------------------------------------------------------
|
| 188 |
+
|
| 189 |
+
_CAMEL_RE = re.compile(r"(?<=[a-z])(?=[A-Z])|(?<=[A-Z])(?=[A-Z][a-z])")
|
| 190 |
+
|
| 191 |
+
|
| 192 |
+
def _camel_to_words(s: str) -> str:
|
| 193 |
+
"""Split CamelCase into lowercase space-separated words."""
|
| 194 |
+
return _CAMEL_RE.sub(" ", s).lower()
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
# ---------------------------------------------------------------------------
|
| 198 |
+
# Helper: side detection
|
| 199 |
+
# ---------------------------------------------------------------------------
|
| 200 |
+
|
| 201 |
+
def _detect_side(name: str) -> tuple[str, str]:
|
| 202 |
+
"""Return (side_prefix, name_without_side).
|
| 203 |
+
|
| 204 |
+
side_prefix is "left ", "right ", "center ", or "".
|
| 205 |
+
"""
|
| 206 |
+
# _L, _R, _C suffixes (jt_ style)
|
| 207 |
+
m = re.match(r"^(.+?)_(L|R|C)$", name)
|
| 208 |
+
if m:
|
| 209 |
+
sides = {"L": "left ", "R": "right ", "C": "center "}
|
| 210 |
+
return sides[m.group(2)], m.group(1)
|
| 211 |
+
|
| 212 |
+
# L_ or R_ prefix
|
| 213 |
+
m = re.match(r"^(L|R)_(.+)$", name)
|
| 214 |
+
if m:
|
| 215 |
+
sides = {"L": "left ", "R": "right "}
|
| 216 |
+
return sides[m.group(1)], m.group(2)
|
| 217 |
+
|
| 218 |
+
# Embedded _L_ or _R_ or _C_
|
| 219 |
+
m = re.match(r"^(.+?)_(L|R|C)_(.+)$", name)
|
| 220 |
+
if m:
|
| 221 |
+
sides = {"L": "left ", "R": "right ", "C": "center "}
|
| 222 |
+
return sides[m.group(2)], m.group(1) + "_" + m.group(3)
|
| 223 |
+
|
| 224 |
+
return "", name
|
| 225 |
+
|
| 226 |
+
|
| 227 |
+
# ---------------------------------------------------------------------------
|
| 228 |
+
# Helper: trailing number extractor
|
| 229 |
+
# ---------------------------------------------------------------------------
|
| 230 |
+
|
| 231 |
+
def _extract_trailing_number(s: str) -> tuple[str, str]:
|
| 232 |
+
"""Split 'Foo02' -> ('Foo', ' 2') or 'Foo' -> ('Foo', '')."""
|
| 233 |
+
m = re.match(r"^(.*?)(\d+)$", s)
|
| 234 |
+
if m:
|
| 235 |
+
base = m.group(1).rstrip("_")
|
| 236 |
+
num = str(int(m.group(2))) # strip leading zeros
|
| 237 |
+
return base, f" {num}"
|
| 238 |
+
return s, ""
|
| 239 |
+
|
| 240 |
+
|
| 241 |
+
# ---------------------------------------------------------------------------
|
| 242 |
+
# PREFIX_RULES: list of (regex, handler) applied in priority order
|
| 243 |
+
# ---------------------------------------------------------------------------
|
| 244 |
+
|
| 245 |
+
def _handle_bip01(raw: str) -> str | None:
|
| 246 |
+
"""Pattern 1: Bip01_ prefix (most common, ~1900 occurrences)."""
|
| 247 |
+
m = re.match(r"^_?Bip01_(.+)$", raw)
|
| 248 |
+
if not m:
|
| 249 |
+
return None
|
| 250 |
+
rest = m.group(1).lstrip("_") # strip stray leading underscores
|
| 251 |
+
|
| 252 |
+
# Detect Nub suffix (leaf joint)
|
| 253 |
+
is_nub = False
|
| 254 |
+
if rest.endswith("Nub"):
|
| 255 |
+
is_nub = True
|
| 256 |
+
rest = rest[:-3]
|
| 257 |
+
|
| 258 |
+
# Detect side
|
| 259 |
+
side, rest = _detect_side(rest)
|
| 260 |
+
|
| 261 |
+
# Handle trailing L/R side markers without underscore (SpineL, SpineR)
|
| 262 |
+
if not side:
|
| 263 |
+
side_suffix_m = re.match(r"^(.+?)(L|R)$", rest)
|
| 264 |
+
if side_suffix_m:
|
| 265 |
+
candidate = side_suffix_m.group(1).lower()
|
| 266 |
+
if candidate in _BODY_PART_MAP or candidate.rstrip("_") in _BODY_PART_MAP:
|
| 267 |
+
side = "left " if side_suffix_m.group(2) == "L" else "right "
|
| 268 |
+
rest = side_suffix_m.group(1)
|
| 269 |
+
|
| 270 |
+
# Special compound names (check BEFORE number extraction so
|
| 271 |
+
# Spine0_Tail matches correctly)
|
| 272 |
+
rest_lower_full = rest.lower().rstrip("_")
|
| 273 |
+
compound_map = {
|
| 274 |
+
"horselink": "pastern",
|
| 275 |
+
"head_jaw": "jaw",
|
| 276 |
+
"head1_jaw": "jaw",
|
| 277 |
+
"head_muzzle": "muzzle",
|
| 278 |
+
"head_brain": "brain",
|
| 279 |
+
"head2_eyeleds": "eyelid",
|
| 280 |
+
"head11_tungecontroler": "tongue controller",
|
| 281 |
+
"ponytail3_tunge": "tongue",
|
| 282 |
+
"xtra_spine": "extra spine",
|
| 283 |
+
"xtra_neck": "extra neck",
|
| 284 |
+
"arm_nub": "arm end",
|
| 285 |
+
"leg_mid_nub": "mid leg end",
|
| 286 |
+
"leg_rear_nub": "rear leg end",
|
| 287 |
+
}
|
| 288 |
+
if rest_lower_full in compound_map:
|
| 289 |
+
base = compound_map[rest_lower_full]
|
| 290 |
+
result = f"{side}{base}"
|
| 291 |
+
if is_nub and "end" not in result:
|
| 292 |
+
result += " end"
|
| 293 |
+
return result.strip()
|
| 294 |
+
|
| 295 |
+
# Compound: Spine0_Tail, Spine0_Tail1, etc.
|
| 296 |
+
spine_tail_m = re.match(
|
| 297 |
+
r"^Spine\d+_Tail(\d*)$", rest, re.IGNORECASE
|
| 298 |
+
)
|
| 299 |
+
if spine_tail_m:
|
| 300 |
+
num = spine_tail_m.group(1)
|
| 301 |
+
suffix = f" {int(num)}" if num else ""
|
| 302 |
+
result = f"{side}tail base{suffix}"
|
| 303 |
+
if is_nub:
|
| 304 |
+
result += " end"
|
| 305 |
+
return result.strip()
|
| 306 |
+
|
| 307 |
+
# Compound: Spine1_LWing, Spine1_RWing
|
| 308 |
+
spine_wing_m = re.match(
|
| 309 |
+
r"^Spine\d+_(L|R)Wing$", rest, re.IGNORECASE
|
| 310 |
+
)
|
| 311 |
+
if spine_wing_m:
|
| 312 |
+
wing_side = "left " if spine_wing_m.group(1) == "L" else "right "
|
| 313 |
+
return f"{wing_side}wing root"
|
| 314 |
+
|
| 315 |
+
# Compound: Calf_Mid, Calf_Rear, Thigh_Mid, Foot_Mid, etc.
|
| 316 |
+
qual_m = re.match(r"^(\w+?)_(Mid|Rear)$", rest, re.IGNORECASE)
|
| 317 |
+
if qual_m:
|
| 318 |
+
part = qual_m.group(1).lower()
|
| 319 |
+
qualifier = qual_m.group(2).lower()
|
| 320 |
+
canonical_part = _BODY_PART_MAP.get(part, part)
|
| 321 |
+
result = f"{side}{qualifier} {canonical_part}"
|
| 322 |
+
if is_nub:
|
| 323 |
+
result += " end"
|
| 324 |
+
return result.strip()
|
| 325 |
+
|
| 326 |
+
# Compound: Foot_1, Foot_2, FootNub_1, Thigh_1, Thigh1_1, etc.
|
| 327 |
+
# Only match limb segment patterns (not Ear_01, Ponytail_L01, etc.)
|
| 328 |
+
_SEGMENT_PARTS = {
|
| 329 |
+
"foot", "calf", "thigh", "toe", "finger",
|
| 330 |
+
"thigh1", "calf1", "foot1",
|
| 331 |
+
}
|
| 332 |
+
part_sub_m = re.match(r"^(\w+?)(\d*)_(\d+)$", rest)
|
| 333 |
+
if part_sub_m:
|
| 334 |
+
part_raw = part_sub_m.group(1).lower()
|
| 335 |
+
if part_raw in _SEGMENT_PARTS:
|
| 336 |
+
group_num = part_sub_m.group(2)
|
| 337 |
+
sub_num = str(int(part_sub_m.group(3)))
|
| 338 |
+
canonical_part = _BODY_PART_MAP.get(part_raw, part_raw)
|
| 339 |
+
group_str = f" {int(group_num)}" if group_num else ""
|
| 340 |
+
result = f"{side}{canonical_part}{group_str} segment {sub_num}"
|
| 341 |
+
if is_nub:
|
| 342 |
+
result += " end"
|
| 343 |
+
return result.strip()
|
| 344 |
+
|
| 345 |
+
# Handle Xtra (extra bone) -- BEFORE general number extraction
|
| 346 |
+
rest_lower_pre = rest.lower().rstrip("_")
|
| 347 |
+
if rest_lower_pre.startswith("xtra"):
|
| 348 |
+
# Parse: Xtra[NN][Opp][Nub], Xtra_Spine, Xtra_Neck01, etc.
|
| 349 |
+
# The index is typically 2 digits (01-08), use \d{0,2} to avoid
|
| 350 |
+
# swallowing sub-numbers (e.g. Xtra0102 = Xtra01 + sub 02)
|
| 351 |
+
xtra_m = re.match(
|
| 352 |
+
r"^Xtra_?(\d{0,2})(?:_?([A-Za-z]+?))?(\d*)$", rest, re.IGNORECASE
|
| 353 |
+
)
|
| 354 |
+
if xtra_m:
|
| 355 |
+
xtra_num = xtra_m.group(1)
|
| 356 |
+
xtra_part = (xtra_m.group(2) or "").lower()
|
| 357 |
+
xtra_num2 = xtra_m.group(3)
|
| 358 |
+
n = f" {int(xtra_num)}" if xtra_num else ""
|
| 359 |
+
n2 = f" {int(xtra_num2)}" if xtra_num2 else ""
|
| 360 |
+
if xtra_part == "opp":
|
| 361 |
+
result = f"extra opposite{n}"
|
| 362 |
+
elif xtra_part == "oppnub":
|
| 363 |
+
# Already handled by Nub stripping
|
| 364 |
+
result = f"extra opposite{n}"
|
| 365 |
+
elif xtra_part in _BODY_PART_MAP:
|
| 366 |
+
result = f"extra {_BODY_PART_MAP[xtra_part]}{n2}{n}"
|
| 367 |
+
elif xtra_part:
|
| 368 |
+
result = f"extra {xtra_part}{n2}{n}"
|
| 369 |
+
else:
|
| 370 |
+
result = f"extra{n}"
|
| 371 |
+
else:
|
| 372 |
+
result = "extra"
|
| 373 |
+
result = f"{side}{result}"
|
| 374 |
+
if is_nub:
|
| 375 |
+
result += " end"
|
| 376 |
+
return result.strip()
|
| 377 |
+
|
| 378 |
+
# Handle Ponytail with embedded antenna/mandible
|
| 379 |
+
# Must be checked BEFORE general number extraction because
|
| 380 |
+
# the side may already have been extracted from embedded _L_/_R_
|
| 381 |
+
rest_lower_check = rest.lower().rstrip("_")
|
| 382 |
+
if rest_lower_check.startswith("ponytail"):
|
| 383 |
+
# Parse: Ponytail[NN][_]?[L|R][_]?PartName[NN]
|
| 384 |
+
pony_m = re.match(
|
| 385 |
+
r"^Ponytail(\d*)[_]?(?:(L|R)[_]?)?([A-Za-z_]*?)(\d*)$",
|
| 386 |
+
rest, re.IGNORECASE
|
| 387 |
+
)
|
| 388 |
+
if pony_m:
|
| 389 |
+
pony_idx = pony_m.group(1) # ponytail index
|
| 390 |
+
pony_side_char = pony_m.group(2) # L or R
|
| 391 |
+
pony_part = pony_m.group(3).lower().rstrip("_") if pony_m.group(3) else ""
|
| 392 |
+
pony_num = pony_m.group(4) # trailing number on part
|
| 393 |
+
|
| 394 |
+
# Determine effective side
|
| 395 |
+
if pony_side_char:
|
| 396 |
+
eff_side = "left " if pony_side_char.upper() == "L" else "right "
|
| 397 |
+
else:
|
| 398 |
+
eff_side = side
|
| 399 |
+
|
| 400 |
+
if pony_part and pony_part not in ("ponytail", ""):
|
| 401 |
+
# Embedded body part: antenna, mandible, etc.
|
| 402 |
+
part = _BODY_PART_MAP.get(pony_part, pony_part)
|
| 403 |
+
p_num = f" {int(pony_num)}" if pony_num else ""
|
| 404 |
+
result = f"{eff_side}{part}{p_num}"
|
| 405 |
+
else:
|
| 406 |
+
# Pure sided ponytail: Ponytail_L01, Ponytail_R03
|
| 407 |
+
if pony_num:
|
| 408 |
+
p_num = f" {int(pony_num)}"
|
| 409 |
+
elif pony_idx:
|
| 410 |
+
p_num = f" {int(pony_idx)}"
|
| 411 |
+
else:
|
| 412 |
+
p_num = ""
|
| 413 |
+
result = f"{eff_side}ponytail{p_num}"
|
| 414 |
+
else:
|
| 415 |
+
# Simple ponytail with number
|
| 416 |
+
_, p_num_suffix = _extract_trailing_number(rest)
|
| 417 |
+
result = f"{side}ponytail{p_num_suffix}"
|
| 418 |
+
if is_nub:
|
| 419 |
+
result += " end"
|
| 420 |
+
return result.strip()
|
| 421 |
+
|
| 422 |
+
# Now extract trailing number for simple names
|
| 423 |
+
rest_stripped, num_suffix = _extract_trailing_number(rest)
|
| 424 |
+
rest_lower = rest_stripped.lower().rstrip("_")
|
| 425 |
+
|
| 426 |
+
# Handle Ear (Bip01_R_Ear_01, Bip01__L_Ear_01)
|
| 427 |
+
if rest_lower.startswith("ear") or rest_lower_check.startswith("ear"):
|
| 428 |
+
# rest may be 'Ear_01' or 'Ear01'
|
| 429 |
+
ear_m = re.match(r"^Ear[_]?(\d+)$", rest, re.IGNORECASE)
|
| 430 |
+
if ear_m:
|
| 431 |
+
n = str(int(ear_m.group(1)))
|
| 432 |
+
result = f"{side}ear {n}"
|
| 433 |
+
else:
|
| 434 |
+
result = f"{side}ear{num_suffix}"
|
| 435 |
+
if is_nub:
|
| 436 |
+
result += " end"
|
| 437 |
+
return result.strip()
|
| 438 |
+
|
| 439 |
+
# Standard body part lookup
|
| 440 |
+
part_name = rest_stripped.rstrip("_")
|
| 441 |
+
part_lower = part_name.lower()
|
| 442 |
+
|
| 443 |
+
# Try direct lookup (rest is already number-stripped, e.g. "Spine")
|
| 444 |
+
canonical_part = _BODY_PART_MAP.get(part_lower, None)
|
| 445 |
+
if canonical_part is None:
|
| 446 |
+
# Try with an inner trailing number: e.g. "Finger01" where
|
| 447 |
+
# rest_stripped="Finger01" (Nub suffix was the only thing stripped)
|
| 448 |
+
inner_base, inner_num = _extract_trailing_number(part_lower)
|
| 449 |
+
canonical_inner = _BODY_PART_MAP.get(inner_base, None)
|
| 450 |
+
if canonical_inner is not None:
|
| 451 |
+
canonical_part = canonical_inner
|
| 452 |
+
num_suffix = inner_num + num_suffix # combine inner + outer
|
| 453 |
+
else:
|
| 454 |
+
# CamelCase split: e.g. "UpperArm" -> "upper arm"
|
| 455 |
+
canonical_part = _camel_to_words(part_name).strip()
|
| 456 |
+
# Re-extract number from the camel-split result
|
| 457 |
+
words = canonical_part.split()
|
| 458 |
+
if words and words[-1].isdigit():
|
| 459 |
+
extra_num = f" {int(words[-1])}"
|
| 460 |
+
canonical_part = " ".join(words[:-1])
|
| 461 |
+
num_suffix = extra_num + num_suffix
|
| 462 |
+
|
| 463 |
+
result = f"{side}{canonical_part}{num_suffix}"
|
| 464 |
+
if is_nub:
|
| 465 |
+
result += " end"
|
| 466 |
+
return result.strip()
|
| 467 |
+
|
| 468 |
+
|
| 469 |
+
def _handle_bn(raw: str) -> str | None:
|
| 470 |
+
"""Pattern 2: BN_ / Bn_ prefix (~900 occurrences)."""
|
| 471 |
+
m = re.match(r"^[Bb][Nn]_(.+)$", raw)
|
| 472 |
+
if not m:
|
| 473 |
+
return None
|
| 474 |
+
rest = m.group(1)
|
| 475 |
+
|
| 476 |
+
# Detect Nub suffix
|
| 477 |
+
is_nub = False
|
| 478 |
+
if rest.endswith("Nub"):
|
| 479 |
+
is_nub = True
|
| 480 |
+
rest = rest[:-3]
|
| 481 |
+
|
| 482 |
+
# Handle BN_Bip01_Pelvis style (nested prefix)
|
| 483 |
+
if rest.startswith("Bip01_"):
|
| 484 |
+
inner = _handle_bip01("Bip01_" + rest[6:])
|
| 485 |
+
return inner
|
| 486 |
+
|
| 487 |
+
# Handle BN_LWing / BN_RWing (no underscore side marker)
|
| 488 |
+
wing_m = re.match(r"^(L|R)(Wing)(\d+)$", rest)
|
| 489 |
+
if wing_m:
|
| 490 |
+
side = "left " if wing_m.group(1) == "L" else "right "
|
| 491 |
+
num = str(int(wing_m.group(3)))
|
| 492 |
+
return f"{side}wing {num}"
|
| 493 |
+
|
| 494 |
+
# Handle BN_LBeard / BN_RBeard / BN_FBeard
|
| 495 |
+
beard_m = re.match(r"^(L|R|F)(Beard|Eyeball)(\d*)$", rest)
|
| 496 |
+
if beard_m:
|
| 497 |
+
side_map = {"L": "left ", "R": "right ", "F": "front "}
|
| 498 |
+
side = side_map[beard_m.group(1)]
|
| 499 |
+
part = beard_m.group(2).lower()
|
| 500 |
+
part = _BODY_PART_MAP.get(part, part)
|
| 501 |
+
num = f" {int(beard_m.group(3))}" if beard_m.group(3) else ""
|
| 502 |
+
return f"{side}{part}{num}"
|
| 503 |
+
|
| 504 |
+
# Handle BN_Crab_pincers_L_01 style
|
| 505 |
+
crab_m = re.match(r"^Crab_pincers_(L|R)_(\d+)$", rest)
|
| 506 |
+
if crab_m:
|
| 507 |
+
side = "left " if crab_m.group(1) == "L" else "right "
|
| 508 |
+
num = str(int(crab_m.group(2)))
|
| 509 |
+
return f"{side}pincer {num}"
|
| 510 |
+
|
| 511 |
+
# Handle double-underscore: BN__Forearm_L_01, BN__Neck_01, BN__UpperArm_R_01
|
| 512 |
+
if rest.startswith("_"):
|
| 513 |
+
rest = rest.lstrip("_")
|
| 514 |
+
|
| 515 |
+
# Detect side
|
| 516 |
+
side, rest = _detect_side(rest)
|
| 517 |
+
|
| 518 |
+
# Extract trailing number
|
| 519 |
+
rest, num_suffix = _extract_trailing_number(rest)
|
| 520 |
+
|
| 521 |
+
# Lookup body part
|
| 522 |
+
rest_clean = rest.rstrip("_").lower()
|
| 523 |
+
|
| 524 |
+
# Handle compound: Toe01, Toe0, Foot
|
| 525 |
+
# Strip inner number: Toe01 -> Toe + 1
|
| 526 |
+
inner_num_m = re.match(r"^(\w+?)(\d+)$", rest_clean)
|
| 527 |
+
if inner_num_m:
|
| 528 |
+
inner_base = inner_num_m.group(1)
|
| 529 |
+
inner_num = str(int(inner_num_m.group(2)))
|
| 530 |
+
canonical_part = _BODY_PART_MAP.get(inner_base, inner_base)
|
| 531 |
+
result = f"{side}{canonical_part} {inner_num}{num_suffix}"
|
| 532 |
+
else:
|
| 533 |
+
canonical_part = _BODY_PART_MAP.get(rest_clean, None)
|
| 534 |
+
if canonical_part is None:
|
| 535 |
+
canonical_part = _camel_to_words(rest.rstrip("_")).strip()
|
| 536 |
+
result = f"{side}{canonical_part}{num_suffix}"
|
| 537 |
+
|
| 538 |
+
if is_nub:
|
| 539 |
+
result += " end"
|
| 540 |
+
return result.strip()
|
| 541 |
+
|
| 542 |
+
|
| 543 |
+
def _handle_jt(raw: str) -> str | None:
|
| 544 |
+
"""Pattern 3: jt_ prefix (Trex, Raptor2/3, ~286 occurrences)."""
|
| 545 |
+
m = re.match(r"^jt_(.+)$", raw)
|
| 546 |
+
if not m:
|
| 547 |
+
return None
|
| 548 |
+
rest = m.group(1)
|
| 549 |
+
|
| 550 |
+
# Detect side (_L, _R, _C suffix)
|
| 551 |
+
side, rest = _detect_side(rest)
|
| 552 |
+
|
| 553 |
+
# Extract trailing number
|
| 554 |
+
rest, num_suffix = _extract_trailing_number(rest)
|
| 555 |
+
|
| 556 |
+
# Handle compound names with CamelCase: ToeMiddle, ClawInner, etc.
|
| 557 |
+
words = _camel_to_words(rest.rstrip("_")).strip()
|
| 558 |
+
|
| 559 |
+
# Remap known parts
|
| 560 |
+
tokens = words.split()
|
| 561 |
+
mapped_tokens = []
|
| 562 |
+
for t in tokens:
|
| 563 |
+
mapped_tokens.append(_BODY_PART_MAP.get(t, t))
|
| 564 |
+
canonical = " ".join(mapped_tokens)
|
| 565 |
+
|
| 566 |
+
# Special: "x" suffix on tail (jt_Tail01x_C -> tail 1 secondary)
|
| 567 |
+
if canonical.endswith("x"):
|
| 568 |
+
canonical = canonical[:-1].rstrip() + " secondary"
|
| 569 |
+
|
| 570 |
+
result = f"{side}{canonical}{num_suffix}"
|
| 571 |
+
return result.strip()
|
| 572 |
+
|
| 573 |
+
|
| 574 |
+
def _handle_japanese(raw: str) -> str | None:
|
| 575 |
+
"""Pattern 4: Japanese romaji (Alligator, Pirrana, Tukan)."""
|
| 576 |
+
# Direct match
|
| 577 |
+
if raw in JAPANESE_MAP:
|
| 578 |
+
return JAPANESE_MAP[raw]
|
| 579 |
+
|
| 580 |
+
# L_/R_ prefixed romaji: L_kata -> left shoulder
|
| 581 |
+
m = re.match(r"^(L|R)_(.+)$", raw)
|
| 582 |
+
if m:
|
| 583 |
+
side = "left " if m.group(1) == "L" else "right "
|
| 584 |
+
base = m.group(2)
|
| 585 |
+
# Try with trailing number: sippo01 -> tail 1
|
| 586 |
+
base_word, num = _extract_trailing_number(base)
|
| 587 |
+
if base_word in JAPANESE_MAP:
|
| 588 |
+
return f"{side}{JAPANESE_MAP[base_word]}{num}"
|
| 589 |
+
if base in JAPANESE_MAP:
|
| 590 |
+
return f"{side}{JAPANESE_MAP[base]}"
|
| 591 |
+
return None
|
| 592 |
+
|
| 593 |
+
# Numbered romaji: sippo01 -> tail 1
|
| 594 |
+
base_word, num = _extract_trailing_number(raw)
|
| 595 |
+
if base_word in JAPANESE_MAP:
|
| 596 |
+
return f"{JAPANESE_MAP[base_word]}{num}"
|
| 597 |
+
|
| 598 |
+
return None
|
| 599 |
+
|
| 600 |
+
|
| 601 |
+
def _handle_npc(raw: str) -> str | None:
|
| 602 |
+
"""Pattern 5a: NPC_ prefix (SabreToothTiger reskin, ~77 occurrences)."""
|
| 603 |
+
m = re.match(r"^NPC_(.+)$", raw)
|
| 604 |
+
if not m:
|
| 605 |
+
return None
|
| 606 |
+
rest = m.group(1)
|
| 607 |
+
|
| 608 |
+
# Strip trailing short-code markers like __Head_, __LMag_ (max 6 chars)
|
| 609 |
+
rest = re.sub(r"__\w{1,6}_$", "", rest)
|
| 610 |
+
|
| 611 |
+
# Detect side via L/R prefix in subname
|
| 612 |
+
side = ""
|
| 613 |
+
side_m = re.match(r"^(L|R)_?(.+)$", rest)
|
| 614 |
+
if side_m and len(side_m.group(2)) > 1: # avoid matching single chars
|
| 615 |
+
side = "left " if side_m.group(1) == "L" else "right "
|
| 616 |
+
rest = side_m.group(2)
|
| 617 |
+
|
| 618 |
+
# Special: magic node
|
| 619 |
+
if "MagicNode" in rest or "MagicEffects" in rest:
|
| 620 |
+
return f"{side}magic node".strip()
|
| 621 |
+
|
| 622 |
+
# Extract trailing number
|
| 623 |
+
rest, num_suffix = _extract_trailing_number(rest)
|
| 624 |
+
|
| 625 |
+
# Handle specific NPC compound names
|
| 626 |
+
rest_lower = rest.rstrip("_").lower()
|
| 627 |
+
npc_special = {
|
| 628 |
+
"head": "head",
|
| 629 |
+
"jaw": "jaw",
|
| 630 |
+
"nose": "nose",
|
| 631 |
+
"pelvis": "pelvis",
|
| 632 |
+
"ribcage": "ribcage",
|
| 633 |
+
"neckjiggle": "neck jiggle",
|
| 634 |
+
"armjiggle": "arm jiggle",
|
| 635 |
+
"armpalm": "palm",
|
| 636 |
+
"armball": "arm ball",
|
| 637 |
+
"armcollarbone": "collarbone",
|
| 638 |
+
"thighjiggle": "thigh jiggle",
|
| 639 |
+
"legankle": "ankle",
|
| 640 |
+
"legball": "leg ball",
|
| 641 |
+
"upperlip": "upper lip",
|
| 642 |
+
"upperleftlip": "upper left lip",
|
| 643 |
+
"upperrightlip": "upper right lip",
|
| 644 |
+
"lowerfrontlip": "lower front lip",
|
| 645 |
+
"lowerleftlip": "lower left lip",
|
| 646 |
+
"lowerrightlip": "lower right lip",
|
| 647 |
+
"eyebrow": "eyebrow",
|
| 648 |
+
"spine1backjiggle": "spine 1 back jiggle",
|
| 649 |
+
"spine1jiggle": "spine 1 jiggle",
|
| 650 |
+
"spine4jiggle": "spine 4 jiggle",
|
| 651 |
+
}
|
| 652 |
+
if rest_lower in npc_special:
|
| 653 |
+
result = f"{side}{npc_special[rest_lower]}{num_suffix}"
|
| 654 |
+
return result.strip()
|
| 655 |
+
|
| 656 |
+
# CamelCase split and map
|
| 657 |
+
words = _camel_to_words(rest.rstrip("_")).strip()
|
| 658 |
+
|
| 659 |
+
# Handle embedded side markers: LArm1, RLeg2, etc.
|
| 660 |
+
inner_side_m = re.match(r"^(l|r)(\w+)$", words.replace(" ", ""))
|
| 661 |
+
if inner_side_m and not side:
|
| 662 |
+
inner_s = inner_side_m.group(1)
|
| 663 |
+
side = "left " if inner_s == "l" else "right "
|
| 664 |
+
words = _camel_to_words(rest.rstrip("_")[1:]).strip()
|
| 665 |
+
|
| 666 |
+
tokens = words.split()
|
| 667 |
+
mapped = []
|
| 668 |
+
skip_next = False
|
| 669 |
+
for i, t in enumerate(tokens):
|
| 670 |
+
if skip_next:
|
| 671 |
+
skip_next = False
|
| 672 |
+
continue
|
| 673 |
+
# Handle "upper arm twist 1" -> "upper arm twist 1"
|
| 674 |
+
mapped.append(_BODY_PART_MAP.get(t, t))
|
| 675 |
+
canonical = " ".join(mapped)
|
| 676 |
+
|
| 677 |
+
result = f"{side}{canonical}{num_suffix}"
|
| 678 |
+
return result.strip()
|
| 679 |
+
|
| 680 |
+
|
| 681 |
+
def _handle_sabrecat(raw: str) -> str | None:
|
| 682 |
+
"""Pattern 5b: Sabrecat_ prefix (SabreToothTiger, ~63 occurrences)."""
|
| 683 |
+
m = re.match(r"^Sabrecat_(.+)$", raw)
|
| 684 |
+
if not m:
|
| 685 |
+
return None
|
| 686 |
+
rest = m.group(1)
|
| 687 |
+
|
| 688 |
+
# Special: _pelv_ (pelvis) -- check BEFORE stripping short codes
|
| 689 |
+
if rest.lstrip("_").startswith("pelv"):
|
| 690 |
+
return "pelvis"
|
| 691 |
+
|
| 692 |
+
# Handle Head sub-parts BEFORE short-code stripping (the trailing
|
| 693 |
+
# codes carry the actual sub-part info for Head joints)
|
| 694 |
+
# Patterns: Head__LEye_, Head__REye_, Head__RChk_, Head__LChk_,
|
| 695 |
+
# Head_Head__LChk_, Head_jaw_, Head_LM01_, Head_RM01_,
|
| 696 |
+
# HeadLeftEar_LEar_, HeadRightEar_REar_,
|
| 697 |
+
# Head_EyeLid_HELT_, HeadEyeLid__HELB_
|
| 698 |
+
rest_for_head = rest
|
| 699 |
+
if rest_for_head.startswith("Head"):
|
| 700 |
+
sub = rest_for_head[4:].strip("_")
|
| 701 |
+
if not sub:
|
| 702 |
+
return "head"
|
| 703 |
+
sub_lower = sub.lower().strip("_")
|
| 704 |
+
|
| 705 |
+
# Try to interpret the head sub-part
|
| 706 |
+
head_sabrecat_map = {
|
| 707 |
+
"jaw": "jaw",
|
| 708 |
+
"leye": "left eye",
|
| 709 |
+
"reye": "right eye",
|
| 710 |
+
"lchk": "left cheek",
|
| 711 |
+
"rchk": "right cheek",
|
| 712 |
+
"lm01": "left muzzle 1",
|
| 713 |
+
"rm01": "right muzzle 1",
|
| 714 |
+
"helt": "upper eyelid",
|
| 715 |
+
"helb": "lower eyelid",
|
| 716 |
+
}
|
| 717 |
+
# Extract the short code from the full sub-part
|
| 718 |
+
# e.g. "Head__LChk_" -> sub = "Head__LChk", look at last code
|
| 719 |
+
codes = [c for c in sub.split("_") if c]
|
| 720 |
+
if codes:
|
| 721 |
+
last_code = codes[-1].lower()
|
| 722 |
+
if last_code in head_sabrecat_map:
|
| 723 |
+
return head_sabrecat_map[last_code]
|
| 724 |
+
# Check all codes for known head parts
|
| 725 |
+
for code in codes:
|
| 726 |
+
cl = code.lower()
|
| 727 |
+
if cl in head_sabrecat_map:
|
| 728 |
+
return head_sabrecat_map[cl]
|
| 729 |
+
|
| 730 |
+
# EyeLid patterns
|
| 731 |
+
if "eyelid" in sub_lower:
|
| 732 |
+
return "eyelid"
|
| 733 |
+
# Jaw
|
| 734 |
+
if "jaw" in sub_lower:
|
| 735 |
+
return "jaw"
|
| 736 |
+
# Head (identity)
|
| 737 |
+
if sub_lower.startswith("head"):
|
| 738 |
+
inner = sub_lower[4:].strip("_")
|
| 739 |
+
if not inner:
|
| 740 |
+
return "head"
|
| 741 |
+
# Fallback for head sub-parts
|
| 742 |
+
inner = _camel_to_words(sub.split("_")[0])
|
| 743 |
+
return f"head {inner}".strip()
|
| 744 |
+
|
| 745 |
+
# HeadLeftEar / HeadRightEar (without leading Sabrecat_)
|
| 746 |
+
if rest_for_head.startswith("HeadLeft"):
|
| 747 |
+
part = rest_for_head[8:].split("_")[0].lower()
|
| 748 |
+
return f"left {part}"
|
| 749 |
+
if rest_for_head.startswith("HeadRight"):
|
| 750 |
+
part = rest_for_head[9:].split("_")[0].lower()
|
| 751 |
+
return f"right {part}"
|
| 752 |
+
if rest_for_head.startswith("HeadEyeLid"):
|
| 753 |
+
return "eyelid"
|
| 754 |
+
|
| 755 |
+
# Extract side and number from trailing short code before stripping
|
| 756 |
+
code_side = ""
|
| 757 |
+
code_num = ""
|
| 758 |
+
code_m = re.search(r"_([LR]?)(\w*?)(\d+)_$", rest)
|
| 759 |
+
if code_m:
|
| 760 |
+
if code_m.group(1):
|
| 761 |
+
code_side = "left " if code_m.group(1) == "L" else "right "
|
| 762 |
+
code_num = f" {int(code_m.group(3))}"
|
| 763 |
+
|
| 764 |
+
# Strip trailing short code: _LThi_, _RClf_, _Spn0_ etc.
|
| 765 |
+
rest = re.sub(r"_\w{2,5}_$", "", rest)
|
| 766 |
+
|
| 767 |
+
# Detect side via Left/Right in name, with fallback to short code side
|
| 768 |
+
side = ""
|
| 769 |
+
if rest.startswith("Left"):
|
| 770 |
+
side = "left "
|
| 771 |
+
rest = rest[4:]
|
| 772 |
+
elif rest.startswith("Right"):
|
| 773 |
+
side = "right "
|
| 774 |
+
rest = rest[5:]
|
| 775 |
+
elif code_side:
|
| 776 |
+
side = code_side
|
| 777 |
+
|
| 778 |
+
# Handle Neck, Spine, Ribcage, Tail, Finger, Toe
|
| 779 |
+
rest, num_suffix = _extract_trailing_number(rest)
|
| 780 |
+
# Use short-code number as fallback if no number in the main part
|
| 781 |
+
if not num_suffix and code_num:
|
| 782 |
+
num_suffix = code_num
|
| 783 |
+
words = _camel_to_words(rest.rstrip("_")).strip()
|
| 784 |
+
tokens = words.split()
|
| 785 |
+
mapped = []
|
| 786 |
+
for t in tokens:
|
| 787 |
+
mapped.append(_BODY_PART_MAP.get(t, t))
|
| 788 |
+
canonical = " ".join(mapped)
|
| 789 |
+
|
| 790 |
+
result = f"{side}{canonical}{num_suffix}"
|
| 791 |
+
return result.strip()
|
| 792 |
+
|
| 793 |
+
|
| 794 |
+
def _handle_game_engine(raw: str) -> str | None:
|
| 795 |
+
"""Pattern 5c: Game-engine names (Spider, etc.)."""
|
| 796 |
+
# Arm[LR]Collarbone, Arm[LR]Claw, Arm[LR]_01_
|
| 797 |
+
arm_m = re.match(r"^Arm(L|R)(\w+?)_?$", raw)
|
| 798 |
+
if arm_m:
|
| 799 |
+
side = "left " if arm_m.group(1) == "L" else "right "
|
| 800 |
+
rest = arm_m.group(2)
|
| 801 |
+
rest, num = _extract_trailing_number(rest)
|
| 802 |
+
part = _BODY_PART_MAP.get(rest.lower(), _camel_to_words(rest))
|
| 803 |
+
return f"{side}arm {part}{num}".strip()
|
| 804 |
+
|
| 805 |
+
# Leg_[LR]_NN_
|
| 806 |
+
leg_m = re.match(r"^Leg_(L|R)_(\d+)_$", raw)
|
| 807 |
+
if leg_m:
|
| 808 |
+
side = "left " if leg_m.group(1) == "L" else "right "
|
| 809 |
+
idx = int(leg_m.group(2))
|
| 810 |
+
leg_group = idx // 10
|
| 811 |
+
seg = idx % 10
|
| 812 |
+
return f"{side}leg {leg_group} segment {seg}"
|
| 813 |
+
|
| 814 |
+
# Fang[LR]_NN_
|
| 815 |
+
fang_m = re.match(r"^Fang(L|R)_(\d+)_$", raw)
|
| 816 |
+
if fang_m:
|
| 817 |
+
side = "left " if fang_m.group(1) == "L" else "right "
|
| 818 |
+
num = str(int(fang_m.group(2)))
|
| 819 |
+
return f"{side}fang {num}"
|
| 820 |
+
|
| 821 |
+
# _[LR]Toe[N]_ or _[LR]Jaw_
|
| 822 |
+
toe_m = re.match(r"^_(L|R)(Toe|Jaw)(\d*)_$", raw)
|
| 823 |
+
if toe_m:
|
| 824 |
+
side = "left " if toe_m.group(1) == "L" else "right "
|
| 825 |
+
part = toe_m.group(2).lower()
|
| 826 |
+
num = f" {int(toe_m.group(3))}" if toe_m.group(3) else ""
|
| 827 |
+
return f"{side}{part}{num}"
|
| 828 |
+
|
| 829 |
+
# _body_
|
| 830 |
+
if raw == "_body_":
|
| 831 |
+
return "body"
|
| 832 |
+
|
| 833 |
+
# NPC_Head__Head_
|
| 834 |
+
if raw == "NPC_Head__Head_":
|
| 835 |
+
return "head"
|
| 836 |
+
|
| 837 |
+
# Elk* prefix (Raindeer-like)
|
| 838 |
+
elk_m = re.match(r"^Elk(L|R)?(.+)$", raw)
|
| 839 |
+
if elk_m:
|
| 840 |
+
side = ""
|
| 841 |
+
if elk_m.group(1):
|
| 842 |
+
side = "left " if elk_m.group(1) == "L" else "right "
|
| 843 |
+
rest = elk_m.group(2)
|
| 844 |
+
# Handle compound: UpperLip, FrontHoof, etc.
|
| 845 |
+
rest_lower = rest.lower().rstrip("_")
|
| 846 |
+
elk_parts = {
|
| 847 |
+
"pelvis": "pelvis",
|
| 848 |
+
"ribcage": "ribcage",
|
| 849 |
+
"scull": "skull",
|
| 850 |
+
"scullbase": "skull base",
|
| 851 |
+
"jaw": "jaw",
|
| 852 |
+
"ear": "ear",
|
| 853 |
+
"femur": "femur",
|
| 854 |
+
"tibia": "tibia",
|
| 855 |
+
"humerus": "humerus",
|
| 856 |
+
"radius": "radius",
|
| 857 |
+
"scapula": "scapula",
|
| 858 |
+
"metacarpus": "metacarpus",
|
| 859 |
+
"phalanxprima": "phalanx prima",
|
| 860 |
+
"phalangesmanus": "phalanges manus",
|
| 861 |
+
"largecannon": "cannon bone",
|
| 862 |
+
"fronthoof": "front hoof",
|
| 863 |
+
"rearhoof": "rear hoof",
|
| 864 |
+
"upperlip": "upper lip",
|
| 865 |
+
}
|
| 866 |
+
rest_clean, num = _extract_trailing_number(rest_lower)
|
| 867 |
+
part = elk_parts.get(rest_clean, None)
|
| 868 |
+
if part is None:
|
| 869 |
+
part = elk_parts.get(rest_lower, None)
|
| 870 |
+
if part is None:
|
| 871 |
+
part = _camel_to_words(rest).strip()
|
| 872 |
+
rest_clean2, num = _extract_trailing_number(part)
|
| 873 |
+
part = rest_clean2
|
| 874 |
+
return f"{side}{part}{num}".strip()
|
| 875 |
+
|
| 876 |
+
# Bone[NN] (generic unnamed bones)
|
| 877 |
+
bone_m = re.match(r"^Bone(\d+)$", raw)
|
| 878 |
+
if bone_m:
|
| 879 |
+
return f"bone {int(bone_m.group(1))}"
|
| 880 |
+
|
| 881 |
+
# Tail[NN] standalone
|
| 882 |
+
tail_m = re.match(r"^Tail(\d+)$", raw)
|
| 883 |
+
if tail_m:
|
| 884 |
+
return f"tail {int(tail_m.group(1))}"
|
| 885 |
+
|
| 886 |
+
# body01
|
| 887 |
+
body_m = re.match(r"^body(\d+)$", raw)
|
| 888 |
+
if body_m:
|
| 889 |
+
return f"body {int(body_m.group(1))}"
|
| 890 |
+
|
| 891 |
+
# Pure numbered: _00, _01, ..., _23
|
| 892 |
+
num_m = re.match(r"^_(\d+)$", raw)
|
| 893 |
+
if num_m:
|
| 894 |
+
return f"joint {int(num_m.group(1))}"
|
| 895 |
+
|
| 896 |
+
return None
|
| 897 |
+
|
| 898 |
+
|
| 899 |
+
def _handle_misc(raw: str) -> str | None:
|
| 900 |
+
"""Catch-all for remaining patterns."""
|
| 901 |
+
# IK_Chain01, Dummy01_*, ESI1_*, ProjectileNode_*
|
| 902 |
+
if raw.startswith("IK_"):
|
| 903 |
+
return "ik " + _camel_to_words(raw[3:]).strip()
|
| 904 |
+
if raw.startswith("Dummy01_"):
|
| 905 |
+
return "dummy " + _camel_to_words(raw[8:]).strip().rstrip("_")
|
| 906 |
+
if raw.startswith("ESI1_"):
|
| 907 |
+
return _camel_to_words(raw[5:]).strip()
|
| 908 |
+
if raw.startswith("ProjectileNode_"):
|
| 909 |
+
return "projectile " + raw[15:].lower().rstrip("_")
|
| 910 |
+
# NPC_L_MagicNode__LMag_
|
| 911 |
+
if "MagicNode" in raw or "MagicEffects" in raw:
|
| 912 |
+
side = "left " if "_L_" in raw or "NPC_L" in raw else (
|
| 913 |
+
"right " if "_R_" in raw else "")
|
| 914 |
+
return f"{side}magic node".strip()
|
| 915 |
+
# BN_center
|
| 916 |
+
if raw == "BN_center":
|
| 917 |
+
return "center"
|
| 918 |
+
|
| 919 |
+
return None
|
| 920 |
+
|
| 921 |
+
|
| 922 |
+
# ---------------------------------------------------------------------------
|
| 923 |
+
# PREFIX_RULES: ordered list of (pattern_regex, handler_function)
|
| 924 |
+
# ---------------------------------------------------------------------------
|
| 925 |
+
|
| 926 |
+
PREFIX_RULES: list[tuple[str, Callable[[str], str | None]]] = [
|
| 927 |
+
# Highest priority: exact matches (standalone + Japanese)
|
| 928 |
+
(r"^(Hips|locator2?|Trajectory|Head|Spine|Handle|Saddle|MESH|N_ALL|"
|
| 929 |
+
r"MagicEffectsNode|EyesBlue|EyesBlue_2|C_ctrl|BN_P|BN_Shell|BN_Down|"
|
| 930 |
+
r"BN_Downbody|BN_center|Left\w+|Right\w+)$",
|
| 931 |
+
lambda raw: _STANDALONE_MAP.get(raw)),
|
| 932 |
+
|
| 933 |
+
# Japanese romaji (check before Bip01 since L_/R_ prefix overlaps)
|
| 934 |
+
(r"^(?:[LR]_)?(?:koshi|kosi|hara|mune|kubi|atama|kao|ago|kata|hiji|te|"
|
| 935 |
+
r"momo|hiza|ashi|sippo|obire|sebire|harabire|munabire|era|shiribire|"
|
| 936 |
+
r"shirihire|shippo|o)\w*$",
|
| 937 |
+
_handle_japanese),
|
| 938 |
+
|
| 939 |
+
# Bip01_ prefix (most common)
|
| 940 |
+
(r"^_?Bip01_", _handle_bip01),
|
| 941 |
+
|
| 942 |
+
# BN_ / Bn_ prefix
|
| 943 |
+
(r"^[Bb][Nn]_", _handle_bn),
|
| 944 |
+
|
| 945 |
+
# jt_ prefix
|
| 946 |
+
(r"^jt_", _handle_jt),
|
| 947 |
+
|
| 948 |
+
# NPC_ prefix
|
| 949 |
+
(r"^NPC_", _handle_npc),
|
| 950 |
+
|
| 951 |
+
# Sabrecat_ prefix
|
| 952 |
+
(r"^Sabrecat_", _handle_sabrecat),
|
| 953 |
+
|
| 954 |
+
# Game-engine patterns (Spider, Elk, Bone, etc.)
|
| 955 |
+
(r"^(?:Arm[LR]|Leg_[LR]|Fang[LR]|_[LR](?:Toe|Jaw)|_body_|_\d+$|"
|
| 956 |
+
r"Elk[LR]?|Bone\d|Tail\d|body\d|NPC_Head__Head_)",
|
| 957 |
+
_handle_game_engine),
|
| 958 |
+
|
| 959 |
+
# Miscellaneous catch-all
|
| 960 |
+
(r".", _handle_misc),
|
| 961 |
+
]
|
| 962 |
+
|
| 963 |
+
# Pre-compile regexes
|
| 964 |
+
_COMPILED_RULES: list[tuple[re.Pattern, Callable[[str], str | None]]] = [
|
| 965 |
+
(re.compile(pattern), handler) for pattern, handler in PREFIX_RULES
|
| 966 |
+
]
|
| 967 |
+
|
| 968 |
+
|
| 969 |
+
# ---------------------------------------------------------------------------
|
| 970 |
+
# Main API
|
| 971 |
+
# ---------------------------------------------------------------------------
|
| 972 |
+
|
| 973 |
+
def canonicalize_zoo_joint(name: str) -> str:
|
| 974 |
+
"""Convert a single Truebones Zoo raw joint name to canonical form.
|
| 975 |
+
|
| 976 |
+
Parameters
|
| 977 |
+
----------
|
| 978 |
+
name : str
|
| 979 |
+
Raw joint name as stored in the skeleton .npz file.
|
| 980 |
+
|
| 981 |
+
Returns
|
| 982 |
+
-------
|
| 983 |
+
str
|
| 984 |
+
Lowercase canonical English name. Falls back to lowercased + cleaned
|
| 985 |
+
version of the input if no rule matches.
|
| 986 |
+
"""
|
| 987 |
+
# Try each rule in priority order
|
| 988 |
+
for pattern, handler in _COMPILED_RULES:
|
| 989 |
+
if pattern.search(name):
|
| 990 |
+
result = handler(name)
|
| 991 |
+
if result is not None:
|
| 992 |
+
# Final cleanup: collapse whitespace, strip
|
| 993 |
+
result = re.sub(r"\s+", " ", result).strip()
|
| 994 |
+
return result
|
| 995 |
+
|
| 996 |
+
# Fallback: lowercase, strip underscores/trailing _
|
| 997 |
+
fallback = name.strip("_").lower()
|
| 998 |
+
fallback = re.sub(r"[_]+", " ", fallback).strip()
|
| 999 |
+
return fallback
|
| 1000 |
+
|
| 1001 |
+
|
| 1002 |
+
def get_zoo_canonical_names(joint_names: list[str]) -> list[str]:
|
| 1003 |
+
"""Map a list of raw Truebones Zoo joint names to canonical form.
|
| 1004 |
+
|
| 1005 |
+
Parameters
|
| 1006 |
+
----------
|
| 1007 |
+
joint_names : list[str]
|
| 1008 |
+
Raw joint names from a skeleton .npz file.
|
| 1009 |
+
|
| 1010 |
+
Returns
|
| 1011 |
+
-------
|
| 1012 |
+
list[str]
|
| 1013 |
+
Canonical names in the same order as *joint_names*.
|
| 1014 |
+
"""
|
| 1015 |
+
return [canonicalize_zoo_joint(n) for n in joint_names]
|
| 1016 |
+
|
| 1017 |
+
|
| 1018 |
+
# ---------------------------------------------------------------------------
|
| 1019 |
+
# CLI: quick self-test
|
| 1020 |
+
# ---------------------------------------------------------------------------
|
| 1021 |
+
|
| 1022 |
+
if __name__ == "__main__":
|
| 1023 |
+
import numpy as np
|
| 1024 |
+
from pathlib import Path
|
| 1025 |
+
|
| 1026 |
+
skel_dir = Path(__file__).resolve().parents[2] / "data" / "processed" / "truebones_zoo" / "skeletons"
|
| 1027 |
+
|
| 1028 |
+
test_species = ["Dog", "Cat", "Horse", "Eagle", "Anaconda",
|
| 1029 |
+
"Trex", "Spider", "Ant", "Dragon", "Crab"]
|
| 1030 |
+
|
| 1031 |
+
for species in test_species:
|
| 1032 |
+
skel_path = skel_dir / f"{species}.npz"
|
| 1033 |
+
if not skel_path.exists():
|
| 1034 |
+
print(f"[SKIP] {species}: file not found")
|
| 1035 |
+
continue
|
| 1036 |
+
data = np.load(str(skel_path), allow_pickle=True)
|
| 1037 |
+
raw_names = [str(n) for n in data["joint_names"]]
|
| 1038 |
+
canonical = get_zoo_canonical_names(raw_names)
|
| 1039 |
+
print(f"\n{'='*60}")
|
| 1040 |
+
print(f" {species} ({len(raw_names)} joints)")
|
| 1041 |
+
print(f"{'='*60}")
|
| 1042 |
+
for r, c in zip(raw_names, canonical):
|
| 1043 |
+
print(f" {r:45s} -> {c}")
|