TopoSlots-MotionData / src /data /fbx_reader.py
Tevior's picture
Upload src/data/fbx_reader.py with huggingface_hub
9b97d00 verified
"""
Minimal pure-Python FBX binary reader for extracting skeleton animation data.
Parses FBX binary format to extract:
- Node hierarchy (skeleton tree)
- Animation curves (rotation, translation per bone)
- Rest pose (bind pose)
Then converts to our BVH-like internal format for further processing.
Reference: https://code.blender.org/2013/08/fbx-binary-file-format-specification/
"""
import struct
import zlib
import numpy as np
from pathlib import Path
from dataclasses import dataclass, field
from typing import Optional, Any
# ============================================================
# FBX Binary Parser
# ============================================================
@dataclass
class FBXNode:
name: str
properties: list = field(default_factory=list)
children: list = field(default_factory=list)
def find(self, name: str) -> Optional['FBXNode']:
for c in self.children:
if c.name == name:
return c
return None
def find_all(self, name: str) -> list['FBXNode']:
return [c for c in self.children if c.name == name]
def read_fbx(filepath: str | Path) -> FBXNode:
"""Read an FBX binary file and return the root node."""
with open(filepath, 'rb') as f:
data = f.read()
# Check magic
magic = b'Kaydara FBX Binary \x00'
if not data[:len(magic)] == magic:
raise ValueError("Not a valid FBX binary file")
# Version
version = struct.unpack_from('<I', data, 23)[0]
# Parse nodes
offset = 27
root = FBXNode(name='__root__')
if version >= 7500:
sentinel_size = 25 # 64-bit offsets
else:
sentinel_size = 13 # 32-bit offsets
while offset < len(data) - sentinel_size:
node, offset = _read_node(data, offset, version)
if node is None:
break
root.children.append(node)
return root
def _read_node(data: bytes, offset: int, version: int) -> tuple[Optional[FBXNode], int]:
"""Read a single FBX node."""
if version >= 7500:
end_offset = struct.unpack_from('<Q', data, offset)[0]
num_props = struct.unpack_from('<Q', data, offset + 8)[0]
props_len = struct.unpack_from('<Q', data, offset + 16)[0]
name_len = data[offset + 24]
name = data[offset + 25:offset + 25 + name_len].decode('ascii', errors='replace')
offset = offset + 25 + name_len
else:
end_offset = struct.unpack_from('<I', data, offset)[0]
num_props = struct.unpack_from('<I', data, offset + 4)[0]
props_len = struct.unpack_from('<I', data, offset + 8)[0]
name_len = data[offset + 12]
name = data[offset + 13:offset + 13 + name_len].decode('ascii', errors='replace')
offset = offset + 13 + name_len
if end_offset == 0:
return None, offset
# Read properties
props = []
props_end = offset + props_len
for _ in range(num_props):
prop, offset = _read_property(data, offset)
props.append(prop)
node = FBXNode(name=name, properties=props)
# Read children
sentinel = b'\x00' * (25 if version >= 7500 else 13)
while offset < end_offset:
if data[offset:offset + len(sentinel)] == sentinel:
offset += len(sentinel)
break
child, offset = _read_node(data, offset, version)
if child is None:
break
node.children.append(child)
offset = max(offset, end_offset)
return node, offset
def _read_property(data: bytes, offset: int) -> tuple[Any, int]:
"""Read a single FBX property value."""
type_code = chr(data[offset])
offset += 1
if type_code == 'Y': # int16
val = struct.unpack_from('<h', data, offset)[0]
return val, offset + 2
elif type_code == 'C': # bool
val = data[offset] != 0
return val, offset + 1
elif type_code == 'I': # int32
val = struct.unpack_from('<i', data, offset)[0]
return val, offset + 4
elif type_code == 'F': # float32
val = struct.unpack_from('<f', data, offset)[0]
return val, offset + 4
elif type_code == 'D': # float64
val = struct.unpack_from('<d', data, offset)[0]
return val, offset + 8
elif type_code == 'L': # int64
val = struct.unpack_from('<q', data, offset)[0]
return val, offset + 8
elif type_code == 'S': # string
length = struct.unpack_from('<I', data, offset)[0]
val = data[offset + 4:offset + 4 + length].decode('utf-8', errors='replace')
return val, offset + 4 + length
elif type_code == 'R': # raw bytes
length = struct.unpack_from('<I', data, offset)[0]
val = data[offset + 4:offset + 4 + length]
return val, offset + 4 + length
elif type_code in ('f', 'd', 'l', 'i', 'b'):
# Array types
arr_len = struct.unpack_from('<I', data, offset)[0]
encoding = struct.unpack_from('<I', data, offset + 4)[0]
comp_len = struct.unpack_from('<I', data, offset + 8)[0]
offset += 12
raw = data[offset:offset + comp_len]
if encoding == 1:
raw = zlib.decompress(raw)
dtype_map = {'f': '<f4', 'd': '<f8', 'l': '<i8', 'i': '<i4', 'b': 'bool'}
arr = np.frombuffer(raw, dtype=dtype_map[type_code])[:arr_len]
return arr, offset + comp_len
else:
raise ValueError(f"Unknown FBX property type: {type_code}")
# ============================================================
# FBX → Skeleton + Animation extraction
# ============================================================
def extract_skeleton_and_animation(fbx_root: FBXNode) -> dict:
"""
Extract skeleton hierarchy and animation data from parsed FBX.
Returns dict with:
- joint_names: list[str]
- parent_indices: list[int]
- rest_offsets: [J, 3]
- rotations: [T, J, 3] Euler degrees
- root_positions: [T, 3]
- fps: float
"""
objects = fbx_root.find('Objects')
if objects is None:
raise ValueError("No Objects section in FBX")
# Find all Model nodes (bones/joints)
models = {}
for node in objects.children:
if node.name == 'Model':
model_id = node.properties[0] if node.properties else None
model_name = str(node.properties[1]).split('\x00')[0] if len(node.properties) > 1 else ''
model_type = str(node.properties[2]) if len(node.properties) > 2 else ''
if 'LimbNode' in model_type or 'Root' in model_type or 'Null' in model_type:
# Extract local translation
local_trans = np.zeros(3)
props70 = node.find('Properties70')
if props70:
for p in props70.children:
if p.name == 'P' and p.properties and str(p.properties[0]) == 'Lcl Translation':
local_trans = np.array([
float(p.properties[4]),
float(p.properties[5]),
float(p.properties[6]),
])
models[model_id] = {
'name': model_name,
'type': model_type,
'translation': local_trans,
}
# Find connections to build parent-child hierarchy
connections = fbx_root.find('Connections')
parent_map = {} # child_id → parent_id
if connections:
for c in connections.children:
if c.name == 'C' and c.properties and str(c.properties[0]) == 'OO':
child_id = c.properties[1]
parent_id = c.properties[2]
if child_id in models and parent_id in models:
parent_map[child_id] = parent_id
# Build ordered joint list (BFS from roots)
roots = [mid for mid in models if mid not in parent_map]
if not roots:
raise ValueError("No root bones found")
joint_names = []
parent_indices = []
rest_offsets = []
id_to_idx = {}
queue = [(rid, -1) for rid in roots]
while queue:
mid, pidx = queue.pop(0)
idx = len(joint_names)
id_to_idx[mid] = idx
joint_names.append(models[mid]['name'])
parent_indices.append(pidx)
rest_offsets.append(models[mid]['translation'])
# Find children
for child_id, par_id in parent_map.items():
if par_id == mid:
queue.append((child_id, idx))
rest_offsets = np.array(rest_offsets, dtype=np.float32)
# Extract animation curves
anim_layers = []
for node in objects.children:
if node.name == 'AnimationLayer':
anim_layers.append(node)
# Find AnimationCurveNode → Model connections
anim_curve_nodes = {}
for node in objects.children:
if node.name == 'AnimationCurveNode':
acn_id = node.properties[0] if node.properties else None
acn_name = str(node.properties[1]).split('\x00')[0] if len(node.properties) > 1 else ''
anim_curve_nodes[acn_id] = {'name': acn_name, 'curves': {}}
# Find AnimationCurve data
anim_curves = {}
for node in objects.children:
if node.name == 'AnimationCurve':
ac_id = node.properties[0] if node.properties else None
key_time = None
key_value = None
for child in node.children:
if child.name == 'KeyTime' and child.properties:
key_time = child.properties[0]
elif child.name == 'KeyValueFloat' and child.properties:
key_value = child.properties[0]
if key_time is not None and key_value is not None:
anim_curves[ac_id] = {
'times': np.array(key_time, dtype=np.int64),
'values': np.array(key_value, dtype=np.float64),
}
# Link curves to bones via connections
# Connection: AnimationCurve → AnimationCurveNode → Model
acn_to_model = {} # acn_id → (model_id, property_name)
ac_to_acn = {} # ac_id → (acn_id, channel_idx)
if connections:
for c in connections.children:
if c.name == 'C' and len(c.properties) >= 3:
ctype = str(c.properties[0])
child_id = c.properties[1]
parent_id = c.properties[2]
if ctype == 'OO':
if child_id in anim_curve_nodes and parent_id in models:
prop_name = anim_curve_nodes[child_id]['name']
acn_to_model[child_id] = (parent_id, prop_name)
elif child_id in anim_curves and parent_id in anim_curve_nodes:
ac_to_acn[child_id] = parent_id
elif ctype == 'OP':
if child_id in anim_curves and parent_id in anim_curve_nodes:
channel = str(c.properties[3]) if len(c.properties) > 3 else ''
ac_to_acn[child_id] = parent_id
anim_curve_nodes[parent_id]['curves'][channel] = child_id
# Determine frame count and FPS
all_times = set()
for ac_id, ac_data in anim_curves.items():
for t in ac_data['times']:
all_times.add(int(t))
if not all_times:
# No animation, return rest pose
return {
'joint_names': joint_names,
'parent_indices': parent_indices,
'rest_offsets': rest_offsets,
'rotations': np.zeros((1, len(joint_names), 3), dtype=np.float32),
'root_positions': rest_offsets[0:1].copy(),
'fps': 30.0,
}
sorted_times = sorted(all_times)
fbx_ticks_per_sec = 46186158000 # FBX time unit
if len(sorted_times) > 1:
dt = sorted_times[1] - sorted_times[0]
fps = fbx_ticks_per_sec / dt if dt > 0 else 30.0
else:
fps = 30.0
T = len(sorted_times)
time_to_frame = {t: i for i, t in enumerate(sorted_times)}
# Build rotation and position arrays
J = len(joint_names)
rotations = np.zeros((T, J, 3), dtype=np.float64)
positions = np.tile(rest_offsets, (T, 1, 1)).astype(np.float64)
for acn_id, acn_data in anim_curve_nodes.items():
if acn_id not in acn_to_model:
continue
model_id, prop_name = acn_to_model[acn_id]
if model_id not in id_to_idx:
continue
j = id_to_idx[model_id]
for channel_key, ac_id in acn_data['curves'].items():
if ac_id not in anim_curves:
continue
ac_data = anim_curves[ac_id]
# Determine axis
axis = -1
ck = channel_key.lower()
if 'x' in ck or ck == 'd|x':
axis = 0
elif 'y' in ck or ck == 'd|y':
axis = 1
elif 'z' in ck or ck == 'd|z':
axis = 2
if axis < 0:
continue
# Fill in values
for t_val, v_val in zip(ac_data['times'], ac_data['values']):
t_int = int(t_val)
if t_int in time_to_frame:
f = time_to_frame[t_int]
if 'rotation' in prop_name.lower() or 'Lcl Rotation' in prop_name:
rotations[f, j, axis] = v_val
elif 'translation' in prop_name.lower() or 'Lcl Translation' in prop_name:
positions[f, j, axis] = v_val
root_positions = positions[:, 0, :]
return {
'joint_names': joint_names,
'parent_indices': parent_indices,
'rest_offsets': rest_offsets,
'rotations': rotations.astype(np.float32),
'root_positions': root_positions.astype(np.float32),
'fps': float(fps),
}
def fbx_to_bvh_data(filepath: str | Path) -> dict:
"""
High-level: read FBX file and return data compatible with our BVH pipeline.
"""
fbx_root = read_fbx(filepath)
return extract_skeleton_and_animation(fbx_root)