Yzy00518 commited on
Commit
6ca9edf
·
1 Parent(s): b275b5c

Upload src/app/process_data.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. src/app/process_data.py +76 -0
src/app/process_data.py ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import torch
3
+ from scipy.spatial.transform import Rotation as R
4
+
5
+ import sys
6
+ import os
7
+ sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
8
+ from utils.constants import SELECTED_JOINT28
9
+
10
+ local_smplx_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '../..', 'deps/smplx'))
11
+ sys.path.insert(0, local_smplx_path)
12
+ import smplx
13
+
14
+ def get_smplx_model(bs, smplx_pth):
15
+ smpl_model = smplx.create(model_path=smplx_pth,
16
+ model_type='smplx',
17
+ gender='male', ext='npz',
18
+ batch_size=bs,
19
+ )
20
+ smpl_model.eval()
21
+ return smpl_model
22
+
23
+ def get_a_sample(mo_data, motion_len=6, SEQLEN=16, smplx_pth=None):
24
+ SEQLENTIMES2 = SEQLEN*2
25
+
26
+ transl_all = []
27
+ global_orient_all = []
28
+ body_pose_all = []
29
+
30
+ transl = mo_data['transl'] # L,3
31
+ global_orient = mo_data['global_orient'] # L,3
32
+ body_pose = mo_data['body_pose'] # L,63 -> L,21,3
33
+ length = transl.shape[0]
34
+
35
+ if (length - (SEQLENTIMES2-2)*motion_len) <= 0:
36
+ return None
37
+
38
+ indices = np.arange(0, (SEQLENTIMES2-1)*motion_len, SEQLENTIMES2-1)
39
+
40
+ for idx in indices:
41
+ transl_i = transl[idx:idx+SEQLENTIMES2:2]
42
+ global_orient_i = global_orient[idx:idx+SEQLENTIMES2:2]
43
+ body_pose_i = body_pose[idx:idx+SEQLENTIMES2:2]
44
+
45
+ b_shape = body_pose_i.shape
46
+ body_pose_i = body_pose_i.reshape(-1, 3)
47
+
48
+ transl_i = transl_i - np.array([transl_i[0, 0], 0., transl_i[0, 2]])
49
+ first_frame_euler = R.from_rotvec(global_orient_i[0]).as_euler('zxy')
50
+ first_frame_euler = np.array([0, 0, -first_frame_euler[2]])
51
+ first_frame_matrix = R.from_euler('zxy', first_frame_euler).as_matrix()
52
+ global_orient_i = (
53
+ R.from_matrix(first_frame_matrix) * R.from_rotvec(global_orient_i)
54
+ ).as_rotvec()
55
+ transl_i = transl_i @ first_frame_matrix.T
56
+
57
+ transl_all.append(transl_i)
58
+ global_orient_all.append(global_orient_i)
59
+ body_pose_all.append(body_pose_i.reshape(b_shape))
60
+
61
+ transl_all = np.stack(transl_all).reshape(-1, 3)
62
+ global_orient_all = np.stack(global_orient_all).reshape(-1, 3)
63
+ body_pose_all = np.stack(body_pose_all).reshape(-1, 63)
64
+
65
+ assert (motion_len*SEQLEN)==transl_all.shape[0]
66
+ batch_size=(motion_len*SEQLEN)
67
+ smpl_model = get_smplx_model(batch_size, smplx_pth=smplx_pth)
68
+
69
+ with torch.no_grad():
70
+ joints = smpl_model(
71
+ body_pose=torch.tensor(body_pose_all, dtype=torch.float32),
72
+ global_orient=torch.tensor(global_orient_all, dtype=torch.float32),
73
+ transl=torch.tensor(transl_all, dtype=torch.float32),
74
+ ).joints[:, SELECTED_JOINT28]
75
+
76
+ return joints.reshape(motion_len, SEQLEN, 28, 3) # a Tensor of size (6, 16, 28, 3)