File size: 2,864 Bytes
05aeae1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
import os
import numpy as np
import argparse
from scripts._obj import ObjHandle
from multiprocessing import Pool, cpu_count


def process_file(args_tuple):
    """Process a single input file."""
    input_file, bs, vert_neutrals, output_dir = args_tuple

    arkit_seq = np.load(input_file)
    print(f"Processing: {input_file}")

    template_key = os.path.basename(input_file).split('_')[1]

    vert_seq = []

    for frame_arkit in arkit_seq:
        vert_offset = bs.T @ frame_arkit
        vert_seq.append(vert_offset)

    vert_seq = np.stack(vert_seq, axis=0)
    vert_neutral = vert_neutrals.get(template_key, vert_neutrals.get('default', next(iter(vert_neutrals.values()))))
    vert_seq = vert_seq + vert_neutral

    print(f"Output: {vert_seq.shape}")

    output_file = os.path.join(output_dir, os.path.basename(input_file))
    np.save(output_file, vert_seq)

    return output_file


if __name__ == '__main__':
    
    parser = argparse.ArgumentParser()
    parser.add_argument('--input', '-i', type=str, default=os.path.join('..', 'face_ict'), help='Path to arkit npy file or folder')
    parser.add_argument('--templates', '-t', type=str, default=os.path.join('..', 'face_ict_templates'), help='Path to template obj file or folder')
    parser.add_argument('--output', '-o', type=str, default=os.path.join('..', 'face_vertices'), help='Folder path to store output npy sequence file')
    parser.add_argument('--bs', '-p', type=str, default='blendshape_ict.npy', help='Path to PCA model')
    parser.add_argument('--workers', '-w', type=int, default=cpu_count(), help='Number of parallel workers')
    args = parser.parse_args()

    os.makedirs(args.output, exist_ok=True)
    bs = np.load(args.bs)

    vert_neutrals = {}
    if os.path.isdir(args.templates):
        for f in sorted(os.listdir(args.templates)):
            if f.endswith('.obj'):
                template_path = os.path.join(args.templates, f)
                obj = ObjHandle(template_path, triangulate=True)
                vert_neutrals[f[:-4]] = obj.vertices.reshape(-1)[None]
    else:
        obj = ObjHandle(args.templates, triangulate=True)
        vert_neutrals['default'] = obj.vertices.reshape(-1)[None]

    if os.path.isdir(args.input):
        input_files = [os.path.join(args.input, f) for f in sorted(os.listdir(args.input)) if f.endswith('.npy')]
    else:
        input_files = [args.input]

    print(f"Processing {len(input_files)} files using {args.workers} workers...")

    # Prepare arguments for each file
    process_args = [(input_file, bs, vert_neutrals, args.output) for input_file in input_files]

    # Process files in parallel
    with Pool(args.workers) as pool:
        results = pool.map(process_file, process_args)

    print(f"Done! Processed {len(results)} files.")