File size: 2,705 Bytes
fb6f9ba
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
import os
import argparse
import json
import numpy as np
import pprint
import time
import multiprocessing as mp
from functools import partial

from plyfile import PlyData
from tqdm import tqdm
import torch

dict_align_matrix ={}

def process_per_scan(scan_id, scan_dir):

    global dict_align_matrix
  
    # Load point clouds with colors
    with open(os.path.join(scan_dir, scan_id, '%s_vh_clean_2.ply'%(scan_id)), 'rb') as f:
        plydata = PlyData.read(f) # elements: vertex, face
    points = np.array([list(x) for x in plydata.elements[0]]) # [[x, y, z, r, g, b, alpha]]
    coords = np.ascontiguousarray(points[:, :3])
    colors = np.ascontiguousarray(points[:, 3:6])
    # # TODO: normalize the coords and colors
    # coords = coords - coords.mean(0)
    # colors = colors / 127.5 - 1

   
    align_matrix = np.eye(4)
    with open(os.path.join(scan_dir, scan_id, '%s.txt'%(scan_id)), 'r') as f:
        for line in f:
            if line.startswith('axisAlignment'):
                align_matrix = np.array([float(x) for x in line.strip().split()[-16:]]).astype(np.float32).reshape(4, 4)
                break
    # Transform the points
    pts = np.ones((coords.shape[0], 4), dtype=coords.dtype)
    pts[:, 0:3] = coords
    coords = np.dot(pts, align_matrix.transpose())[:, :3]  # Nx4
    dict_align_matrix[scan_id] = align_matrix.tolist()
    # Make sure no nans are introduced after conversion
    assert (np.sum(np.isnan(coords)) == 0)
    

def parse_args():
    parser = argparse.ArgumentParser()

    parser.add_argument('--scannet_dir', required=True, type=str,
                        help='the path to the downloaded ScanNet scans')

    # Optional arguments.
    parser.add_argument('--num_workers', default=-1, type=int,
                        help='the number of processes, -1 means use the available max')
    parser.add_argument('--apply_global_alignment', default=True, action='store_true',
                        help='rotate/translate entire scan globally to aligned it with other scans')
    args = parser.parse_args()

    # Print the args
    args_string = pprint.pformat(vars(args))
    print(args_string)

    return args


def main():
    args = parse_args()
    
    # for split in ['scans', 'scans_test']:
    for split in ['scans']:
        scannet_dir = os.path.join(args.scannet_dir, split)

        scan_ids = os.listdir(scannet_dir)
        scan_ids.sort()
        print(split, '%d scans' % (len(scan_ids)))

        for scan_id in tqdm(scan_ids):
            process_per_scan(scan_id=scan_id,scan_dir=scannet_dir)

    global dict_align_matrix
    json.dump(dict_align_matrix,open("scannet_align_matrix.json","w"))

if __name__ == '__main__':
    main()