File size: 6,903 Bytes
663494c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
import copy
from typing import Dict, Optional

import numpy as np

import utils.calibration as calib_utils

################## box in the object's own coordinate


def create_bb_points(size: np.ndarray) -> np.ndarray:
    """Create bounding box's 8 corners in the object coordinate

    Note: we assume that the location of the object is at the center
    of the object, not the bottom center

    Args:
        size: (3, ), length, width and height, obtained from
        carla_utils.vec2np(obj.bounding_box.extent)

    Returns:
        corners: 8 x 4, in the object coordinate

    Carla use left-handed coordinate, world coordinate:
        +x -> front
        +y -> right
        +z -> up
        4 -------- 5
       /|         /|
      7 -------- 6 .
      | |        | |
      . 0 -------- 1
      |/         |/
      3 -------- 2
    e.g., if take points of 0, 1, 2, 3, it will return the box in bottom
    if take points of 0, 3, 4, 7, it will return the box on the left
    """

    # decompose object size into length, width, height
    l, w, h = size

    # create corner points
    corners = np.zeros((8, 4))
    corners[0, :] = np.array([l / 2, -w / 2, -h / 2, 1])
    corners[1, :] = np.array([l / 2, w / 2, -h / 2, 1])
    corners[2, :] = np.array([-l / 2, w / 2, -h / 2, 1])
    corners[3, :] = np.array([-l / 2, -w / 2, -h / 2, 1])
    corners[4, :] = np.array([l / 2, -w / 2, h / 2, 1])
    corners[5, :] = np.array([l / 2, w / 2, h / 2, 1])
    corners[6, :] = np.array([-l / 2, w / 2, h / 2, 1])
    corners[7, :] = np.array([-l / 2, -w / 2, h / 2, 1])

    return corners


def box_in_obj_to_world(box_in_object: np.ndarray, transform: np.ndarray) -> np.ndarray:
    """Convert object boxes in its own coordinate to world coordinate

    Args:
        corners: 8 x 4 in the object coordinate
        transform: 4 x 4, object to world transformation
    """

    box_in_world = np.dot(transform, np.transpose(box_in_object)).transpose()  #  8 x 4
    return box_in_world


################## box in the world coordinate


def get_box_in_world(
    location: np.ndarray, rotation: np.ndarray, size: np.ndarray
) -> np.ndarray:
    """Return 3D bounding box in world coordinate"""

    # get box in the object's own coordinate
    box_in_object: np.ndarray = create_bb_points(size)  # 8 x 4

    # apply transformation to the world coordinate
    transform: np.ndarray = calib_utils.create_transform(location, rotation)
    box_in_world: np.ndarray = box_in_obj_to_world(box_in_object, transform)  # 8 x 4

    return box_in_world


################## box in the ego vehicle's coordinate


def box_in_world_to_ego(
    box_in_world: np.ndarray, ego_transform: np.ndarray
) -> np.ndarray:
    """Convert bbox in the world coordinate to the ego coordinate

    Args:
        box_in_world: (8, 4), 8 corners in the world coordinate
        ego_transform: (4, 4), transformation from ego to the world coordinate

    Returns:
        box_in_ego: (8, 4), 8 corners in the ego coordinate
    """

    # avoid in-place operation
    box_in_world = copy.copy(box_in_world)  # 8 x 4

    # apply transformation
    world2ego_transform = np.linalg.inv(ego_transform)  # 4 x 4
    box_in_ego = np.dot(
        world2ego_transform, np.transpose(box_in_world)
    ).transpose()  # 8 x 4

    return box_in_ego


################## box in the LiDAR's coordinate


def box_in_ego_to_lidar(box_in_ego: np.ndarray, ego2lidar: Optional[np.ndarray] = None) -> np.ndarray:
    """Convert bbox in the ego coordinate to the lidar coordinate
    
    Arguments
        box_in_ego: 8 x 4 
        ego2lidar: 4 x 4
    """

    # avoid in-place operation
    box_in_ego = copy.copy(box_in_ego)  # 8 x 4

    # get transformation by default
    if ego2lidar is None:
        ego2lidar = np.linalg.inv(calib_utils.lidar_to_ego_transform())  # 4 x 4

    # apply transformation
    box_in_lidar = np.dot(ego2lidar, np.transpose(box_in_ego)).transpose()  # 8 x 4

    return box_in_lidar


def box_in_lidar_to_ego(box_in_lidar: np.ndarray, lidar2ego: Optional[np.ndarray] = None) -> np.ndarray:
    """Convert bbox in the lidar coordinate to the ego coordinate
    
    Arguments
        box_in_lidar: 8 x 4 
        lidar2ego: 4 x 4
    """

    # avoid in-place operation
    box_in_lidar = copy.copy(box_in_lidar)  # 8 x 4

    # get transformation by default
    if lidar2ego is None:
        lidar2ego = calib_utils.lidar_to_ego_transform()
        # print(lidar2ego)
        assert False, 'No lidar2ego is provided!!!'
        # asd
    # qwe
    # print('using provided')

    # apply transformation
    box_in_ego = np.dot(lidar2ego, np.transpose(box_in_lidar)).transpose()  # 8 x 4

    return box_in_ego


################## box in the camera/image's coordinate


def box_in_ego_to_camera(box_in_ego: np.ndarray, cam_param: Dict) -> np.ndarray:
    """Convert bbox in the ego coordinate to the camera coordinate"""

    # avoid in-place operation
    box_in_ego = copy.copy(box_in_ego)  # 8 x 4

    # get transformation by default
    if 'ego2cam' in cam_param:
        ego2cam: np.narray = cam_param['ego2cam']
    else:
        ego2cam = np.linalg.inv(
            calib_utils.camera_to_ego_transform(cam_param)
        )  # 4 x 4

    # apply transformation
    box_in_camera = np.dot(ego2cam, np.transpose(box_in_ego)).transpose()  # 8 x 4

    return box_in_camera


def box_projection_to_image(
    box_in_camera: np.ndarray, 
    intrinsics: np.ndarray, 
    front_dist_threshold: float = 1.5,
    left_hand_system: bool = True,
) -> np.ndarray:
    """Project 3D box in camera coordinate to 2D box in image coordinate

    Args:
        box_in_camera: (8, 4), 3D box in the camera coordinate
        intrinsics: (4, 4) camera intrinsic matrix
        front_dist_threshold: used to filter out bad project due to too close

    ReturnsL
        box_2d_in_image: (8, 2), 2D box in the image coordinate
    """

    box_in_camera = copy.copy(box_in_camera).transpose()  # 4 x 8

    # camera rotation correction, from left to right hand coordinate system
    # change to camera coordinate as in other datasets such as KITTI
    if left_hand_system:
        box_in_camera = np.vstack(
            (
                box_in_camera[1, :],
                -box_in_camera[2, :],
                box_in_camera[0, :],
                box_in_camera[3, :],
            )
        )  # 4 x 8

    # apply projection and normalization of the homogeneous coordinate
    box_2d_in_image = (intrinsics @ box_in_camera).T  # 8 x 4
    box_2d_in_image[:, 0] = box_2d_in_image[:, 0] / box_2d_in_image[:, 2]
    box_2d_in_image[:, 1] = box_2d_in_image[:, 1] / box_2d_in_image[:, 2]

    # check if the box is in front of camera
    if np.any(box_2d_in_image[:, 2] > front_dist_threshold):
        box_2d_in_image = np.array(box_2d_in_image)[:, :2]
        return box_2d_in_image  # 8 x 2
    else:
        return None