|
|
import copy |
|
|
import math |
|
|
|
|
|
|
|
|
import pickle |
|
|
from typing import Dict, Tuple |
|
|
|
|
|
import numpy as np |
|
|
|
|
|
np.set_printoptions(precision=2, suppress=True) |
|
|
|
|
|
|
|
|
def create_transform(location: np.ndarray, rotation: np.ndarray) -> np.ndarray: |
|
|
"""Create 4x4 transformation matrix from location and rotation |
|
|
|
|
|
Args: |
|
|
location: (3, ) xyz in the world coordinate (Carla left-handed coords) |
|
|
rotation: (3, ) pitch, yaw, roll |
|
|
|
|
|
Returns: |
|
|
transform: 4 x 4, the transformation from object to world coordinate |
|
|
""" |
|
|
|
|
|
pitch, yaw, roll = rotation |
|
|
c_y = np.cos(np.radians(yaw)) |
|
|
s_y = np.sin(np.radians(yaw)) |
|
|
c_r = np.cos(np.radians(roll)) |
|
|
s_r = np.sin(np.radians(roll)) |
|
|
c_p = np.cos(np.radians(pitch)) |
|
|
s_p = np.sin(np.radians(pitch)) |
|
|
transform = np.matrix(np.identity(4)) |
|
|
transform[0, 3] = location[0] |
|
|
transform[1, 3] = location[1] |
|
|
transform[2, 3] = location[2] |
|
|
transform[0, 0] = c_p * c_y |
|
|
transform[0, 1] = c_y * s_p * s_r - s_y * c_r |
|
|
transform[0, 2] = -c_y * s_p * c_r - s_y * s_r |
|
|
transform[1, 0] = s_y * c_p |
|
|
transform[1, 1] = s_y * s_p * s_r + c_y * c_r |
|
|
transform[1, 2] = -s_y * s_p * c_r + c_y * s_r |
|
|
transform[2, 0] = s_p |
|
|
transform[2, 1] = -c_p * s_r |
|
|
transform[2, 2] = c_p * c_r |
|
|
|
|
|
return transform |
|
|
|
|
|
|
|
|
def transform_matrix_to_vector(transform: np.ndarray) -> Tuple[np.ndarray, np.ndarray]: |
|
|
"""Convert 4x4 transformation matrix back to the location and rotation |
|
|
|
|
|
Note that we assume 0 pitch and roll so the 3x3 rotation matrix becomes |
|
|
[c_y, -s_y, 0] |
|
|
[s_y, c_y, 0] |
|
|
[ 0, 0, 1] |
|
|
""" |
|
|
|
|
|
location: np.ndarray = transform[:3, 3] |
|
|
|
|
|
|
|
|
rotation_matrix_2d: np.ndarray = transform[:2, :2] |
|
|
sin_yaw: float = rotation_matrix_2d[1, 0] |
|
|
cos_yaw: float = rotation_matrix_2d[0, 0] |
|
|
yaw: float = math.atan2(sin_yaw, cos_yaw) |
|
|
rotation = np.array([0, np.rad2deg(yaw), 0]) |
|
|
|
|
|
return (location, rotation) |
|
|
|
|
|
|
|
|
def get_transform_A2C(A2B: np.ndarray, B2C: np.ndarray): |
|
|
"""Compose 4x4 transformation matrix""" |
|
|
|
|
|
return np.dot(A2B.transpose(), B2C.transpose()).transpose() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def gps_offset(carla_version: str) -> Tuple[np.ndarray, np.ndarray]: |
|
|
"""GPS sensor parameters""" |
|
|
|
|
|
|
|
|
if carla_version == "0.9.10": |
|
|
mean = np.array([0.0, 0.0]) |
|
|
scale = np.array([111324.60662786, 111319.490945]) |
|
|
|
|
|
|
|
|
elif carla_version == "0.9.9": |
|
|
mean = np.array([49.0, 8.0]) |
|
|
scale = np.array([111324.60662786, 73032.1570362]) |
|
|
|
|
|
return (mean, scale) |
|
|
|
|
|
|
|
|
def gps_normalize(gps: np.ndarray, carla_version="0.9.10") -> np.ndarray: |
|
|
"""Normalize GPS (latitude & longitude) to the typical x,y scale""" |
|
|
|
|
|
gps: np.ndarray = copy.copy(gps) |
|
|
gps_mean, gps_scale = gps_offset(carla_version) |
|
|
gps[:2] = (gps[:2] - gps_mean) * gps_scale |
|
|
|
|
|
return gps |
|
|
|
|
|
|
|
|
def gps_to_world(gps: np.ndarray) -> np.ndarray: |
|
|
"""Convert normalized GPS coordinate to world coordinate |
|
|
|
|
|
Args: |
|
|
gps: (3, ) |
|
|
""" |
|
|
|
|
|
gps_world = copy.copy(gps) |
|
|
gps_world[0] = gps[1] |
|
|
gps_world[1] = -gps[0] |
|
|
|
|
|
return gps_world |
|
|
|
|
|
|
|
|
def world_to_gps(gps_world: np.ndarray) -> np.ndarray: |
|
|
"""Convert world coordinate to normalized GPS coordinate |
|
|
|
|
|
Args: |
|
|
gps_world: (3, ) |
|
|
""" |
|
|
|
|
|
gps = copy.copy(gps_world) |
|
|
gps[0] = -gps_world[1] |
|
|
gps[1] = gps_world[0] |
|
|
|
|
|
return gps |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def accelerometer_to_world(accelerometer: np.ndarray) -> np.ndarray: |
|
|
"""Convert accelerometer from the IMU coords. to world coords""" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
accelerometer[2] -= 9.80665 |
|
|
|
|
|
return accelerometer |
|
|
|
|
|
|
|
|
def gyroscope_to_world(gyroscope: np.ndarray) -> np.ndarray: |
|
|
"""Convert gyroscope from the IMU coords. to world coords""" |
|
|
|
|
|
|
|
|
gyroscope[1] = gyroscope[2] |
|
|
gyroscope[2] = 0 |
|
|
|
|
|
return gyroscope |
|
|
|
|
|
|
|
|
def compass_to_rotation(compass: float) -> np.ndarray: |
|
|
"""Convert the raw compass readings from sensors to yaw in world coord. |
|
|
|
|
|
Note: compass is in the same coordinate as GPS, so a rotation is needed |
|
|
also, compass is in radians so we need to convert to degrees |
|
|
""" |
|
|
|
|
|
yaw = math.degrees(compass - np.pi / 2.0) |
|
|
|
|
|
|
|
|
if yaw > 180: |
|
|
yaw -= 360 |
|
|
if yaw < -180: |
|
|
yaw += 360 |
|
|
|
|
|
rotation = np.array([0, yaw, 0]) |
|
|
|
|
|
return rotation |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_camera_intrinsics(cam_param: Dict, dim: int = 4) -> np.ndarray: |
|
|
"""Returns the camera intrinsics matrix |
|
|
|
|
|
Args: |
|
|
cam_param: include width/height/fov of a camera |
|
|
dim: 3 or 4, 4 leads to producing matrix in the homogeneous coordinate |
|
|
|
|
|
Returns: |
|
|
intrinsics: shape of (3, 3) or (4, 4) |
|
|
""" |
|
|
|
|
|
intrinsics = np.identity(dim) |
|
|
intrinsics[0, 2] = cam_param["width"] / 2.0 |
|
|
intrinsics[1, 2] = cam_param["height"] / 2.0 |
|
|
intrinsics[0, 0] = intrinsics[1, 1] = cam_param["width"] / ( |
|
|
2.0 * np.tan(cam_param["fov"] * np.pi / 360.0) |
|
|
) |
|
|
|
|
|
return intrinsics |
|
|
|
|
|
|
|
|
def get_camera_param() -> Dict[str, Dict]: |
|
|
"""Returns our camera configuration, fixed""" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
cam_param = { |
|
|
"left": { |
|
|
"width": 400, |
|
|
"height": 300, |
|
|
"fov": 100, |
|
|
"yaw": -100.0, |
|
|
"pitch": 0.0, |
|
|
"x": 1.3, |
|
|
"y": -0.2, |
|
|
"z": 2.3, |
|
|
}, |
|
|
"front": { |
|
|
"width": 400, |
|
|
"height": 300, |
|
|
"fov": 100, |
|
|
"yaw": 0.0, |
|
|
"pitch": 0.0, |
|
|
"x": 1.3, |
|
|
"y": 0.0, |
|
|
"z": 2.3, |
|
|
}, |
|
|
"right": { |
|
|
"width": 400, |
|
|
"height": 300, |
|
|
"fov": 100, |
|
|
"yaw": 100.0, |
|
|
"pitch": 0.0, |
|
|
"x": 1.3, |
|
|
"y": 0.2, |
|
|
"z": 2.3, |
|
|
}, |
|
|
|
|
|
"tele": { |
|
|
"width": 400, |
|
|
"height": 300, |
|
|
"fov": 45, |
|
|
"yaw": 0.0, |
|
|
"pitch": 5, |
|
|
"x": 1.5, |
|
|
"y": 0.0, |
|
|
"z": 2.4, |
|
|
}, |
|
|
} |
|
|
|
|
|
return cam_param |
|
|
|
|
|
|
|
|
def camera_to_ego_transform(cam_param: Dict[str, float]) -> np.ndarray: |
|
|
"""Get camera to ego 4 x 4 transformation matrix |
|
|
|
|
|
Rotation composition: roll * pitch * yaw |
|
|
roll_rot = np.array( |
|
|
[ |
|
|
[1, 0, 0], |
|
|
[0, math.cos(pitch), -math.sin(pitch)], |
|
|
[0, math.sin(pitch), math.cos(pitch)], |
|
|
] |
|
|
) |
|
|
""" |
|
|
|
|
|
|
|
|
transform = np.eye(4) |
|
|
transform[0, 3] = cam_param["x"] |
|
|
transform[1, 3] = cam_param["y"] |
|
|
transform[2, 3] = cam_param["z"] |
|
|
|
|
|
|
|
|
yaw: float = math.radians(cam_param["yaw"]) |
|
|
yaw_rot = np.array( |
|
|
[ |
|
|
[math.cos(yaw), -math.sin(yaw), 0], |
|
|
[math.sin(yaw), math.cos(yaw), 0], |
|
|
[0, 0, 1], |
|
|
] |
|
|
) |
|
|
pitch: float = math.radians(-cam_param["pitch"]) |
|
|
pitch_rot = np.array( |
|
|
[ |
|
|
[ math.cos(pitch), 0, math.sin(pitch)], |
|
|
[0, 1, 0], |
|
|
[-math.sin(pitch), 0, math.cos(pitch)], |
|
|
] |
|
|
) |
|
|
transform[:3, :3] = np.matmul(pitch_rot, yaw_rot) |
|
|
|
|
|
return transform |
|
|
|
|
|
|
|
|
def camera_to_lidar_transform(cam_param: Dict[str, float]) -> np.ndarray: |
|
|
"""Get camera to ego 4 x 4 transformation matrix""" |
|
|
|
|
|
ego2lidar_transform: np.ndarray = ego_to_lidar_transform() |
|
|
camera2ego_transform: np.ndarray = camera_to_ego_transform(cam_param) |
|
|
|
|
|
return np.dot( |
|
|
camera2ego_transform.transpose(), ego2lidar_transform.transpose() |
|
|
).transpose() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_lidar_param() -> Dict[str, float]: |
|
|
"""Returns our camera configuration, fixed""" |
|
|
|
|
|
lidar_param = { |
|
|
"yaw": -90.0, |
|
|
"x": 1.3, |
|
|
"y": 0.0, |
|
|
"z": 2.5, |
|
|
} |
|
|
|
|
|
return lidar_param |
|
|
|
|
|
|
|
|
def lidar_to_ego_transform() -> np.ndarray: |
|
|
"""Get lidar to ego 4 x 4 transformation matrix""" |
|
|
|
|
|
lidar_param: Dict[str, float] = get_lidar_param() |
|
|
|
|
|
|
|
|
transform = np.eye(4) |
|
|
transform[0, 3] = lidar_param["x"] |
|
|
transform[1, 3] = lidar_param["y"] |
|
|
transform[2, 3] = lidar_param["z"] |
|
|
|
|
|
|
|
|
yaw = math.radians(lidar_param["yaw"]) |
|
|
rot = np.array( |
|
|
[ |
|
|
[math.cos(yaw), -math.sin(yaw), 0], |
|
|
[math.sin(yaw), math.cos(yaw), 0], |
|
|
[0, 0, 1], |
|
|
] |
|
|
) |
|
|
transform[:3, :3] = rot |
|
|
|
|
|
return transform |
|
|
|
|
|
|
|
|
def lidar_to_camera_transform(cam_param: Dict[str, float]) -> np.ndarray: |
|
|
"""Get lidar to camera 4 x 4 transformation matrix""" |
|
|
|
|
|
return np.linalg.inv(camera_to_lidar_transform(cam_param)) |
|
|
|
|
|
|
|
|
def lidar_to_image_transform(cam_param: Dict[str, float]) -> np.ndarray: |
|
|
"""Get lidar to image (after projection) 4 x 4 transformation matrix""" |
|
|
|
|
|
|
|
|
lidar2camera_transform: np.ndarray = lidar_to_camera_transform(cam_param) |
|
|
intrinsics: np.ndarray = get_camera_intrinsics(cam_param, dim=4) |
|
|
|
|
|
lidar2image_transform = np.dot( |
|
|
lidar2camera_transform.transpose(), intrinsics.transpose() |
|
|
).transpose() |
|
|
|
|
|
return lidar2image_transform |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def load_ego2world_transform(ego_file: str) -> np.ndarray: |
|
|
"""load ego data and create ego's transformation matrix""" |
|
|
|
|
|
|
|
|
with open(ego_file, "rb") as handle: |
|
|
ego_data: Dict = pickle.load(handle) |
|
|
|
|
|
|
|
|
location: np.ndarray = ego_data["location"] |
|
|
location[2] -= ego_data["size"][2] / 2 |
|
|
rotation: np.ndarray = ego_data["rotation"] |
|
|
ego2world: np.ndarray = create_transform(location, rotation) |
|
|
|
|
|
return ego2world |
|
|
|
|
|
|
|
|
def ego_to_lidar_transform() -> np.ndarray: |
|
|
"""Get ego to lidar 4 x 4 transformation matrix""" |
|
|
|
|
|
return np.linalg.inv(lidar_to_ego_transform()) |
|
|
|
|
|
|
|
|
def ego_to_camera_transform(cam_param: Dict[str, float]) -> np.ndarray: |
|
|
"""Get ego to camera 4 x 4 transformation matrix""" |
|
|
|
|
|
return np.linalg.inv(camera_to_ego_transform(cam_param)) |
|
|
|