diff --git a/extracted_data/config.json b/extracted_data/config.json new file mode 100644 index 0000000000000000000000000000000000000000..7e2533d57e876f71ea55459ed32dd8c111e20d0b --- /dev/null +++ b/extracted_data/config.json @@ -0,0 +1,119 @@ +{ + "model": { + "type": "PointPillars", + "voxel_size": [ + 0.2, + 0.2, + 8.0 + ], + "pc_range": [ + -51.2, + -51.2, + -5.0, + 51.2, + 51.2, + 3.0 + ], + "num_input_features": 5 + }, + "voxel_generator": { + "range": [ + -51.2, + -51.2, + -5.0, + 51.2, + 51.2, + 3.0 + ], + "voxel_size": [ + 0.2, + 0.2, + 8.0 + ], + "max_points_in_voxel": 20, + "max_voxel_num": [ + 30000, + 60000 + ] + }, + "test_cfg": { + "post_center_limit_range": [ + -61.2, + -61.2, + -10.0, + 61.2, + 61.2, + 10.0 + ], + "max_per_img": 500, + "nms": { + "nms_pre_max_size": 1000, + "nms_post_max_size": 83, + "nms_iou_threshold": 0.2 + }, + "score_threshold": 0.1, + "pc_range": [ + -51.2, + -51.2 + ], + "out_size_factor": 4, + "voxel_size": [ + 0.2, + 0.2 + ] + }, + "class_names": [ + "car", + "truck", + "construction_vehicle", + "bus", + "trailer", + "barrier", + "motorcycle", + "bicycle", + "pedestrian", + "traffic_cone" + ], + "tasks": [ + { + "num_class": 1, + "class_names": [ + "car" + ] + }, + { + "num_class": 2, + "class_names": [ + "truck", + "construction_vehicle" + ] + }, + { + "num_class": 2, + "class_names": [ + "bus", + "trailer" + ] + }, + { + "num_class": 1, + "class_names": [ + "barrier" + ] + }, + { + "num_class": 2, + "class_names": [ + "motorcycle", + "bicycle" + ] + }, + { + "num_class": 2, + "class_names": [ + "pedestrian", + "traffic_cone" + ] + } + ] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151603547590.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151603547590.json new file mode 100644 index 0000000000000000000000000000000000000000..4f4023ad9a5b737bdd0ab08cc89518d22950b5f9 --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151603547590.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151603547590", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151604048025.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151604048025.json new file mode 100644 index 0000000000000000000000000000000000000000..3535a14e75b70ed9f4002d78d9f31cc3aff64815 --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151604048025.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151604048025", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151604547893.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151604547893.json new file mode 100644 index 0000000000000000000000000000000000000000..01aa8742fe15d5e3c7589760bd9e512957c6ce3b --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151604547893.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151604547893", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151605047769.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151605047769.json new file mode 100644 index 0000000000000000000000000000000000000000..c04e483e3c9522c0f25bed9739d7fdd673d914cf --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151605047769.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151605047769", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151605548192.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151605548192.json new file mode 100644 index 0000000000000000000000000000000000000000..c472b5b536f98c8568007776ec1c487cfee99966 --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151605548192.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151605548192", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151606048630.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151606048630.json new file mode 100644 index 0000000000000000000000000000000000000000..ca7577ecef2834e37b49fb8b5104ad557b715bc9 --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151606048630.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151606048630", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151606549066.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151606549066.json new file mode 100644 index 0000000000000000000000000000000000000000..e3019196e2b736e52b0328e8735161239958d55f --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151606549066.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151606549066", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151607048933.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151607048933.json new file mode 100644 index 0000000000000000000000000000000000000000..023ab73f3afa0b18fbef2840b9072928d2943c36 --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151607048933.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151607048933", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151607548824.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151607548824.json new file mode 100644 index 0000000000000000000000000000000000000000..ecc51429168f661c3798f713116c754c8f92d19e --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151607548824.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151607548824", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151608048151.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151608048151.json new file mode 100644 index 0000000000000000000000000000000000000000..9868b7afdfda33a8e09a4936cc76a5b20c7e33ef --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151608048151.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151608048151", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151608548020.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151608548020.json new file mode 100644 index 0000000000000000000000000000000000000000..3d663890cc929226c1638b901f49598def642877 --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151608548020.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151608548020", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609047890.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609047890.json new file mode 100644 index 0000000000000000000000000000000000000000..02c571e0434f9400c4bf9414a7fdfa00e94fffeb --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609047890.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609047890", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609547766.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609547766.json new file mode 100644 index 0000000000000000000000000000000000000000..f594fb295e5c3b5a47b1d93f1159411965b41c5b --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609547766.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609547766", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609947025.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609947025.json new file mode 100644 index 0000000000000000000000000000000000000000..7f6cc54d5b9cc747dd4fa9f21481c7424b4e57b4 --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609947025.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609947025", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151610446899.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151610446899.json new file mode 100644 index 0000000000000000000000000000000000000000..566a441fedf91d11239a73781cbf8a4ce06c67fa --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151610446899.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151610446899", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151610946785.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151610946785.json new file mode 100644 index 0000000000000000000000000000000000000000..16f9bbcb53bfea9731cc46078764eef82b9fc469 --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151610946785.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151610946785", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151611446636.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151611446636.json new file mode 100644 index 0000000000000000000000000000000000000000..30e73151d0705f89a42badd8c3fc2202d8ddba24 --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151611446636.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151611446636", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151611896734.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151611896734.json new file mode 100644 index 0000000000000000000000000000000000000000..49cc930f5ba56e71898efcdbd8fcee76c0b5bf9c --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151611896734.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151611896734", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151612397179.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151612397179.json new file mode 100644 index 0000000000000000000000000000000000000000..503ba8195c720e5ec17c573081512e92ae00006b --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151612397179.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151612397179", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151612897588.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151612897588.json new file mode 100644 index 0000000000000000000000000000000000000000..48e489fbe9b57f3a5dd413daefae18a2ad63126b --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151612897588.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151612897588", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151613398020.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151613398020.json new file mode 100644 index 0000000000000000000000000000000000000000..bbffa9988ee7b75047c1e6e87233b9abd1f510b2 --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151613398020.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151613398020", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151613948222.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151613948222.json new file mode 100644 index 0000000000000000000000000000000000000000..436614e14387d8c66332c7ccf0cb3c4d474f57d9 --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151613948222.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151613948222", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151614450164.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151614450164.json new file mode 100644 index 0000000000000000000000000000000000000000..ae01e8cec293c3ca70d57817cc12462206a18538 --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151614450164.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151614450164", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151614948536.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151614948536.json new file mode 100644 index 0000000000000000000000000000000000000000..f62e5d553043ba55ccb154d0a7fb53d1336a1dea --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151614948536.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151614948536", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151615448397.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151615448397.json new file mode 100644 index 0000000000000000000000000000000000000000..7fb1f41d3d7fe3a93cdf95b0fcf656f0b089ff7b --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151615448397.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151615448397", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151615947733.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151615947733.json new file mode 100644 index 0000000000000000000000000000000000000000..590176aa11d3eb6fc915088e1b591a04d4619021 --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151615947733.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151615947733", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151616447606.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151616447606.json new file mode 100644 index 0000000000000000000000000000000000000000..7333bf30788888d571d7eb7425f3d8928d16d27b --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151616447606.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151616447606", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151616947490.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151616947490.json new file mode 100644 index 0000000000000000000000000000000000000000..eed839bc2bf97b92d61c2461cc60618299b94a97 --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151616947490.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151616947490", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151617397050.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151617397050.json new file mode 100644 index 0000000000000000000000000000000000000000..93f22fe26225172c4a1263525a147511945d025c --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151617397050.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151617397050", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151617947237.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151617947237.json new file mode 100644 index 0000000000000000000000000000000000000000..44ec48ccb8e3309dae1942c6d4159adf6c6d487f --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151617947237.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151617947237", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151618447134.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151618447134.json new file mode 100644 index 0000000000000000000000000000000000000000..dec048900c273b55002ca5d1a412a9c9e1b784c8 --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151618447134.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151618447134", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151618946993.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151618946993.json new file mode 100644 index 0000000000000000000000000000000000000000..19fa2c192a909352c08814aeb37d1007d943c1d3 --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151618946993.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151618946993", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151619446866.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151619446866.json new file mode 100644 index 0000000000000000000000000000000000000000..29086ac168c03e160098517965d12b578aad5792 --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151619446866.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151619446866", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151619947313.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151619947313.json new file mode 100644 index 0000000000000000000000000000000000000000..1077fc17843ac887d668131d97e40e65ad92ef8b --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151619947313.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151619947313", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151620447745.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151620447745.json new file mode 100644 index 0000000000000000000000000000000000000000..ebfcdeaf6427bd20bced4db87bcbd175b772ba7f --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151620447745.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151620447745", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621047706.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621047706.json new file mode 100644 index 0000000000000000000000000000000000000000..87b123ac25d483f04376f516c885e4a83197b5be --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621047706.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621047706", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621448049.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621448049.json new file mode 100644 index 0000000000000000000000000000000000000000..9072280a5c23691ad3a94c2e07ddf3c794785eee --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621448049.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621448049", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621947928.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621947928.json new file mode 100644 index 0000000000000000000000000000000000000000..657dc0f4f71cdf4de44046532eef21b3e92e13be --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621947928.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621947928", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151622448916.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151622448916.json new file mode 100644 index 0000000000000000000000000000000000000000..2259f3ae0ed714b306d7f88aeff4aeba6195cc18 --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151622448916.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151622448916", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151622948227.json b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151622948227.json new file mode 100644 index 0000000000000000000000000000000000000000..8f10f05171616a4eeee6241f113716a365a9d365 --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151622948227.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151622948227", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385092150099.json b/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385092150099.json new file mode 100644 index 0000000000000000000000000000000000000000..13b3a40b87bb14bc6cca7aceb4962bcb25ee5fca --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385092150099.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385092150099", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385092700299.json b/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385092700299.json new file mode 100644 index 0000000000000000000000000000000000000000..c23586325d0bb70ee2102db036cae59bc3f02287 --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385092700299.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385092700299", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385093200183.json b/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385093200183.json new file mode 100644 index 0000000000000000000000000000000000000000..8789993ace3b49dd7102d510d72c28758e636b31 --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385093200183.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385093200183", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385093650312.json b/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385093650312.json new file mode 100644 index 0000000000000000000000000000000000000000..1ce346621b421d05c6d1fe8402fcef391b20a7c9 --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385093650312.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385093650312", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094150195.json b/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094150195.json new file mode 100644 index 0000000000000000000000000000000000000000..f27623b57cfac066d895ee78713e7e9c4160c95d --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094150195.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094150195", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094550004.json b/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094550004.json new file mode 100644 index 0000000000000000000000000000000000000000..de23b4d8bf8f79630a8175ea557f03b24157de79 --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094550004.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094550004", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094949783.json b/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094949783.json new file mode 100644 index 0000000000000000000000000000000000000000..a95940a2136275590b6d4b5b4f247ba04552b06d --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094949783.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094949783", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385095449675.json b/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385095449675.json new file mode 100644 index 0000000000000000000000000000000000000000..32054ab84a904c8860fa8d92348261044417b305 --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385095449675.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385095449675", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385095949554.json b/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385095949554.json new file mode 100644 index 0000000000000000000000000000000000000000..e04aff4fb950648b00e19b011092e469e26698de --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385095949554.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385095949554", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385096399680.json b/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385096399680.json new file mode 100644 index 0000000000000000000000000000000000000000..4877dc3955098fcd2e7c3d358524915415db34fb --- /dev/null +++ b/extracted_data/gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385096399680.json @@ -0,0 +1,5 @@ +{ + "token": "n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385096399680", + "gt_boxes": [], + "gt_names": [] +} \ No newline at end of file diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151603547590.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151603547590.bin new file mode 100644 index 0000000000000000000000000000000000000000..3f4f746af01b21efeea8a8c32d6e0cf97bda2e21 --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151603547590.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f0f1a99c9062003d77e7bcb1ade1a6f5ecbcb6215a746f5ef9bb4fcdf115c4d9 +size 695040 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151604048025.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151604048025.bin new file mode 100644 index 0000000000000000000000000000000000000000..454c16dc138a747e36b30bba3a32eab0d8669a91 --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151604048025.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e49a1107ebb60940d9da466a55f69d2f61b3f5c066f80b635c710ff30bc30cc3 +size 693760 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151604547893.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151604547893.bin new file mode 100644 index 0000000000000000000000000000000000000000..d89612b263bed10a305ce7c9839aac3befc468d6 --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151604547893.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2ea12ba5717bc5992f6af149b0fc94942e06019bb84f7f9bc786ef1587a6b20e +size 695040 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151605047769.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151605047769.bin new file mode 100644 index 0000000000000000000000000000000000000000..bf550ab38a29a44de60d4a2c3008ed5ef8560f69 --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151605047769.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f9d23460db5b733709a755ca3991da0a69fcf1120ce87c870a2fbb4e33ca8ddc +size 694400 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151605548192.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151605548192.bin new file mode 100644 index 0000000000000000000000000000000000000000..5db57259b2bf63b7443094769f3a701875a0aef9 --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151605548192.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0c33a84aead83198601e47e827ca7bdb380098fbe55002b641d14afd1a389136 +size 695680 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151606048630.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151606048630.bin new file mode 100644 index 0000000000000000000000000000000000000000..23b60dc38ca958e1cff8df81d7d10ad52de7c1ca --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151606048630.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0212e4d03201da5794061d02c312d173c00d4dfcf0456efaf16e2bfdeae2485e +size 694400 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151606549066.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151606549066.bin new file mode 100644 index 0000000000000000000000000000000000000000..01967daac51f0969d26cdade9f0fa20bc2e5daba --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151606549066.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aafec8ba3b963290426201b1f39b1769c3ebb61a4939156725d5fa8ed8a8c006 +size 693760 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151607048933.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151607048933.bin new file mode 100644 index 0000000000000000000000000000000000000000..3762aa6ef082bb2b10de5d6413bd9053103c04ff --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151607048933.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:affb50546b1dab85be1486cfb872113420e403cb458c88129d39a10af6334e37 +size 693760 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151607548824.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151607548824.bin new file mode 100644 index 0000000000000000000000000000000000000000..fddbdf9579277d1430c26bfcc83d8040b8c7b823 --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151607548824.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6e87f0a50e55355a4f08a61c08bfd4f117f378447e1a75c47cc972f0c019b516 +size 694400 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151608048151.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151608048151.bin new file mode 100644 index 0000000000000000000000000000000000000000..c60a52a6e78178788c79327e49966e7a6fff4e71 --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151608048151.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:20043a2361a8d9320456c7ff6d65fadb6813edb0d6adda283a5cfa4beab4a6c0 +size 694400 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151608548020.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151608548020.bin new file mode 100644 index 0000000000000000000000000000000000000000..3ac987a371b2aa2b199410b2e0f4adbdfa263d09 --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151608548020.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f079c67ee764efed7c0d05744c9c2e7ff3d0e649fffdaae990ea0bae7e6829e4 +size 694400 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609047890.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609047890.bin new file mode 100644 index 0000000000000000000000000000000000000000..12d0661aef179330bbb1707b1bcca67b6f449432 --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609047890.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7cf3d8b46ef152cdca8e8c04cf8bf334308007553cc8a73b4358164c3b4ca831 +size 693760 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609547766.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609547766.bin new file mode 100644 index 0000000000000000000000000000000000000000..28d0ed46ece37c3193fc4396fc749eb494c69fb6 --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609547766.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f3518600b85d6f82379ccfc63c2a17bf5c8db52ef34ecc5ef61891552d2954a2 +size 693760 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609947025.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609947025.bin new file mode 100644 index 0000000000000000000000000000000000000000..975155a268adb940f1f5abb54c8de7a0df11dcaa --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609947025.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2386940ba9f7e1b85fb34c3b59a432c7815940b359db27ef60e511f76435642e +size 693760 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151610446899.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151610446899.bin new file mode 100644 index 0000000000000000000000000000000000000000..fd9d20759fd41ffa594b34ceae8a77c6bd09617a --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151610446899.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9c59ae797417a0413ef0afde8612971bcf9c0f3660455f76397001bd31833804 +size 694400 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151610946785.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151610946785.bin new file mode 100644 index 0000000000000000000000000000000000000000..92573af1752634d40420eb72f7805e13f289c8c8 --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151610946785.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a84155e0419e3912d2b0896799e99b44b579412f63782e12151e774eb8545e9d +size 693760 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151611446636.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151611446636.bin new file mode 100644 index 0000000000000000000000000000000000000000..65422744e092d7d4af7d43d46f3e3b913fe60aa1 --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151611446636.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c5d057fc5ebda77a09b6beb560c488fc71c0a95c41bcd16360bb9359174f1281 +size 694400 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151611896734.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151611896734.bin new file mode 100644 index 0000000000000000000000000000000000000000..a1eb6dfca318012cecbcc4df1bf0211dca7f5f3f --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151611896734.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:784f5b1c5130e013a26c34bcd8a4ee8f8287dfc794ce2093a7eca94cebb1d40d +size 694400 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151612397179.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151612397179.bin new file mode 100644 index 0000000000000000000000000000000000000000..71da166a64b95c787ed239bc20a99872451006f0 --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151612397179.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:849689fe7d44c40a1a9aecef6f0fe96b49e19d576ab5c5ebe84783d79299454e +size 695680 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151612897588.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151612897588.bin new file mode 100644 index 0000000000000000000000000000000000000000..d68fc03d8d60afa6f6c2dae55bd480177552b96e --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151612897588.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:24e38645c5751dafc5657cc1baf706faa607314b98d91bb6d5cf1ce53713acf1 +size 695680 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151613398020.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151613398020.bin new file mode 100644 index 0000000000000000000000000000000000000000..23db39a168aa09907c63f3960bb02c46eadc5288 --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151613398020.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4e55c60421cbd09f255651e59e48edc8d9ad77eb1191f33c5aac1b63d2fecb9d +size 695040 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151613948222.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151613948222.bin new file mode 100644 index 0000000000000000000000000000000000000000..0109bd5abb3f67a0964d08724ab46e465c0915a8 --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151613948222.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5a26bf6af2ec25be6702e443615e2488dd19c1ac9c67da169e62f9b61dbccfd0 +size 695040 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151614450164.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151614450164.bin new file mode 100644 index 0000000000000000000000000000000000000000..a4145d1cdb13364f4c3b8bd012962b32e7d97f40 --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151614450164.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bdf85c6023015af80339ff6e77216a2d9f50b1a8b66395d60abde1079ed4c4b6 +size 695040 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151614948536.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151614948536.bin new file mode 100644 index 0000000000000000000000000000000000000000..ff5e888266d002123cd3c22517ee34b00ba2707f --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151614948536.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:10068637818695cdc9a5572ac91cee8ce1cbf46ecc0aeac13829b56695c0492d +size 694400 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151615448397.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151615448397.bin new file mode 100644 index 0000000000000000000000000000000000000000..963ef67dbf885849b0fede497b706883cb66ffa4 --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151615448397.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:baeecce3e8434ab2a644ad3009a8c565e26d7e84c8d5e21cb33d0ab2da075c67 +size 693760 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151615947733.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151615947733.bin new file mode 100644 index 0000000000000000000000000000000000000000..614e7fe28f8820292c56ed53a0ef5a7fe877c9d0 --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151615947733.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a36d7bebfcaed3f1f5ee3d8394ecc390554f71af8940a344784376a78f207882 +size 693760 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151616447606.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151616447606.bin new file mode 100644 index 0000000000000000000000000000000000000000..29544cc401c6bf4bf8c01c3c12bbe220828d7828 --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151616447606.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7a758aa0affd2be226f1d855443eeffd64be5941e8b3a5f4a34e23092c3d0b51 +size 693760 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151616947490.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151616947490.bin new file mode 100644 index 0000000000000000000000000000000000000000..cc9b65e13b8fbeb62e8129c77b230c067818cc3b --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151616947490.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:00a7028da8dea14dc9c7aa0925aae95423b1d4d4f14c4d22b12a33f0107ebf16 +size 695040 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151617397050.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151617397050.bin new file mode 100644 index 0000000000000000000000000000000000000000..8c52e428b373cffd5d594ac4f5be320888a8ceea --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151617397050.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9ea22c73b7f6e60b7f699f135b04e24362081c1610dc7ef2f7f697c006099a81 +size 694400 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151617947237.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151617947237.bin new file mode 100644 index 0000000000000000000000000000000000000000..9f8d16b1956810d4442c5a0ec8db8e8fa4f5de3a --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151617947237.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:45de7efb89aefec76fc1b2fe21793274a0bdf96d791888aba5332a0641e47f81 +size 694400 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151618447134.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151618447134.bin new file mode 100644 index 0000000000000000000000000000000000000000..0129dc156c6c27c6b14d7e6c3d959d8b14fda404 --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151618447134.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:43211760236dff9cd489dda42481caaf76ada59998bbabb526c27d08b4c0192a +size 693760 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151618946993.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151618946993.bin new file mode 100644 index 0000000000000000000000000000000000000000..4d622fd05856b1ff33908bce8589d403e575f4ee --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151618946993.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e0511ebe3052357d5b6a2d712b3de227e1fef60c5dc04e6a1da57b1328f55176 +size 693760 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151619446866.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151619446866.bin new file mode 100644 index 0000000000000000000000000000000000000000..0fa598c72e86afa649d39b02aae6e3fb66b06404 --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151619446866.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9a6f65f3c8fdae412d003cfdb446dacac7135900527675ceb68d4a2b445c694e +size 695040 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151619947313.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151619947313.bin new file mode 100644 index 0000000000000000000000000000000000000000..c74711bf7c06db2cf7a16680bd0a12bcaf7cedaf --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151619947313.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b2a8955de9e176d26954d0e5dde376788d99f0bdcb038becea5d16098a4c6d28 +size 695680 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151620447745.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151620447745.bin new file mode 100644 index 0000000000000000000000000000000000000000..a30f0089cc0cfab1dd16340201cf3c9c6ead0c75 --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151620447745.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6ad7448837c2c6b250021e52016e8132ceb36aecd404522bce7f9081f8e5af16 +size 693760 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621047706.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621047706.bin new file mode 100644 index 0000000000000000000000000000000000000000..c3d8fa066a825f54d9bcbf390e0b873e810124c3 --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621047706.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6473b31bea554973641834fced0ba1f6af138e90fff7a30d9b618ffd3060b33f +size 694400 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621448049.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621448049.bin new file mode 100644 index 0000000000000000000000000000000000000000..c0b93671aa4b14474cd47da78ceea63618520543 --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621448049.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ea60023d2bbf01a0e19ef052bbfcf7cf0ae398fe932e5c1c3e76aaa8687d49be +size 695040 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621947928.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621947928.bin new file mode 100644 index 0000000000000000000000000000000000000000..81c4302ba3dd235eb796b53f72efcd92b1bb8880 --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621947928.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fe903c6eee2805bcb3b24659acca34e8e3f402934516c659eb7cefd2e3b901fe +size 694400 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151622448916.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151622448916.bin new file mode 100644 index 0000000000000000000000000000000000000000..8e392e0edc0b3ce2a98718ccac09d6e767d37077 --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151622448916.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:50c1fb831798bf4ea4e2170719cbf9cba1bcdee4779826c67b22741927a01192 +size 695040 diff --git a/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151622948227.bin b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151622948227.bin new file mode 100644 index 0000000000000000000000000000000000000000..4608243d85807953e1921adec2c3c58622fda3b5 --- /dev/null +++ b/extracted_data/points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151622948227.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f1d1913e51fe89bbabde7bd018a3dc4033ec3496709bc442fb3e5f16ca6e0afa +size 694400 diff --git a/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385092150099.bin b/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385092150099.bin new file mode 100644 index 0000000000000000000000000000000000000000..fc00fdb2099559487776b04de29f0a41feb318f4 --- /dev/null +++ b/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385092150099.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3bba5aa8d214d24a7a80c83321bf655c7a2cb57cc74abe2d8890bcca984a5e5a +size 687360 diff --git a/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385092700299.bin b/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385092700299.bin new file mode 100644 index 0000000000000000000000000000000000000000..22b0ffc82272d3888ed5acb406723e9df5bc62b0 --- /dev/null +++ b/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385092700299.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2d721de9355a0b7bbe598ad17f816c6584d8e8b2d55710604e5e8cfb70cbbb13 +size 694400 diff --git a/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385093200183.bin b/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385093200183.bin new file mode 100644 index 0000000000000000000000000000000000000000..91b3bf11ba5734575b03a0c742b0c6d8f7b32e28 --- /dev/null +++ b/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385093200183.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7fadaabd87ab2bc0616c742cada4dae5c3e9a62a3ed0e7d6bb90a40b3243963c +size 695040 diff --git a/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385093650312.bin b/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385093650312.bin new file mode 100644 index 0000000000000000000000000000000000000000..eb23ff6484f848e5c891828ef9121c7429519409 --- /dev/null +++ b/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385093650312.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8c0fb4d38f2ded9b93fe70bd9193ebc7370652d6f9b85bc7bbb7a4d6d9137c09 +size 694400 diff --git a/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094150195.bin b/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094150195.bin new file mode 100644 index 0000000000000000000000000000000000000000..9cb4f470ebd137fa6eff5dacb41dd6f96efb3cad --- /dev/null +++ b/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094150195.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aecbc42333a642eae68a3e3eb748d24466c26683f27f2bbf99b061cd16be3307 +size 693760 diff --git a/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094550004.bin b/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094550004.bin new file mode 100644 index 0000000000000000000000000000000000000000..f945617a466ea54621a107cbcffd3feb56e8ab5c --- /dev/null +++ b/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094550004.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0d5ca2957f19f76b18fa7d8b0cda1b47cd4f7934d67ca3db73b9ccd89f30a3d4 +size 693760 diff --git a/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094949783.bin b/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094949783.bin new file mode 100644 index 0000000000000000000000000000000000000000..5c7d3e6a82daa50bf16ca17d9a34bf70be4bdbab --- /dev/null +++ b/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094949783.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:32512921afc134c10fb723f486f874275666564c2aaccab2895dfc42c88cca62 +size 694400 diff --git a/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385095449675.bin b/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385095449675.bin new file mode 100644 index 0000000000000000000000000000000000000000..a458311ca56c2197dde47252d38f6e3775720b4a --- /dev/null +++ b/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385095449675.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7d6acb4c41f095a0c1ef852999bb265b352145a24a75b83e14a8398e95538230 +size 694400 diff --git a/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385095949554.bin b/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385095949554.bin new file mode 100644 index 0000000000000000000000000000000000000000..f4f3a68ef7820053d6d61d3d71277df2db5382ff --- /dev/null +++ b/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385095949554.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ddeef538918974b4615b614c899f6a3cc782a031ce8e5e4020a7546aea8decc1 +size 694400 diff --git a/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385096399680.bin b/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385096399680.bin new file mode 100644 index 0000000000000000000000000000000000000000..904adcfb5b278735000cb52d3cebd25f92bda788 --- /dev/null +++ b/extracted_data/points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385096399680.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:36decf7bef52f5ce969f5838f0c93ab0ae16a62d1b5ef8c59d4ad06a40784140 +size 694400 diff --git a/extracted_data/sample_index.json b/extracted_data/sample_index.json new file mode 100644 index 0000000000000000000000000000000000000000..71db56e214b033434749b4857379c86a034b4507 --- /dev/null +++ b/extracted_data/sample_index.json @@ -0,0 +1,305 @@ +{ + "samples": [ + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151603547590", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151603547590.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151603547590.json", + "num_points": 34752 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151604048025", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151604048025.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151604048025.json", + "num_points": 34688 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151604547893", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151604547893.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151604547893.json", + "num_points": 34752 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151605047769", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151605047769.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151605047769.json", + "num_points": 34720 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151605548192", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151605548192.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151605548192.json", + "num_points": 34784 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151606048630", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151606048630.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151606048630.json", + "num_points": 34720 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151606549066", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151606549066.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151606549066.json", + "num_points": 34688 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151607048933", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151607048933.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151607048933.json", + "num_points": 34688 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151607548824", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151607548824.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151607548824.json", + "num_points": 34720 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151608048151", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151608048151.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151608048151.json", + "num_points": 34720 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151608548020", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151608548020.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151608548020.json", + "num_points": 34720 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609047890", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609047890.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609047890.json", + "num_points": 34688 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609547766", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609547766.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609547766.json", + "num_points": 34688 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609947025", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609947025.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151609947025.json", + "num_points": 34688 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151610446899", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151610446899.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151610446899.json", + "num_points": 34720 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151610946785", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151610946785.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151610946785.json", + "num_points": 34688 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151611446636", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151611446636.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151611446636.json", + "num_points": 34720 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151611896734", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151611896734.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151611896734.json", + "num_points": 34720 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151612397179", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151612397179.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151612397179.json", + "num_points": 34784 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151612897588", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151612897588.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151612897588.json", + "num_points": 34784 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151613398020", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151613398020.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151613398020.json", + "num_points": 34752 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151613948222", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151613948222.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151613948222.json", + "num_points": 34752 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151614450164", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151614450164.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151614450164.json", + "num_points": 34752 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151614948536", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151614948536.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151614948536.json", + "num_points": 34720 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151615448397", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151615448397.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151615448397.json", + "num_points": 34688 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151615947733", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151615947733.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151615947733.json", + "num_points": 34688 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151616447606", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151616447606.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151616447606.json", + "num_points": 34688 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151616947490", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151616947490.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151616947490.json", + "num_points": 34752 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151617397050", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151617397050.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151617397050.json", + "num_points": 34720 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151617947237", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151617947237.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151617947237.json", + "num_points": 34720 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151618447134", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151618447134.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151618447134.json", + "num_points": 34688 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151618946993", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151618946993.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151618946993.json", + "num_points": 34688 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151619446866", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151619446866.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151619446866.json", + "num_points": 34752 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151619947313", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151619947313.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151619947313.json", + "num_points": 34784 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151620447745", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151620447745.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151620447745.json", + "num_points": 34688 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621047706", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621047706.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621047706.json", + "num_points": 34720 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621448049", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621448049.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621448049.json", + "num_points": 34752 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621947928", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621947928.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151621947928.json", + "num_points": 34720 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151622448916", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151622448916.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151622448916.json", + "num_points": 34752 + }, + { + "token": "n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151622948227", + "points_path": "points/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151622948227.bin", + "gt_path": "gt_annotations/n008-2018-08-01-15-16-36-0400__LIDAR_TOP__1533151622948227.json", + "num_points": 34720 + }, + { + "token": "n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385092150099", + "points_path": "points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385092150099.bin", + "gt_path": "gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385092150099.json", + "num_points": 34368 + }, + { + "token": "n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385092700299", + "points_path": "points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385092700299.bin", + "gt_path": "gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385092700299.json", + "num_points": 34720 + }, + { + "token": "n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385093200183", + "points_path": "points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385093200183.bin", + "gt_path": "gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385093200183.json", + "num_points": 34752 + }, + { + "token": "n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385093650312", + "points_path": "points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385093650312.bin", + "gt_path": "gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385093650312.json", + "num_points": 34720 + }, + { + "token": "n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094150195", + "points_path": "points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094150195.bin", + "gt_path": "gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094150195.json", + "num_points": 34688 + }, + { + "token": "n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094550004", + "points_path": "points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094550004.bin", + "gt_path": "gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094550004.json", + "num_points": 34688 + }, + { + "token": "n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094949783", + "points_path": "points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094949783.bin", + "gt_path": "gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385094949783.json", + "num_points": 34720 + }, + { + "token": "n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385095449675", + "points_path": "points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385095449675.bin", + "gt_path": "gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385095449675.json", + "num_points": 34720 + }, + { + "token": "n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385095949554", + "points_path": "points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385095949554.bin", + "gt_path": "gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385095949554.json", + "num_points": 34720 + }, + { + "token": "n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385096399680", + "points_path": "points/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385096399680.bin", + "gt_path": "gt_annotations/n008-2018-08-27-11-48-51-0400__LIDAR_TOP__1535385096399680.json", + "num_points": 34720 + } + ], + "total_samples": 50 +} \ No newline at end of file diff --git a/inference_axmodel.py b/inference_axmodel.py new file mode 100644 index 0000000000000000000000000000000000000000..e374e8a24b1c4d4421fedf7020f5026a5db11c87 --- /dev/null +++ b/inference_axmodel.py @@ -0,0 +1,841 @@ +#!/usr/bin/env python3 +""" +CenterPoint AXEngine Inference Demo + +Usage: + python inference_axmodel.py ./centerpoint.axmodel ./extracted_data/config.json ./extracted_data \ + --output-dir ./inference_results_ax --num-samples 10 +""" + +import argparse +import json +import os +import os.path as osp +import numpy as np +from tqdm import tqdm +import numba + +try: + import axengine as axe +except ImportError: + print("Warning: axengine not available. Install it to use AXEngine inference.") + axe = None + + +def parse_args(): + parser = argparse.ArgumentParser(description='CenterPoint AXEngine Inference') + parser.add_argument('axmodel', help='AXModel path') + parser.add_argument('config_json', help='JSON config file path') + parser.add_argument('data_dir', help='extracted data directory') + parser.add_argument('--output-dir', default='./inference_results_ax', help='output directory') + parser.add_argument('--score-thr', type=float, default=0.1, help='score threshold') + parser.add_argument('--num-samples', type=int, default=None, help='number of samples to process') + parser.add_argument('--visualize', action='store_true', help='save visualization images and video') + parser.add_argument('--fps', type=int, default=10, help='video fps') + return parser.parse_args() + + +def load_axmodel(axmodel_path): + """Load AXModel""" + if axe is None: + raise RuntimeError("axengine is not installed") + + providers = ['AxEngineExecutionProvider'] + session = axe.InferenceSession(axmodel_path, providers=providers) + return session + + +def load_config(config_path): + """Load configuration from JSON file""" + with open(config_path, 'r') as f: + config = json.load(f) + return config + + +def load_sample_index(data_dir): + """Load sample index""" + index_path = osp.join(data_dir, 'sample_index.json') + with open(index_path, 'r') as f: + sample_index = json.load(f) + return sample_index + + +def load_points(data_dir, points_path): + """Load point cloud data from binary file""" + full_path = osp.join(data_dir, points_path) + points = np.fromfile(full_path, dtype=np.float32).reshape(-1, 5) + return points + + +def load_gt(data_dir, gt_path): + """Load ground truth annotations""" + full_path = osp.join(data_dir, gt_path) + with open(full_path, 'r') as f: + gt = json.load(f) + return gt + + +@numba.jit(nopython=True) +def _points_to_voxel_kernel( + points, + voxel_size, + coors_range, + num_points_per_voxel, + coor_to_voxelidx, + voxels, + coors, + max_points=20, + max_voxels=30000, +): + """Voxelization kernel using numba for acceleration""" + N = points.shape[0] + ndim = 3 + ndim_minus_1 = ndim - 1 + grid_size = (coors_range[3:] - coors_range[:3]) / voxel_size + grid_size = np.round(grid_size, 0, grid_size).astype(np.int32) + coor = np.zeros(shape=(3,), dtype=np.int32) + voxel_num = 0 + failed = False + + for i in range(N): + failed = False + for j in range(ndim): + c = np.floor((points[i, j] - coors_range[j]) / voxel_size[j]) + if c < 0 or c >= grid_size[j]: + failed = True + break + coor[ndim_minus_1 - j] = c + if failed: + continue + voxelidx = coor_to_voxelidx[coor[0], coor[1], coor[2]] + if voxelidx == -1: + voxelidx = voxel_num + if voxel_num >= max_voxels: + continue + voxel_num += 1 + coor_to_voxelidx[coor[0], coor[1], coor[2]] = voxelidx + coors[voxelidx] = coor + num = num_points_per_voxel[voxelidx] + if num < max_points: + voxels[voxelidx, num] = points[i] + num_points_per_voxel[voxelidx] += 1 + return voxel_num + + +def points_to_voxel(points, voxel_size, coors_range, max_points=20, max_voxels=30000): + """Convert point cloud to voxels + + Args: + points: [N, 5] float32 array (x, y, z, intensity, time_lag) + voxel_size: [3] voxel size (x, y, z) + coors_range: [6] point cloud range (xmin, ymin, zmin, xmax, ymax, zmax) + max_points: max points per voxel + max_voxels: max number of voxels + + Returns: + voxels: [M, max_points, 5] voxel features + coors: [M, 3] voxel coordinates (z, y, x) + num_points_per_voxel: [M] number of points in each voxel + """ + if not isinstance(voxel_size, np.ndarray): + voxel_size = np.array(voxel_size, dtype=np.float32) + if not isinstance(coors_range, np.ndarray): + coors_range = np.array(coors_range, dtype=np.float32) + + voxelmap_shape = (coors_range[3:] - coors_range[:3]) / voxel_size + voxelmap_shape = tuple(np.round(voxelmap_shape).astype(np.int32).tolist()) + voxelmap_shape = voxelmap_shape[::-1] # reverse to (z, y, x) + + num_points_per_voxel = np.zeros(shape=(max_voxels,), dtype=np.int32) + coor_to_voxelidx = -np.ones(shape=voxelmap_shape, dtype=np.int32) + voxels = np.zeros(shape=(max_voxels, max_points, points.shape[-1]), dtype=np.float32) + coors = np.zeros(shape=(max_voxels, 3), dtype=np.int32) + + voxel_num = _points_to_voxel_kernel( + points.astype(np.float32), + voxel_size, + coors_range, + num_points_per_voxel, + coor_to_voxelidx, + voxels, + coors, + max_points, + max_voxels, + ) + + coors = coors[:voxel_num] + voxels = voxels[:voxel_num] + num_points_per_voxel = num_points_per_voxel[:voxel_num] + + return voxels, coors, num_points_per_voxel + + +def preprocess_pointpillars(points, config): + """Preprocess point cloud for PointPillars model""" + voxel_cfg = config['voxel_generator'] + voxel_size = np.array(voxel_cfg['voxel_size'], dtype=np.float32) + pc_range = np.array(voxel_cfg['range'], dtype=np.float32) + max_points = voxel_cfg['max_points_in_voxel'] + max_voxels = voxel_cfg['max_voxel_num'][1] if isinstance(voxel_cfg['max_voxel_num'], list) else voxel_cfg['max_voxel_num'] + + # Voxelization + voxels, coors, num_points = points_to_voxel( + points, voxel_size, pc_range, max_points, max_voxels + ) + + return voxels, coors, num_points + + +@numba.jit(nopython=True) +def _create_pillars_input_kernel(voxels, coors, num_points, features, indices, + voxel_size, pc_range, bev_w, num_voxels): + """Numba-accelerated kernel for pillar feature computation""" + for i in range(num_voxels): + n_points = num_points[i] + if n_points == 0: + continue + + voxel = voxels[i] + coor = coors[i] + + # Compute pillar center (vectorized sum) + x_sum = 0.0 + y_sum = 0.0 + z_sum = 0.0 + for j in range(n_points): + x_sum += voxel[j, 0] + y_sum += voxel[j, 1] + z_sum += voxel[j, 2] + x_center = x_sum / n_points + y_center = y_sum / n_points + z_center = z_sum / n_points + + # Compute pillar position + x_pillar = coor[2] * voxel_size[0] + pc_range[0] + voxel_size[0] / 2 + y_pillar = coor[1] * voxel_size[1] + pc_range[1] + voxel_size[1] / 2 + + # Fill features + for j in range(n_points): + features[0, i, j] = voxel[j, 0] # x + features[1, i, j] = voxel[j, 1] # y + features[2, i, j] = voxel[j, 2] # z + features[3, i, j] = voxel[j, 3] # intensity + features[4, i, j] = voxel[j, 4] # time_lag + features[5, i, j] = voxel[j, 0] - x_center # x_c + features[6, i, j] = voxel[j, 1] - y_center # y_c + features[7, i, j] = voxel[j, 2] - z_center # z_c + features[8, i, j] = voxel[j, 0] - x_pillar # x_p + features[9, i, j] = voxel[j, 1] - y_pillar # y_p + + # Compute BEV index + indices[i, 1] = coor[1] * bev_w + coor[2] + + +def create_pillars_input(voxels, coors, num_points, config, max_pillars=30000): + """Create input tensors for the PointPillars AXModel (numba-accelerated) + + The model expects: + - features: [1, 10, max_pillars, max_points_per_pillar] + - indices: [1, max_pillars, 2] + """ + voxel_cfg = config['voxel_generator'] + voxel_size = np.array(voxel_cfg['voxel_size'], dtype=np.float32) + pc_range = np.array(voxel_cfg['range'], dtype=np.float32) + max_points_per_pillar = voxel_cfg['max_points_in_voxel'] + + num_voxels = voxels.shape[0] + + # Pad or truncate to max_pillars + if num_voxels > max_pillars: + voxels = voxels[:max_pillars] + coors = coors[:max_pillars] + num_points = num_points[:max_pillars] + num_voxels = max_pillars + + # Initialize tensors + features = np.zeros((10, max_pillars, max_points_per_pillar), dtype=np.float32) + indices = np.zeros((max_pillars, 2), dtype=np.int32) + indices[:, 0] = 0 # batch index + indices[:, 1] = -1 # invalid index marker + + # BEV grid size + bev_w = int((pc_range[3] - pc_range[0]) / voxel_size[0]) + + # Call numba kernel + _create_pillars_input_kernel( + voxels, coors, num_points, features, indices, + voxel_size, pc_range, bev_w, num_voxels + ) + + # Add batch dimension + features = features[np.newaxis, ...] # [1, 10, max_pillars, max_points_per_pillar] + indices = indices[np.newaxis, ...] # [1, max_pillars, 2] + + return features, indices + + +def decode_bbox(reg, height, dim, rot, vel, score, cls, config, task_idx): + """Decode detection outputs to 3D bounding boxes""" + test_cfg = config['test_cfg'] + voxel_size = test_cfg['voxel_size'] + pc_range = test_cfg['pc_range'] + out_size_factor = test_cfg['out_size_factor'] + score_threshold = test_cfg['score_threshold'] + + H, W = score.shape + + # Create grid + xs = np.arange(W, dtype=np.float32) + ys = np.arange(H, dtype=np.float32) + xs, ys = np.meshgrid(xs, ys) + + # Decode center + xs = (xs + reg[..., 0]) * out_size_factor * voxel_size[0] + pc_range[0] + ys = (ys + reg[..., 1]) * out_size_factor * voxel_size[1] + pc_range[1] + zs = height[..., 0] + + # Decode rotation + theta = np.arctan2(rot[..., 0], rot[..., 1]) + + # Get class offset for this task + class_offset = [0, 1, 3, 5, 6, 8][task_idx] + + # Filter by score + mask = score > score_threshold + + if not np.any(mask): + return np.zeros((0, 9), dtype=np.float32), np.zeros((0,)), np.zeros((0,), dtype=np.int32) + + # Extract valid predictions + xs = xs[mask] + ys = ys[mask] + zs = zs[mask] + dims = dim[mask] + theta = theta[mask] + vels = vel[mask] + scores = score[mask] + labels = cls[mask] + class_offset + + # Construct boxes: [x, y, z, w, l, h, theta, vx, vy] + boxes = np.stack([ + xs, ys, zs, + dims[:, 2], # w + dims[:, 0], # l + dims[:, 1], # h + theta, + vels[:, 0], # vx + vels[:, 1], # vy + ], axis=-1) + + return boxes.astype(np.float32), scores.astype(np.float32), labels.astype(np.int32) + + +@numba.jit(nopython=True) +def _nms_bev_kernel(boxes, scores, nms_threshold, max_output=500): + """Numba-accelerated NMS kernel""" + n = len(boxes) + if n == 0: + return np.zeros(0, dtype=np.int64) + + # Sort by score descending + order = np.argsort(-scores) + + # Pre-compute box corners + x1 = boxes[:, 0] - boxes[:, 4] / 2 # x - l/2 + y1 = boxes[:, 1] - boxes[:, 3] / 2 # y - w/2 + x2 = boxes[:, 0] + boxes[:, 4] / 2 # x + l/2 + y2 = boxes[:, 1] + boxes[:, 3] / 2 # y + w/2 + areas = boxes[:, 3] * boxes[:, 4] # w * l + + suppressed = np.zeros(n, dtype=np.int32) + keep = np.zeros(max_output, dtype=np.int64) + num_keep = 0 + + for _i in range(n): + i = order[_i] + if suppressed[i] == 1: + continue + + keep[num_keep] = i + num_keep += 1 + if num_keep >= max_output: + break + + # Compute IoU with remaining boxes + for _j in range(_i + 1, n): + j = order[_j] + if suppressed[j] == 1: + continue + + # Compute intersection + ix1 = max(x1[i], x1[j]) + iy1 = max(y1[i], y1[j]) + ix2 = min(x2[i], x2[j]) + iy2 = min(y2[i], y2[j]) + + iw = max(0.0, ix2 - ix1) + ih = max(0.0, iy2 - iy1) + inter = iw * ih + + # Compute IoU + union = areas[i] + areas[j] - inter + iou = inter / max(union, 1e-6) + + if iou > nms_threshold: + suppressed[j] = 1 + + return keep[:num_keep] + + +def nms_bev(boxes, scores, labels, nms_threshold=0.2): + """Aligned BEV NMS (numba-accelerated)""" + if len(boxes) == 0: + return np.array([], dtype=np.int64) + return _nms_bev_kernel(boxes, scores, nms_threshold) + + +def postprocess(outputs, config, score_thr=0.1): + """Postprocess model outputs + + CenterPoint model output structure (42 outputs total, 7 per task, 6 tasks): + Per task output order: + - reg: [1, 2, 128, 128] - registration offset + - height: [1, 1, 128, 128] - height + - dim: [1, 3, 128, 128] - dimensions (l, h, w) + - rot: [1, 2, 128, 128] - rotation (sin, cos) + - vel: [1, 2, 128, 128] - velocity + - score: [1, 128, 128] - confidence (after sigmoid) + - cls: [1, 128, 128] - class index (after argmax) + """ + tasks = config['tasks'] + num_tasks = len(tasks) # 6 tasks + outputs_per_task = 7 # reg, height, dim, rot, vel, score, cls + + test_cfg = config['test_cfg'] + voxel_size = test_cfg['voxel_size'] + pc_range = test_cfg['pc_range'] + out_size_factor = test_cfg['out_size_factor'] + score_threshold = test_cfg['score_threshold'] + + all_boxes = [] + all_scores = [] + all_labels = [] + + # Class offset for each task + class_offsets = [0, 1, 3, 5, 6, 8] + + for task_idx in range(num_tasks): + base_idx = task_idx * outputs_per_task + + reg = outputs[base_idx + 0][0] # [2, H, W] + height = outputs[base_idx + 1][0] # [1, H, W] + dim = outputs[base_idx + 2][0] # [3, H, W] + rot = outputs[base_idx + 3][0] # [2, H, W] + vel = outputs[base_idx + 4][0] # [2, H, W] + score = outputs[base_idx + 5][0] # [H, W] + cls = outputs[base_idx + 6][0] # [H, W] + + H, W = score.shape + + xs = np.arange(W, dtype=np.float32) + ys = np.arange(H, dtype=np.float32) + xs, ys = np.meshgrid(xs, ys) + + center_x = (xs + reg[0]) * out_size_factor * voxel_size[0] + pc_range[0] + center_y = (ys + reg[1]) * out_size_factor * voxel_size[1] + pc_range[1] + center_z = height[0] + + dim_l = dim[0] # length + dim_h = dim[1] # height + dim_w = dim[2] # width + + theta = np.arctan2(rot[0], rot[1]) + vel_x = vel[0] + vel_y = vel[1] + + mask = score > score_threshold + + if not np.any(mask): + continue + + class_offset = class_offsets[task_idx] + + boxes = np.stack([ + center_x[mask], center_y[mask], center_z[mask], + dim_w[mask], dim_l[mask], dim_h[mask], + theta[mask], vel_x[mask], vel_y[mask], + ], axis=-1).astype(np.float32) + + scores_task = score[mask].astype(np.float32) + labels_task = (cls[mask] + class_offset).astype(np.int32) + + if len(boxes) > 0: + all_boxes.append(boxes) + all_scores.append(scores_task) + all_labels.append(labels_task) + + if len(all_boxes) == 0: + return np.zeros((0, 9), dtype=np.float32), np.zeros((0,)), np.zeros((0,), dtype=np.int32) + + boxes = np.concatenate(all_boxes, axis=0) + scores = np.concatenate(all_scores, axis=0) + labels = np.concatenate(all_labels, axis=0) + + nms_cfg = config['test_cfg']['nms'] + keep = nms_bev(boxes, scores, labels, nms_cfg['nms_iou_threshold']) + + boxes = boxes[keep] + scores = scores[keep] + labels = labels[keep] + + mask = scores > score_thr + boxes = boxes[mask] + scores = scores[mask] + labels = labels[mask] + + max_per_img = config['test_cfg']['max_per_img'] + if len(boxes) > max_per_img: + topk_indices = np.argsort(-scores)[:max_per_img] + boxes = boxes[topk_indices] + scores = scores[topk_indices] + labels = labels[topk_indices] + + return boxes, scores, labels + + +CLASS_NAMES = [ + 'car', 'truck', 'construction_vehicle', 'bus', 'trailer', + 'barrier', 'motorcycle', 'bicycle', 'pedestrian', 'traffic_cone' +] + +# BGR colors +CLASS_COLORS_BGR = { + 0: (255, 0, 0), # car - blue + 1: (0, 165, 255), # truck - orange + 2: (0, 0, 255), # construction_vehicle - red + 3: (0, 255, 255), # bus - yellow + 4: (128, 0, 128), # trailer - purple + 5: (255, 255, 0), # barrier - cyan + 6: (0, 0, 255), # motorcycle - red + 7: (0, 255, 0), # bicycle - green + 8: (255, 0, 255), # pedestrian - magenta + 9: (0, 255, 255), # traffic_cone - yellow +} + + +def visualize_bev(points, boxes, scores, labels, config, save_path, + frame_idx=0, eval_range=35, conf_th=0.5): + """Fast BEV visualization using OpenCV (50-100x faster than matplotlib)""" + try: + import cv2 + except ImportError: + print("opencv-python not available, skipping visualization") + return None + + # Image size and scale + img_size = 800 + scale = img_size / (2 * eval_range) + center = img_size // 2 + + # Create black background + img = np.zeros((img_size, img_size, 3), dtype=np.uint8) + + # Filter points within range + mask = (np.abs(points[:, 0]) < eval_range) & (np.abs(points[:, 1]) < eval_range) + pts = points[mask, :3] + + # Remove close points + close_mask = (np.abs(pts[:, 0]) < 3) & (np.abs(pts[:, 1]) < 3) + pts = pts[~close_mask] + + # Calculate distances for coloring (viridis-like: purple->cyan->yellow) + dists = np.sqrt(pts[:, 0]**2 + pts[:, 1]**2) + norm_dists = np.minimum(1.0, dists / eval_range) + + # Convert to image coordinates and draw points + px = (center + pts[:, 0] * scale).astype(np.int32) + py = (center - pts[:, 1] * scale).astype(np.int32) + + # Filter valid points (within image bounds) + valid = (px >= 0) & (px < img_size) & (py >= 0) & (py < img_size) + px, py, norm_dists = px[valid], py[valid], norm_dists[valid] + + # Viridis-like colormap using vectorized operations + t = norm_dists + r = np.where(t < 0.5, 68 + t * 2 * (49 - 68), 49 + (t - 0.5) * 2 * (253 - 49)) + g = np.where(t < 0.5, 1 + t * 2 * (104 - 1), 104 + (t - 0.5) * 2 * (231 - 104)) + b = np.where(t < 0.5, 84 + t * 2 * (142 - 84), 142 + (t - 0.5) * 2 * (37 - 142)) + + # Draw all points at once + img[py, px, 0] = b.astype(np.uint8) + img[py, px, 1] = g.astype(np.uint8) + img[py, px, 2] = r.astype(np.uint8) + + # Count detections + num_detections = sum(1 for s in scores if s >= conf_th) + + # Draw detection boxes with class-specific shapes + for box, score, label in zip(boxes, scores, labels): + if score < conf_th: + continue + + x, y, z, w, l, h, theta, vx, vy = box + label_int = int(label) + + # Get color for this class + color = CLASS_COLORS_BGR.get(label_int, (255, 255, 255)) + + # Convert center to image coordinates + cx = int(center + x * scale) + cy = int(center - y * scale) + + # Apply angle transformation (same as demo_utils) + vis_theta = -theta - np.pi / 2 + cos_t, sin_t = np.cos(vis_theta), np.sin(vis_theta) + + # Different shapes based on class + if label_int == 8: # pedestrian - circle + radius = max(3, int(max(w, l) * scale / 2)) + cv2.circle(img, (cx, cy), radius, color, 2) + # Draw heading line + head_x = int(cx + radius * cos_t) + head_y = int(cy - radius * sin_t) + cv2.line(img, (cx, cy), (head_x, head_y), color, 2) + + elif label_int == 9: # traffic_cone - small triangle + size = max(4, int(max(w, l) * scale)) + pts = np.array([ + [cx, cy - size], # top + [cx - size//2, cy + size//2], # bottom left + [cx + size//2, cy + size//2], # bottom right + ], dtype=np.int32) + cv2.fillPoly(img, [pts], color) + + elif label_int == 5: # barrier - thin rectangle + # Box corners (thin barrier) + corners = np.array([ + [l/2, w/4], [l/2, -w/4], [-l/2, -w/4], [-l/2, w/4] + ]) + rot_corners = np.zeros_like(corners) + rot_corners[:, 0] = corners[:, 0] * cos_t - corners[:, 1] * sin_t + x + rot_corners[:, 1] = corners[:, 0] * sin_t + corners[:, 1] * cos_t + y + corners_img = np.zeros((4, 2), dtype=np.int32) + corners_img[:, 0] = (center + rot_corners[:, 0] * scale).astype(np.int32) + corners_img[:, 1] = (center - rot_corners[:, 1] * scale).astype(np.int32) + cv2.fillPoly(img, [corners_img], color) + + elif label_int in [6, 7]: # motorcycle, bicycle - small box with direction + # Smaller box for bikes + corners = np.array([ + [l/2, w/2], [l/2, -w/2], [-l/2, -w/2], [-l/2, w/2] + ]) + rot_corners = np.zeros_like(corners) + rot_corners[:, 0] = corners[:, 0] * cos_t - corners[:, 1] * sin_t + x + rot_corners[:, 1] = corners[:, 0] * sin_t + corners[:, 1] * cos_t + y + corners_img = np.zeros((4, 2), dtype=np.int32) + corners_img[:, 0] = (center + rot_corners[:, 0] * scale).astype(np.int32) + corners_img[:, 1] = (center - rot_corners[:, 1] * scale).astype(np.int32) + cv2.polylines(img, [corners_img], True, color, 2) + # Draw prominent heading arrow + front_mid = ((corners_img[0] + corners_img[1]) // 2).astype(np.int32) + cv2.arrowedLine(img, (cx, cy), tuple(front_mid), color, 2, tipLength=0.4) + + else: # car, truck, bus, trailer, construction_vehicle - standard box + # Box corners + corners = np.array([ + [l/2, w/2], [l/2, -w/2], [-l/2, -w/2], [-l/2, w/2] + ]) + rot_corners = np.zeros_like(corners) + rot_corners[:, 0] = corners[:, 0] * cos_t - corners[:, 1] * sin_t + x + rot_corners[:, 1] = corners[:, 0] * sin_t + corners[:, 1] * cos_t + y + corners_img = np.zeros((4, 2), dtype=np.int32) + corners_img[:, 0] = (center + rot_corners[:, 0] * scale).astype(np.int32) + corners_img[:, 1] = (center - rot_corners[:, 1] * scale).astype(np.int32) + cv2.polylines(img, [corners_img], True, color, 2) + # Draw front indicator line + front_mid = ((corners_img[0] + corners_img[1]) // 2).astype(np.int32) + cv2.line(img, (cx, cy), tuple(front_mid), color, 2) + + # Draw frame info (white text) + cv2.putText(img, f'Frame: {frame_idx}', (10, 25), + cv2.FONT_HERSHEY_SIMPLEX, 0.7, (255, 255, 255), 2) + cv2.putText(img, f'Detections: {num_detections}', (10, 50), + cv2.FONT_HERSHEY_SIMPLEX, 0.7, (255, 255, 255), 2) + + # Draw legend + legend_y = 80 + for cls_id, cls_name in enumerate(CLASS_NAMES): + color = CLASS_COLORS_BGR.get(cls_id, (255, 255, 255)) + cv2.rectangle(img, (10, legend_y), (25, legend_y + 12), color, -1) + cv2.putText(img, cls_name, (30, legend_y + 10), + cv2.FONT_HERSHEY_SIMPLEX, 0.4, (255, 255, 255), 1) + legend_y += 18 + + # Save image + cv2.imwrite(save_path, img) + return True + + +def create_video_from_images(image_dir, output_video_path, fps=10): + """Create video from images in a directory + + Args: + image_dir: directory containing images + output_video_path: output video file path + fps: frames per second + """ + try: + import cv2 + except ImportError: + print("opencv-python not available, cannot create video") + return + + # Get all image files sorted by name + image_files = sorted([f for f in os.listdir(image_dir) + if f.endswith(('.png', '.jpg', '.jpeg'))]) + + if len(image_files) == 0: + print(f"No images found in {image_dir}") + return + + # Read first image to get dimensions + first_img = cv2.imread(osp.join(image_dir, image_files[0])) + if first_img is None: + print(f"Cannot read first image: {image_files[0]}") + return + + height, width = first_img.shape[:2] + + # Limit video size for better compatibility + max_width, max_height = 1920, 1080 + if width > max_width or height > max_height: + scale = min(max_width / width, max_height / height) + width, height = int(width * scale), int(height * scale) + + # Create video writer + fourcc = cv2.VideoWriter_fourcc(*'mp4v') + video_writer = cv2.VideoWriter(output_video_path, fourcc, fps, (width, height)) + + if not video_writer.isOpened(): + # Try alternative codec + fourcc = cv2.VideoWriter_fourcc(*'XVID') + output_video_path = output_video_path.replace('.mp4', '.avi') + video_writer = cv2.VideoWriter(output_video_path, fourcc, fps, (width, height)) + + + for img_file in tqdm(image_files, desc="Creating video"): + img_path = osp.join(image_dir, img_file) + img = cv2.imread(img_path) + if img is not None: + if img.shape[:2] != (height, width): + img = cv2.resize(img, (width, height)) + video_writer.write(img) + + video_writer.release() + + +def run_inference(session, points, config): + """Run inference on a single point cloud""" + # Preprocess + voxels, coors, num_points = preprocess_pointpillars(points, config) + + # Create model input + features, indices = create_pillars_input(voxels, coors, num_points, config) + + # Get input names + input_names = [inp.name for inp in session.get_inputs()] + + # Build feed dict based on exact input names + feed_dict = {} + for name in input_names: + if name == 'input.1': + feed_dict[name] = features.astype(np.float32) + elif name == 'indices_input': + feed_dict[name] = indices.astype(np.int32) + elif 'indices' in name.lower(): + feed_dict[name] = indices.astype(np.int32) + else: + feed_dict[name] = features.astype(np.float32) + + # Run inference + outputs = session.run(None, feed_dict) + + # Postprocess + boxes, scores, labels = postprocess(outputs, config) + + return boxes, scores, labels + + +def main(): + args = parse_args() + + if axe is None: + print("Error: axengine is not installed. Please install it first.") + return + + # Load config and model + config = load_config(args.config_json) + session = load_axmodel(args.axmodel) + + # Load sample index + sample_index = load_sample_index(args.data_dir) + samples = sample_index['samples'] + + if args.num_samples is not None: + samples = samples[:args.num_samples] + + print(f"Processing {len(samples)} samples...") + + # Create output directory + os.makedirs(args.output_dir, exist_ok=True) + + # Create images directory for visualization + images_dir = osp.join(args.output_dir, 'images') + if args.visualize: + os.makedirs(images_dir, exist_ok=True) + + # Results storage + all_results = [] + + # Process each sample + for idx, sample in enumerate(tqdm(samples, desc="Inference")): + token = sample['token'] + + # Load point cloud + points = load_points(args.data_dir, sample['points_path']) + + # Run inference + boxes, scores, labels = run_inference(session, points, config) + + # Store results + result = { + 'token': token, + 'boxes': boxes.tolist(), + 'scores': scores.tolist(), + 'labels': labels.tolist(), + 'num_detections': len(boxes), + } + all_results.append(result) + + # Visualize if requested + if args.visualize: + vis_path = osp.join(images_dir, f'frame_{idx:06d}.png') + visualize_bev(points, boxes, scores, labels, config, vis_path, frame_idx=idx, conf_th=args.score_thr) + + # Save results + results_path = osp.join(args.output_dir, 'results.json') + with open(results_path, 'w') as f: + json.dump(all_results, f, indent=2) + + # Create video from images + if args.visualize: + video_path = osp.join(args.output_dir, 'centerpoint_detection_axmodel.mp4') + create_video_from_images(images_dir, video_path, fps=args.fps) + + # Print summary + total_detections = sum(r['num_detections'] for r in all_results) + print(f"Done! {len(samples)} frames, {total_detections} detections, saved to {args.output_dir}") + + +if __name__ == '__main__': + main() diff --git a/inference_onnx.py b/inference_onnx.py new file mode 100644 index 0000000000000000000000000000000000000000..a36ef89d74f9d2a080f16bfff0320c1baa9a6b51 --- /dev/null +++ b/inference_onnx.py @@ -0,0 +1,924 @@ +#!/usr/bin/env python3 +""" +CenterPoint ONNX Runtime Inference Demo. + +Usage: + python inference_onnx.py path/to/onnx_model/pointpillars.onnx path/to/extracted_data/config.json path/to/extracted_data \ + --output-dir path/to/inference_results --num-samples 50 +""" + +import argparse +import json +import os +import os.path as osp +import numpy as np +import onnxruntime as ort +from tqdm import tqdm +import numba + + +def parse_args(): + parser = argparse.ArgumentParser(description='CenterPoint ONNX Inference') + parser.add_argument('onnx_model', help='ONNX model path') + parser.add_argument('config_json', help='JSON config file path') + parser.add_argument('data_dir', help='extracted data directory') + parser.add_argument('--output-dir', default='./inference_results', help='output directory') + parser.add_argument('--score-thr', type=float, default=0.1, help='score threshold') + parser.add_argument('--device', default='cuda:0', help='device for ONNX inference') + parser.add_argument('--num-samples', type=int, default=None, help='number of samples to process') + parser.add_argument('--visualize', action='store_true', help='save visualization images and video') + parser.add_argument('--fps', type=int, default=10, help='video fps') + return parser.parse_args() + + +def load_onnx_model(onnx_path, device='cuda:0'): + """Load ONNX model""" + available_providers = ort.get_available_providers() + + providers = [] + if 'cuda' in device.lower() and 'CUDAExecutionProvider' in available_providers: + providers = ['CUDAExecutionProvider', 'CPUExecutionProvider'] + else: + providers = ['CPUExecutionProvider'] + + sess_options = ort.SessionOptions() + sess_options.graph_optimization_level = ort.GraphOptimizationLevel.ORT_ENABLE_BASIC + + session = ort.InferenceSession(onnx_path, sess_options=sess_options, providers=providers) + print(f"Loaded ONNX model from {onnx_path}") + print(f"Using providers: {session.get_providers()}") + return session + + +def load_config(config_path): + """Load configuration from JSON file""" + with open(config_path, 'r') as f: + config = json.load(f) + return config + + +def load_sample_index(data_dir): + """Load sample index""" + index_path = osp.join(data_dir, 'sample_index.json') + with open(index_path, 'r') as f: + sample_index = json.load(f) + return sample_index + + +def load_points(data_dir, points_path): + """Load point cloud data from binary file""" + full_path = osp.join(data_dir, points_path) + points = np.fromfile(full_path, dtype=np.float32).reshape(-1, 5) + return points + + +def load_gt(data_dir, gt_path): + """Load ground truth annotations""" + full_path = osp.join(data_dir, gt_path) + with open(full_path, 'r') as f: + gt = json.load(f) + return gt + + +@numba.jit(nopython=True) +def _points_to_voxel_kernel( + points, + voxel_size, + coors_range, + num_points_per_voxel, + coor_to_voxelidx, + voxels, + coors, + max_points=20, + max_voxels=30000, +): + """Voxelization kernel using numba for acceleration""" + N = points.shape[0] + ndim = 3 + ndim_minus_1 = ndim - 1 + grid_size = (coors_range[3:] - coors_range[:3]) / voxel_size + grid_size = np.round(grid_size, 0, grid_size).astype(np.int32) + coor = np.zeros(shape=(3,), dtype=np.int32) + voxel_num = 0 + failed = False + + for i in range(N): + failed = False + for j in range(ndim): + c = np.floor((points[i, j] - coors_range[j]) / voxel_size[j]) + if c < 0 or c >= grid_size[j]: + failed = True + break + coor[ndim_minus_1 - j] = c + if failed: + continue + voxelidx = coor_to_voxelidx[coor[0], coor[1], coor[2]] + if voxelidx == -1: + voxelidx = voxel_num + if voxel_num >= max_voxels: + continue + voxel_num += 1 + coor_to_voxelidx[coor[0], coor[1], coor[2]] = voxelidx + coors[voxelidx] = coor + num = num_points_per_voxel[voxelidx] + if num < max_points: + voxels[voxelidx, num] = points[i] + num_points_per_voxel[voxelidx] += 1 + return voxel_num + + +def points_to_voxel(points, voxel_size, coors_range, max_points=20, max_voxels=30000): + """Convert point cloud to voxels + + Args: + points: [N, 5] float32 array (x, y, z, intensity, time_lag) + voxel_size: [3] voxel size (x, y, z) + coors_range: [6] point cloud range (xmin, ymin, zmin, xmax, ymax, zmax) + max_points: max points per voxel + max_voxels: max number of voxels + + Returns: + voxels: [M, max_points, 5] voxel features + coors: [M, 3] voxel coordinates (z, y, x) + num_points_per_voxel: [M] number of points in each voxel + """ + if not isinstance(voxel_size, np.ndarray): + voxel_size = np.array(voxel_size, dtype=np.float32) + if not isinstance(coors_range, np.ndarray): + coors_range = np.array(coors_range, dtype=np.float32) + + voxelmap_shape = (coors_range[3:] - coors_range[:3]) / voxel_size + voxelmap_shape = tuple(np.round(voxelmap_shape).astype(np.int32).tolist()) + voxelmap_shape = voxelmap_shape[::-1] # reverse to (z, y, x) + + num_points_per_voxel = np.zeros(shape=(max_voxels,), dtype=np.int32) + coor_to_voxelidx = -np.ones(shape=voxelmap_shape, dtype=np.int32) + voxels = np.zeros(shape=(max_voxels, max_points, points.shape[-1]), dtype=np.float32) + coors = np.zeros(shape=(max_voxels, 3), dtype=np.int32) + + voxel_num = _points_to_voxel_kernel( + points.astype(np.float32), + voxel_size, + coors_range, + num_points_per_voxel, + coor_to_voxelidx, + voxels, + coors, + max_points, + max_voxels, + ) + + coors = coors[:voxel_num] + voxels = voxels[:voxel_num] + num_points_per_voxel = num_points_per_voxel[:voxel_num] + + return voxels, coors, num_points_per_voxel + + +def preprocess_pointpillars(points, config): + """Preprocess point cloud for PointPillars model + + This function converts raw point cloud to the input format required by the ONNX model. + """ + voxel_cfg = config['voxel_generator'] + voxel_size = np.array(voxel_cfg['voxel_size'], dtype=np.float32) + pc_range = np.array(voxel_cfg['range'], dtype=np.float32) + max_points = voxel_cfg['max_points_in_voxel'] + max_voxels = voxel_cfg['max_voxel_num'][1] if isinstance(voxel_cfg['max_voxel_num'], list) else voxel_cfg['max_voxel_num'] + + # Voxelization + voxels, coors, num_points = points_to_voxel( + points, voxel_size, pc_range, max_points, max_voxels + ) + + return voxels, coors, num_points + + +@numba.jit(nopython=True) +def _create_pillars_input_kernel(voxels, coors, num_points, features, indices, + voxel_size, pc_range, bev_w, num_voxels): + """Numba-accelerated kernel for pillar feature computation""" + for i in range(num_voxels): + n_points = num_points[i] + if n_points == 0: + continue + + voxel = voxels[i] + coor = coors[i] + + # Compute pillar center (vectorized sum) + x_sum = 0.0 + y_sum = 0.0 + z_sum = 0.0 + for j in range(n_points): + x_sum += voxel[j, 0] + y_sum += voxel[j, 1] + z_sum += voxel[j, 2] + x_center = x_sum / n_points + y_center = y_sum / n_points + z_center = z_sum / n_points + + # Compute pillar position + x_pillar = coor[2] * voxel_size[0] + pc_range[0] + voxel_size[0] / 2 + y_pillar = coor[1] * voxel_size[1] + pc_range[1] + voxel_size[1] / 2 + + # Fill features + for j in range(n_points): + features[0, i, j] = voxel[j, 0] # x + features[1, i, j] = voxel[j, 1] # y + features[2, i, j] = voxel[j, 2] # z + features[3, i, j] = voxel[j, 3] # intensity + features[4, i, j] = voxel[j, 4] # time_lag + features[5, i, j] = voxel[j, 0] - x_center # x_c + features[6, i, j] = voxel[j, 1] - y_center # y_c + features[7, i, j] = voxel[j, 2] - z_center # z_c + features[8, i, j] = voxel[j, 0] - x_pillar # x_p + features[9, i, j] = voxel[j, 1] - y_pillar # y_p + + # Compute BEV index + indices[i, 1] = coor[1] * bev_w + coor[2] + + +def create_pillars_input(voxels, coors, num_points, config, max_pillars=30000): + """Create input tensors for the PointPillars ONNX model (numba-accelerated) + + The model expects: + - features: [1, 10, max_pillars, max_points_per_pillar] + - indices: [1, max_pillars, 2] + """ + voxel_cfg = config['voxel_generator'] + voxel_size = np.array(voxel_cfg['voxel_size'], dtype=np.float32) + pc_range = np.array(voxel_cfg['range'], dtype=np.float32) + max_points_per_pillar = voxel_cfg['max_points_in_voxel'] + + num_voxels = voxels.shape[0] + + # Pad or truncate to max_pillars + if num_voxels > max_pillars: + voxels = voxels[:max_pillars] + coors = coors[:max_pillars] + num_points = num_points[:max_pillars] + num_voxels = max_pillars + + # Initialize tensors + features = np.zeros((10, max_pillars, max_points_per_pillar), dtype=np.float32) + indices = np.zeros((max_pillars, 2), dtype=np.int64) # ONNX needs int64 + indices[:, 0] = 0 # batch index + indices[:, 1] = -1 # invalid index marker + + # BEV grid size + bev_w = int((pc_range[3] - pc_range[0]) / voxel_size[0]) + + # Call numba kernel + _create_pillars_input_kernel( + voxels, coors, num_points, features, indices, + voxel_size, pc_range, bev_w, num_voxels + ) + + # Add batch dimension + features = features[np.newaxis, ...] # [1, 10, max_pillars, max_points_per_pillar] + indices = indices[np.newaxis, ...] # [1, max_pillars, 2] + + return features, indices + + +def decode_bbox(reg, height, dim, rot, vel, score, cls, config, task_idx): + """Decode detection outputs to 3D bounding boxes + + Args: + reg: [H, W, 2] registration offset + height: [H, W, 1] height + dim: [H, W, 3] dimensions (l, h, w) + rot: [H, W, 2] rotation (sin, cos) + vel: [H, W, 2] velocity + score: [H, W] confidence score + cls: [H, W] class prediction + config: configuration dict + task_idx: task index for class offset + + Returns: + boxes: [N, 9] (x, y, z, w, l, h, theta, vx, vy) + scores: [N] + labels: [N] + """ + test_cfg = config['test_cfg'] + voxel_size = test_cfg['voxel_size'] + pc_range = test_cfg['pc_range'] + out_size_factor = test_cfg['out_size_factor'] + score_threshold = test_cfg['score_threshold'] + + H, W = score.shape + + # Create grid + xs = np.arange(W, dtype=np.float32) + ys = np.arange(H, dtype=np.float32) + xs, ys = np.meshgrid(xs, ys) + + # Decode center + xs = (xs + reg[..., 0]) * out_size_factor * voxel_size[0] + pc_range[0] + ys = (ys + reg[..., 1]) * out_size_factor * voxel_size[1] + pc_range[1] + zs = height[..., 0] + + # Decode rotation + theta = np.arctan2(rot[..., 0], rot[..., 1]) + + # Get class offset for this task + class_offset = [0, 1, 3, 5, 6, 8][task_idx] + + # Filter by score + mask = score > score_threshold + + if not np.any(mask): + return np.zeros((0, 9), dtype=np.float32), np.zeros((0,)), np.zeros((0,), dtype=np.int32) + + # Extract valid predictions + xs = xs[mask] + ys = ys[mask] + zs = zs[mask] + dims = dim[mask] # [N, 3] (l, h, w) + theta = theta[mask] + vels = vel[mask] # [N, 2] + scores = score[mask] + labels = cls[mask] + class_offset + + # Construct boxes: [x, y, z, w, l, h, theta, vx, vy] + boxes = np.stack([ + xs, ys, zs, + dims[:, 2], # w + dims[:, 0], # l + dims[:, 1], # h + theta, + vels[:, 0], # vx + vels[:, 1], # vy + ], axis=-1) + + return boxes.astype(np.float32), scores.astype(np.float32), labels.astype(np.int32) + + +@numba.jit(nopython=True) +def _nms_bev_kernel(boxes, scores, nms_threshold, max_output=500): + """Numba-accelerated NMS kernel""" + n = len(boxes) + if n == 0: + return np.zeros(0, dtype=np.int64) + + # Sort by score descending + order = np.argsort(-scores) + + # Pre-compute box corners + x1 = boxes[:, 0] - boxes[:, 4] / 2 # x - l/2 + y1 = boxes[:, 1] - boxes[:, 3] / 2 # y - w/2 + x2 = boxes[:, 0] + boxes[:, 4] / 2 # x + l/2 + y2 = boxes[:, 1] + boxes[:, 3] / 2 # y + w/2 + areas = boxes[:, 3] * boxes[:, 4] # w * l + + suppressed = np.zeros(n, dtype=np.int32) + keep = np.zeros(max_output, dtype=np.int64) + num_keep = 0 + + for _i in range(n): + i = order[_i] + if suppressed[i] == 1: + continue + + keep[num_keep] = i + num_keep += 1 + if num_keep >= max_output: + break + + # Compute IoU with remaining boxes + for _j in range(_i + 1, n): + j = order[_j] + if suppressed[j] == 1: + continue + + # Compute intersection + ix1 = max(x1[i], x1[j]) + iy1 = max(y1[i], y1[j]) + ix2 = min(x2[i], x2[j]) + iy2 = min(y2[i], y2[j]) + + iw = max(0.0, ix2 - ix1) + ih = max(0.0, iy2 - iy1) + inter = iw * ih + + # Compute IoU + union = areas[i] + areas[j] - inter + iou = inter / max(union, 1e-6) + + if iou > nms_threshold: + suppressed[j] = 1 + + return keep[:num_keep] + + +def nms_bev(boxes, scores, labels, nms_threshold=0.2): + """Aligned BEV NMS (numba-accelerated)""" + if len(boxes) == 0: + return np.array([], dtype=np.int64) + return _nms_bev_kernel(boxes, scores, nms_threshold) + + +def postprocess(outputs, config, score_thr=0.1): + """Postprocess model outputs + + CenterPoint model output structure (42 outputs total, 7 per task, 6 tasks): + Per task output order: + - reg: [1, 2, 128, 128] - registration offset + - height: [1, 1, 128, 128] - height + - dim: [1, 3, 128, 128] - dimensions (l, h, w) + - rot: [1, 2, 128, 128] - rotation (sin, cos) + - vel: [1, 2, 128, 128] - velocity + - score: [1, 128, 128] - confidence (after sigmoid) + - cls: [1, 128, 128] - class index (after argmax) + + Args: + outputs: model outputs (list of 42 tensors) + config: configuration dict + score_thr: score threshold + + Returns: + boxes: [N, 9] (x, y, z, w, l, h, theta, vx, vy) + scores: [N] + labels: [N] + """ + tasks = config['tasks'] + num_tasks = len(tasks) # 6 tasks + outputs_per_task = 7 # reg, height, dim, rot, vel, score, cls + + test_cfg = config['test_cfg'] + voxel_size = test_cfg['voxel_size'] + pc_range = test_cfg['pc_range'] + out_size_factor = test_cfg['out_size_factor'] + score_threshold = test_cfg['score_threshold'] + + all_boxes = [] + all_scores = [] + all_labels = [] + + # Class offset for each task + class_offsets = [0, 1, 3, 5, 6, 8] # car, truck/constr, bus/trailer, barrier, moto/bicycle, ped/cone + + for task_idx in range(num_tasks): + base_idx = task_idx * outputs_per_task + + # Extract outputs for this task + reg = outputs[base_idx + 0][0] # [2, H, W] + height = outputs[base_idx + 1][0] # [1, H, W] + dim = outputs[base_idx + 2][0] # [3, H, W] + rot = outputs[base_idx + 3][0] # [2, H, W] + vel = outputs[base_idx + 4][0] # [2, H, W] + score = outputs[base_idx + 5][0] # [H, W] - already after sigmoid + cls = outputs[base_idx + 6][0] # [H, W] - class index + + H, W = score.shape + + # Create coordinate grid + xs = np.arange(W, dtype=np.float32) + ys = np.arange(H, dtype=np.float32) + xs, ys = np.meshgrid(xs, ys) + + # Decode center position + # reg is [2, H, W], reg[0] is x offset, reg[1] is y offset + center_x = (xs + reg[0]) * out_size_factor * voxel_size[0] + pc_range[0] + center_y = (ys + reg[1]) * out_size_factor * voxel_size[1] + pc_range[1] + center_z = height[0] # [H, W] + + # Decode dimensions + # dim [3, H, W], order: l, h, w + dim_l = dim[0] # length + dim_h = dim[1] # height + dim_w = dim[2] # width + + # Decode rotation + # rot is [2, H, W], rot[0] is sin, rot[1] is cos + theta = np.arctan2(rot[0], rot[1]) + + # Velocity + vel_x = vel[0] + vel_y = vel[1] + + # Filter by score threshold + mask = score > score_threshold + + if not np.any(mask): + continue + + # Get class offset for this task + class_offset = class_offsets[task_idx] + + # Extract valid predictions + boxes = np.stack([ + center_x[mask], + center_y[mask], + center_z[mask], + dim_w[mask], # w + dim_l[mask], # l + dim_h[mask], # h + theta[mask], + vel_x[mask], + vel_y[mask], + ], axis=-1).astype(np.float32) + + scores_task = score[mask].astype(np.float32) + labels_task = (cls[mask] + class_offset).astype(np.int32) + + if len(boxes) > 0: + all_boxes.append(boxes) + all_scores.append(scores_task) + all_labels.append(labels_task) + + if len(all_boxes) == 0: + return np.zeros((0, 9), dtype=np.float32), np.zeros((0,)), np.zeros((0,), dtype=np.int32) + + # Concatenate all detections + boxes = np.concatenate(all_boxes, axis=0) + scores = np.concatenate(all_scores, axis=0) + labels = np.concatenate(all_labels, axis=0) + + # Apply NMS + nms_cfg = config['test_cfg']['nms'] + keep = nms_bev(boxes, scores, labels, nms_cfg['nms_iou_threshold']) + + boxes = boxes[keep] + scores = scores[keep] + labels = labels[keep] + + # Filter by final score threshold + mask = scores > score_thr + boxes = boxes[mask] + scores = scores[mask] + labels = labels[mask] + + # Limit max detections + max_per_img = config['test_cfg']['max_per_img'] + if len(boxes) > max_per_img: + topk_indices = np.argsort(-scores)[:max_per_img] + boxes = boxes[topk_indices] + scores = scores[topk_indices] + labels = labels[topk_indices] + + return boxes, scores, labels + + +CLASS_NAMES = [ + 'car', 'truck', 'construction_vehicle', 'bus', 'trailer', + 'barrier', 'motorcycle', 'bicycle', 'pedestrian', 'traffic_cone' +] + +# BGR colors +CLASS_COLORS_BGR = { + 0: (255, 0, 0), # car - blue + 1: (0, 165, 255), # truck - orange + 2: (0, 0, 255), # construction_vehicle - red + 3: (0, 255, 255), # bus - yellow + 4: (128, 0, 128), # trailer - purple + 5: (255, 255, 0), # barrier - cyan + 6: (0, 0, 255), # motorcycle - red + 7: (0, 255, 0), # bicycle - green + 8: (255, 0, 255), # pedestrian - magenta + 9: (0, 255, 255), # traffic_cone - yellow +} + + +def visualize_bev(points, boxes, scores, labels, config, save_path, + frame_idx=0, eval_range=35, conf_th=0.5): + """Fast BEV visualization using OpenCV (50-100x faster than matplotlib)""" + try: + import cv2 + except ImportError: + print("opencv-python not available, skipping visualization") + return None + + # Image size and scale + img_size = 800 + scale = img_size / (2 * eval_range) + center = img_size // 2 + + # Create black background + img = np.zeros((img_size, img_size, 3), dtype=np.uint8) + + # Filter points within range + mask = (np.abs(points[:, 0]) < eval_range) & (np.abs(points[:, 1]) < eval_range) + pts = points[mask, :3] + + # Remove close points + close_mask = (np.abs(pts[:, 0]) < 3) & (np.abs(pts[:, 1]) < 3) + pts = pts[~close_mask] + + # Calculate distances for coloring (viridis-like: purple->cyan->yellow) + dists = np.sqrt(pts[:, 0]**2 + pts[:, 1]**2) + norm_dists = np.minimum(1.0, dists / eval_range) + + # Convert to image coordinates and draw points + px = (center + pts[:, 0] * scale).astype(np.int32) + py = (center - pts[:, 1] * scale).astype(np.int32) + + # Filter valid points (within image bounds) + valid = (px >= 0) & (px < img_size) & (py >= 0) & (py < img_size) + px, py, norm_dists = px[valid], py[valid], norm_dists[valid] + + # Viridis-like colormap using vectorized operations + t = norm_dists + r = np.where(t < 0.5, 68 + t * 2 * (49 - 68), 49 + (t - 0.5) * 2 * (253 - 49)) + g = np.where(t < 0.5, 1 + t * 2 * (104 - 1), 104 + (t - 0.5) * 2 * (231 - 104)) + b = np.where(t < 0.5, 84 + t * 2 * (142 - 84), 142 + (t - 0.5) * 2 * (37 - 142)) + + # Draw all points at once + img[py, px, 0] = b.astype(np.uint8) + img[py, px, 1] = g.astype(np.uint8) + img[py, px, 2] = r.astype(np.uint8) + + # Count detections + num_detections = sum(1 for s in scores if s >= conf_th) + + # Draw detection boxes with class-specific shapes + for box, score, label in zip(boxes, scores, labels): + if score < conf_th: + continue + + x, y, z, w, l, h, theta, vx, vy = box + label_int = int(label) + + # Get color for this class + color = CLASS_COLORS_BGR.get(label_int, (255, 255, 255)) + + # Convert center to image coordinates + cx = int(center + x * scale) + cy = int(center - y * scale) + + # Apply angle transformation (same as demo_utils) + vis_theta = -theta - np.pi / 2 + cos_t, sin_t = np.cos(vis_theta), np.sin(vis_theta) + + # Different shapes based on class + if label_int == 8: # pedestrian - circle + radius = max(3, int(max(w, l) * scale / 2)) + cv2.circle(img, (cx, cy), radius, color, 2) + # Draw heading line + head_x = int(cx + radius * cos_t) + head_y = int(cy - radius * sin_t) + cv2.line(img, (cx, cy), (head_x, head_y), color, 2) + + elif label_int == 9: # traffic_cone - small triangle + size = max(4, int(max(w, l) * scale)) + pts = np.array([ + [cx, cy - size], # top + [cx - size//2, cy + size//2], # bottom left + [cx + size//2, cy + size//2], # bottom right + ], dtype=np.int32) + cv2.fillPoly(img, [pts], color) + + elif label_int == 5: # barrier - thin rectangle + # Box corners (thin barrier) + corners = np.array([ + [l/2, w/4], [l/2, -w/4], [-l/2, -w/4], [-l/2, w/4] + ]) + rot_corners = np.zeros_like(corners) + rot_corners[:, 0] = corners[:, 0] * cos_t - corners[:, 1] * sin_t + x + rot_corners[:, 1] = corners[:, 0] * sin_t + corners[:, 1] * cos_t + y + corners_img = np.zeros((4, 2), dtype=np.int32) + corners_img[:, 0] = (center + rot_corners[:, 0] * scale).astype(np.int32) + corners_img[:, 1] = (center - rot_corners[:, 1] * scale).astype(np.int32) + cv2.fillPoly(img, [corners_img], color) + + elif label_int in [6, 7]: # motorcycle, bicycle - small box with direction + # Smaller box for bikes + corners = np.array([ + [l/2, w/2], [l/2, -w/2], [-l/2, -w/2], [-l/2, w/2] + ]) + rot_corners = np.zeros_like(corners) + rot_corners[:, 0] = corners[:, 0] * cos_t - corners[:, 1] * sin_t + x + rot_corners[:, 1] = corners[:, 0] * sin_t + corners[:, 1] * cos_t + y + corners_img = np.zeros((4, 2), dtype=np.int32) + corners_img[:, 0] = (center + rot_corners[:, 0] * scale).astype(np.int32) + corners_img[:, 1] = (center - rot_corners[:, 1] * scale).astype(np.int32) + cv2.polylines(img, [corners_img], True, color, 2) + # Draw prominent heading arrow + front_mid = ((corners_img[0] + corners_img[1]) // 2).astype(np.int32) + cv2.arrowedLine(img, (cx, cy), tuple(front_mid), color, 2, tipLength=0.4) + + else: # car, truck, bus, trailer, construction_vehicle - standard box + # Box corners + corners = np.array([ + [l/2, w/2], [l/2, -w/2], [-l/2, -w/2], [-l/2, w/2] + ]) + rot_corners = np.zeros_like(corners) + rot_corners[:, 0] = corners[:, 0] * cos_t - corners[:, 1] * sin_t + x + rot_corners[:, 1] = corners[:, 0] * sin_t + corners[:, 1] * cos_t + y + corners_img = np.zeros((4, 2), dtype=np.int32) + corners_img[:, 0] = (center + rot_corners[:, 0] * scale).astype(np.int32) + corners_img[:, 1] = (center - rot_corners[:, 1] * scale).astype(np.int32) + cv2.polylines(img, [corners_img], True, color, 2) + # Draw front indicator line + front_mid = ((corners_img[0] + corners_img[1]) // 2).astype(np.int32) + cv2.line(img, (cx, cy), tuple(front_mid), color, 2) + + # Draw frame info (white text) + cv2.putText(img, f'Frame: {frame_idx}', (10, 25), + cv2.FONT_HERSHEY_SIMPLEX, 0.7, (255, 255, 255), 2) + cv2.putText(img, f'Detections: {num_detections}', (10, 50), + cv2.FONT_HERSHEY_SIMPLEX, 0.7, (255, 255, 255), 2) + + # Draw legend + legend_y = 80 + for cls_id, cls_name in enumerate(CLASS_NAMES): + color = CLASS_COLORS_BGR.get(cls_id, (255, 255, 255)) + cv2.rectangle(img, (10, legend_y), (25, legend_y + 12), color, -1) + cv2.putText(img, cls_name, (30, legend_y + 10), + cv2.FONT_HERSHEY_SIMPLEX, 0.4, (255, 255, 255), 1) + legend_y += 18 + + # Save image + cv2.imwrite(save_path, img) + return True + + +def create_video_from_images(image_dir, output_video_path, fps=10): + """Create video from images in a directory + + Args: + image_dir: directory containing images + output_video_path: output video file path + fps: frames per second + """ + try: + import cv2 + except ImportError: + print("opencv-python not available, cannot create video") + return + + # Get all image files sorted by name + image_files = sorted([f for f in os.listdir(image_dir) + if f.endswith(('.png', '.jpg', '.jpeg'))]) + + if len(image_files) == 0: + print(f"No images found in {image_dir}") + return + + # Read first image to get dimensions + first_img = cv2.imread(osp.join(image_dir, image_files[0])) + if first_img is None: + print(f"Cannot read first image: {image_files[0]}") + return + + height, width = first_img.shape[:2] + + # Limit video size for better compatibility + max_width, max_height = 1920, 1080 + if width > max_width or height > max_height: + scale = min(max_width / width, max_height / height) + width, height = int(width * scale), int(height * scale) + + # Create video writer + fourcc = cv2.VideoWriter_fourcc(*'mp4v') + video_writer = cv2.VideoWriter(output_video_path, fourcc, fps, (width, height)) + + if not video_writer.isOpened(): + # Try alternative codec + fourcc = cv2.VideoWriter_fourcc(*'XVID') + output_video_path = output_video_path.replace('.mp4', '.avi') + video_writer = cv2.VideoWriter(output_video_path, fourcc, fps, (width, height)) + + print(f"Creating video: {output_video_path}") + + for img_file in tqdm(image_files, desc="Creating video"): + img_path = osp.join(image_dir, img_file) + img = cv2.imread(img_path) + if img is not None: + if img.shape[:2] != (height, width): + img = cv2.resize(img, (width, height)) + video_writer.write(img) + + video_writer.release() + print(f"Video saved to {output_video_path}") + + +def run_inference(session, points, config): + """Run inference on a single point cloud + + Args: + session: ONNX Runtime session + points: [N, 5] point cloud + config: configuration dict + + Returns: + boxes: [M, 9] detected boxes + scores: [M] detection scores + labels: [M] class labels + """ + # Preprocess + voxels, coors, num_points = preprocess_pointpillars(points, config) + + # Create model input + features, indices = create_pillars_input(voxels, coors, num_points, config) + + # Get input names + input_names = [inp.name for inp in session.get_inputs()] + + # Build feed dict based on exact input names + # Model expects: input.1 (features) and indices_input (indices) + feed_dict = {} + for name in input_names: + if name == 'input.1': + feed_dict[name] = features.astype(np.float32) + elif name == 'indices_input': + feed_dict[name] = indices.astype(np.int64) + elif 'indices' in name.lower(): + feed_dict[name] = indices.astype(np.int64) + else: + feed_dict[name] = features.astype(np.float32) + + # Run inference + outputs = session.run(None, feed_dict) + + # Postprocess + boxes, scores, labels = postprocess(outputs, config) + + return boxes, scores, labels + + +def main(): + args = parse_args() + + # Load config + config = load_config(args.config_json) + print(f"Loaded config from {args.config_json}") + + # Load ONNX model + session = load_onnx_model(args.onnx_model, args.device) + + # Print model info + print("\nModel inputs:") + for inp in session.get_inputs(): + print(f" {inp.name}: {inp.shape} ({inp.type})") + print("\nModel outputs:") + for out in session.get_outputs(): + print(f" {out.name}: {out.shape}") + + # Load sample index + sample_index = load_sample_index(args.data_dir) + samples = sample_index['samples'] + + # Limit samples if specified + if args.num_samples is not None: + samples = samples[:args.num_samples] + + print(f"\nProcessing {len(samples)} samples...") + + # Create output directory + os.makedirs(args.output_dir, exist_ok=True) + + # Create images directory for visualization + images_dir = osp.join(args.output_dir, 'images') + if args.visualize: + os.makedirs(images_dir, exist_ok=True) + + # Results storage + all_results = [] + + # Process each sample + for idx, sample in enumerate(tqdm(samples, desc="Inference")): + token = sample['token'] + + # Load point cloud + points = load_points(args.data_dir, sample['points_path']) + + # Run inference + boxes, scores, labels = run_inference(session, points, config) + + # Store results + result = { + 'token': token, + 'boxes': boxes.tolist(), + 'scores': scores.tolist(), + 'labels': labels.tolist(), + 'num_detections': len(boxes), + } + all_results.append(result) + + # Visualize if requested + if args.visualize: + vis_path = osp.join(images_dir, f'frame_{idx:06d}.png') + visualize_bev(points, boxes, scores, labels, config, vis_path, frame_idx=idx) + + # Save results + results_path = osp.join(args.output_dir, 'results.json') + with open(results_path, 'w') as f: + json.dump(all_results, f, indent=2) + + print(f"\nResults saved to {results_path}") + + # Create video from images + if args.visualize: + video_path = osp.join(args.output_dir, 'centerpoint_detection_onnx.mp4') + create_video_from_images(images_dir, video_path, fps=args.fps) + + # Print summary + total_detections = sum(r['num_detections'] for r in all_results) + print(f"Total detections: {total_detections}") + print(f"Average detections per sample: {total_detections / len(samples):.1f}") + + +if __name__ == '__main__': + main()