Datasets:

ArXiv:
zzuxzt commited on
Commit
4e02fa8
·
verified ·
1 Parent(s): 4c5dc64

Upload folder using huggingface_hub

Browse files
Files changed (35) hide show
  1. LICENSE +21 -0
  2. README.md +570 -3
  3. dataset/README.md +2 -0
  4. demos/1.lobby_s3net_segmentation.gif +3 -0
  5. demos/2.lobby_semantic_mapping.gif +3 -0
  6. demos/3.lobby_semantic_navigation.gif +3 -0
  7. demos/labelme_demo.gif +3 -0
  8. demos/semantic_lidar_data_2000.png +3 -0
  9. salsa/automatic_labeling/draw_semantic_label_sample.py +210 -0
  10. salsa/automatic_labeling/semi_automated_labeling_framework.py +461 -0
  11. salsa/manually_labeling/.labelmerc +116 -0
  12. salsa/manually_labeling/dataset_collection.py +161 -0
  13. salsa/manually_labeling/generateTrainDevSet.py +85 -0
  14. salsa/manually_labeling/labelme_example.svg +0 -0
  15. salsa/manually_labeling/labelme_output/img.png +3 -0
  16. salsa/manually_labeling/labelme_output/label.png +3 -0
  17. salsa/manually_labeling/labelme_output/label_names.txt +10 -0
  18. salsa/manually_labeling/labelme_output/label_viz.png +3 -0
  19. salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/CMakeLists.txt +53 -0
  20. salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/LICENSE +27 -0
  21. salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/README.md +84 -0
  22. salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/images/line_extraction.gif +3 -0
  23. salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/include/laser_line_extraction/line.h +67 -0
  24. salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/include/laser_line_extraction/line_extraction.h +62 -0
  25. salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/include/laser_line_extraction/line_extraction_ros.h +52 -0
  26. salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/include/laser_line_extraction/utilities.h +65 -0
  27. salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/launch/debug.launch +8 -0
  28. salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/launch/example.launch +29 -0
  29. salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/msg/LineSegment.msg +5 -0
  30. salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/msg/LineSegmentList.msg +2 -0
  31. salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/package.xml +25 -0
  32. salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/src/line.cpp +304 -0
  33. salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/src/line_extraction.cpp +363 -0
  34. salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/src/line_extraction_node.cpp +32 -0
  35. salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/src/line_extraction_ros.cpp +220 -0
LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2024 Temple Robotics and Artificial Intelligence Lab
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
README.md CHANGED
@@ -1,3 +1,570 @@
1
- ---
2
- license: mit
3
- ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Semantic2D: Enabling Semantic Scene Understanding with 2D Lidar Alone
2
+
3
+ Implementation code for our paper ["Semantic2D: Enabling Semantic Scene Understanding with 2D Lidar Alone"](https://arxiv.org/pdf/2409.09899).
4
+ Video demos can be found at [multimedia demonstrations](https://youtu.be/P1Hsvj6WUSY).
5
+ The Semantic2D dataset can be found and downloaded at: https://doi.org/10.5281/zenodo.18350696.
6
+
7
+ ---
8
+
9
+ ## Related Resources
10
+
11
+ - **Dataset Download:** https://doi.org/10.5281/zenodo.18350696
12
+ - **SALSA (Dataset and Labeling Framework):** https://github.com/TempleRAIL/semantic2d
13
+ - **S³-Net (Stochastic Semantic Segmentation):** https://github.com/TempleRAIL/s3_net
14
+ - **Semantic CNN Navigation:** https://github.com/TempleRAIL/semantic_cnn_nav
15
+
16
+ ## Demos
17
+
18
+ **S³-Net Segmentation**
19
+ ![S³-Net Segmentation](demos/1.lobby_s3net_segmentation.gif)
20
+
21
+ **Semantic Mapping**
22
+ ![Semantic Mapping](demos/2.lobby_semantic_mapping.gif)
23
+
24
+ **Semantic Navigation**
25
+ ![Semantic Navigation](demos/3.lobby_semantic_navigation.gif)
26
+
27
+ ## Table of Contents
28
+
29
+ 1. [Requirements](#requirements)
30
+ 2. [Installation](#installation)
31
+ 3. [Dataset Description](#semantic2d-dataset-description)
32
+ 4. [Semi-Automatic Labeling (SALSA)](#semi-automatic-labeling-usage)
33
+ - [Step 1: Data Collection](#step-1-data-collection)
34
+ - [Step 2: Manual Labeling](#step-2-manual-labeling)
35
+ - [Step 3: Automatic Labeling](#step-3-automatic-labeling)
36
+ 5. [Customizing for Different LiDAR Sensors](#customizing-for-different-lidar-sensors)
37
+ 6. [Visualization](#visualization)
38
+ 7. [Citation](#citation)
39
+
40
+ ---
41
+
42
+ ## Requirements
43
+
44
+ - Ubuntu 20.04
45
+ - ROS Noetic
46
+ - Python 3.8
47
+ - Labelme
48
+ - scikit-learn
49
+ - tqdm
50
+ - PyTorch
51
+ - NumPy
52
+ - Pillow
53
+
54
+ ---
55
+
56
+ ## Installation
57
+
58
+ ```bash
59
+ # Clone the repository
60
+ git clone https://github.com/TempleRAIL/semantic2d.git
61
+ cd semantic2d
62
+
63
+ # Install Python dependencies
64
+ pip install labelme scikit-learn tqdm torch numpy pillow
65
+
66
+ # Install LabelMe configuration with pre-defined semantic classes
67
+ cp salsa/manually_labeling/.labelmerc ~/.labelmerc
68
+ ```
69
+
70
+ ---
71
+
72
+ ## Semantic2D Dataset Description
73
+
74
+ The dataset contains the following data types:
75
+
76
+ | Folder | Description | Shape |
77
+ |--------|-------------|-------|
78
+ | `scans_lidar/` | 2D LiDAR range data | (N,) array |
79
+ | `intensities_lidar/` | 2D LiDAR intensity data | (N,) array |
80
+ | `line_segments/` | Extracted line segments | List of [x1,y1,x2,y2] |
81
+ | `positions/` | Robot position in map frame | (3,) array [x, y, yaw] |
82
+ | `velocities/` | Robot velocity commands | (2,) array [Vx, Wz] |
83
+ | `semantic_label/` | Point-wise semantic labels | (N,) array |
84
+
85
+ ### Semantic Classes
86
+
87
+ | ID | Class | Color (RGB) |
88
+ |----|-------|-------------|
89
+ | 0 | Other/Background | - |
90
+ | 1 | Chair | (109, 0, 156) |
91
+ | 2 | Door | (0, 46, 221) |
92
+ | 3 | Elevator | (0, 164, 187) |
93
+ | 4 | Person | (204, 204, 204) |
94
+ | 5 | Pillar | (0, 155, 18) |
95
+ | 6 | Sofa | (0, 225, 0) |
96
+ | 7 | Table | (203, 249, 0) |
97
+ | 8 | Trash bin | (255, 173, 0) |
98
+ | 9 | Wall | (227, 0, 0) |
99
+
100
+ ---
101
+
102
+ ## Semi-Automatic Labeling Usage
103
+
104
+ SALSA (Semi-Automatic Labeling framework for Semantic Annotation) consists of three steps:
105
+
106
+ ---
107
+
108
+ ### Step 1: Data Collection
109
+
110
+ Collect and save data from a rosbag file. **Prerequisites:** You should have already created an environment map using a mapping package (e.g., `gmapping`) and collected raw rosbag data.
111
+
112
+ #### 1.1 Configure Data Collection
113
+
114
+ Edit `salsa/manually_labeling/dataset_collection.py`:
115
+
116
+ ```python
117
+ ################ CUSTOMIZATION REQUIRED ################
118
+
119
+ # Number of LiDAR points (must match your sensor)
120
+ POINTS = 1081 # Hokuyo: 1081, WLR-716: 811, RPLIDAR-S2: 1972
121
+
122
+ # Output directory for collected data
123
+ DATA_PATH = "~/semantic2d_data/2024-04-11-15-24-29"
124
+ ```
125
+
126
+ #### 1.2 Configure Line Extraction Launch File
127
+
128
+ The `laser_line_extraction` package extracts line features from LiDAR scans for ICP alignment. **You must configure it for your LiDAR sensor.**
129
+
130
+ Edit `salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/launch/example.launch`:
131
+
132
+ ```xml
133
+ <launch>
134
+ <node name="line_extractor" pkg="laser_line_extraction" type="line_extraction_node">
135
+ <!--################ CUSTOMIZATION REQUIRED ################-->
136
+
137
+ <!-- LiDAR frame ID (from your URDF or tf tree) -->
138
+ <param name="~frame_id" value="rear_laser" />
139
+
140
+ <!-- LiDAR scan topic name -->
141
+ <param name="~scan_topic" value="scan" />
142
+
143
+ <!-- Sensor range parameters (must match your LiDAR) -->
144
+ <param name="~min_range" value="0.6" />
145
+ <param name="~max_range" value="60.0" />
146
+
147
+ <!--################ Usually no changes needed below ################-->
148
+ <param name="~frequency" value="30.0" />
149
+ <param name="~publish_markers" value="false" />
150
+ <param name="~bearing_std_dev" value="1e-5" />
151
+ <param name="~range_std_dev" value="0.02" />
152
+ <param name="~least_sq_angle_thresh" value="0.0001" />
153
+ <param name="~least_sq_radius_thresh" value="0.0001" />
154
+ <param name="~max_line_gap" value="1.0" />
155
+ <param name="~min_line_length" value="0.4" />
156
+ <param name="~min_split_dist" value="0.04" />
157
+ <param name="~outlier_dist" value="0.06" />
158
+ <param name="~min_line_points" value="15" />
159
+ </node>
160
+ </launch>
161
+ ```
162
+
163
+ **Key parameters to change:**
164
+
165
+ | Parameter | Description | Example Values |
166
+ |-----------|-------------|----------------|
167
+ | `frame_id` | TF frame of your LiDAR | `laser`, `base_scan`, `rear_laser`, `rplidar_link` |
168
+ | `scan_topic` | ROS topic for LiDAR scans | `scan`, `/scan`, `/rplidar/scan` |
169
+ | `min_range` | Minimum valid range (m) | Hokuyo: 0.1, WLR-716: 0.15, RPLIDAR: 0.2 |
170
+ | `max_range` | Maximum valid range (m) | Hokuyo: 60.0, WLR-716: 25.0, RPLIDAR: 30.0 |
171
+
172
+ **How to find your frame_id:**
173
+ ```bash
174
+ # Method 1: From rostopic
175
+ rostopic echo /scan --noarr -n 1 | grep frame_id
176
+
177
+ # Method 2: From tf tree
178
+ rosrun tf view_frames # Creates frames.pdf
179
+ ```
180
+
181
+ #### 1.3 Configure ROS Topics for Data Collection
182
+
183
+ The default ROS topic subscriptions in `dataset_collection.py` are:
184
+
185
+ | Topic | Message Type | Description |
186
+ |-------|--------------|-------------|
187
+ | `scan` | `sensor_msgs/LaserScan` | LiDAR scan data |
188
+ | `line_segments` | `laser_line_extraction/LineSegmentList` | Line features |
189
+ | `bluetooth_teleop/cmd_vel` | `geometry_msgs/Twist` | Velocity commands |
190
+ | `robot_pose` | `geometry_msgs/PoseStamped` | Robot pose |
191
+
192
+ **To customize for your robot**, modify the subscribers in `dataset_collection.py`:
193
+
194
+ ```python
195
+ # Example: For Hokuyo UTM-30LX-EW lidar
196
+ self.scan_sub = rospy.Subscriber("/scan", LaserScan, self.scan_callback)
197
+ self.dwa_cmd_sub = rospy.Subscriber('/cmd_vel', Twist, self.dwa_cmd_callback)
198
+ self.robot_pose_pub = rospy.Subscriber('/amcl_pose', PoseWithCovarianceStamped, self.robot_pose_callback)
199
+
200
+ # Example: For custom robot with namespaced topics
201
+ self.scan_sub = rospy.Subscriber("/my_robot/laser/scan", LaserScan, self.scan_callback)
202
+ ```
203
+
204
+ #### 1.4 Run Data Collection
205
+
206
+ ```bash
207
+ # Terminal 1: Start ROS master
208
+ roscore
209
+
210
+ # Terminal 2: Compile and launch line extraction
211
+ cd salsa/manually_labeling/semantic_data_collection_ws
212
+ catkin_make
213
+ source devel/setup.bash
214
+ roslaunch laser_line_extraction example.launch
215
+
216
+ # Terminal 3: Start data collection
217
+ cd salsa/manually_labeling
218
+ python dataset_collection.py
219
+
220
+ # Terminal 4: Play rosbag
221
+ rosbag play your_data.bag
222
+ ```
223
+
224
+ #### 1.5 Generate Train/Dev/Test Splits
225
+
226
+ After data collection, generate index files (`train.txt`, `dev.txt`, `test.txt`) that define the dataset splits.
227
+
228
+ **Configure** `salsa/manually_labeling/generateTrainDevSet.py`:
229
+
230
+ ```python
231
+ ################ CUSTOMIZATION REQUIRED ################
232
+ # The path of your dataset folder:
233
+ train_folder = '~/semantic2d_data/2024-04-11-15-24-29'
234
+
235
+ # Split percentages (must sum to 1.0)
236
+ TRAIN_RATIO = 0.70 # 70% for training
237
+ DEV_RATIO = 0.10 # 10% for validation/development
238
+ TEST_RATIO = 0.20 # 20% for testing
239
+ ########################################################
240
+ ```
241
+
242
+ **Run the script:**
243
+
244
+ ```bash
245
+ cd salsa/manually_labeling
246
+ python generateTrainDevSet.py
247
+ ```
248
+
249
+ **Example output:**
250
+ ```
251
+ Dataset folder: /home/user/semantic2d_data/2024-04-11-15-24-29
252
+ Total samples: 20427
253
+ Split ratios: Train=70%, Dev=10%, Test=20%
254
+ Split sizes: Train=14298, Dev=2042, Test=4087
255
+
256
+ Generated split files:
257
+ - /home/user/.../train.txt
258
+ - /home/user/.../dev.txt
259
+ - /home/user/.../test.txt
260
+ Done!
261
+ ```
262
+
263
+ The script automatically:
264
+ - Counts total samples in the `positions/` folder
265
+ - Calculates split sizes based on the defined ratios
266
+ - Shuffles data randomly before splitting
267
+ - Generates the three `.txt` files with sample filenames
268
+
269
+ ---
270
+
271
+ ### Step 2: Manual Labeling
272
+
273
+ Use LabelMe to manually label the environment map with semantic classes.
274
+
275
+ #### 2.1 Run LabelMe
276
+
277
+ ```bash
278
+ labelme
279
+
280
+ # Optionally, use RViz to visualize RGB images while labeling:
281
+ roscore
282
+ rosbag play your_data.bag
283
+ rviz # Add Image display for camera topic
284
+ ```
285
+
286
+ #### 2.2 Labeling Process
287
+
288
+ 1. **Open** your occupancy grid map image (`.pgm` or `.png`)
289
+ 2. **Create polygons** around each object (`Ctrl+N`)
290
+ 3. **Select class** from dropdown: Chair, Door, Elevator, Pillar, Sofa, Table, Trash bin, Wall
291
+ 4. **DO NOT label people** - they are automatically detected as dynamic objects
292
+ 5. **Save** as `.json` file
293
+
294
+ **Demo: How to use LabelMe**
295
+ ![Labelme example](./salsa/manually_labeling/labelme_example.svg "labelme_example")
296
+
297
+ ![Labelme demo](./demos/labelme_demo.gif "How to use LabelMe for semantic labeling")
298
+
299
+
300
+ **Keyboard shortcuts:**
301
+
302
+ | Shortcut | Action |
303
+ |----------|--------|
304
+ | `Ctrl+N` | Create new polygon |
305
+ | `Ctrl+S` | Save annotation |
306
+ | `Ctrl+Z` | Undo |
307
+ | `Delete` | Delete selected polygon |
308
+ | `Ctrl+E` | Edit label |
309
+ | `D` | Next image |
310
+ | `A` | Previous image |
311
+
312
+ #### 2.3 Export Labeled Map
313
+
314
+ ```bash
315
+ # Export to label images
316
+ labelme_export_json your_map.json -o labelme_output
317
+
318
+ # For older labelme versions:
319
+ # labelme_json_to_dataset your_map.json -o labelme_output
320
+ ```
321
+
322
+ **Output structure** (see [labelme_output](./salsa/manually_labeling/labelme_output)):
323
+
324
+ ```
325
+ labelme_output/
326
+ ├��─ img.png # Original map image
327
+ ├── label.png # Semantic label image (class IDs)
328
+ ├── label_viz.png # Colored visualization
329
+ └── label_names.txt # Class name list
330
+ ```
331
+
332
+ ---
333
+
334
+ ### Step 3: Automatic Labeling
335
+
336
+ Use ICP-based scan matching to automatically transfer labels from the map to each LiDAR scan.
337
+
338
+ #### 3.1 Configure Automatic Labeling
339
+
340
+ Edit `salsa/automatic_labeling/semi_automated_labeling_framework.py`:
341
+
342
+ ```python
343
+ ################ CUSTOMIZATION REQUIRED ################
344
+
345
+ # Dataset paths
346
+ DATASET_ODIR = "/home/user/semantic2d_data/2024-04-04-12-16-41"
347
+ DATASET_NAME = "train" # Options: train, dev, test
348
+
349
+ # Map parameters (from your_map.yaml file)
350
+ MAP_ORIGIN = np.array([-21.200000, -34.800000, 0.000000]) # [x, y, theta]
351
+ MAP_RESOLUTION = 0.025000 # meters per pixel
352
+
353
+ # Labeled map paths (from Step 2)
354
+ MAP_LABEL_PATH = '../manually_labeling/labelme_output/label.png'
355
+ MAP_PATH = '../manually_labeling/labelme_output/img.png'
356
+
357
+ # LiDAR sensor parameters (see Customization section below)
358
+ POINTS = 1081
359
+ AGNLE_MIN = -2.356194496154785 # -135 degrees in radians
360
+ AGNLE_MAX = 2.356194496154785 # +135 degrees in radians
361
+ RANGE_MAX = 60.0
362
+
363
+ # URDF: LiDAR to base_link transformation
364
+ JOINT_XYZ = [-0.12, 0.0, 0.0] # [x, y, z] translation
365
+ JOINT_RPY = [0.0, 0.0, 0.0] # [roll, pitch, yaw] rotation
366
+ ```
367
+
368
+ #### 3.2 Run Automatic Labeling
369
+
370
+ ```bash
371
+ cd salsa/automatic_labeling
372
+ python semi_automated_labeling_framework.py
373
+ ```
374
+
375
+ **What the algorithm does:**
376
+ 1. For each LiDAR scan:
377
+ - Extract line features (stable structures like walls)
378
+ - Use ICP to refine robot pose alignment with the map
379
+ - Project LiDAR points to map coordinates
380
+ - Match each point to semantic labels via pixel lookup
381
+ - Points in free space → labeled as "Person" (dynamic objects)
382
+
383
+ ---
384
+
385
+ ## Customizing for Different LiDAR Sensors
386
+
387
+ ### Supported Sensor Configurations
388
+
389
+ The code includes pre-configured parameters for three sensors:
390
+
391
+ | Parameter | Hokuyo UTM-30LX-EW | WLR-716 | RPLIDAR-S2 |
392
+ |-----------|-------------------|---------|------------|
393
+ | `POINTS` | 1081 | 811 | 1972 |
394
+ | `ANGLE_MIN` (rad) | -2.356 (-135°) | -2.356 (-135°) | -3.142 (-180°) |
395
+ | `ANGLE_MAX` (rad) | 2.356 (+135°) | 2.356 (+135°) | 3.142 (+180°) |
396
+ | `RANGE_MIN` (m) | 0.1 | 0.15 | 0.2 |
397
+ | `RANGE_MAX` (m) | 60.0 | 25.0 | 30.0 |
398
+ | `JOINT_XYZ` | [-0.12, 0, 0] | [0.065, 0, 0.182] | [0.065, 0, 0.11] |
399
+ | `JOINT_RPY` | [0, 0, 0] | [3.14, 0, 0] | [0, 0, 3.14] |
400
+ | `frame_id` (launch) | `rear_laser` | `wlr716_link` | `rplidar_link` |
401
+ | `scan_topic` (launch) | `scan` | `/wj716_base/scan` | `/rplidar_base/scan` |
402
+
403
+ ### How to Configure Your Own Sensor
404
+
405
+ #### Method 1: From ROS Topic
406
+
407
+ ```bash
408
+ # Get sensor parameters from ROS
409
+ rostopic echo /scan --noarr -n 1
410
+
411
+ # Output shows:
412
+ # angle_min: -2.356...
413
+ # angle_max: 2.356...
414
+ # angle_increment: 0.00436...
415
+ # range_max: 60.0
416
+ # ranges: <array with N elements>
417
+ ```
418
+
419
+ #### Method 2: Calculate from Specifications
420
+
421
+ ```python
422
+ import numpy as np
423
+
424
+ # From your sensor datasheet
425
+ fov_degrees = 270 # Field of view
426
+ angular_resolution = 0.25 # Degrees per point
427
+
428
+ # Calculate parameters
429
+ points = int(fov_degrees / angular_resolution) + 1 # = 1081
430
+ angle_min = -np.radians(fov_degrees / 2) # = -2.356
431
+ angle_max = np.radians(fov_degrees / 2) # = +2.356
432
+ ```
433
+
434
+ ### URDF Transformation (JOINT_XYZ, JOINT_RPY)
435
+
436
+ Find the LiDAR mounting position from your robot's URDF file:
437
+
438
+ ```xml
439
+ <!-- In your robot.urdf -->
440
+ <joint name="laser_joint" type="fixed">
441
+ <origin xyz="-0.12 0.0 0.0" rpy="0 0 0"/>
442
+ <parent link="base_link"/>
443
+ <child link="laser_frame"/>
444
+ </joint>
445
+ ```
446
+
447
+ ```python
448
+ # Use these values in the config
449
+ JOINT_XYZ = [-0.12, 0.0, 0.0] # From xyz attribute
450
+ JOINT_RPY = [0.0, 0.0, 0.0] # From rpy attribute
451
+ ```
452
+
453
+ ### Complete Example: Adding a New Sensor
454
+
455
+ In `semi_automated_labeling_framework.py`:
456
+
457
+ ```python
458
+ ################ CUSTOMIZATION REQUIRED ################
459
+
460
+ # Comment out existing configuration
461
+ # # Hokuyo UTM-30LX-EW:
462
+ # POINTS = 1081
463
+ # AGNLE_MIN = -2.356194496154785
464
+ # ...
465
+
466
+ # Add YOUR sensor configuration:
467
+ # SICK TiM561:
468
+ POINTS = 811 # From rostopic echo /scan
469
+ AGNLE_MIN = -2.356 # -135 degrees
470
+ AGNLE_MAX = 2.356 # +135 degrees
471
+ RANGE_MAX = 10.0 # 10 meters
472
+
473
+ # URDF transformation (from robot model)
474
+ JOINT_XYZ = [0.15, 0.0, 0.2] # Mounted 15cm forward, 20cm up
475
+ JOINT_RPY = [0.0, 0.0, 0.0] # No rotation
476
+ ```
477
+
478
+ ### Modifying Data Collection for Your Robot
479
+
480
+ In `dataset_collection.py`:
481
+
482
+ ```python
483
+ ################ CUSTOMIZATION REQUIRED ################
484
+
485
+ # 1. Set number of points for your sensor
486
+ POINTS = 811 # Your sensor's point count
487
+
488
+ # 2. Set output directory
489
+ DATA_PATH = "~/my_robot_data/environment_1"
490
+
491
+ # 3. Modify ROS subscribers for your topics (in __init__):
492
+
493
+ # Original (Jackal robot):
494
+ self.scan_sub = rospy.Subscriber("scan", LaserScan, self.scan_callback)
495
+ self.dwa_cmd_sub = rospy.Subscriber('bluetooth_teleop/cmd_vel', Twist, self.dwa_cmd_callback)
496
+ self.robot_pose_pub = rospy.Subscriber('robot_pose', PoseStamped, self.robot_pose_callback)
497
+
498
+ # For YOUR robot (example):
499
+ self.scan_sub = rospy.Subscriber("/my_robot/scan", LaserScan, self.scan_callback)
500
+ self.dwa_cmd_sub = rospy.Subscriber('/my_robot/cmd_vel', Twist, self.dwa_cmd_callback)
501
+ self.robot_pose_pub = rospy.Subscriber('/amcl_pose', PoseWithCovarianceStamped, self.robot_pose_callback)
502
+ ```
503
+
504
+ ### Quick Reference: Files to Modify
505
+
506
+ | Task | File | Parameters to Change |
507
+ |------|------|---------------------|
508
+ | Line Extraction | `salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/launch/example.launch` | `frame_id`, `scan_topic`, `min_range`, `max_range` |
509
+ | Data Collection | `salsa/manually_labeling/dataset_collection.py` | `POINTS`, `DATA_PATH`, ROS topics |
510
+ | Dataset Splits | `salsa/manually_labeling/generateTrainDevSet.py` | `train_folder`, `TRAIN_RATIO`, `DEV_RATIO`, `TEST_RATIO` |
511
+ | Manual Labeling | `~/.labelmerc` | Label names/colors (optional) |
512
+ | Auto Labeling | `salsa/automatic_labeling/semi_automated_labeling_framework.py` | `DATASET_ODIR`, `MAP_*`, `POINTS`, `ANGLE_*`, `JOINT_*` |
513
+ | Visualization | `salsa/automatic_labeling/draw_semantic_label_sample.py` | `DATASET_ODIR`, `POINTS`, `ANGLE_*` |
514
+
515
+ ### Launch File Configurations for Different Sensors
516
+
517
+ **Hokuyo UTM-30LX-EW (Jackal robot):**
518
+ ```xml
519
+ <param name="~frame_id" value="rear_laser" />
520
+ <param name="~scan_topic" value="scan" />
521
+ <param name="~min_range" value="0.1" />
522
+ <param name="~max_range" value="60.0" />
523
+ ```
524
+
525
+ **WLR-716 (Custom robot):**
526
+ ```xml
527
+ <param name="~frame_id" value="wlr716_link" />
528
+ <param name="~scan_topic" value="/wj716_base/scan" />
529
+ <param name="~min_range" value="0.15" />
530
+ <param name="~max_range" value="25.0" />
531
+ ```
532
+
533
+ **RPLIDAR-S2 (Custom robot):**
534
+ ```xml
535
+ <param name="~frame_id" value="rplidar_link" />
536
+ <param name="~scan_topic" value="/rplidar_base/scan" />
537
+ <param name="~min_range" value="0.2" />
538
+ <param name="~max_range" value="30.0" />
539
+ ```
540
+
541
+ ---
542
+
543
+ ## Visualization
544
+
545
+ Plot the labeled semantic LiDAR data:
546
+
547
+ ```bash
548
+ # Configure the same sensor parameters in draw_semantic_label_sample.py
549
+ cd salsa/automatic_labeling
550
+ python draw_semantic_label_sample.py
551
+ ```
552
+
553
+ ![semantic_lidar_data](demos/semantic_lidar_data_2000.png "semantic_lidar_data_2000")
554
+
555
+ ---
556
+
557
+ ## Citation
558
+
559
+ ```bibtex
560
+ @article{xie2026semantic2d,
561
+ title={Semantic2D: Enabling Semantic Scene Understanding with 2D Lidar Alone},
562
+ author={Xie, Zhanteng and Pan, Yipeng and Zhang, Yinqiang and Pan, Jia and Dames, Philip},
563
+ journal={arXiv preprint arXiv:2409.09899},
564
+ year={2026}
565
+ }
566
+ ```
567
+
568
+
569
+
570
+
dataset/README.md ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ # Semantic2D: Enabling Semantic Scene Understanding with 2D Lidar Alone
2
+ The Semantic2D Dataset and its raw rosbag data can be downloaded at: https://doi.org/10.5281/zenodo.13730200.
demos/1.lobby_s3net_segmentation.gif ADDED

Git LFS Details

  • SHA256: b9d77f9bb9a88f57be8888e934c69cfc0a5b79edc14dd69ddee152c5a6ecc3fc
  • Pointer size: 133 Bytes
  • Size of remote file: 12.4 MB
demos/2.lobby_semantic_mapping.gif ADDED

Git LFS Details

  • SHA256: cf0af6410a4c25971390639ab0bd3466de629af0c2be99abccaa2d353fe12251
  • Pointer size: 132 Bytes
  • Size of remote file: 1.45 MB
demos/3.lobby_semantic_navigation.gif ADDED

Git LFS Details

  • SHA256: e26fb5c2fb52941f4e33c7b4b8e7f116d3e3689d2f9c40d7fe288972b5af48f5
  • Pointer size: 133 Bytes
  • Size of remote file: 13.2 MB
demos/labelme_demo.gif ADDED

Git LFS Details

  • SHA256: 548228d98ac0d2d1eaeec4d7905f212a1b70aa1c5aed2262ca0d6504ab777aa8
  • Pointer size: 133 Bytes
  • Size of remote file: 13 MB
demos/semantic_lidar_data_2000.png ADDED

Git LFS Details

  • SHA256: cd8ea82b4aab9af0f1eaa4764fb403ce3230019f9d52b5385a98694e0b151b3d
  • Pointer size: 130 Bytes
  • Size of remote file: 90.8 kB
salsa/automatic_labeling/draw_semantic_label_sample.py ADDED
@@ -0,0 +1,210 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ #
3
+ # file: $ISIP_EXP/tuh_dpath/exp_0074/scripts/decode.py
4
+ #
5
+ # revision history:
6
+ # 20190925 (TE): first version
7
+ #
8
+ # usage:
9
+ # python decode.py odir mfile data
10
+ #
11
+ # arguments:
12
+ # odir: the directory where the hypotheses will be stored
13
+ # mfile: input model file
14
+ # data: the input data list to be decoded
15
+ #
16
+ # This script decodes data using a simple MLP model.
17
+ #------------------------------------------------------------------------------
18
+
19
+ # import pytorch modules
20
+ #
21
+ import torch
22
+ from tqdm import tqdm
23
+
24
+ # visualize:
25
+ import matplotlib.pyplot as plt
26
+ import numpy as np
27
+ import matplotlib
28
+ matplotlib.style.use('ggplot')
29
+ import sys
30
+ import os
31
+
32
+
33
+ ################ customized parameters #################
34
+ ################ please modify them based on your dataset #################
35
+ DATASET_ODIR = "~/semantic2d_data/2024-04-04-12-16-41" # the directory path of the raw data
36
+ DATASET_NAME = "train" # select the train, dev, and test
37
+ SEMANTIC_MASK_ODIR = "./output"
38
+
39
+ # Hokuyo UTM-30LX-EW:
40
+ POINTS = 1081 # the number of lidar points
41
+ AGNLE_MIN = -2.356194496154785
42
+ AGNLE_MAX = 2.356194496154785
43
+ RANGE_MAX = 60.0
44
+
45
+ # # WLR-716:
46
+ # POINTS = 811 # the number of lidar points
47
+ # AGNLE_MIN = -2.356194496154785
48
+ # AGNLE_MAX = 2.356194496154785
49
+ # RANGE_MAX = 25.0
50
+ # # RPLIDAR-S2:
51
+ # POINTS = 1972 # the number of lidar points
52
+ # AGNLE_MIN = -3.1415927410125732
53
+ # AGNLE_MAX = 3.1415927410125732
54
+ # RANGE_MAX = 16.0
55
+
56
+ ################# read dataset ###################
57
+ NEW_LINE = "\n"
58
+ # for reproducibility, we seed the rng
59
+ #
60
+ class Semantic2DLidarDataset(torch.utils.data.Dataset):
61
+ def __init__(self, img_path, file_name):
62
+ # initialize the data and labels
63
+ # read the names of image data:
64
+ self.scan_file_names = []
65
+ self.intensity_file_names = []
66
+ self.vel_file_names = []
67
+ self.label_file_names = []
68
+ # parameters:
69
+ self.s_max = 30
70
+ self.s_min = 0
71
+ # open train.txt or dev.txt:
72
+ fp_file = open(img_path+'/'+file_name+'.txt', 'r')
73
+
74
+ # for each line of the file:
75
+ for line in fp_file.read().split(NEW_LINE):
76
+ if('.npy' in line):
77
+ self.scan_file_names.append(img_path+'/scans_lidar/'+line)
78
+ self.intensity_file_names.append(img_path+'/intensities_lidar/'+line)
79
+ self.label_file_names.append(img_path+'/semantic_label/'+line)
80
+ # close txt file:
81
+ fp_file.close()
82
+ self.length = len(self.scan_file_names)
83
+
84
+ print("dataset length: ", self.length)
85
+
86
+
87
+ def __len__(self):
88
+ return self.length
89
+
90
+ def __getitem__(self, idx):
91
+ # get the index of start point:
92
+ scan = np.zeros((1, POINTS))
93
+ intensity = np.zeros((1, POINTS))
94
+ label = np.zeros((1, POINTS))
95
+
96
+ # get the scan data:
97
+ scan_name = self.scan_file_names[idx]
98
+ scan = np.load(scan_name)
99
+
100
+ # get the intensity data:
101
+ intensity_name = self.intensity_file_names[idx]
102
+ intensity = np.load(intensity_name)
103
+
104
+ # get the semantic label data:
105
+ label_name = self.label_file_names[idx]
106
+ label = np.load(label_name)
107
+
108
+ # initialize:
109
+ scan[np.isnan(scan)] = 0.
110
+ scan[np.isinf(scan)] = 0.
111
+
112
+ intensity[np.isnan(intensity)] = 0.
113
+ intensity[np.isinf(intensity)] = 0.
114
+
115
+ scan[scan >= 15] = 0.
116
+
117
+ label[np.isnan(label)] = 0.
118
+ label[np.isinf(label)] = 0.
119
+
120
+ # transfer to pytorch tensor:
121
+ scan_tensor = torch.FloatTensor(scan)
122
+ intensity_tensor = torch.FloatTensor(intensity)
123
+ label_tensor = torch.FloatTensor(label)
124
+
125
+ data = {
126
+ 'scan': scan_tensor,
127
+ 'intensity': intensity_tensor,
128
+ 'label': label_tensor,
129
+ }
130
+
131
+ return data
132
+
133
+ #------------------------------------------------------------------------------
134
+ #
135
+ # the main program starts here
136
+ #
137
+ #------------------------------------------------------------------------------
138
+
139
+ # function: main
140
+ #
141
+ # arguments: none
142
+ #
143
+ # return: none
144
+ #
145
+ # This method is the main function.
146
+ #
147
+ if __name__ == '__main__':
148
+ # input parameters:
149
+ dataset_odir = DATASET_ODIR
150
+ dataset_name = DATASET_NAME
151
+ semantic_mask_odir = SEMANTIC_MASK_ODIR
152
+ # create the folder for the semantic label mask:
153
+ if not os.path.exists(semantic_mask_odir):
154
+ os.makedirs(semantic_mask_odir)
155
+
156
+ # read dataset:
157
+ eval_dataset = Semantic2DLidarDataset(dataset_odir, dataset_name)
158
+ eval_dataloader = torch.utils.data.DataLoader(eval_dataset, batch_size=1, num_workers=2, \
159
+ shuffle=False, drop_last=True, pin_memory=True)
160
+
161
+ # for each batch in increments of batch size:
162
+ cnt = 0
163
+ cnt_m = 0
164
+ # get the number of batches (ceiling of train_data/batch_size):
165
+ num_batches = int(len(eval_dataset)/eval_dataloader.batch_size)
166
+ for i, batch in tqdm(enumerate(eval_dataloader), total=num_batches):
167
+ # collect the samples as a batch: 10 timesteps
168
+ if(i % 200 == 0):
169
+ scans = batch['scan']
170
+ scans = scans.detach().cpu().numpy()
171
+ labels = batch['label']
172
+ labels = labels.detach().cpu().numpy()
173
+
174
+ # lidar data:
175
+ r = scans.reshape(POINTS)
176
+ theta = np.linspace(AGNLE_MIN, AGNLE_MAX, num=POINTS, endpoint='true')
177
+
178
+ ## plot semantic label:
179
+ fig = plt.figure(figsize=(12, 12))
180
+ ax = fig.add_subplot(1,1,1, projection='polar', facecolor='seashell')
181
+ smap = labels.reshape(POINTS)
182
+
183
+ # add the background label:
184
+ theta = np.insert(theta, -1, np.pi)
185
+ r = np.insert(r, -1, 1)
186
+ smap = np.insert(smap, -1, 0)
187
+ label_val = np.unique(smap).astype(int)
188
+ print("label_values: ", label_val)
189
+
190
+ colors = smap
191
+ area = 6
192
+ scatter = ax.scatter(theta, r, c=colors, s=area, cmap='nipy_spectral', alpha=0.95, linewidth=10)
193
+ ax.set_xticks(np.linspace(AGNLE_MIN, AGNLE_MAX, 8, endpoint='true'))
194
+ ax.set_thetamin(-135)
195
+ ax.set_thetamax(135)
196
+ ax.set_yticklabels([])
197
+ # produce a legend with the unique colors from the scatter
198
+ classes = ['Other', 'Chair', 'Door', 'Elevator', 'Person', 'Pillar', 'Sofa', 'Table', 'Trash bin', 'Wall']
199
+ plt.xticks(fontsize=16)
200
+ plt.yticks(fontsize=16)
201
+ plt.legend(handles=scatter.legend_elements(num=[j for j in label_val])[0], labels=[classes[j] for j in label_val], bbox_to_anchor=(0.5, -0.08), loc='lower center', fontsize=18)
202
+ ax.grid(False)
203
+ ax.set_theta_offset(np.pi/2)
204
+
205
+ input_img_name = semantic_mask_odir + "/semantic_mask" + str(i)+ ".png"
206
+ plt.savefig(input_img_name, bbox_inches='tight')
207
+ plt.show()
208
+
209
+ print(i)
210
+
salsa/automatic_labeling/semi_automated_labeling_framework.py ADDED
@@ -0,0 +1,461 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ import os
3
+ import numpy as np
4
+ import PIL.Image
5
+ import torch
6
+ from tqdm import tqdm
7
+ from typing import Optional, Tuple, Union
8
+ from sklearn.neighbors import NearestNeighbors
9
+ from scipy.spatial.transform import Rotation
10
+
11
+ ################ customized parameters #################
12
+ ################ please modify them based on your dataset #################
13
+ # dataset:
14
+ DATASET_ODIR = "/home/xzt/data/semantic_lidar_v2/2024-04-04-12-16-41" # the directory path of the raw data
15
+ DATASET_NAME = "train" # select the train, dev, and test
16
+ # map: parameters from the map configuration file
17
+ MAP_ORIGIN = np.array([-21.200000, -34.800000, 0.000000])
18
+ MAP_RESOLUTION = 0.025000
19
+ # labeling:
20
+ MAP_LABEL_PATH = '../manually_labeling/labelme_output/label.png'
21
+ MAP_PATH = '../manually_labeling/labelme_output/img.png'
22
+
23
+ # Hokuyo UTM-30LX-EW:
24
+ POINTS = 1081 # the number of lidar points
25
+ AGNLE_MIN = -2.356194496154785
26
+ AGNLE_MAX = 2.356194496154785
27
+ RANGE_MAX = 60.0
28
+ # urdf: laser_joint
29
+ JOINT_XYZ = [-0.12, 0.0, 0.0]
30
+ JOINT_RPY = [0.0, 0.0, 0.0]
31
+
32
+ # # WLR-716:
33
+ # POINTS = 811 # the number of lidar points
34
+ # AGNLE_MIN = -2.356194496154785
35
+ # AGNLE_MAX = 2.356194496154785
36
+ # RANGE_MAX = 25.0
37
+ # # urdf: laser_joint
38
+ # JOINT_XYZ = [0.065, 0.0, 0.182]
39
+ # JOINT_RPY = [3.1415926, 0.0, 0.0]
40
+ # # RPLIDAR-S2:
41
+ # POINTS = 1972 # the number of lidar points
42
+ # AGNLE_MIN = -3.1415927410125732
43
+ # AGNLE_MAX = 3.1415927410125732
44
+ # RANGE_MAX = 16.0
45
+ # # urdf: laser_joint
46
+ # JOINT_XYZ = [0.065, 0.0, 0.11]
47
+ # JOINT_RPY = [0.0, 0.0, 3.1415926]
48
+
49
+ ################# read dataset ###################
50
+ NEW_LINE = "\n"
51
+ class Semantic2DLidarDataset(torch.utils.data.Dataset):
52
+ def __init__(self, img_path, file_name):
53
+ # initialize the data and labels
54
+ # read the names of image data:
55
+ self.scan_file_names = []
56
+ self.line_file_names = []
57
+ self.pos_file_names = []
58
+ self.vel_file_names = []
59
+ # open train.txt or dev.txt:
60
+ fp_file = open(img_path+'/'+file_name+'.txt', 'r')
61
+ # for each line of the file:
62
+ for line in fp_file.read().split(NEW_LINE):
63
+ if('.npy' in line):
64
+ self.scan_file_names.append(img_path+'/scans_lidar/'+line)
65
+ self.line_file_names.append(img_path+'/line_segments/'+line)
66
+ self.pos_file_names.append(img_path+'/positions/'+line)
67
+ self.vel_file_names.append(img_path+'/velocities/'+line)
68
+ # close txt file:
69
+ fp_file.close()
70
+ self.length = len(self.scan_file_names)
71
+
72
+ print("dataset length: ", self.length)
73
+
74
+ def __len__(self):
75
+ return self.length
76
+
77
+ def __getitem__(self, idx):
78
+ scan = np.zeros(POINTS)
79
+ position = np.zeros(3)
80
+ vel = np.zeros(2)
81
+ # get the scan data:
82
+ scan_name = self.scan_file_names[idx]
83
+ scan = np.load(scan_name)
84
+
85
+ # get the line segments data:
86
+ line_name = self.line_file_names[idx]
87
+ line_segs = np.load(line_name)
88
+
89
+ # get the scan_ur data:
90
+ pos_name = self.pos_file_names[idx]
91
+ position = np.load(pos_name)
92
+
93
+ # get the velocity data:
94
+ vel_name = self.vel_file_names[idx]
95
+ vel = np.load(vel_name)
96
+
97
+ # initialize:
98
+ scan[np.isnan(scan)] = 0.
99
+ scan[np.isinf(scan)] = 0.
100
+ scan[scan==60] = 0.
101
+
102
+ position[np.isnan(position)] = 0.
103
+ position[np.isinf(position)] = 0.
104
+
105
+ vel[np.isnan(vel)] = 0.
106
+ vel[np.isinf(vel)] = 0.
107
+
108
+ # transfer to pytorch tensor:
109
+ scan_tensor = torch.FloatTensor(scan)
110
+ line_tensor = torch.FloatTensor(line_segs)
111
+ pose_tensor = torch.FloatTensor(position)
112
+ vel_tensor = torch.FloatTensor(vel)
113
+
114
+ data = {
115
+ 'scan': scan_tensor,
116
+ 'line': line_tensor,
117
+ 'position': pose_tensor,
118
+ 'velocity': vel_tensor,
119
+ }
120
+
121
+ return data
122
+
123
+ ################# ICP algorithm ###################
124
+ class MapICP:
125
+ def __init__(self, map_pts):
126
+ # map_pts: 2xN numpy.ndarray of points
127
+ self.map_pts = map_pts
128
+ self.neighbors = NearestNeighbors(n_neighbors=1).fit(self.map_pts.T)
129
+
130
+ def setMapPoints(self, pts: np.ndarray) -> None:
131
+ '''
132
+ Initializes a set of points to match against
133
+ Inputs:
134
+ pts: 2xN numpy.ndarray of 2D points
135
+ '''
136
+ assert pts.shape[0] == 2
137
+ self.map_pts = pts
138
+ self.neighbors = NearestNeighbors(n_neighbors=1).fit(pts.T)
139
+
140
+ def nearestNeighbor(self, pts: np.ndarray) -> Tuple[np.array, np.array]:
141
+ '''
142
+ Find the nearest (Euclidean) neighbor in for each point in pts
143
+ Input:
144
+ pts: 2xN array of points
145
+ Output:
146
+ distances: np.array of Euclidean distances of the nearest neighbor
147
+ indices: np.array of indices of the nearest neighbor
148
+ '''
149
+ assert pts.shape[0] == 2
150
+ distances, indices = self.neighbors.kneighbors(pts.T, return_distance=True)
151
+
152
+ return distances.ravel(), indices.ravel()
153
+
154
+ def bestFitTransform(self, pts: np.array, map_pts: np.array) -> np.ndarray: #indices: np.array) -> np.ndarray:
155
+ '''
156
+ Calculates the least-squares best-fit transform that maps pts on to map_pts
157
+ Input:
158
+ pts: 2xN numpy.ndarray of points
159
+ # indices: 1xN numpy.array of corresponding map_point indices
160
+ map_pts: 2xN numpy.ndarray of points
161
+ Returns:
162
+ T: 3x3 homogeneous transformation matrix that maps pts on to map_pts
163
+ '''
164
+ # get number of dimensions
165
+ m = pts.shape[0]
166
+ assert m == 2
167
+
168
+ # Extract points
169
+ map = map_pts # self.map_pts[:,indices]
170
+ assert pts.shape == map.shape
171
+
172
+ # translate points to their centroids
173
+ centroid_pts = np.mean(pts, axis=1)
174
+ centroid_map = np.mean(map, axis=1)
175
+ PTS = pts - centroid_pts.reshape((-1,1))
176
+ MAP = map - centroid_map.reshape((-1,1))
177
+
178
+ # rotation matrix
179
+ H = MAP @ PTS.T
180
+ U, _, Vt = np.linalg.svd(H)
181
+ R = U @ Vt
182
+
183
+ # special reflection case
184
+ if np.linalg.det(R) < 0:
185
+ Vt[:,-1] *= -1
186
+ R = U @ Vt
187
+
188
+ # translation
189
+ t = centroid_map - R @ centroid_pts
190
+
191
+ # homogeneous transformation
192
+ T = np.identity(m+1)
193
+ T[:m, :m] = R
194
+ T[:m, m] = t
195
+
196
+ return T
197
+
198
+ def icp(self, pts: np.ndarray, init_pose: Optional[Union[np.array, None]]=None, max_iterations: Optional[int]=20, tolerance: Optional[float]=0.05) -> Tuple[np.ndarray, np.array, int]:
199
+ '''
200
+ The Iterative Closest Point method: finds best-fit transform that maps points A on to points B
201
+ Input:
202
+ pts: 2xN numpy.ndarray of source points
203
+ init_pose: 3x3 homogeneous transformation
204
+ max_iterations: exit algorithm after max_iterations
205
+ tolerance: convergence criteria
206
+ Outputs:
207
+ T: final homogeneous transformation that maps pts on to map_pts
208
+ distances: Euclidean distances (errors) of the nearest neighbor
209
+ i: number of iterations to converge
210
+ '''
211
+ # Get number of dimensions
212
+ m = pts.shape[0]
213
+ assert m == 2
214
+
215
+ # Make points homogeneous, copy them to maintain the originals
216
+ src = np.ones((m+1, pts.shape[1]))
217
+ dst = np.ones((m+1, self.map_pts.shape[1]))
218
+ src[:m,:] = np.copy(pts)
219
+ dst[:m,:] = np.copy(self.map_pts)
220
+
221
+ # Apply the initial pose estimate
222
+ T = np.eye(3)
223
+ if init_pose is not None:
224
+ src = init_pose @ src
225
+ T = init_pose @ T
226
+
227
+ # Run ICP
228
+ prev_error = 1e6
229
+ for i in range(max_iterations):
230
+ # find the nearest neighbors between the current source and destination points
231
+ distances, indices = self.nearestNeighbor(src[:m,:])
232
+ map_pts = self.map_pts[:, indices]
233
+ # compute the transformation between the current source and nearest destination points
234
+ T_delta = self.bestFitTransform(src[:m,:], map_pts) #indices)
235
+
236
+ # update the current source and transform
237
+ src = T_delta @ src
238
+ T = T_delta @ T
239
+
240
+ # check error
241
+ mean_error = np.mean(distances)
242
+ if np.abs(prev_error - mean_error) < tolerance:
243
+ break
244
+ prev_error = mean_error
245
+
246
+ #T = self.bestFitTransform(pts, src[:m,:])
247
+ # Calculate final transformation
248
+ return T, distances, i
249
+
250
+ ################# map converter: homogeneous transformation ###################
251
+ class MapConverter:
252
+ # Constructor
253
+ def __init__(self, label_file, origin=np.array([-12.200000, -12.200000, 0.000000]), resolution=0.05):
254
+ # map parameters
255
+ self.origin = origin
256
+ self.resolution = resolution
257
+ # homogeneous transformation matrix:
258
+ self.map_T_pixel = np.array([[np.cos(self.origin[2]), -np.sin(self.origin[2]), self.origin[0]],
259
+ [np.sin(self.origin[2]), np.cos(self.origin[2]), self.origin[1]],
260
+ [0, 0, 1]
261
+ ])
262
+ self.pixel_T_map = np.linalg.inv(self.map_T_pixel)
263
+ # load semantic map
264
+ label_map = np.asarray(PIL.Image.open(label_file))
265
+ self.label_map = label_map.T # transpose 90
266
+ self.x_lim = self.label_map.shape[0]
267
+ self.y_lim = self.label_map.shape[1]
268
+ print(self.x_lim, self.y_lim)
269
+
270
+ def coordinate2pose(self, px, py):
271
+ pixel_pose = np.array([px*self.resolution, py*self.resolution, 1])
272
+ map_pose = np.matmul(self.map_T_pixel, pixel_pose.T)
273
+ x = map_pose[0]
274
+ y = map_pose[1]
275
+
276
+ return x, y
277
+
278
+ def pose2coordinate(self, x, y):
279
+ map_pose = np.array([x, y, 1])
280
+ pixel_pose = np.matmul(self.pixel_T_map, map_pose.T)
281
+ px = int(pixel_pose[0] / self.resolution)
282
+ py = int(pixel_pose[1] / self.resolution)
283
+
284
+ return px, py
285
+
286
+ def get_semantic_label(self, x, y):
287
+ px, py = self.pose2coordinate(x, y)
288
+ py = self.y_lim - py # the y axis is inverse
289
+ label_loc = np.zeros(10) # 10 labels
290
+ # filtering:
291
+ for i in range(px-2, px+2):
292
+ for j in range(py-2, py+2):
293
+ if(i >= 0 and i < self.x_lim and j >=0 and j < self.y_lim):
294
+ label = self.label_map[i, j]
295
+ if(label != 0):
296
+ label_loc[label] += 1
297
+
298
+ if(np.sum(label_loc) == 0): # people
299
+ semantic_label = 4
300
+ else: # static objects
301
+ semantic_label = np.argmax(label_loc)
302
+
303
+ return semantic_label
304
+
305
+ def transform_lidar_points(self, points_sensor, xyz, rpy, inverse=True):
306
+ """
307
+ Transform 2D lidar points from sensor frame to robot base frame, given URDF translation and RPY.
308
+ Args:
309
+ points_sensor (Nx2 ndarray): Lidar points [x, y] in the sensor frame.
310
+ translation (3-tuple/list/ndarray): [x, y, z] translation from base to sensor (URDF).
311
+ rpy (3-tuple/list/ndarray): [roll, pitch, yaw] rotation from base to sensor (URDF, radians).
312
+ inverse (bool): If True (default), transforms from sensor to base frame (i.e., applies inverse transform).
313
+ If False, transforms from base to sensor frame.
314
+ Returns:
315
+ Nx2 ndarray: Transformed points in base frame.
316
+ """
317
+ translation = np.asarray(xyz).reshape((3,))
318
+ rpy = np.asarray(rpy).reshape((3,))
319
+
320
+ rot = Rotation.from_euler('xyz', rpy) # Roll, Pitch, Yaw
321
+
322
+ N = points_sensor.shape[0]
323
+ # Add z=0 to points
324
+ points_3d = np.hstack([points_sensor, np.zeros((N, 1))]) # Nx3
325
+
326
+ if inverse:
327
+ # Transform from sensor -> base: x_base = R.T @ (x_sensor - t)
328
+ points_shifted = points_3d - translation
329
+ points_base = rot.inv().apply(points_shifted)
330
+ else:
331
+ # Transform from base -> sensor: x_sensor = R @ x_base + t
332
+ points_base = rot.apply(points_3d) + translation
333
+
334
+ return points_base[:, :2]
335
+
336
+ def lidar2map(self, lidar_pos, lidar_scan):
337
+ # get laser points: polar to cartesian
338
+ points_laser = np.zeros((POINTS, 2))
339
+ angles = np.linspace(AGNLE_MIN, AGNLE_MAX, num=POINTS) #np.linspace(-(135*np.pi/180), 135*np.pi/180, num=POINTS)
340
+ dis = lidar_scan
341
+ points_laser[:, 0] = dis*np.cos(angles)
342
+ points_laser[:, 1] = dis*np.sin(angles)
343
+ #lidar_points[:, 2] = 1
344
+
345
+ # coordinate transformation: lidar -> footprint
346
+ lidar_points = self.transform_lidar_points(points_sensor=points_laser, xyz=JOINT_XYZ, rpy=JOINT_RPY)
347
+
348
+ # coordinate transformation: footprint -> map
349
+ lidar_points_in_map = np.zeros((POINTS, 2))
350
+ lidar_points_in_map[:, 0] = lidar_points[:, 0]*np.cos(lidar_pos[2]) - lidar_points[:, 1]*np.sin(lidar_pos[2]) + lidar_pos[0]
351
+ lidar_points_in_map[:, 1] = lidar_points[:, 0]*np.sin(lidar_pos[2]) + lidar_points[:, 1]*np.cos(lidar_pos[2]) + lidar_pos[1]
352
+
353
+ return lidar_points_in_map, points_laser
354
+
355
+
356
+ if __name__ == '__main__':
357
+ # input parameters:
358
+ dataset_odir = DATASET_ODIR
359
+ dataset_name = DATASET_NAME
360
+ scan_label_odir = dataset_odir + "/" + "semantic_label"
361
+ if not os.path.exists(scan_label_odir):
362
+ os.makedirs(scan_label_odir)
363
+
364
+ map_path = MAP_PATH
365
+ map_label_path = MAP_LABEL_PATH
366
+
367
+ map_origin = MAP_ORIGIN
368
+ map_resolution = MAP_RESOLUTION
369
+
370
+ # initialize semantic scan label:
371
+ scan_label = np.zeros(POINTS)
372
+
373
+ # initialize the map converter: homogeneous transformation
374
+ mc = MapConverter(map_label_path, origin=map_origin, resolution=map_resolution)
375
+
376
+ ## extract the valid map points fromt the map image:
377
+ # load map image:
378
+ map_img = np.asarray(PIL.Image.open(map_path))
379
+ map_img = map_img.T # transpose 90
380
+ print(map_img.shape)
381
+ # get map valid points:
382
+ map_idx = np.where(map_img == 0)
383
+ map_idx_x = map_idx[0]
384
+ map_idx_y = map_idx[1]
385
+ map_points = []
386
+ for n in range(len(map_idx_x)):
387
+ px = map_idx_x[n]
388
+ py = map_idx_y[n]
389
+ py = map_img.shape[1] - py # the x axis is inverse
390
+ [x, y] = mc.coordinate2pose(px, py)
391
+ map_points.append([x, y])
392
+ map_pts = np.array(map_points)
393
+
394
+ # initialize ICP:
395
+ icp = MapICP(map_pts.T)
396
+
397
+ # load dataset:
398
+ eval_dataset = Semantic2DLidarDataset(dataset_odir, dataset_name)
399
+ eval_dataloader = torch.utils.data.DataLoader(eval_dataset, batch_size=1, \
400
+ shuffle=False, drop_last=True) #, pin_memory=True)
401
+
402
+ # get the number of batches (ceiling of train_data/batch_size):
403
+ num_batches = int(len(eval_dataset)/eval_dataloader.batch_size)
404
+ for i, batch in tqdm(enumerate(eval_dataloader), total=num_batches):
405
+ # collect the samples as a batch:
406
+ scan = batch['scan']
407
+ scan = scan.detach().cpu().numpy()
408
+ lines = batch['line']
409
+ lines = lines.detach().cpu().numpy()
410
+ position = batch['position']
411
+ position = position.detach().cpu().numpy()
412
+
413
+ # transfer lidar points:
414
+ lidar_pos = position.reshape(3)
415
+ lidar_pos[0] += LIDAR_BASE_DIS # the distance from lidar_mount to base_link
416
+ lidar_scan = scan.reshape(POINTS)
417
+ lidar_points_in_map, lidar_points = mc.lidar2map(lidar_pos, lidar_scan)
418
+
419
+ # use line segments (key line features) to remove outliers in the lidar scan:
420
+ correspondences = []
421
+ for line in lines[0]:
422
+ x1, y1, x2, y2 = line
423
+ # construct line formular: y = ax +b
424
+ a = (y2 - y1) / (x2 - x1 + 1e-10)
425
+ b = y1 - a*x1
426
+ for n in range(POINTS):
427
+ x, y = lidar_points[n, :]
428
+ if(x >= x1-1 and x <= x2+1 and y >= y1-1 and y <= y2+1): # in the area of the line
429
+ if(abs(y - (a*x+b)) <= 0.3): # on the line
430
+ correspondences.append([x, y])
431
+
432
+ correspondences = np.array(correspondences)
433
+ correspondences_length = len(correspondences)
434
+
435
+ if(correspondences_length > 280): # reliable correspondences
436
+ # coordinate transformation: lidar -> map
437
+ correspondences_in_map = np.zeros((correspondences_length, 2))
438
+ correspondences_in_map[:, 0] = correspondences[:, 0]*np.cos(lidar_pos[2]) - correspondences[:, 1]*np.sin(lidar_pos[2]) + lidar_pos[0]
439
+ correspondences_in_map[:, 1] = correspondences[:, 0]*np.sin(lidar_pos[2]) + correspondences[:, 1]*np.cos(lidar_pos[2]) + lidar_pos[1]
440
+
441
+ # use ICP scan matching to correct lidar pose:
442
+ mapc_T_map, _, _ = icp.icp(correspondences_in_map.T, max_iterations=500, tolerance=1e-6)
443
+
444
+ # corrected lidar pose:
445
+ lidar_pose_corrected = np.matmul(mapc_T_map, np.array([lidar_pos[0], lidar_pos[1], 1]))
446
+ lidar_points_in_map_old = np.concatenate((lidar_points_in_map, np.ones((POINTS, 1))), axis=1)
447
+ point_corrected = np.matmul(mapc_T_map, lidar_points_in_map_old.T)
448
+ else: # no icp correction
449
+ lidar_pose_corrected = lidar_pos
450
+ point_corrected = lidar_points_in_map.T
451
+
452
+ # semantic pixel mataching:
453
+ for j in range(POINTS):
454
+ if(point_corrected[0, j] == lidar_pose_corrected[0] and point_corrected[1, j] == lidar_pose_corrected[1]): # scan == 0
455
+ scan_label[j] = 0
456
+ else:
457
+ scan_label[j] = mc.get_semantic_label(point_corrected[0, j], point_corrected[1, j])
458
+
459
+ # write scan_label in lidar frame into np.array:
460
+ scan_label_name = scan_label_odir + "/" + str(i).zfill(7)
461
+ np.save(scan_label_name, scan_label)
salsa/manually_labeling/.labelmerc ADDED
@@ -0,0 +1,116 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ auto_save: false
2
+ display_label_popup: true
3
+ store_data: true
4
+ keep_prev: false
5
+ keep_prev_scale: false
6
+ keep_prev_brightness: false
7
+ keep_prev_contrast: false
8
+ logger_level: info
9
+
10
+ flags: null
11
+ label_flags: null
12
+ labels: [Chair, Door, Elevator, Person, Pillar, Sofa, Table, Trash bin, Wall] # null
13
+ file_search: null
14
+ sort_labels: false #true
15
+ validate_label: null
16
+
17
+ default_shape_color: [0, 255, 0]
18
+ shape_color: manual #auto # null, 'auto', 'manual'
19
+ shift_auto_shape_color: 0
20
+ label_colors: {Chair:[109, 0, 156], Door:[0, 46, 221], Elevator:[0, 164, 187], Person:[204, 204, 204], Pillar:[0, 155, 18], Sofa:[0, 225, 0], Table:[203, 249, 0], Trash bin:[255, 173, 0], Wall:[227, 0, 0]} #null
21
+
22
+ shape:
23
+ # drawing
24
+ line_color: [0, 255, 0, 128]
25
+ fill_color: [0, 0, 0, 64]
26
+ vertex_fill_color: [0, 255, 0, 255]
27
+ # selecting / hovering
28
+ select_line_color: [255, 255, 255, 255]
29
+ select_fill_color: [0, 255, 0, 155]
30
+ hvertex_fill_color: [255, 255, 255, 255]
31
+ point_size: 8
32
+
33
+ # main
34
+ flag_dock:
35
+ show: true
36
+ closable: true
37
+ movable: true
38
+ floatable: true
39
+ label_dock:
40
+ show: true
41
+ closable: true
42
+ movable: true
43
+ floatable: true
44
+ shape_dock:
45
+ show: true
46
+ closable: true
47
+ movable: true
48
+ floatable: true
49
+ file_dock:
50
+ show: true
51
+ closable: true
52
+ movable: true
53
+ floatable: true
54
+
55
+ # label_dialog
56
+ show_label_text_field: true
57
+ label_completion: startswith
58
+ fit_to_content:
59
+ column: true
60
+ row: false
61
+
62
+ # canvas
63
+ epsilon: 10.0
64
+ canvas:
65
+ fill_drawing: true
66
+ # None: do nothing
67
+ # close: close polygon
68
+ double_click: close
69
+ # The max number of edits we can undo
70
+ num_backups: 10
71
+ # show crosshair
72
+ crosshair:
73
+ polygon: false
74
+ rectangle: true
75
+ circle: false
76
+ line: false
77
+ point: false
78
+ linestrip: false
79
+ ai_polygon: false
80
+
81
+ shortcuts:
82
+ close: Ctrl+W
83
+ open: Ctrl+O
84
+ open_dir: Ctrl+U
85
+ quit: Ctrl+Q
86
+ save: Ctrl+S
87
+ save_as: Ctrl+Shift+S
88
+ save_to: null
89
+ delete_file: Ctrl+Delete
90
+
91
+ open_next: [D, Ctrl+Shift+D]
92
+ open_prev: [A, Ctrl+Shift+A]
93
+
94
+ zoom_in: [Ctrl++, Ctrl+=]
95
+ zoom_out: Ctrl+-
96
+ zoom_to_original: Ctrl+0
97
+ fit_window: Ctrl+F
98
+ fit_width: Ctrl+Shift+F
99
+
100
+ create_polygon: Ctrl+N
101
+ create_rectangle: Ctrl+R
102
+ create_circle: null
103
+ create_line: null
104
+ create_point: null
105
+ create_linestrip: null
106
+ edit_polygon: Ctrl+J
107
+ delete_polygon: Delete
108
+ duplicate_polygon: Ctrl+D
109
+ copy_polygon: Ctrl+C
110
+ paste_polygon: Ctrl+V
111
+ undo: Ctrl+Z
112
+ undo_last_point: Ctrl+Z
113
+ add_point_to_edge: Ctrl+Shift+P
114
+ edit_label: Ctrl+E
115
+ toggle_keep_prev_mode: Ctrl+P
116
+ remove_selected_point: [Meta+H, Backspace]
salsa/manually_labeling/dataset_collection.py ADDED
@@ -0,0 +1,161 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ from random import choice
3
+ import rospy
4
+ import tf
5
+ # custom define messages:
6
+ from geometry_msgs.msg import Point, PoseStamped, Twist, TwistStamped, Pose, PoseArray, PoseWithCovarianceStamped
7
+ from sensor_msgs.msg import Image, LaserScan
8
+ from nav_msgs.msg import Odometry, OccupancyGrid, Path
9
+ from laser_line_extraction.msg import LineSegmentList, LineSegment
10
+ # python:
11
+ import os
12
+ import numpy as np
13
+ import threading
14
+
15
+ ################ customized parameters #################
16
+ ################ please modify them based on your dataset #################
17
+ POINTS = 1081
18
+ DATA_PATH = "~/semantic2d_data/2024-04-11-15-24-29"
19
+
20
+ class DataCollection:
21
+ # Constructor
22
+ def __init__(self):
23
+
24
+ # initialize data:
25
+ self.scan_lidar = np.zeros(POINTS)
26
+ self.intensities_lidar = np.zeros(POINTS)
27
+ self.lines = []
28
+ self.vel_cmd = np.zeros(2)
29
+ self.pos = np.zeros(3)
30
+ self.record = True
31
+ self.rosbag_cnt = 0
32
+ self.rosbag_cnt_reg = np.array([-4, -3, -2, -1])
33
+ self.reg_cnt = 0
34
+
35
+ # store directory:
36
+ data_odir = DATA_PATH
37
+ self.scan_lidar_odir = data_odir + "/" + "scans_lidar"
38
+ if not os.path.exists(self.scan_lidar_odir):
39
+ os.makedirs(self.scan_lidar_odir)
40
+ self.intensities_lidar_odir = data_odir + "/" + "intensities_lidar"
41
+ if not os.path.exists(self.intensities_lidar_odir):
42
+ os.makedirs(self.intensities_lidar_odir)
43
+ self.line_odir = data_odir + "/" + "line_segments"
44
+ if not os.path.exists(self.line_odir):
45
+ os.makedirs(self.line_odir)
46
+ self.vel_odir = data_odir + "/" + "velocities"
47
+ if not os.path.exists(self.vel_odir):
48
+ os.makedirs(self.vel_odir)
49
+ self.pos_odir = data_odir + "/" + "positions"
50
+ if not os.path.exists(self.pos_odir):
51
+ os.makedirs(self.pos_odir)
52
+
53
+ # timer:
54
+ self.timer = None
55
+ self.rate = 20 # 20 Hz velocity controller
56
+ self.idx = 0
57
+ # Lock
58
+ self.lock = threading.Lock()
59
+
60
+ # initialize ROS objects:
61
+ self.tf_listener = tf.TransformListener()
62
+ self.scan_sub = rospy.Subscriber("scan", LaserScan, self.scan_callback)
63
+ self.line_sub = rospy.Subscriber("line_segments", LineSegmentList, self.line_segments_callback)
64
+ self.dwa_cmd_sub = rospy.Subscriber('bluetooth_teleop/cmd_vel', Twist, self.dwa_cmd_callback) #, queue_size=1)
65
+ self.robot_pose_pub = rospy.Subscriber('robot_pose', PoseStamped, self.robot_pose_callback)
66
+
67
+
68
+ # Callback function for the scan measurement subscriber
69
+ def scan_callback(self, laserScan_msg):
70
+ # get the laser scan data:
71
+ scan_data = np.array(laserScan_msg.ranges, dtype=np.float32)
72
+ scan_data[np.isnan(scan_data)] = 0.
73
+ scan_data[np.isinf(scan_data)] = 0.
74
+ self.scan_lidar = scan_data
75
+ # get the laser intensity data:
76
+ intensity_data = np.array(laserScan_msg.intensities, dtype=np.float32)
77
+ intensity_data[np.isnan(intensity_data)] = 0.
78
+ intensity_data[np.isinf(intensity_data)] = 0.
79
+ self.intensities_lidar = intensity_data
80
+ self.rosbag_cnt += 1
81
+
82
+ # Callback function for the line segments subscriber
83
+ def line_segments_callback(self, lineSeg_msg):
84
+ self.lines = []
85
+ # get the laser line segments data:
86
+ line_segs = lineSeg_msg.line_segments
87
+ for line_seg in line_segs:
88
+ line = [line_seg.start[0], line_seg.start[1], line_seg.end[0], line_seg.end[1]]
89
+ self.lines.append(line)
90
+
91
+ # Callback function for the local map subscriber
92
+ def dwa_cmd_callback(self, robot_vel_msg):
93
+ self.vel_cmd = np.array([robot_vel_msg.linear.x, robot_vel_msg.angular.z])
94
+
95
+ # Callback function for the current robot pose subscriber
96
+ def robot_pose_callback(self, robot_pose_msg):
97
+ # Cartesian coordinate:
98
+ #self.pos = np.array([robot_pose_msg.pose.position.x, robot_pose_msg.pose.position.y, robot_pose_msg.pose.orientation.z])
99
+ quaternion = [
100
+ robot_pose_msg.pose.orientation.x, robot_pose_msg.pose.orientation.y,
101
+ robot_pose_msg.pose.orientation.z, robot_pose_msg.pose.orientation.w
102
+ ]
103
+ roll, pitch, yaw = tf.transformations.euler_from_quaternion(quaternion)
104
+ self.pos = np.array([robot_pose_msg.pose.position.x, robot_pose_msg.pose.position.y, yaw])
105
+
106
+ # start the timer if this is the first path received
107
+ if self.timer is None:
108
+ self.start()
109
+
110
+ # Start the timer that calculates command velocities
111
+ def start(self):
112
+ # initialize timer for controller update
113
+ self.timer = rospy.Timer(rospy.Duration(1./self.rate), self.timer_callback)
114
+
115
+ # function that runs every time the timer finishes to ensure that vae data are sent regularly
116
+ def timer_callback(self, event):
117
+ # lock data:
118
+ #self.lock.acquire()
119
+ scan_ranges = self.scan_lidar
120
+ scan_intensities = self.intensities_lidar
121
+ line_segs = self.lines
122
+ rob_pos = self.pos
123
+ vel_cmd = self.vel_cmd
124
+ #self.lock.release()
125
+ # check if the rosbag is done:
126
+ if(self.reg_cnt > 3):
127
+ self.reg_cnt = 0
128
+
129
+ self.rosbag_cnt_reg[self.reg_cnt] = self.rosbag_cnt
130
+ self.reg_cnt += 1
131
+ cnt_unique = np.unique(self.rosbag_cnt_reg)
132
+ # write array into npy:
133
+ if(self.record and len(cnt_unique)>1): #and self.idx < 15000):
134
+ # write lidar scan in lidar frame into np.array:
135
+ scan_name = self.scan_lidar_odir + "/" + str(self.idx).zfill(7)
136
+ np.save(scan_name, scan_ranges)
137
+ # write lidar intensity in lidar frame into np.array:
138
+ intensity_name = self.intensities_lidar_odir + "/" + str(self.idx).zfill(7)
139
+ np.save(intensity_name, scan_intensities)
140
+ # write line segments in lidar frame into np.array:
141
+ line_name = self.line_odir + "/" + str(self.idx).zfill(7)
142
+ np.save(line_name, line_segs)
143
+ # write velocoties into np.array:
144
+ vel_name = self.vel_odir + "/" + str(self.idx).zfill(7)
145
+ np.save(vel_name, vel_cmd)
146
+ # write robot position into np.array:
147
+ pos_name = self.pos_odir + "/" + str(self.idx).zfill(7)
148
+ np.save(pos_name, rob_pos)
149
+
150
+ #img.show()
151
+ self.idx += 1
152
+ print("idx: ", self.idx)
153
+ else:
154
+ print("idx: ", self.idx)
155
+
156
+ if __name__ == '__main__':
157
+ rospy.init_node('data_collection')
158
+ data = DataCollection()
159
+ rospy.spin()
160
+
161
+
salsa/manually_labeling/generateTrainDevSet.py ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ """
3
+ Generate train/dev/test split files for Semantic2D dataset.
4
+ Automatically calculates the number of samples based on percentages.
5
+ """
6
+ import os
7
+ from os import listdir, getcwd
8
+ from os.path import join, expanduser
9
+ import random
10
+
11
+ if __name__ == '__main__':
12
+ ################ CUSTOMIZATION REQUIRED ################
13
+ # The path of your dataset folder:
14
+ train_folder = '~/semantic2d_data/2024-04-11-15-24-29'
15
+
16
+ # Split percentages (must sum to 1.0)
17
+ TRAIN_RATIO = 0.70 # 70% for training
18
+ DEV_RATIO = 0.10 # 10% for validation/development
19
+ TEST_RATIO = 0.20 # 20% for testing
20
+ ########################################################
21
+
22
+ # Expand user path (handle ~)
23
+ train_folder = expanduser(train_folder)
24
+
25
+ # Verify ratios sum to 1.0
26
+ assert abs(TRAIN_RATIO + DEV_RATIO + TEST_RATIO - 1.0) < 1e-6, \
27
+ f"Ratios must sum to 1.0, got {TRAIN_RATIO + DEV_RATIO + TEST_RATIO}"
28
+
29
+ # The index files of datasets:
30
+ train_txt = train_folder + '/train.txt'
31
+ dev_txt = train_folder + '/dev.txt'
32
+ test_txt = train_folder + '/test.txt'
33
+
34
+ # Get the list of data files:
35
+ positions_folder = train_folder + '/positions'
36
+ if not os.path.exists(positions_folder):
37
+ print(f"Error: Folder not found: {positions_folder}")
38
+ print("Please check the train_folder path.")
39
+ exit(1)
40
+
41
+ train_files = os.listdir(positions_folder)
42
+
43
+ # Filter only .npy files
44
+ train_list = [f for f in train_files if f.endswith(".npy")]
45
+
46
+ # Sort the list according to the name without extension:
47
+ train_list.sort(key=lambda x: int(x[:-4]))
48
+
49
+ # Shuffle the list
50
+ random.shuffle(train_list)
51
+
52
+ # Calculate split sizes based on percentages
53
+ total_samples = len(train_list)
54
+ NUM_TRAIN = int(total_samples * TRAIN_RATIO)
55
+ NUM_DEV = int(total_samples * DEV_RATIO)
56
+ NUM_TEST = total_samples - NUM_TRAIN - NUM_DEV # Remaining samples go to test
57
+
58
+ print(f"Dataset folder: {train_folder}")
59
+ print(f"Total samples: {total_samples}")
60
+ print(f"Split ratios: Train={TRAIN_RATIO:.0%}, Dev={DEV_RATIO:.0%}, Test={TEST_RATIO:.0%}")
61
+ print(f"Split sizes: Train={NUM_TRAIN}, Dev={NUM_DEV}, Test={NUM_TEST}")
62
+
63
+ # Open txt files:
64
+ train_file = open(train_txt, 'w')
65
+ dev_file = open(dev_txt, 'w')
66
+ test_file = open(test_txt, 'w')
67
+
68
+ # Write to txt files based on calculated splits:
69
+ for idx, file_name in enumerate(train_list):
70
+ if idx < NUM_TRAIN: # train
71
+ train_file.write(file_name + '\n')
72
+ elif idx < NUM_TRAIN + NUM_DEV: # dev
73
+ dev_file.write(file_name + '\n')
74
+ else: # test
75
+ test_file.write(file_name + '\n')
76
+
77
+ train_file.close()
78
+ dev_file.close()
79
+ test_file.close()
80
+
81
+ print(f"\nGenerated split files:")
82
+ print(f" - {train_txt}")
83
+ print(f" - {dev_txt}")
84
+ print(f" - {test_txt}")
85
+ print("Done!")
salsa/manually_labeling/labelme_example.svg ADDED
salsa/manually_labeling/labelme_output/img.png ADDED

Git LFS Details

  • SHA256: 319942943324a63ec2ce1bc15171d139ac332fce6a6e7392199f282af624f2fd
  • Pointer size: 130 Bytes
  • Size of remote file: 74.3 kB
salsa/manually_labeling/labelme_output/label.png ADDED

Git LFS Details

  • SHA256: 59ef900a184a9326d2bdd5954619f1d9a25df5a0e8fd4e1fe0738cc714869356
  • Pointer size: 130 Bytes
  • Size of remote file: 34.1 kB
salsa/manually_labeling/labelme_output/label_names.txt ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ _background_
2
+ Chair
3
+ Door
4
+ Elevator
5
+ People
6
+ Pillar
7
+ Sofa
8
+ Table
9
+ Trash bin
10
+ Wall
salsa/manually_labeling/labelme_output/label_viz.png ADDED

Git LFS Details

  • SHA256: 703ec20b403b97ab3ddfb2835cc637bd64f5a06d0bd91f58e1919f2a4c15d1ac
  • Pointer size: 131 Bytes
  • Size of remote file: 145 kB
salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/CMakeLists.txt ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ cmake_minimum_required(VERSION 2.8.3)
2
+ project(laser_line_extraction)
3
+
4
+ find_package(catkin REQUIRED COMPONENTS
5
+ cmake_modules
6
+ geometry_msgs
7
+ message_generation
8
+ roscpp
9
+ rospy
10
+ sensor_msgs
11
+ visualization_msgs
12
+ )
13
+
14
+ find_package(Eigen3 REQUIRED)
15
+
16
+ add_message_files(
17
+ FILES
18
+ LineSegment.msg
19
+ LineSegmentList.msg
20
+ )
21
+
22
+ generate_messages(
23
+ DEPENDENCIES
24
+ sensor_msgs
25
+ )
26
+
27
+ catkin_package(
28
+ INCLUDE_DIRS include
29
+ LIBRARIES line line_extraction line_extraction_ros
30
+ CATKIN_DEPENDS geometry_msgs message_runtime roscpp sensor_msgs visualization_msgs
31
+ )
32
+
33
+ add_library(line src/line.cpp)
34
+ target_link_libraries(line ${catkin_LIBRARIES})
35
+
36
+ add_library(line_extraction src/line_extraction.cpp)
37
+ target_link_libraries(line_extraction ${catkin_LIBRARIES})
38
+
39
+ add_library(line_extraction_ros src/line_extraction_ros.cpp)
40
+ target_link_libraries(line_extraction_ros line line_extraction ${catkin_LIBRARIES})
41
+ add_dependencies(line_extraction_ros laser_line_extraction_generate_messages_cpp)
42
+
43
+ add_executable(line_extraction_node src/line_extraction_node.cpp)
44
+ target_link_libraries(line_extraction_node line_extraction_ros ${catkin_LIBRARIES})
45
+
46
+ include_directories(include ${catkin_INCLUDE_DIRS})
47
+ include_directories(include ${EIGEN3_INCLUDE_DIRS})
48
+
49
+ # catkin_add_gtest(${PROJECT_NAME}-test test/test_laser_line_extraction.cpp)
50
+
51
+ install(TARGETS line_extraction_node RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION})
52
+ install(TARGETS line_extraction_ros line_extraction line ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION})
53
+ install(DIRECTORY include/${PROJECT_NAME}/ DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION})
salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/LICENSE ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Copyright (c) 2014, Marc Gallant
2
+ All rights reserved.
3
+
4
+ Redistribution and use in source and binary forms, with or without
5
+ modification, are permitted provided that the following conditions are met:
6
+
7
+ * Redistributions of source code must retain the above copyright notice, this
8
+ list of conditions and the following disclaimer.
9
+
10
+ * Redistributions in binary form must reproduce the above copyright notice,
11
+ this list of conditions and the following disclaimer in the documentation
12
+ and/or other materials provided with the distribution.
13
+
14
+ * Neither the name of the {organization} nor the names of its
15
+ contributors may be used to endorse or promote products derived from
16
+ this software without specific prior written permission.
17
+
18
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21
+ DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
22
+ FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
23
+ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
24
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
25
+ CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
26
+ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/README.md ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Laser Line Extraction
2
+ Laser Line Extraction is a [Robot Operating System (ROS)](http://www.ros.org) package that extracts line segments form [LaserScan](http://docs.ros.org/api/sensor_msgs/html/msg/LaserScan.html) messages. Created by [Marc Gallant](http://marcgallant.ca), originally for use in the [Mining Systems Laboratory](http://msl.engineering.queensu.ca). Here is what the Laser Line Extraction package looks like in action:
3
+
4
+ ![Laser line extraction](images/line_extraction.gif)
5
+
6
+ In the above image, the white dots are points in a LaserScan message, and the red lines are what is extracted by Laser Line Extraction. This data was collected by driving a robot through Beamish-Munro Hall at Queen's University. A SICK LMS111 laser scanner was mounted to the robot. The extraction algorithm is very configurable; the above image used the parameters configured in the `example.launch` launch file.
7
+
8
+ After applying some filters to remove outlying points, Laser Line Extraction implements a split-and-merge algorithm to determine which points belong to lines. Next, it implements the weighted line fitting algorithm by Pfister *et al.* [1] to find the best fit lines and their respective covariance matrices.
9
+
10
+ ## Usage
11
+ I recommend making a copy of `example.launch` in the launch directory and configuring the parameters until you reach a desirable outcome. The parameters in `example.launch` are a good starting point. Then simply use roslaunch, e.g.,
12
+
13
+ ```
14
+ roslaunch laser_line_extraction example.launch
15
+ ```
16
+
17
+ ## Messages
18
+ Laser Line Extraction has two messages types:
19
+
20
+ #### LineSegment.msg
21
+ ```
22
+ float32 radius
23
+ float32 angle
24
+ float32[4] covariance
25
+ float32[2] start
26
+ float32[2] end
27
+ ```
28
+ `radius` (m) and `angle` (rad) are the polar parameterization of the line segment. `covariance` is the 2x2 covariance matrix of `radius` and `angle` (listed in row-major order). Finally `start` and `end` are the (x, y) coordinates of the start and end of the line segment.
29
+
30
+ #### LineSegmentList.msg
31
+ ```
32
+ Header header
33
+ LineSegment[] line_segments
34
+ ```
35
+ An array of LineSegment.msg with a header.
36
+
37
+ ## Topics
38
+
39
+ Laser Line Extraction subscribes to a single topic and publishes one or two topics.
40
+
41
+ ### Subscribed topics
42
+ - `/scan` ([sensor_msgs/LaserScan](http://docs.ros.org/api/sensor_msgs/html/msg/LaserScan.html))
43
+ - The name of this topic can be configured (see Parameters).
44
+
45
+ ### Published topics
46
+ - `/line_segments` (laser\_line\_extraction/LineSegmentList)
47
+ - A list of line segments extracted from a laser scan.
48
+ - `/line_markers` ([visualization_msgs/Marker](http://docs.ros.org/api/visualization_msgs/html/msg/Marker.html))
49
+ - (optional) Markers so that the extracted lines can be visualized in rviz (see above image). Can be toggled (see Parameters).
50
+
51
+ ## Parameters
52
+ The parameters are listed in alphabetical order.
53
+
54
+ - `bearing_std_dev` (default: 0.001)
55
+ - The standard deviation of bearing uncertainty in the laser scans (rad).
56
+ - `frame_id` (default: "laser")
57
+ - The frame in which the line segments are published.
58
+ - `least_sq_angle_thresh` (default: 0.0001)
59
+ - Change in angle (rad) threshold to stop iterating least squares (`least_sq_radius_thresh` must also be met).
60
+ - `least_sq_radius_thresh` (default: 0.0001)
61
+ - Change in radius (m) threshold to stop iterating least squares (`least_sq_angle_thresh` must also be met).
62
+ - `max_line_gap` (default: 0.4)
63
+ - The maximum distance between two points in the same line (m).
64
+ - `min_line_length` (default: 0.5)
65
+ - Lines shorter than this are not published (m).
66
+ - `min_line_points` (default: 9)
67
+ - Lines with fewer points than this are not published.
68
+ - `min_range` (default: 0.4)
69
+ - Points closer than this are ignored (m).
70
+ - `max_range` (default: 10000.0)
71
+ - Points farther than this are ignored (m).
72
+ - `min_split_dist` (default: 0.05)
73
+ - When performing "split" step of split and merge, a split between two points results when the two points are at least this far apart (m).
74
+ - `outlier_dist` (default: 0.05)
75
+ - Points who are at least this distance from all their neighbours are considered outliers (m).
76
+ - `publish_markers` (default: false)
77
+ - Whether or not markers are published.
78
+ - `range_std_dev` (default: 0.02)
79
+ - The standard deviation of range uncertainty in the laser scans (m).
80
+ - `scan_topic` (default: "scan")
81
+ - The LaserScan topic.
82
+
83
+ ## References
84
+ [1] S. T. Pfister, S. I. Roumeliotis, and J. W. Burdick, "Weighted line fitting algorithms for mobile robot map building and efficient data representation" in Proc. IEEE Intl. Conf. on Robotics and Automation (ICRA), Taipei, Taiwan, 14-19 Sept., 2003.
salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/images/line_extraction.gif ADDED

Git LFS Details

  • SHA256: 25cce5cc12833dc1e7e223206695f52648f3be6e16dd7a79fe7d05735754d65b
  • Pointer size: 131 Bytes
  • Size of remote file: 685 kB
salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/include/laser_line_extraction/line.h ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #ifndef LINE_EXTRACTION_LINE_H
2
+ #define LINE_EXTRACTION_LINE_H
3
+
4
+ #include <vector>
5
+ #include <boost/array.hpp>
6
+ #include "laser_line_extraction/utilities.h"
7
+
8
+ namespace line_extraction
9
+ {
10
+
11
+ class Line
12
+ {
13
+
14
+ public:
15
+ // Constructor / destructor
16
+ Line(const CachedData&, const RangeData&, const Params&, std::vector<unsigned int>);
17
+ Line(double angle, double radius, const boost::array<double, 4> &covariance,
18
+ const boost::array<double, 2> &start, const boost::array<double, 2> &end,
19
+ const std::vector<unsigned int> &indices);
20
+ ~Line();
21
+ // Get methods for the line parameters
22
+ double getAngle() const;
23
+ const boost::array<double, 4>& getCovariance() const;
24
+ const boost::array<double, 2>& getEnd() const;
25
+ const std::vector<unsigned int>& getIndices() const;
26
+ double getRadius() const;
27
+ const boost::array<double, 2>& getStart() const;
28
+ // Methods for line fitting
29
+ double distToPoint(unsigned int);
30
+ void endpointFit();
31
+ void leastSqFit();
32
+ double length() const;
33
+ unsigned int numPoints() const;
34
+ void projectEndpoints();
35
+
36
+ private:
37
+ std::vector<unsigned int> indices_;
38
+ // Data structures
39
+ CachedData c_data_;
40
+ RangeData r_data_;
41
+ Params params_;
42
+ PointParams p_params_;
43
+ // Point variances used for least squares
44
+ std::vector<double> point_scalar_vars_;
45
+ std::vector<boost::array<double, 4> > point_covs_;
46
+ double p_rr_;
47
+ // Line parameters
48
+ double angle_;
49
+ double radius_;
50
+ boost::array<double, 2> start_;
51
+ boost::array<double, 2> end_;
52
+ boost::array<double, 4> covariance_;
53
+ // Methods
54
+ void angleFromEndpoints();
55
+ void angleFromLeastSq();
56
+ double angleIncrement();
57
+ void calcCovariance();
58
+ void calcPointCovariances();
59
+ void calcPointParameters();
60
+ void calcPointScalarCovariances();
61
+ void radiusFromEndpoints();
62
+ void radiusFromLeastSq();
63
+ }; // class Line
64
+
65
+ } // namespace line_extraction
66
+
67
+ #endif
salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/include/laser_line_extraction/line_extraction.h ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #ifndef LINE_EXTRACTION_H
2
+ #define LINE_EXTRACTION_H
3
+
4
+ #include <cmath>
5
+ #include <vector>
6
+ #include <boost/array.hpp>
7
+ #include <Eigen/Dense>
8
+ #include "laser_line_extraction/utilities.h"
9
+ #include "laser_line_extraction/line.h"
10
+
11
+ namespace line_extraction
12
+ {
13
+
14
+ class LineExtraction
15
+ {
16
+
17
+ public:
18
+ // Constructor / destructor
19
+ LineExtraction();
20
+ ~LineExtraction();
21
+ // Run
22
+ void extractLines(std::vector<Line>&);
23
+ // Data setting
24
+ void setCachedData(const std::vector<double>&, const std::vector<double>&,
25
+ const std::vector<double>&, const std::vector<unsigned int>&);
26
+ void setRangeData(const std::vector<double>&);
27
+ // Parameter setting
28
+ void setBearingVariance(double);
29
+ void setRangeVariance(double);
30
+ void setLeastSqAngleThresh(double);
31
+ void setLeastSqRadiusThresh(double);
32
+ void setMaxLineGap(double);
33
+ void setMinLineLength(double);
34
+ void setMinLinePoints(unsigned int);
35
+ void setMinRange(double);
36
+ void setMaxRange(double);
37
+ void setMinSplitDist(double);
38
+ void setOutlierDist(double);
39
+
40
+ private:
41
+ // Data structures
42
+ CachedData c_data_;
43
+ RangeData r_data_;
44
+ Params params_;
45
+ // Indices after filtering
46
+ std::vector<unsigned int> filtered_indices_;
47
+ // Line data
48
+ std::vector<Line> lines_;
49
+ // Methods
50
+ double chiSquared(const Eigen::Vector2d&, const Eigen::Matrix2d&,
51
+ const Eigen::Matrix2d&);
52
+ double distBetweenPoints(unsigned int index_1, unsigned int index_2);
53
+ void filterCloseAndFarPoints();
54
+ void filterOutlierPoints();
55
+ void filterLines();
56
+ void mergeLines();
57
+ void split(const std::vector<unsigned int>&);
58
+ };
59
+
60
+ } // namespace line_extraction
61
+
62
+ #endif
salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/include/laser_line_extraction/line_extraction_ros.h ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #ifndef LINE_EXTRACTION_ROS_H
2
+ #define LINE_EXTRACTION_ROS_H
3
+
4
+ #include <vector>
5
+ #include <string>
6
+ #include <ros/ros.h>
7
+ #include <sensor_msgs/LaserScan.h>
8
+ #include <visualization_msgs/Marker.h>
9
+ #include <geometry_msgs/Point.h>
10
+ #include "laser_line_extraction/LineSegment.h"
11
+ #include "laser_line_extraction/LineSegmentList.h"
12
+ #include "laser_line_extraction/line_extraction.h"
13
+ #include "laser_line_extraction/line.h"
14
+
15
+ namespace line_extraction
16
+ {
17
+
18
+ class LineExtractionROS
19
+ {
20
+
21
+ public:
22
+ // Constructor / destructor
23
+ LineExtractionROS(ros::NodeHandle&, ros::NodeHandle&);
24
+ ~LineExtractionROS();
25
+ // Running
26
+ void run();
27
+
28
+ private:
29
+ // ROS
30
+ ros::NodeHandle nh_;
31
+ ros::NodeHandle nh_local_;
32
+ ros::Subscriber scan_subscriber_;
33
+ ros::Publisher line_publisher_;
34
+ ros::Publisher marker_publisher_;
35
+ // Parameters
36
+ std::string frame_id_;
37
+ std::string scan_topic_;
38
+ bool pub_markers_;
39
+ // Line extraction
40
+ LineExtraction line_extraction_;
41
+ bool data_cached_; // true after first scan used to cache data
42
+ // Members
43
+ void loadParameters();
44
+ void populateLineSegListMsg(const std::vector<Line>&, laser_line_extraction::LineSegmentList&);
45
+ void populateMarkerMsg(const std::vector<Line>&, visualization_msgs::Marker&);
46
+ void cacheData(const sensor_msgs::LaserScan::ConstPtr&);
47
+ void laserScanCallback(const sensor_msgs::LaserScan::ConstPtr&);
48
+ };
49
+
50
+ } // namespace line_extraction
51
+
52
+ #endif
salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/include/laser_line_extraction/utilities.h ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #ifndef LINE_EXTRACTION_UTILITIES_H
2
+ #define LINE_EXTRACTION_UTILITIES_H
3
+
4
+ #include <vector>
5
+ #include <cmath>
6
+
7
+ namespace line_extraction
8
+ {
9
+
10
+ struct CachedData
11
+ {
12
+ std::vector<unsigned int> indices;
13
+ std::vector<double> bearings;
14
+ std::vector<double> cos_bearings;
15
+ std::vector<double> sin_bearings;
16
+ };
17
+
18
+ struct RangeData
19
+ {
20
+ std::vector<double> ranges;
21
+ std::vector<double> xs;
22
+ std::vector<double> ys;
23
+ };
24
+
25
+ struct Params
26
+ {
27
+ double bearing_var;
28
+ double range_var;
29
+ double least_sq_angle_thresh;
30
+ double least_sq_radius_thresh;
31
+ double max_line_gap;
32
+ double min_line_length;
33
+ double min_range;
34
+ double max_range;
35
+ double min_split_dist;
36
+ double outlier_dist;
37
+ unsigned int min_line_points;
38
+ };
39
+
40
+ struct PointParams
41
+ {
42
+ std::vector<double> a;
43
+ std::vector<double> ap;
44
+ std::vector<double> app;
45
+ std::vector<double> b;
46
+ std::vector<double> bp;
47
+ std::vector<double> bpp;
48
+ std::vector<double> c;
49
+ std::vector<double> s;
50
+ };
51
+
52
+ // Inlining this function will be faster
53
+ // and also get rid of multiple definitions
54
+ // error
55
+ inline double pi_to_pi(double angle)
56
+ {
57
+ angle = fmod(angle, 2 * M_PI);
58
+ if (angle >= M_PI)
59
+ angle -= 2 * M_PI;
60
+ return angle;
61
+ }
62
+
63
+ } // namespace line_extraction
64
+
65
+ #endif
salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/launch/debug.launch ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ <launch>
2
+ <node name="line_extractor" pkg="laser_line_extraction" type="line_extraction_node" output="screen" launch-prefix="nemiver">
3
+ <param name="~scan_topic" value="front_lidar/scan" />
4
+ <param name="~frequency" value="50.0" />
5
+ <param name="~publish_markers" value="true" />
6
+ </node>
7
+ </launch>
8
+
salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/launch/example.launch ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <launch>
2
+ <node name="line_extractor" pkg="laser_line_extraction" type="line_extraction_node">
3
+ <!--################ CUSTOMIZATION REQUIRED ################-->
4
+
5
+ <!-- LiDAR frame ID (from your URDF or tf tree) -->
6
+ <param name="~frame_id" value="rear_laser" />
7
+
8
+ <!-- LiDAR scan topic name -->
9
+ <param name="~scan_topic" value="scan" />
10
+
11
+ <!-- Sensor range parameters (must match your LiDAR) -->
12
+ <param name="~min_range" value="0.6" />
13
+ <param name="~max_range" value="60.0" />
14
+
15
+ <!--################ Usually no changes needed below ################-->
16
+ <param name="~frequency" value="30.0" />
17
+ <param name="~publish_markers" value="false" />
18
+ <param name="~bearing_std_dev" value="1e-5" />
19
+ <param name="~range_std_dev" value="0.02" />
20
+ <param name="~least_sq_angle_thresh" value="0.0001" />
21
+ <param name="~least_sq_radius_thresh" value="0.0001" />
22
+ <param name="~max_line_gap" value="1.0" />
23
+ <param name="~min_line_length" value="0.4" />
24
+ <param name="~min_split_dist" value="0.04" />
25
+ <param name="~outlier_dist" value="0.06" />
26
+ <param name="~min_line_points" value="15" />
27
+ </node>
28
+ </launch>
29
+
salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/msg/LineSegment.msg ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ float32 radius
2
+ float32 angle
3
+ float32[4] covariance
4
+ float32[2] start
5
+ float32[2] end
salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/msg/LineSegmentList.msg ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ Header header
2
+ LineSegment[] line_segments
salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/package.xml ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0"?>
2
+ <package>
3
+ <name>laser_line_extraction</name>
4
+ <version>0.1.0</version>
5
+ <description>
6
+ A ROS package to extract line segments from LaserScan messages.
7
+ </description>
8
+ <maintainer email="m.gallant@queensu.ca">Marc Gallant</maintainer>
9
+ <license>BSD</license>
10
+ <buildtool_depend>catkin</buildtool_depend>
11
+ <build_depend>cmake_modules</build_depend>
12
+ <build_depend>eigen</build_depend>
13
+ <build_depend>geometry_msgs</build_depend>
14
+ <build_depend>message_generation</build_depend>
15
+ <build_depend>roscpp</build_depend>
16
+ <build_depend>rospy</build_depend>
17
+ <build_depend>sensor_msgs</build_depend>
18
+ <build_depend>visualization_msgs</build_depend>
19
+ <run_depend>geometry_msgs</run_depend>
20
+ <run_depend>message_runtime</run_depend>
21
+ <run_depend>roscpp</run_depend>
22
+ <run_depend>rospy</run_depend>
23
+ <run_depend>sensor_msgs</run_depend>
24
+ <run_depend>visualization_msgs</run_depend>
25
+ </package>
salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/src/line.cpp ADDED
@@ -0,0 +1,304 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #include "laser_line_extraction/line.h"
2
+
3
+ namespace line_extraction
4
+ {
5
+
6
+ ///////////////////////////////////////////////////////////////////////////////
7
+ // Constructor / destructor
8
+ ///////////////////////////////////////////////////////////////////////////////
9
+ Line::Line(const CachedData &c_data, const RangeData &r_data, const Params &params,
10
+ std::vector<unsigned int> indices):
11
+ c_data_(c_data),
12
+ r_data_(r_data),
13
+ params_(params),
14
+ indices_(indices)
15
+ {
16
+ }
17
+
18
+ Line::Line(double angle, double radius, const boost::array<double, 4> &covariance,
19
+ const boost::array<double, 2> &start, const boost::array<double, 2> &end,
20
+ const std::vector<unsigned int> &indices):
21
+ angle_(angle),
22
+ radius_(radius),
23
+ covariance_(covariance),
24
+ start_(start),
25
+ end_(end),
26
+ indices_(indices)
27
+ {
28
+ }
29
+
30
+ Line::~Line()
31
+ {
32
+ }
33
+
34
+ ///////////////////////////////////////////////////////////////////////////////
35
+ // Get methods for line parameters
36
+ ///////////////////////////////////////////////////////////////////////////////
37
+ double Line::getAngle() const
38
+ {
39
+ return angle_;
40
+ }
41
+
42
+ const boost::array<double, 4>& Line::getCovariance() const
43
+ {
44
+ return covariance_;
45
+ }
46
+
47
+ const boost::array<double, 2>& Line::getEnd() const
48
+ {
49
+ return end_;
50
+ }
51
+
52
+ const std::vector<unsigned int>& Line::getIndices() const
53
+ {
54
+ return indices_;
55
+ }
56
+
57
+ double Line::getRadius() const
58
+ {
59
+ return radius_;
60
+ }
61
+
62
+ const boost::array<double, 2>& Line::getStart() const
63
+ {
64
+ return start_;
65
+ }
66
+
67
+ ///////////////////////////////////////////////////////////////////////////////
68
+ // Utility methods
69
+ ///////////////////////////////////////////////////////////////////////////////
70
+ double Line::distToPoint(unsigned int index)
71
+ {
72
+ double p_rad = sqrt(pow(r_data_.xs[index], 2) + pow(r_data_.ys[index], 2));
73
+ double p_ang = atan2(r_data_.ys[index], r_data_.xs[index]);
74
+ return fabs(p_rad * cos(p_ang - angle_) - radius_);
75
+ }
76
+
77
+ double Line::length() const
78
+ {
79
+ return sqrt(pow(start_[0] - end_[0], 2) + pow(start_[1] - end_[1], 2));
80
+ }
81
+
82
+ unsigned int Line::numPoints() const
83
+ {
84
+ return indices_.size();
85
+ }
86
+
87
+ void Line::projectEndpoints()
88
+ {
89
+ double s = -1.0 / tan(angle_);
90
+ double b = radius_ / sin(angle_);
91
+ double x = start_[0];
92
+ double y = start_[1];
93
+ start_[0] = (s * y + x - s * b) / (pow(s, 2) + 1);
94
+ start_[1] = (pow(s, 2) * y + s * x + b) / (pow(s, 2) + 1);
95
+ x = end_[0];
96
+ y = end_[1];
97
+ end_[0] = (s * y + x - s * b) / (pow(s, 2) + 1);
98
+ end_[1] = (pow(s, 2) * y + s * x + b) / (pow(s, 2) + 1);
99
+ }
100
+
101
+ ///////////////////////////////////////////////////////////////////////////////
102
+ // Methods for endpoint line fitting
103
+ ///////////////////////////////////////////////////////////////////////////////
104
+ void Line::endpointFit()
105
+ {
106
+ start_[0] = r_data_.xs[indices_[0]];
107
+ start_[1] = r_data_.ys[indices_[0]];
108
+ end_[0] = r_data_.xs[indices_.back()];
109
+ end_[1] = r_data_.ys[indices_.back()];
110
+ angleFromEndpoints();
111
+ radiusFromEndpoints();
112
+ }
113
+
114
+ void Line::angleFromEndpoints()
115
+ {
116
+ double slope;
117
+ if (fabs(end_[0] - start_[0]) > 1e-9)
118
+ {
119
+ slope = (end_[1] - start_[1]) / (end_[0] - start_[0]);
120
+ angle_ = pi_to_pi(atan(slope) + M_PI/2);
121
+ }
122
+ else
123
+ {
124
+ angle_ = 0.0;
125
+ }
126
+ }
127
+
128
+ void Line::radiusFromEndpoints()
129
+ {
130
+ radius_ = start_[0] * cos(angle_) + start_[1] * sin(angle_);
131
+ if (radius_ < 0)
132
+ {
133
+ radius_ = -radius_;
134
+ angle_ = pi_to_pi(angle_ + M_PI);
135
+ }
136
+ }
137
+
138
+ ///////////////////////////////////////////////////////////////////////////////
139
+ // Methods for least squares line fitting
140
+ ///////////////////////////////////////////////////////////////////////////////
141
+ void Line::leastSqFit()
142
+ {
143
+ calcPointCovariances();
144
+ double prev_radius = 0.0;
145
+ double prev_angle = 0.0;
146
+ while (fabs(radius_ - prev_radius) > params_.least_sq_radius_thresh ||
147
+ fabs(angle_ - prev_angle) > params_.least_sq_angle_thresh)
148
+ {
149
+ prev_radius = radius_;
150
+ prev_angle = angle_;
151
+ calcPointScalarCovariances();
152
+ radiusFromLeastSq();
153
+ angleFromLeastSq();
154
+ }
155
+ calcCovariance();
156
+ projectEndpoints();
157
+ }
158
+
159
+ void Line::angleFromLeastSq()
160
+ {
161
+ calcPointParameters();
162
+ angle_ += angleIncrement();
163
+ }
164
+
165
+ double Line::angleIncrement()
166
+ {
167
+ const std::vector<double> &a = p_params_.a;
168
+ const std::vector<double> &ap = p_params_.ap;
169
+ const std::vector<double> &app = p_params_.app;
170
+ const std::vector<double> &b = p_params_.b;
171
+ const std::vector<double> &bp = p_params_.bp;
172
+ const std::vector<double> &bpp = p_params_.bpp;
173
+ const std::vector<double> &c = p_params_.c;
174
+ const std::vector<double> &s = p_params_.s;
175
+
176
+ double numerator = 0;
177
+ double denominator = 0;
178
+ for (std::size_t i = 0; i < a.size(); ++i)
179
+ {
180
+ numerator += (b[i] * ap[i] - a[i] * bp[i]) / pow(b[i], 2);
181
+ denominator += ((app[i] * b[i] - a[i] * bpp[i]) * b[i] -
182
+ 2 * (ap[i] * b[i] - a[i] * bp[i]) * bp[i]) / pow(b[i], 3);
183
+ }
184
+ return -(numerator/denominator);
185
+ }
186
+
187
+ void Line::calcCovariance()
188
+ {
189
+ covariance_[0] = p_rr_;
190
+
191
+ const std::vector<double> &a = p_params_.a;
192
+ const std::vector<double> &ap = p_params_.ap;
193
+ const std::vector<double> &app = p_params_.app;
194
+ const std::vector<double> &b = p_params_.b;
195
+ const std::vector<double> &bp = p_params_.bp;
196
+ const std::vector<double> &bpp = p_params_.bpp;
197
+ const std::vector<double> &c = p_params_.c;
198
+ const std::vector<double> &s = p_params_.s;
199
+
200
+ double G = 0;
201
+ double A = 0;
202
+ double B = 0;
203
+ double r, phi;
204
+ for (std::size_t i = 0; i < a.size(); ++i)
205
+ {
206
+ r = r_data_.ranges[indices_[i]]; // range
207
+ phi = c_data_.bearings[indices_[i]]; // bearing
208
+ G += ((app[i] * b[i] - a[i] * bpp[i]) * b[i] - 2 * (ap[i] * b[i] - a[i] * bp[i]) * bp[i]) / pow(b[i], 3);
209
+ A += 2 * r * sin(angle_ - phi) / b[i];
210
+ B += 4 * pow(r, 2) * pow(sin(angle_ - phi), 2) / b[i];
211
+ }
212
+ covariance_[1] = p_rr_ * A / G;
213
+ covariance_[2] = covariance_[1];
214
+ covariance_[3] = pow(1.0 / G, 2) * B;
215
+ }
216
+
217
+ void Line::calcPointCovariances()
218
+ {
219
+ point_covs_.clear();
220
+ double r, phi, var_r, var_phi;
221
+ for (std::vector<unsigned int>::const_iterator cit = indices_.begin(); cit != indices_.end(); ++cit)
222
+ {
223
+ r = r_data_.ranges[*cit]; // range
224
+ phi = c_data_.bearings[*cit]; // bearing
225
+ var_r = params_.range_var; // range variance
226
+ var_phi = params_.bearing_var; // bearing variance
227
+ boost::array<double, 4> Q;
228
+ Q[0] = pow(r, 2) * var_phi * pow(sin(phi), 2) + var_r * pow(cos(phi), 2);
229
+ Q[1] = -pow(r, 2) * var_phi * sin(2 * phi) / 2.0 + var_r * sin(2 * phi) / 2.0;
230
+ Q[2] = Q[1];
231
+ Q[3] = pow(r, 2) * var_phi * pow(cos(phi), 2) + var_r * pow(sin(phi), 2);
232
+ point_covs_.push_back(Q);
233
+ }
234
+ }
235
+
236
+ void Line::calcPointParameters()
237
+ {
238
+ p_params_.a.clear();
239
+ p_params_.ap.clear();
240
+ p_params_.app.clear();
241
+ p_params_.b.clear();
242
+ p_params_.bp.clear();
243
+ p_params_.bpp.clear();
244
+ p_params_.c.clear();
245
+ p_params_.s.clear();
246
+
247
+ double r, phi, var_r, var_phi;
248
+ double a, ap, app, b, bp, bpp, c, s;
249
+ for (std::vector<unsigned int>::const_iterator cit = indices_.begin(); cit != indices_.end(); ++cit)
250
+ {
251
+ r = r_data_.ranges[*cit]; // range
252
+ phi = c_data_.bearings[*cit]; // bearing
253
+ var_r = params_.range_var; // range variance
254
+ var_phi = params_.bearing_var; // bearing variance
255
+ c = cos(angle_ - phi);
256
+ s = sin(angle_ - phi);
257
+ a = pow(r * c - radius_, 2);
258
+ ap = -2 * r * s * (r * c - radius_);
259
+ app = 2 * pow(r, 2) * pow(s, 2) - 2 * r * c * (r * c - radius_);
260
+ b = var_r * pow(c, 2) + var_phi * pow(r, 2) * pow(s, 2);
261
+ bp = 2 * (pow(r, 2) * var_phi - var_r) * c * s;
262
+ bpp = 2 * (pow(r, 2) * var_phi - var_r) * (pow(c, 2) - pow(s, 2));
263
+ p_params_.a.push_back(a);
264
+ p_params_.ap.push_back(ap);
265
+ p_params_.app.push_back(app);
266
+ p_params_.b.push_back(b);
267
+ p_params_.bp.push_back(bp);
268
+ p_params_.bpp.push_back(bpp);
269
+ p_params_.c.push_back(c);
270
+ p_params_.s.push_back(s);
271
+ }
272
+ }
273
+
274
+ void Line::calcPointScalarCovariances()
275
+ {
276
+ point_scalar_vars_.clear();
277
+ double P;
278
+ double inverse_P_sum = 0;
279
+ for (std::vector<boost::array<double, 4> >::const_iterator cit = point_covs_.begin();
280
+ cit != point_covs_.end(); ++cit)
281
+ {
282
+ P = (*cit)[0] * pow(cos(angle_), 2) + 2 * (*cit)[1] * sin(angle_) * cos(angle_) +
283
+ (*cit)[3] * pow(sin(angle_), 2);
284
+ inverse_P_sum += 1.0 / P;
285
+ point_scalar_vars_.push_back(P);
286
+ }
287
+ p_rr_ = 1.0 / inverse_P_sum;
288
+ }
289
+
290
+ void Line::radiusFromLeastSq()
291
+ {
292
+ radius_ = 0;
293
+ double r, phi;
294
+ for (std::vector<unsigned int>::const_iterator cit = indices_.begin(); cit != indices_.end(); ++cit)
295
+ {
296
+ r = r_data_.ranges[*cit]; // range
297
+ phi = c_data_.bearings[*cit]; // bearing
298
+ radius_ += r * cos(angle_ - phi) / point_scalar_vars_[cit - indices_.begin()]; // cit to index
299
+ }
300
+
301
+ radius_ *= p_rr_;
302
+ }
303
+
304
+ } // namespace line_extraction
salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/src/line_extraction.cpp ADDED
@@ -0,0 +1,363 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #include "laser_line_extraction/line_extraction.h"
2
+ #include <algorithm>
3
+ #include <Eigen/Dense>
4
+ #include <iostream>
5
+
6
+ namespace line_extraction
7
+ {
8
+
9
+ ///////////////////////////////////////////////////////////////////////////////
10
+ // Constructor / destructor
11
+ ///////////////////////////////////////////////////////////////////////////////
12
+ LineExtraction::LineExtraction()
13
+ {
14
+ }
15
+
16
+ LineExtraction::~LineExtraction()
17
+ {
18
+ }
19
+
20
+ ///////////////////////////////////////////////////////////////////////////////
21
+ // Main run function
22
+ ///////////////////////////////////////////////////////////////////////////////
23
+ void LineExtraction::extractLines(std::vector<Line>& lines)
24
+ {
25
+ // Resets
26
+ filtered_indices_ = c_data_.indices;
27
+ lines_.clear();
28
+
29
+ // Filter indices
30
+ filterCloseAndFarPoints();
31
+ filterOutlierPoints();
32
+
33
+ // Return no lines if not enough points left
34
+ if (filtered_indices_.size() <= std::max(params_.min_line_points, static_cast<unsigned int>(3)))
35
+ {
36
+ return;
37
+ }
38
+
39
+ // Split indices into lines and filter out short and sparse lines
40
+ split(filtered_indices_);
41
+ filterLines();
42
+
43
+ // Fit each line using least squares and merge colinear lines
44
+ for (std::vector<Line>::iterator it = lines_.begin(); it != lines_.end(); ++it)
45
+ {
46
+ it->leastSqFit();
47
+ }
48
+
49
+ // If there is more than one line, check if lines should be merged based on the merging criteria
50
+ if (lines_.size() > 1)
51
+ {
52
+ mergeLines();
53
+ }
54
+
55
+ lines = lines_;
56
+ }
57
+
58
+ ///////////////////////////////////////////////////////////////////////////////
59
+ // Data setting
60
+ ///////////////////////////////////////////////////////////////////////////////
61
+ void LineExtraction::setCachedData(const std::vector<double>& bearings,
62
+ const std::vector<double>& cos_bearings,
63
+ const std::vector<double>& sin_bearings,
64
+ const std::vector<unsigned int>& indices)
65
+ {
66
+ c_data_.bearings = bearings;
67
+ c_data_.cos_bearings = cos_bearings;
68
+ c_data_.sin_bearings = sin_bearings;
69
+ c_data_.indices = indices;
70
+ }
71
+
72
+ void LineExtraction::setRangeData(const std::vector<double>& ranges)
73
+ {
74
+ r_data_.ranges = ranges;
75
+ r_data_.xs.clear();
76
+ r_data_.ys.clear();
77
+ for (std::vector<unsigned int>::const_iterator cit = c_data_.indices.begin();
78
+ cit != c_data_.indices.end(); ++cit)
79
+ {
80
+ r_data_.xs.push_back(c_data_.cos_bearings[*cit] * ranges[*cit]);
81
+ r_data_.ys.push_back(c_data_.sin_bearings[*cit] * ranges[*cit]);
82
+ }
83
+ }
84
+
85
+ ///////////////////////////////////////////////////////////////////////////////
86
+ // Parameter setting
87
+ ///////////////////////////////////////////////////////////////////////////////
88
+ void LineExtraction::setBearingVariance(double value)
89
+ {
90
+ params_.bearing_var = value;
91
+ }
92
+
93
+ void LineExtraction::setRangeVariance(double value)
94
+ {
95
+ params_.range_var = value;
96
+ }
97
+
98
+ void LineExtraction::setLeastSqAngleThresh(double value)
99
+ {
100
+ params_.least_sq_angle_thresh = value;
101
+ }
102
+
103
+ void LineExtraction::setLeastSqRadiusThresh(double value)
104
+ {
105
+ params_.least_sq_radius_thresh = value;
106
+ }
107
+
108
+ void LineExtraction::setMaxLineGap(double value)
109
+ {
110
+ params_.max_line_gap = value;
111
+ }
112
+
113
+ void LineExtraction::setMinLineLength(double value)
114
+ {
115
+ params_.min_line_length = value;
116
+ }
117
+
118
+ void LineExtraction::setMinLinePoints(unsigned int value)
119
+ {
120
+ params_.min_line_points = value;
121
+ }
122
+
123
+ void LineExtraction::setMinRange(double value)
124
+ {
125
+ params_.min_range = value;
126
+ }
127
+
128
+ void LineExtraction::setMaxRange(double value)
129
+ {
130
+ params_.max_range = value;
131
+ }
132
+
133
+ void LineExtraction::setMinSplitDist(double value)
134
+ {
135
+ params_.min_split_dist = value;
136
+ }
137
+
138
+ void LineExtraction::setOutlierDist(double value)
139
+ {
140
+ params_.outlier_dist = value;
141
+ }
142
+
143
+ ///////////////////////////////////////////////////////////////////////////////
144
+ // Utility methods
145
+ ///////////////////////////////////////////////////////////////////////////////
146
+ double LineExtraction::chiSquared(const Eigen::Vector2d &dL, const Eigen::Matrix2d &P_1,
147
+ const Eigen::Matrix2d &P_2)
148
+ {
149
+ return dL.transpose() * (P_1 + P_2).inverse() * dL;
150
+ }
151
+
152
+ double LineExtraction::distBetweenPoints(unsigned int index_1, unsigned int index_2)
153
+ {
154
+ return sqrt(pow(r_data_.xs[index_1] - r_data_.xs[index_2], 2) +
155
+ pow(r_data_.ys[index_1] - r_data_.ys[index_2], 2));
156
+ }
157
+
158
+ ///////////////////////////////////////////////////////////////////////////////
159
+ // Filtering points
160
+ ///////////////////////////////////////////////////////////////////////////////
161
+ void LineExtraction::filterCloseAndFarPoints()
162
+ {
163
+ std::vector<unsigned int> output;
164
+ for (std::vector<unsigned int>::const_iterator cit = filtered_indices_.begin();
165
+ cit != filtered_indices_.end(); ++cit)
166
+ {
167
+ const double& range = r_data_.ranges[*cit];
168
+ if (range >= params_.min_range && range <= params_.max_range)
169
+ {
170
+ output.push_back(*cit);
171
+ }
172
+ }
173
+ filtered_indices_ = output;
174
+ }
175
+
176
+ void LineExtraction::filterOutlierPoints()
177
+ {
178
+ if (filtered_indices_.size() < 3)
179
+ {
180
+ return;
181
+ }
182
+
183
+ std::vector<unsigned int> output;
184
+ unsigned int p_i, p_j, p_k;
185
+ for (std::size_t i = 0; i < filtered_indices_.size(); ++i)
186
+ {
187
+
188
+ // Get two closest neighbours
189
+
190
+ p_i = filtered_indices_[i];
191
+ if (i == 0) // first point
192
+ {
193
+ p_j = filtered_indices_[i + 1];
194
+ p_k = filtered_indices_[i + 2];
195
+ }
196
+ else if (i == filtered_indices_.size() - 1) // last point
197
+ {
198
+ p_j = filtered_indices_[i - 1];
199
+ p_k = filtered_indices_[i - 2];
200
+ }
201
+ else // middle points
202
+ {
203
+ p_j = filtered_indices_[i - 1];
204
+ p_k = filtered_indices_[i + 1];
205
+ }
206
+
207
+ // Check if point is an outlier
208
+
209
+ if (fabs(r_data_.ranges[p_i] - r_data_.ranges[p_j]) > params_.outlier_dist &&
210
+ fabs(r_data_.ranges[p_i] - r_data_.ranges[p_k]) > params_.outlier_dist)
211
+ {
212
+ // Check if it is close to line connecting its neighbours
213
+ std::vector<unsigned int> line_indices;
214
+ line_indices.push_back(p_j);
215
+ line_indices.push_back(p_k);
216
+ Line line(c_data_, r_data_, params_, line_indices);
217
+ line.endpointFit();
218
+ if (line.distToPoint(p_i) > params_.min_split_dist)
219
+ {
220
+ continue; // point is an outlier
221
+ }
222
+ }
223
+
224
+ output.push_back(p_i);
225
+ }
226
+
227
+ filtered_indices_ = output;
228
+ }
229
+
230
+ ///////////////////////////////////////////////////////////////////////////////
231
+ // Filtering and merging lines
232
+ ///////////////////////////////////////////////////////////////////////////////
233
+ void LineExtraction::filterLines()
234
+ {
235
+ std::vector<Line> output;
236
+ for (std::vector<Line>::const_iterator cit = lines_.begin(); cit != lines_.end(); ++cit)
237
+ {
238
+ if (cit->length() >= params_.min_line_length && cit->numPoints() >= params_.min_line_points)
239
+ {
240
+ output.push_back(*cit);
241
+ }
242
+ }
243
+ lines_ = output;
244
+ }
245
+
246
+ void LineExtraction::mergeLines()
247
+ {
248
+ std::vector<Line> merged_lines;
249
+
250
+ for (std::size_t i = 1; i < lines_.size(); ++i)
251
+ {
252
+ // Get L, P_1, P_2 of consecutive lines
253
+ Eigen::Vector2d L_1(lines_[i-1].getRadius(), lines_[i-1].getAngle());
254
+ Eigen::Vector2d L_2(lines_[i].getRadius(), lines_[i].getAngle());
255
+ Eigen::Matrix2d P_1;
256
+ P_1 << lines_[i-1].getCovariance()[0], lines_[i-1].getCovariance()[1],
257
+ lines_[i-1].getCovariance()[2], lines_[i-1].getCovariance()[3];
258
+ Eigen::Matrix2d P_2;
259
+ P_2 << lines_[i].getCovariance()[0], lines_[i].getCovariance()[1],
260
+ lines_[i].getCovariance()[2], lines_[i].getCovariance()[3];
261
+
262
+ // Merge lines if chi-squared distance is less than 3
263
+ if (chiSquared(L_1 - L_2, P_1, P_2) < 3)
264
+ {
265
+ // Get merged angle, radius, and covariance
266
+ Eigen::Matrix2d P_m = (P_1.inverse() + P_2.inverse()).inverse();
267
+ Eigen::Vector2d L_m = P_m * (P_1.inverse() * L_1 + P_2.inverse() * L_2);
268
+ // Populate new line with these merged parameters
269
+ boost::array<double, 4> cov;
270
+ cov[0] = P_m(0,0);
271
+ cov[1] = P_m(0,1);
272
+ cov[2] = P_m(1,0);
273
+ cov[3] = P_m(1,1);
274
+ std::vector<unsigned int> indices;
275
+ const std::vector<unsigned int> &ind_1 = lines_[i-1].getIndices();
276
+ const std::vector<unsigned int> &ind_2 = lines_[i].getIndices();
277
+ indices.resize(ind_1.size() + ind_2.size());
278
+ indices.insert(indices.end(), ind_1.begin(), ind_1.end());
279
+ indices.insert(indices.end(), ind_2.begin(), ind_2.end());
280
+ Line merged_line(L_m[1], L_m[0], cov, lines_[i-1].getStart(), lines_[i].getEnd(), indices);
281
+ // Project the new endpoints
282
+ merged_line.projectEndpoints();
283
+ lines_[i] = merged_line;
284
+ }
285
+ else
286
+ {
287
+ merged_lines.push_back(lines_[i-1]);
288
+ }
289
+
290
+ if (i == lines_.size() - 1)
291
+ {
292
+ merged_lines.push_back(lines_[i]);
293
+ }
294
+ }
295
+ lines_ = merged_lines;
296
+ }
297
+
298
+ ///////////////////////////////////////////////////////////////////////////////
299
+ // Splitting points into lines
300
+ ///////////////////////////////////////////////////////////////////////////////
301
+ void LineExtraction::split(const std::vector<unsigned int>& indices)
302
+ {
303
+ // Don't split if only a single point (only occurs when orphaned by gap)
304
+ if (indices.size() <= 1)
305
+ {
306
+ return;
307
+ }
308
+
309
+ Line line(c_data_, r_data_, params_, indices);
310
+ line.endpointFit();
311
+ double dist_max = 0;
312
+ double gap_max = 0;
313
+ double dist, gap;
314
+ int i_max, i_gap;
315
+
316
+ // Find the farthest point and largest gap
317
+ for (std::size_t i = 1; i < indices.size() - 1; ++i)
318
+ {
319
+ dist = line.distToPoint(indices[i]);
320
+ if (dist > dist_max)
321
+ {
322
+ dist_max = dist;
323
+ i_max = i;
324
+ }
325
+ gap = distBetweenPoints(indices[i], indices[i+1]);
326
+ if (gap > gap_max)
327
+ {
328
+ gap_max = gap;
329
+ i_gap = i;
330
+ }
331
+ }
332
+
333
+ // Check for gaps at endpoints
334
+ double gap_start = distBetweenPoints(indices[0], indices[1]);
335
+ if (gap_start > gap_max)
336
+ {
337
+ gap_max = gap_start;
338
+ i_gap = 1;
339
+ }
340
+ double gap_end = distBetweenPoints(indices.rbegin()[1], indices.rbegin()[0]);
341
+ if (gap_end > gap_max)
342
+ {
343
+ gap_max = gap_end;
344
+ i_gap = indices.size() - 1;
345
+ }
346
+
347
+ // Check if line meets requirements or should be split
348
+ if (dist_max < params_.min_split_dist && gap_max < params_.max_line_gap)
349
+ {
350
+ lines_.push_back(line);
351
+ }
352
+ else
353
+ {
354
+ int i_split = dist_max >= params_.min_split_dist ? i_max : i_gap;
355
+ std::vector<unsigned int> first_split(&indices[0], &indices[i_split - 1]);
356
+ std::vector<unsigned int> second_split(&indices[i_split], &indices.back());
357
+ split(first_split);
358
+ split(second_split);
359
+ }
360
+
361
+ }
362
+
363
+ } // namespace line_extraction
salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/src/line_extraction_node.cpp ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #include "laser_line_extraction/line_extraction_ros.h"
2
+ #include <ros/console.h>
3
+
4
+ int main(int argc, char **argv)
5
+ {
6
+
7
+ if (ros::console::set_logger_level(ROSCONSOLE_DEFAULT_NAME, ros::console::levels::Debug))
8
+ {
9
+ ros::console::notifyLoggerLevelsChanged();
10
+ }
11
+
12
+ ROS_DEBUG("Starting line_extraction_node.");
13
+
14
+ ros::init(argc, argv, "line_extraction_node");
15
+ ros::NodeHandle nh;
16
+ ros::NodeHandle nh_local("~");
17
+ line_extraction::LineExtractionROS line_extractor(nh, nh_local);
18
+
19
+ double frequency;
20
+ nh_local.param<double>("frequency", frequency, 25);
21
+ ROS_DEBUG("Frequency set to %0.1f Hz", frequency);
22
+ ros::Rate rate(frequency);
23
+
24
+ while (ros::ok())
25
+ {
26
+ line_extractor.run();
27
+ ros::spinOnce();
28
+ rate.sleep();
29
+ }
30
+ return 0;
31
+ }
32
+
salsa/manually_labeling/semantic_data_collection_ws/src/laser_line_extraction/src/line_extraction_ros.cpp ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #include "laser_line_extraction/line_extraction_ros.h"
2
+ #include <cmath>
3
+ #include <ros/console.h>
4
+
5
+
6
+ namespace line_extraction
7
+ {
8
+
9
+ ///////////////////////////////////////////////////////////////////////////////
10
+ // Constructor / destructor
11
+ ///////////////////////////////////////////////////////////////////////////////
12
+ LineExtractionROS::LineExtractionROS(ros::NodeHandle& nh, ros::NodeHandle& nh_local):
13
+ nh_(nh),
14
+ nh_local_(nh_local),
15
+ data_cached_(false)
16
+ {
17
+ loadParameters();
18
+ line_publisher_ = nh_.advertise<laser_line_extraction::LineSegmentList>("line_segments", 1);
19
+ scan_subscriber_ = nh_.subscribe(scan_topic_, 1, &LineExtractionROS::laserScanCallback, this);
20
+ if (pub_markers_)
21
+ {
22
+ marker_publisher_ = nh_.advertise<visualization_msgs::Marker>("line_markers", 1);
23
+ }
24
+ }
25
+
26
+ LineExtractionROS::~LineExtractionROS()
27
+ {
28
+ }
29
+
30
+ ///////////////////////////////////////////////////////////////////////////////
31
+ // Run
32
+ ///////////////////////////////////////////////////////////////////////////////
33
+ void LineExtractionROS::run()
34
+ {
35
+ // Extract the lines
36
+ std::vector<Line> lines;
37
+ line_extraction_.extractLines(lines);
38
+
39
+ // Populate message
40
+ laser_line_extraction::LineSegmentList msg;
41
+ populateLineSegListMsg(lines, msg);
42
+
43
+ // Publish the lines
44
+ line_publisher_.publish(msg);
45
+
46
+ // Also publish markers if parameter publish_markers is set to true
47
+ if (pub_markers_)
48
+ {
49
+ visualization_msgs::Marker marker_msg;
50
+ populateMarkerMsg(lines, marker_msg);
51
+ marker_publisher_.publish(marker_msg);
52
+ }
53
+ }
54
+
55
+ ///////////////////////////////////////////////////////////////////////////////
56
+ // Load ROS parameters
57
+ ///////////////////////////////////////////////////////////////////////////////
58
+ void LineExtractionROS::loadParameters()
59
+ {
60
+
61
+ ROS_DEBUG("*************************************");
62
+ ROS_DEBUG("PARAMETERS:");
63
+
64
+ // Parameters used by this node
65
+
66
+ std::string frame_id, scan_topic;
67
+ bool pub_markers;
68
+
69
+ nh_local_.param<std::string>("frame_id", frame_id, "laser");
70
+ frame_id_ = frame_id;
71
+ ROS_DEBUG("frame_id: %s", frame_id_.c_str());
72
+
73
+ nh_local_.param<std::string>("scan_topic", scan_topic, "scan");
74
+ scan_topic_ = scan_topic;
75
+ ROS_DEBUG("scan_topic: %s", scan_topic_.c_str());
76
+
77
+ nh_local_.param<bool>("publish_markers", pub_markers, false);
78
+ pub_markers_ = pub_markers;
79
+ ROS_DEBUG("publish_markers: %s", pub_markers ? "true" : "false");
80
+
81
+ // Parameters used by the line extraction algorithm
82
+
83
+ double bearing_std_dev, range_std_dev, least_sq_angle_thresh, least_sq_radius_thresh,
84
+ max_line_gap, min_line_length, min_range, max_range, min_split_dist, outlier_dist;
85
+ int min_line_points;
86
+
87
+ nh_local_.param<double>("bearing_std_dev", bearing_std_dev, 1e-3);
88
+ line_extraction_.setBearingVariance(bearing_std_dev * bearing_std_dev);
89
+ ROS_DEBUG("bearing_std_dev: %f", bearing_std_dev);
90
+
91
+ nh_local_.param<double>("range_std_dev", range_std_dev, 0.02);
92
+ line_extraction_.setRangeVariance(range_std_dev * range_std_dev);
93
+ ROS_DEBUG("range_std_dev: %f", range_std_dev);
94
+
95
+ nh_local_.param<double>("least_sq_angle_thresh", least_sq_angle_thresh, 1e-4);
96
+ line_extraction_.setLeastSqAngleThresh(least_sq_angle_thresh);
97
+ ROS_DEBUG("least_sq_angle_thresh: %f", least_sq_angle_thresh);
98
+
99
+ nh_local_.param<double>("least_sq_radius_thresh", least_sq_radius_thresh, 1e-4);
100
+ line_extraction_.setLeastSqRadiusThresh(least_sq_radius_thresh);
101
+ ROS_DEBUG("least_sq_radius_thresh: %f", least_sq_radius_thresh);
102
+
103
+ nh_local_.param<double>("max_line_gap", max_line_gap, 0.4);
104
+ line_extraction_.setMaxLineGap(max_line_gap);
105
+ ROS_DEBUG("max_line_gap: %f", max_line_gap);
106
+
107
+ nh_local_.param<double>("min_line_length", min_line_length, 0.5);
108
+ line_extraction_.setMinLineLength(min_line_length);
109
+ ROS_DEBUG("min_line_length: %f", min_line_length);
110
+
111
+ nh_local_.param<double>("min_range", min_range, 0.4);
112
+ line_extraction_.setMinRange(min_range);
113
+ ROS_DEBUG("min_range: %f", min_range);
114
+
115
+ nh_local_.param<double>("max_range", max_range, 10000.0);
116
+ line_extraction_.setMaxRange(max_range);
117
+ ROS_DEBUG("max_range: %f", max_range);
118
+
119
+ nh_local_.param<double>("min_split_dist", min_split_dist, 0.05);
120
+ line_extraction_.setMinSplitDist(min_split_dist);
121
+ ROS_DEBUG("min_split_dist: %f", min_split_dist);
122
+
123
+ nh_local_.param<double>("outlier_dist", outlier_dist, 0.05);
124
+ line_extraction_.setOutlierDist(outlier_dist);
125
+ ROS_DEBUG("outlier_dist: %f", outlier_dist);
126
+
127
+ nh_local_.param<int>("min_line_points", min_line_points, 9);
128
+ line_extraction_.setMinLinePoints(static_cast<unsigned int>(min_line_points));
129
+ ROS_DEBUG("min_line_points: %d", min_line_points);
130
+
131
+ ROS_DEBUG("*************************************");
132
+ }
133
+
134
+ ///////////////////////////////////////////////////////////////////////////////
135
+ // Populate messages
136
+ ///////////////////////////////////////////////////////////////////////////////
137
+ void LineExtractionROS::populateLineSegListMsg(const std::vector<Line> &lines,
138
+ laser_line_extraction::LineSegmentList &line_list_msg)
139
+ {
140
+ for (std::vector<Line>::const_iterator cit = lines.begin(); cit != lines.end(); ++cit)
141
+ {
142
+ laser_line_extraction::LineSegment line_msg;
143
+ line_msg.angle = cit->getAngle();
144
+ line_msg.radius = cit->getRadius();
145
+ line_msg.covariance = cit->getCovariance();
146
+ line_msg.start = cit->getStart();
147
+ line_msg.end = cit->getEnd();
148
+ line_list_msg.line_segments.push_back(line_msg);
149
+ }
150
+ line_list_msg.header.frame_id = frame_id_;
151
+ line_list_msg.header.stamp = ros::Time::now();
152
+ }
153
+
154
+ void LineExtractionROS::populateMarkerMsg(const std::vector<Line> &lines,
155
+ visualization_msgs::Marker &marker_msg)
156
+ {
157
+ marker_msg.ns = "line_extraction";
158
+ marker_msg.id = 0;
159
+ marker_msg.type = visualization_msgs::Marker::LINE_LIST;
160
+ marker_msg.scale.x = 0.1;
161
+ marker_msg.color.r = 1.0;
162
+ marker_msg.color.g = 0.0;
163
+ marker_msg.color.b = 0.0;
164
+ marker_msg.color.a = 1.0;
165
+ for (std::vector<Line>::const_iterator cit = lines.begin(); cit != lines.end(); ++cit)
166
+ {
167
+ geometry_msgs::Point p_start;
168
+ p_start.x = cit->getStart()[0];
169
+ p_start.y = cit->getStart()[1];
170
+ p_start.z = 0;
171
+ marker_msg.points.push_back(p_start);
172
+ geometry_msgs::Point p_end;
173
+ p_end.x = cit->getEnd()[0];
174
+ p_end.y = cit->getEnd()[1];
175
+ p_end.z = 0;
176
+ marker_msg.points.push_back(p_end);
177
+ }
178
+ marker_msg.header.frame_id = frame_id_;
179
+ marker_msg.header.stamp = ros::Time::now();
180
+ }
181
+
182
+ ///////////////////////////////////////////////////////////////////////////////
183
+ // Cache data on first LaserScan message received
184
+ ///////////////////////////////////////////////////////////////////////////////
185
+ void LineExtractionROS::cacheData(const sensor_msgs::LaserScan::ConstPtr &scan_msg)
186
+ {
187
+ std::vector<double> bearings, cos_bearings, sin_bearings;
188
+ std::vector<unsigned int> indices;
189
+ const std::size_t num_measurements = std::ceil(
190
+ (scan_msg->angle_max - scan_msg->angle_min) / scan_msg->angle_increment);
191
+ for (std::size_t i = 0; i < num_measurements; ++i)
192
+ {
193
+ const double b = scan_msg->angle_min + i * scan_msg->angle_increment;
194
+ bearings.push_back(b);
195
+ cos_bearings.push_back(cos(b));
196
+ sin_bearings.push_back(sin(b));
197
+ indices.push_back(i);
198
+ }
199
+
200
+ line_extraction_.setCachedData(bearings, cos_bearings, sin_bearings, indices);
201
+ ROS_DEBUG("Data has been cached.");
202
+ }
203
+
204
+ ///////////////////////////////////////////////////////////////////////////////
205
+ // Main LaserScan callback
206
+ ///////////////////////////////////////////////////////////////////////////////
207
+ void LineExtractionROS::laserScanCallback(const sensor_msgs::LaserScan::ConstPtr &scan_msg)
208
+ {
209
+ if (!data_cached_)
210
+ {
211
+ cacheData(scan_msg);
212
+ data_cached_ = true;
213
+ }
214
+
215
+ std::vector<double> scan_ranges_doubles(scan_msg->ranges.begin(), scan_msg->ranges.end());
216
+ line_extraction_.setRangeData(scan_ranges_doubles);
217
+ }
218
+
219
+ } // namespace line_extraction
220
+