GotThatData commited on
Commit
4ebe08e
·
verified ·
1 Parent(s): 459d27c
Files changed (1) hide show
  1. prepare_dataset.py +286 -0
prepare_dataset.py ADDED
@@ -0,0 +1,286 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+
3
+ import os
4
+ import json
5
+ from pathlib import Path
6
+ import numpy as np
7
+ import cv2
8
+ from PIL import Image
9
+ import open3d as o3d
10
+ from tqdm import tqdm
11
+ from typing import Dict, List, Any
12
+ import shutil
13
+ import argparse
14
+
15
+ class FAFODatasetPreprocessor:
16
+ """Preprocessor for FAFO dataset"""
17
+
18
+ def __init__(self, input_dir: str, output_dir: str):
19
+ self.input_dir = Path(input_dir)
20
+ self.output_dir = Path(output_dir)
21
+ self.metadata = {
22
+ 'num_samples': 0,
23
+ 'data_types': {
24
+ 'sensor_data': {'lidar': 0, 'gps': 0, 'imu': 0},
25
+ 'image_data': 0,
26
+ '3d_data': 0,
27
+ 'task_data': 0
28
+ }
29
+ }
30
+
31
+ def prepare_dataset(self):
32
+ """Prepare the complete dataset"""
33
+ print("Preparing FAFO dataset...")
34
+
35
+ # Create directory structure
36
+ self._create_directories()
37
+
38
+ # Process each data type
39
+ self._process_sensor_data()
40
+ self._process_image_data()
41
+ self._process_3d_data()
42
+ self._process_task_data()
43
+
44
+ # Save metadata
45
+ self._save_metadata()
46
+
47
+ print("Dataset preparation completed!")
48
+
49
+ def _create_directories(self):
50
+ """Create dataset directory structure"""
51
+ directories = [
52
+ 'data/sensor_data/lidar',
53
+ 'data/sensor_data/gps',
54
+ 'data/sensor_data/imu',
55
+ 'data/image_data',
56
+ 'data/3d_data',
57
+ 'data/task_data'
58
+ ]
59
+
60
+ for dir_path in directories:
61
+ (self.output_dir / dir_path).mkdir(parents=True, exist_ok=True)
62
+
63
+ def _process_sensor_data(self):
64
+ """Process all sensor data"""
65
+ sensor_types = ['lidar', 'gps', 'imu']
66
+
67
+ for sensor_type in sensor_types:
68
+ print(f"Processing {sensor_type} data...")
69
+ input_dir = self.input_dir / f'data/sensor_data/{sensor_type}'
70
+ output_dir = self.output_dir / f'data/sensor_data/{sensor_type}'
71
+
72
+ if not input_dir.exists():
73
+ print(f"Warning: {input_dir} does not exist")
74
+ continue
75
+
76
+ for file_path in tqdm(list(input_dir.glob('*.json'))):
77
+ try:
78
+ # Load and process data
79
+ with open(file_path, 'r') as f:
80
+ data = json.load(f)
81
+
82
+ # Process based on sensor type
83
+ if sensor_type == 'lidar':
84
+ data = self._process_lidar_data(data)
85
+ elif sensor_type == 'gps':
86
+ data = self._process_gps_data(data)
87
+ elif sensor_type == 'imu':
88
+ data = self._process_imu_data(data)
89
+
90
+ # Save processed data
91
+ output_path = output_dir / file_path.name
92
+ with open(output_path, 'w') as f:
93
+ json.dump(data, f, indent=2)
94
+
95
+ self.metadata['data_types']['sensor_data'][sensor_type] += 1
96
+
97
+ except Exception as e:
98
+ print(f"Error processing {file_path}: {e}")
99
+
100
+ def _process_image_data(self):
101
+ """Process image data"""
102
+ print("Processing image data...")
103
+ input_dir = self.input_dir / 'data/image_data'
104
+ output_dir = self.output_dir / 'data/image_data'
105
+
106
+ if not input_dir.exists():
107
+ print(f"Warning: {input_dir} does not exist")
108
+ return
109
+
110
+ for file_path in tqdm(list(input_dir.glob('*.jpg'))):
111
+ try:
112
+ # Load and process image
113
+ image = Image.open(file_path)
114
+
115
+ # Standardize image
116
+ image = self._process_image(image)
117
+
118
+ # Save processed image
119
+ output_path = output_dir / file_path.name
120
+ image.save(output_path, quality=95)
121
+
122
+ self.metadata['data_types']['image_data'] += 1
123
+
124
+ except Exception as e:
125
+ print(f"Error processing {file_path}: {e}")
126
+
127
+ def _process_3d_data(self):
128
+ """Process 3D point cloud data"""
129
+ print("Processing 3D data...")
130
+ input_dir = self.input_dir / 'data/3d_data'
131
+ output_dir = self.output_dir / 'data/3d_data'
132
+
133
+ if not input_dir.exists():
134
+ print(f"Warning: {input_dir} does not exist")
135
+ return
136
+
137
+ for file_path in tqdm(list(input_dir.glob('*.pcd'))):
138
+ try:
139
+ # Load and process point cloud
140
+ pcd = o3d.io.read_point_cloud(str(file_path))
141
+
142
+ # Process point cloud
143
+ pcd = self._process_point_cloud(pcd)
144
+
145
+ # Save processed point cloud
146
+ output_path = output_dir / file_path.name
147
+ o3d.io.write_point_cloud(str(output_path), pcd)
148
+
149
+ self.metadata['data_types']['3d_data'] += 1
150
+
151
+ except Exception as e:
152
+ print(f"Error processing {file_path}: {e}")
153
+
154
+ def _process_task_data(self):
155
+ """Process task data"""
156
+ print("Processing task data...")
157
+ input_dir = self.input_dir / 'data/task_data'
158
+ output_dir = self.output_dir / 'data/task_data'
159
+
160
+ if not input_dir.exists():
161
+ print(f"Warning: {input_dir} does not exist")
162
+ return
163
+
164
+ for file_path in tqdm(list(input_dir.glob('*.json'))):
165
+ try:
166
+ # Load and process task data
167
+ with open(file_path, 'r') as f:
168
+ data = json.load(f)
169
+
170
+ # Process task data
171
+ data = self._process_task_definition(data)
172
+
173
+ # Save processed data
174
+ output_path = output_dir / file_path.name
175
+ with open(output_path, 'w') as f:
176
+ json.dump(data, f, indent=2)
177
+
178
+ self.metadata['data_types']['task_data'] += 1
179
+
180
+ except Exception as e:
181
+ print(f"Error processing {file_path}: {e}")
182
+
183
+ def _process_lidar_data(self, data: Dict) -> Dict:
184
+ """Process LiDAR data"""
185
+ # Normalize ranges to meters
186
+ if 'ranges' in data:
187
+ data['ranges'] = [x / 100.0 for x in data['ranges']]
188
+
189
+ # Ensure all required fields
190
+ required_fields = ['timestamp', 'ranges', 'intensities', 'angles']
191
+ for field in required_fields:
192
+ if field not in data:
193
+ data[field] = []
194
+
195
+ return data
196
+
197
+ def _process_gps_data(self, data: Dict) -> Dict:
198
+ """Process GPS data"""
199
+ # Ensure all required fields
200
+ required_fields = ['timestamp', 'latitude', 'longitude', 'altitude']
201
+ for field in required_fields:
202
+ if field not in data:
203
+ data[field] = 0.0
204
+
205
+ return data
206
+
207
+ def _process_imu_data(self, data: Dict) -> Dict:
208
+ """Process IMU data"""
209
+ # Ensure all required fields
210
+ required_fields = ['timestamp', 'acceleration', 'angular_velocity', 'orientation']
211
+ for field in required_fields:
212
+ if field not in data:
213
+ data[field] = [0.0, 0.0, 0.0]
214
+
215
+ return data
216
+
217
+ def _process_image(self, image: Image.Image) -> Image.Image:
218
+ """Process image data"""
219
+ # Resize to standard size
220
+ image = image.resize((640, 480), Image.Resampling.LANCZOS)
221
+
222
+ # Convert to RGB if needed
223
+ if image.mode != 'RGB':
224
+ image = image.convert('RGB')
225
+
226
+ return image
227
+
228
+ def _process_point_cloud(self, pcd: o3d.geometry.PointCloud) -> o3d.geometry.PointCloud:
229
+ """Process point cloud data"""
230
+ # Remove outliers
231
+ pcd, _ = pcd.remove_statistical_outlier(nb_neighbors=20, std_ratio=2.0)
232
+
233
+ # Downsample
234
+ pcd = pcd.voxel_down_sample(voxel_size=0.05)
235
+
236
+ return pcd
237
+
238
+ def _process_task_definition(self, data: Dict) -> Dict:
239
+ """Process task definition data"""
240
+ # Ensure all required fields
241
+ required_fields = ['task_type', 'parameters', 'annotations']
242
+ for field in required_fields:
243
+ if field not in data:
244
+ data[field] = {}
245
+
246
+ return data
247
+
248
+ def _save_metadata(self):
249
+ """Save dataset metadata"""
250
+ # Update total samples
251
+ self.metadata['num_samples'] = sum([
252
+ sum(self.metadata['data_types']['sensor_data'].values()),
253
+ self.metadata['data_types']['image_data'],
254
+ self.metadata['data_types']['3d_data'],
255
+ self.metadata['data_types']['task_data']
256
+ ])
257
+
258
+ # Save metadata
259
+ metadata_path = self.output_dir / 'dataset_info.json'
260
+ with open(metadata_path, 'w') as f:
261
+ json.dump(self.metadata, f, indent=2)
262
+
263
+ print(f"Dataset statistics:")
264
+ print(f"Total samples: {self.metadata['num_samples']}")
265
+ print("Data types:")
266
+ for data_type, count in self.metadata['data_types'].items():
267
+ if isinstance(count, dict):
268
+ for subtype, subcount in count.items():
269
+ print(f" - {data_type}/{subtype}: {subcount}")
270
+ else:
271
+ print(f" - {data_type}: {count}")
272
+
273
+ def main():
274
+ parser = argparse.ArgumentParser(description='Prepare FAFO dataset')
275
+ parser.add_argument('--input_dir', type=str, required=True,
276
+ help='Input directory containing raw data')
277
+ parser.add_argument('--output_dir', type=str, required=True,
278
+ help='Output directory for processed dataset')
279
+
280
+ args = parser.parse_args()
281
+
282
+ preprocessor = FAFODatasetPreprocessor(args.input_dir, args.output_dir)
283
+ preprocessor.prepare_dataset()
284
+
285
+ if __name__ == '__main__':
286
+ main()