File size: 2,645 Bytes
fdbf9f8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
#!/usr/bin/env python3
"""
Hugging Face Dataset Loader for WAVE BENDER IDE v5.0
This script loads the dataset WITHOUT ArrowInvalid errors.
"""

import json
from datasets import Dataset, DatasetDict
import os

def load_wave_bender_dataset(dataset_path):
    """
    Load WAVE BENDER dataset from extracted directory.
    
    Args:
        dataset_path (str): Path to extracted dataset directory
        
    Returns:
        DatasetDict: Dictionary of datasets
    """
    
    datasets = {}
    
    # Load telemetry data
    telemetry_file = os.path.join(dataset_path, "telemetry", "telemetry.jsonl")
    if os.path.exists(telemetry_file):
        datasets['telemetry'] = Dataset.from_json(telemetry_file)
        print(f"Loaded telemetry data: {len(datasets['telemetry'])} records")
    
    # Load SLAM data
    slam_path = os.path.join(dataset_path, "slam")
    
    obstacles_file = os.path.join(slam_path, "obstacles.json")
    if os.path.exists(obstacles_file):
        datasets['slam_obstacles'] = Dataset.from_json(obstacles_file)
        print(f"Loaded SLAM obstacles: {len(datasets['slam_obstacles'])} records")
    
    detections_file = os.path.join(slam_path, "detections.json")
    if os.path.exists(detections_file):
        datasets['slam_detections'] = Dataset.from_json(detections_file)
        print(f"Loaded SLAM detections: {len(datasets['slam_detections'])} records")
    
    avoidances_file = os.path.join(slam_path, "avoidances.json")
    if os.path.exists(avoidances_file):
        datasets['slam_avoidances'] = Dataset.from_json(avoidances_file)
        print(f"Loaded SLAM avoidances: {len(datasets['slam_avoidances'])} records")
    
    # Load training data
    stats_path = os.path.join(dataset_path, "statistics")
    
    epochs_file = os.path.join(stats_path, "epochs.json")
    if os.path.exists(epochs_file):
        datasets['training_epochs'] = Dataset.from_json(epochs_file)
        print(f"Loaded training epochs: {len(datasets['training_epochs'])} records")
    
    summary_file = os.path.join(stats_path, "summary.json")
    if os.path.exists(summary_file):
        datasets['statistics'] = Dataset.from_json(summary_file)
        print("Loaded statistics summary")
    
    # Create DatasetDict
    dataset_dict = DatasetDict(datasets)
    
    print(f"\n✅ Dataset loaded successfully with {len(datasets)} components")
    print("✅ No ArrowInvalid errors - all schemas are separate and consistent")
    
    return dataset_dict

if __name__ == "__main__":
    # Example usage
    dataset = load_wave_bender_dataset("./extracted_dataset")
    print(f"\nDataset structure: {list(dataset.keys())}")