File size: 5,809 Bytes
4340d26 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 |
import torch
import numpy as np
import pandas as pd
from torch.utils.data import Dataset
from tqdm import tqdm
import h5py
class TrajectoryDataset(Dataset):
"""Dataset for loading trajectory data from HDF5 files."""
def __init__(self, file_paths, traj_length):
self.samples = []
self.load_samples(file_paths, traj_length)
def load_samples(self, file_paths, traj_length):
for file_path in tqdm(file_paths, desc="Loading files", unit="file"):
with h5py.File(file_path, 'r') as h5_file:
for user_id in h5_file.keys(): # Iterate over users in the HDF5 file
user_group = h5_file[user_id]
latitudes = user_group['latitudes'][:]
longitudes = user_group['longitudes'][:]
hours = user_group['hours'][:]
# Create samples by sliding a window of traj_length over the user's trajectory
if len(latitudes) > traj_length:
for j in range(len(latitudes) - traj_length + 1):
self.samples.append((hours[j:j+traj_length], latitudes[j:j+traj_length], longitudes[j:j+traj_length]))
elif len(latitudes) == traj_length:
self.samples.append((hours[:], latitudes[:], longitudes[:]))
def __len__(self):
return len(self.samples)
def __getitem__(self, idx):
hours, latitudes, longitudes = self.samples[idx]
return torch.tensor(hours, dtype=torch.float32), torch.tensor(latitudes, dtype=torch.float32), torch.tensor(longitudes, dtype=torch.float32)
class PatternDataset:
"""Dataset for loading trajectory patterns, possibly for prototype learning."""
def __init__(self, file_paths):
self.trajectories = []
self.load_samples(file_paths)
def load_samples(self, file_paths):
for file_path in tqdm(file_paths, desc="Loading files", unit="file"):
with pd.HDFStore(file_path, 'r') as store: # Using pandas HDFStore
data = store['data']
for i in range(len(data)):
abs_time_list = np.array(data['ABS_TIME'][i])
lat_list = np.array(data['LAT'][i])
lng_list = np.array(data['LNG'][i])
trajectory = list(zip(abs_time_list, lat_list, lng_list))
self.trajectories.append(trajectory)
def get_all_trajectories(self):
return self.trajectories
def pad_trajectories(self):
max_length = max(len(traj) for traj in self.trajectories)
padded_samples = []
for traj in self.trajectories:
if len(traj) < max_length:
# Pad shorter trajectories with their last point
last_point = traj[-1]
padding = [last_point] * (max_length - len(traj))
padded_traj = traj + padding
else:
padded_traj = traj
padded_samples.append(padded_traj)
return padded_samples
class MinMaxScaler:
"""Min-Max Scaler for trajectory data with global normalization support."""
def __init__(self, global_params_file=None):
self.min_val = None
self.max_val = None
self.global_params_file = global_params_file
self.is_global = global_params_file is not None
if self.is_global:
self._load_global_params()
def _load_global_params(self):
"""Load global normalization parameters from file."""
import json
import torch
with open(self.global_params_file, 'r') as f:
params = json.load(f)
# Convert to tensor format: [hours, latitudes, longitudes]
min_vals = [
params['hours']['min'],
params['latitudes']['min'],
params['longitudes']['min']
]
max_vals = [
params['hours']['max'],
params['latitudes']['max'],
params['longitudes']['max']
]
# Shape: (1, 1, 3) to match data format (batch_size, traj_length, 3)
self.min_val = torch.tensor(min_vals, dtype=torch.float32).view(1, 1, 3)
self.max_val = torch.tensor(max_vals, dtype=torch.float32).view(1, 1, 3)
def fit(self, data):
"""Fit scaler to data. If using global params, this does nothing."""
if not self.is_global:
self.min_val = data.amin(dim=(0, 1), keepdim=True)
self.max_val = data.amax(dim=(0, 1), keepdim=True)
def transform(self, data):
"""Transform data to [0, 1] range."""
if self.min_val is None or self.max_val is None:
raise ValueError("Scaler not fitted. Call fit() first.")
# Move tensors to same device as data
min_val = self.min_val.to(data.device)
max_val = self.max_val.to(data.device)
# Avoid division by zero
range_val = max_val - min_val
range_val = torch.where(range_val == 0, torch.ones_like(range_val), range_val)
# Clamp input data to avoid extreme values
if self.is_global:
data_clamped = torch.clamp(data, min_val, max_val)
else:
data_clamped = data
return (data_clamped - min_val) / range_val
def inverse_transform(self, data):
"""Transform data back to original scale."""
if self.min_val is None or self.max_val is None:
raise ValueError("Scaler not fitted. Call fit() first.")
# Move tensors to same device as data
min_val = self.min_val.to(data.device)
max_val = self.max_val.to(data.device)
return data * (max_val - min_val) + min_val
|