Wuhuwill's picture
Upload ProDiff/dataset/data_util.py with huggingface_hub
4340d26 verified
import torch
import numpy as np
import pandas as pd
from torch.utils.data import Dataset
from tqdm import tqdm
import h5py
class TrajectoryDataset(Dataset):
"""Dataset for loading trajectory data from HDF5 files."""
def __init__(self, file_paths, traj_length):
self.samples = []
self.load_samples(file_paths, traj_length)
def load_samples(self, file_paths, traj_length):
for file_path in tqdm(file_paths, desc="Loading files", unit="file"):
with h5py.File(file_path, 'r') as h5_file:
for user_id in h5_file.keys(): # Iterate over users in the HDF5 file
user_group = h5_file[user_id]
latitudes = user_group['latitudes'][:]
longitudes = user_group['longitudes'][:]
hours = user_group['hours'][:]
# Create samples by sliding a window of traj_length over the user's trajectory
if len(latitudes) > traj_length:
for j in range(len(latitudes) - traj_length + 1):
self.samples.append((hours[j:j+traj_length], latitudes[j:j+traj_length], longitudes[j:j+traj_length]))
elif len(latitudes) == traj_length:
self.samples.append((hours[:], latitudes[:], longitudes[:]))
def __len__(self):
return len(self.samples)
def __getitem__(self, idx):
hours, latitudes, longitudes = self.samples[idx]
return torch.tensor(hours, dtype=torch.float32), torch.tensor(latitudes, dtype=torch.float32), torch.tensor(longitudes, dtype=torch.float32)
class PatternDataset:
"""Dataset for loading trajectory patterns, possibly for prototype learning."""
def __init__(self, file_paths):
self.trajectories = []
self.load_samples(file_paths)
def load_samples(self, file_paths):
for file_path in tqdm(file_paths, desc="Loading files", unit="file"):
with pd.HDFStore(file_path, 'r') as store: # Using pandas HDFStore
data = store['data']
for i in range(len(data)):
abs_time_list = np.array(data['ABS_TIME'][i])
lat_list = np.array(data['LAT'][i])
lng_list = np.array(data['LNG'][i])
trajectory = list(zip(abs_time_list, lat_list, lng_list))
self.trajectories.append(trajectory)
def get_all_trajectories(self):
return self.trajectories
def pad_trajectories(self):
max_length = max(len(traj) for traj in self.trajectories)
padded_samples = []
for traj in self.trajectories:
if len(traj) < max_length:
# Pad shorter trajectories with their last point
last_point = traj[-1]
padding = [last_point] * (max_length - len(traj))
padded_traj = traj + padding
else:
padded_traj = traj
padded_samples.append(padded_traj)
return padded_samples
class MinMaxScaler:
"""Min-Max Scaler for trajectory data with global normalization support."""
def __init__(self, global_params_file=None):
self.min_val = None
self.max_val = None
self.global_params_file = global_params_file
self.is_global = global_params_file is not None
if self.is_global:
self._load_global_params()
def _load_global_params(self):
"""Load global normalization parameters from file."""
import json
import torch
with open(self.global_params_file, 'r') as f:
params = json.load(f)
# Convert to tensor format: [hours, latitudes, longitudes]
min_vals = [
params['hours']['min'],
params['latitudes']['min'],
params['longitudes']['min']
]
max_vals = [
params['hours']['max'],
params['latitudes']['max'],
params['longitudes']['max']
]
# Shape: (1, 1, 3) to match data format (batch_size, traj_length, 3)
self.min_val = torch.tensor(min_vals, dtype=torch.float32).view(1, 1, 3)
self.max_val = torch.tensor(max_vals, dtype=torch.float32).view(1, 1, 3)
def fit(self, data):
"""Fit scaler to data. If using global params, this does nothing."""
if not self.is_global:
self.min_val = data.amin(dim=(0, 1), keepdim=True)
self.max_val = data.amax(dim=(0, 1), keepdim=True)
def transform(self, data):
"""Transform data to [0, 1] range."""
if self.min_val is None or self.max_val is None:
raise ValueError("Scaler not fitted. Call fit() first.")
# Move tensors to same device as data
min_val = self.min_val.to(data.device)
max_val = self.max_val.to(data.device)
# Avoid division by zero
range_val = max_val - min_val
range_val = torch.where(range_val == 0, torch.ones_like(range_val), range_val)
# Clamp input data to avoid extreme values
if self.is_global:
data_clamped = torch.clamp(data, min_val, max_val)
else:
data_clamped = data
return (data_clamped - min_val) / range_val
def inverse_transform(self, data):
"""Transform data back to original scale."""
if self.min_val is None or self.max_val is None:
raise ValueError("Scaler not fitted. Call fit() first.")
# Move tensors to same device as data
min_val = self.min_val.to(data.device)
max_val = self.max_val.to(data.device)
return data * (max_val - min_val) + min_val