Wuhuwill commited on
Commit
421eadc
·
verified ·
1 Parent(s): ebb5a61

Upload ProDiff/Experiments/trajectory_a40_temporal_optimized_TKY_temporal_len3_ddpm_20250724-101534/code_snapshot/data_util.py with huggingface_hub

Browse files
ProDiff/Experiments/trajectory_a40_temporal_optimized_TKY_temporal_len3_ddpm_20250724-101534/code_snapshot/data_util.py ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import numpy as np
3
+ import pandas as pd
4
+ from torch.utils.data import Dataset
5
+ from tqdm import tqdm
6
+ import h5py
7
+
8
+ class TrajectoryDataset(Dataset):
9
+ """Dataset for loading trajectory data from HDF5 files."""
10
+ def __init__(self, file_paths, traj_length):
11
+ self.samples = []
12
+ self.load_samples(file_paths, traj_length)
13
+
14
+ def load_samples(self, file_paths, traj_length):
15
+ for file_path in tqdm(file_paths, desc="Loading files", unit="file"):
16
+ with h5py.File(file_path, 'r') as h5_file:
17
+ for user_id in h5_file.keys(): # Iterate over users in the HDF5 file
18
+ user_group = h5_file[user_id]
19
+ latitudes = user_group['latitudes'][:]
20
+ longitudes = user_group['longitudes'][:]
21
+ hours = user_group['hours'][:]
22
+
23
+ # Create samples by sliding a window of traj_length over the user's trajectory
24
+ if len(latitudes) > traj_length:
25
+ for j in range(len(latitudes) - traj_length + 1):
26
+ self.samples.append((hours[j:j+traj_length], latitudes[j:j+traj_length], longitudes[j:j+traj_length]))
27
+ elif len(latitudes) == traj_length:
28
+ self.samples.append((hours[:], latitudes[:], longitudes[:]))
29
+
30
+ def __len__(self):
31
+ return len(self.samples)
32
+
33
+ def __getitem__(self, idx):
34
+ hours, latitudes, longitudes = self.samples[idx]
35
+ return torch.tensor(hours, dtype=torch.float32), torch.tensor(latitudes, dtype=torch.float32), torch.tensor(longitudes, dtype=torch.float32)
36
+
37
+
38
+ class PatternDataset:
39
+ """Dataset for loading trajectory patterns, possibly for prototype learning."""
40
+ def __init__(self, file_paths):
41
+ self.trajectories = []
42
+ self.load_samples(file_paths)
43
+
44
+ def load_samples(self, file_paths):
45
+ for file_path in tqdm(file_paths, desc="Loading files", unit="file"):
46
+ with pd.HDFStore(file_path, 'r') as store: # Using pandas HDFStore
47
+ data = store['data']
48
+ for i in range(len(data)):
49
+ abs_time_list = np.array(data['ABS_TIME'][i])
50
+ lat_list = np.array(data['LAT'][i])
51
+ lng_list = np.array(data['LNG'][i])
52
+ trajectory = list(zip(abs_time_list, lat_list, lng_list))
53
+ self.trajectories.append(trajectory)
54
+
55
+ def get_all_trajectories(self):
56
+ return self.trajectories
57
+
58
+ def pad_trajectories(self):
59
+ max_length = max(len(traj) for traj in self.trajectories)
60
+ padded_samples = []
61
+
62
+ for traj in self.trajectories:
63
+ if len(traj) < max_length:
64
+ # Pad shorter trajectories with their last point
65
+ last_point = traj[-1]
66
+ padding = [last_point] * (max_length - len(traj))
67
+ padded_traj = traj + padding
68
+ else:
69
+ padded_traj = traj
70
+ padded_samples.append(padded_traj)
71
+
72
+ return padded_samples
73
+
74
+
75
+ class MinMaxScaler:
76
+ """Min-Max Scaler for trajectory data."""
77
+ def __init__(self):
78
+ self.min_val = None
79
+ self.max_val = None
80
+
81
+ def fit(self, data):
82
+ self.min_val = data.amin(dim=(0, 1), keepdim=True)
83
+ self.max_val = data.amax(dim=(0, 1), keepdim=True)
84
+
85
+ def transform(self, data):
86
+ return (data - self.min_val) / (self.max_val - self.min_val)
87
+
88
+ def inverse_transform(self, data):
89
+ return data * (self.max_val - self.min_val) + self.min_val
90
+