jordi-ai2 commited on
Commit
dc23af7
·
verified ·
1 Parent(s): edccf28

Create repair_video_paths.py

Browse files
Files changed (1) hide show
  1. repair_video_paths.py +172 -0
repair_video_paths.py ADDED
@@ -0,0 +1,172 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+ from concurrent.futures import CancelledError, ProcessPoolExecutor, Future, as_completed
3
+ from threading import Semaphore, Lock
4
+ from pathlib import Path
5
+ import glob
6
+ import os
7
+ import traceback
8
+
9
+ import numpy as np
10
+ import h5py
11
+ from tqdm import tqdm
12
+
13
+
14
+ def get_args():
15
+ parser = argparse.ArgumentParser()
16
+ parser.add_argument("data_root")
17
+ parser.add_argument("--num-workers", type=int, default=32)
18
+ parser.add_argument(
19
+ "--no-overwrite",
20
+ action="store_false",
21
+ dest="overwrite",
22
+ help="Do not overwrite existing valid trajectory mask in data files (skip instead)",
23
+ )
24
+ parser.add_argument(
25
+ "--dry-run",
26
+ action="store_true",
27
+ help="Find valid trajectories but do not write to data files",
28
+ )
29
+ return parser.parse_args()
30
+
31
+
32
+ def process_data_file(args, data_file_path: Path):
33
+ n_updated = 0
34
+ n_skipped = 0
35
+ try:
36
+ with h5py.File(data_file_path, "r" if args.dry_run else "r+") as f:
37
+ for traj_name in f.keys():
38
+ if not traj_name.startswith("traj_"):
39
+ continue
40
+ traj_idx = int(traj_name.split("_")[-1])
41
+ sensor_data_group = f[traj_name]["obs/sensor_data"]
42
+
43
+ if len(sensor_data_group.keys()) > 0 and not args.overwrite:
44
+ n_skipped += 1
45
+ continue
46
+
47
+ n_updated += 1
48
+ batch_suffix = data_file_path.name.split("_", 1)[1][: -len(".h5")]
49
+
50
+ for camera_name in f[traj_name]["obs/sensor_param"].keys():
51
+ # Process RGB video
52
+ if camera_name not in sensor_data_group:
53
+ video_filename = (
54
+ f"episode_{traj_idx:08d}_{camera_name}_{batch_suffix}.mp4"
55
+ )
56
+ video_path = str(data_file_path.parent / video_filename)
57
+ assert os.path.exists(
58
+ video_path
59
+ ), f"Video path {video_path} does not exist"
60
+ video_filename_bytes = video_filename.encode("utf-8")
61
+ byte_array = np.zeros(100, dtype=np.uint8)
62
+ byte_array[: len(video_filename_bytes)] = list(
63
+ video_filename_bytes
64
+ )
65
+ if not args.dry_run:
66
+ sensor_data_group.create_dataset(
67
+ camera_name, data=byte_array, dtype=np.uint8
68
+ )
69
+
70
+ # Process depth video if it exists
71
+ depth_camera_name = f"{camera_name}_depth"
72
+ if depth_camera_name not in sensor_data_group:
73
+ depth_video_filename = f"episode_{traj_idx:08d}_{depth_camera_name}_{batch_suffix}.mp4"
74
+ depth_video_path = str(
75
+ data_file_path.parent / depth_video_filename
76
+ )
77
+ if os.path.exists(depth_video_path):
78
+ depth_video_filename_bytes = depth_video_filename.encode(
79
+ "utf-8"
80
+ )
81
+ depth_byte_array = np.zeros(100, dtype=np.uint8)
82
+ depth_byte_array[: len(depth_video_filename_bytes)] = list(
83
+ depth_video_filename_bytes
84
+ )
85
+ if not args.dry_run:
86
+ sensor_data_group.create_dataset(
87
+ depth_camera_name,
88
+ data=depth_byte_array,
89
+ dtype=np.uint8,
90
+ )
91
+ except OSError:
92
+ return n_updated, n_skipped, 1
93
+ except Exception as e:
94
+ raise RuntimeError(f"Error processing data file {data_file_path}: {e}") from e
95
+ return n_updated, n_skipped, 0
96
+
97
+
98
+ def main():
99
+ args = get_args()
100
+
101
+ print("Finding data files...")
102
+ data_files = glob.glob(
103
+ os.path.join(args.data_root, "**", "traj*.h5"), recursive=True
104
+ )
105
+ print(f"Found {len(data_files)} data files")
106
+ total_n_updated = 0
107
+ total_n_skipped = 0
108
+ total_n_corrupted_files = 0
109
+ if args.num_workers > 1:
110
+ # use a semaphore to limit the number of queued jobs, helps with large quantity of datafiles
111
+ submit_semaphore = Semaphore(args.num_workers * 4)
112
+ lock = Lock()
113
+ with ProcessPoolExecutor(max_workers=args.num_workers) as executor:
114
+ with tqdm(total=len(data_files), desc="Processing files...") as pbar:
115
+
116
+ def on_done(future: Future[tuple[int, int, int]]):
117
+ try:
118
+ n_updated, n_skipped, n_corrupted_files = future.result()
119
+ except CancelledError:
120
+ pass
121
+ except:
122
+ traceback.print_exc()
123
+ executor.shutdown(wait=False, cancel_futures=True)
124
+
125
+ with lock:
126
+ nonlocal total_n_updated, total_n_skipped, total_n_corrupted_files
127
+ total_n_updated += n_updated
128
+ total_n_skipped += n_skipped
129
+ total_n_corrupted_files += n_corrupted_files
130
+ pbar.set_postfix(
131
+ n_traj_updated=total_n_updated,
132
+ n_traj_skipped=total_n_skipped,
133
+ n_corrupted_files=total_n_corrupted_files,
134
+ )
135
+ pbar.update(1)
136
+ submit_semaphore.release()
137
+
138
+ futures: list[Future] = []
139
+ for data_file in data_files:
140
+ submit_semaphore.acquire()
141
+ future = executor.submit(process_data_file, args, Path(data_file))
142
+ future.add_done_callback(on_done)
143
+ futures.append(future)
144
+
145
+ for future in as_completed(futures):
146
+ future.result()
147
+
148
+ else:
149
+ for data_file in (pbar := tqdm(data_files)):
150
+ n_updated, n_skipped, n_corrupted_files = process_data_file(
151
+ args, Path(data_file)
152
+ )
153
+ total_n_updated += n_updated
154
+ total_n_skipped += n_skipped
155
+ total_n_corrupted_files += n_corrupted_files
156
+ pbar.set_postfix(
157
+ n_traj_updated=total_n_updated,
158
+ n_traj_skipped=total_n_skipped,
159
+ n_corrupted_files=total_n_corrupted_files,
160
+ )
161
+
162
+ dry_run_str = "would be " if args.dry_run else ""
163
+ print(
164
+ f"Finished processing {len(data_files)} data files, "
165
+ f"{total_n_updated} trajectories {dry_run_str}updated, "
166
+ f"{total_n_skipped} trajectories {dry_run_str}skipped, "
167
+ f"found {total_n_corrupted_files} corrupted files"
168
+ )
169
+
170
+
171
+ if __name__ == "__main__":
172
+ main()