| |
| """ |
| Visualization script for collected tactile manipulation data. |
| |
| Displays: |
| 1. Camera images (agentview + eye_in_hand) - synchronized with control freq |
| 2. Real-time tactile force distribution heatmaps (4x4 per finger) |
| 3. Force magnitude time series |
| |
| Can visualize from: |
| - Saved HDF5 data files (offline) |
| - Live collection (real-time) |
| |
| Usage: |
| # Visualize saved data |
| python visualize_data.py --data_file ./tactile_data/precision_grasp_data.hdf5 --episode 0 |
| |
| # Live visualization during collection |
| python visualize_data.py --task precision_grasp --live |
| """ |
|
|
| import os |
| import sys |
| import argparse |
|
|
| import numpy as np |
| import h5py |
|
|
| sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) |
|
|
|
|
| def visualize_offline(data_file, episode_idx=0, playback_speed=1.0): |
| """ |
| Visualize saved episode data from HDF5 file. |
| |
| Shows camera images and tactile heatmaps side by side. |
| """ |
| import matplotlib.pyplot as plt |
| from matplotlib.gridspec import GridSpec |
| from matplotlib.colors import Normalize |
| from matplotlib import cm |
|
|
| with h5py.File(data_file, "r") as f: |
| meta = f["metadata"] |
| print(f"Task: {meta.attrs['task']}") |
| print(f"Robot: {meta.attrs['robot']}, Gripper: {meta.attrs['gripper']}") |
| print(f"Tactile sensor: {meta.attrs['tactile_sensor']}") |
| print(f"Control freq: {meta.attrs['control_freq']} Hz, " |
| f"Tactile freq: {meta.attrs['tactile_freq']} Hz") |
|
|
| |
| ep = f |
| print(f"\nEpisode: " |
| f"steps={ep.attrs['n_steps']}, " |
| f"success={ep.attrs['success']}") |
|
|
| |
| agentview = ep["agentview_image"][:] if "agentview_image" in ep else None |
| eye_in_hand = ep["eye_in_hand_image"][:] if "eye_in_hand_image" in ep else None |
| tactile_left = ep["tactile_left"][:] if "tactile_left" in ep else None |
| tactile_right = ep["tactile_right"][:] if "tactile_right" in ep else None |
| rewards = ep["rewards"][:] if "rewards" in ep else None |
| eef_pos = ep["eef_pos"][:] if "eef_pos" in ep else None |
|
|
| n_frames = len(agentview) if agentview is not None else 0 |
| tactile_ratio = 5 |
|
|
| if n_frames == 0: |
| print("No image data to visualize.") |
| return |
|
|
| print(f"Frames: {n_frames}, Tactile samples: {len(tactile_left) if tactile_left is not None else 0}") |
|
|
| |
| fig = plt.figure(figsize=(16, 10)) |
| gs = GridSpec(3, 4, figure=fig, hspace=0.35, wspace=0.3) |
|
|
| |
| ax_agent = fig.add_subplot(gs[0:2, 0:2]) |
| ax_agent.set_title("AgentView Camera", fontsize=12, fontweight="bold") |
| ax_agent.axis("off") |
|
|
| ax_hand = fig.add_subplot(gs[0:2, 2:4]) |
| ax_hand.set_title("Eye-in-Hand Camera", fontsize=12, fontweight="bold") |
| ax_hand.axis("off") |
|
|
| |
| ax_tleft = fig.add_subplot(gs[2, 0]) |
| ax_tleft.set_title("Left Finger Tactile", fontsize=10, fontweight="bold") |
|
|
| ax_tright = fig.add_subplot(gs[2, 1]) |
| ax_tright.set_title("Right Finger Tactile", fontsize=10, fontweight="bold") |
|
|
| |
| ax_force = fig.add_subplot(gs[2, 2:4]) |
| ax_force.set_title("Tactile Force Magnitude", fontsize=10, fontweight="bold") |
| ax_force.set_xlabel("Step") |
| ax_force.set_ylabel("Force (N)") |
|
|
| |
| if tactile_left is not None and tactile_right is not None: |
| left_mag_all = np.linalg.norm(tactile_left, axis=-1).mean(axis=(1, 2)) |
| right_mag_all = np.linalg.norm(tactile_right, axis=-1).mean(axis=(1, 2)) |
| else: |
| left_mag_all = np.zeros(1) |
| right_mag_all = np.zeros(1) |
|
|
| |
| vmax = max(left_mag_all.max(), right_mag_all.max(), 0.1) |
|
|
| |
| img_agent = ax_agent.imshow(agentview[0] if agentview is not None else np.zeros((256, 256, 3), dtype=np.uint8)) |
| img_hand = ax_hand.imshow(eye_in_hand[0] if eye_in_hand is not None else np.zeros((256, 256, 3), dtype=np.uint8)) |
|
|
| |
| if tactile_left is not None: |
| left_mag = np.linalg.norm(tactile_left[0], axis=-1) |
| right_mag = np.linalg.norm(tactile_right[0], axis=-1) |
| else: |
| left_mag = np.zeros((4, 4)) |
| right_mag = np.zeros((4, 4)) |
|
|
| hm_left = ax_tleft.imshow(left_mag, cmap="hot", vmin=0, vmax=vmax, |
| interpolation="nearest", aspect="equal") |
| hm_right = ax_tright.imshow(right_mag, cmap="hot", vmin=0, vmax=vmax, |
| interpolation="nearest", aspect="equal") |
| plt.colorbar(hm_left, ax=ax_tleft, fraction=0.046) |
| plt.colorbar(hm_right, ax=ax_tright, fraction=0.046) |
|
|
| |
| left_texts = [] |
| right_texts = [] |
| for ri in range(4): |
| for ci in range(4): |
| val = left_mag[ri, ci] |
| color = "white" if val > vmax * 0.5 else "black" |
| t = ax_tleft.text(ci, ri, f"{val:.1f}", ha="center", va="center", |
| fontsize=6, color=color, fontweight="bold") |
| left_texts.append(t) |
| val = right_mag[ri, ci] |
| color = "white" if val > vmax * 0.5 else "black" |
| t = ax_tright.text(ci, ri, f"{val:.1f}", ha="center", va="center", |
| fontsize=6, color=color, fontweight="bold") |
| right_texts.append(t) |
|
|
| |
| for ax in [ax_tleft, ax_tright]: |
| ax.set_xticks(range(4)) |
| ax.set_yticks(range(4)) |
| ax.set_xticklabels([f"c{i}" for i in range(4)], fontsize=7) |
| ax.set_yticklabels([f"r{i}" for i in range(4)], fontsize=7) |
|
|
| |
| line_left, = ax_force.plot([], [], "b-", label="Left finger", linewidth=1) |
| line_right, = ax_force.plot([], [], "r-", label="Right finger", linewidth=1) |
| ax_force.legend(fontsize=8) |
| ax_force.set_xlim(0, len(left_mag_all)) |
| ax_force.set_ylim(0, vmax * 1.1) |
| vline = ax_force.axvline(x=0, color="gray", linestyle="--", alpha=0.5) |
|
|
| |
| line_left.set_data(range(len(left_mag_all)), left_mag_all) |
| line_right.set_data(range(len(right_mag_all)), right_mag_all) |
|
|
| fig.suptitle(f"Tactile Manipulation Data Viewer", fontsize=14, fontweight="bold") |
|
|
| |
| delay = (1.0 / 20.0) / playback_speed |
|
|
| plt.ion() |
| plt.show() |
|
|
| try: |
| for frame_idx in range(n_frames): |
| |
| if agentview is not None: |
| img_agent.set_data(agentview[frame_idx]) |
| if eye_in_hand is not None: |
| img_hand.set_data(eye_in_hand[frame_idx]) |
|
|
| |
| if tactile_left is not None: |
| t_idx = min(frame_idx * tactile_ratio + tactile_ratio - 1, len(tactile_left) - 1) |
| left_mag = np.linalg.norm(tactile_left[t_idx], axis=-1) |
| right_mag = np.linalg.norm(tactile_right[t_idx], axis=-1) |
| hm_left.set_data(left_mag) |
| hm_right.set_data(right_mag) |
| |
| for ri in range(4): |
| for ci in range(4): |
| idx = ri * 4 + ci |
| lv = left_mag[ri, ci] |
| left_texts[idx].set_text(f"{lv:.1f}") |
| left_texts[idx].set_color("white" if lv > vmax * 0.5 else "black") |
| rv = right_mag[ri, ci] |
| right_texts[idx].set_text(f"{rv:.1f}") |
| right_texts[idx].set_color("white" if rv > vmax * 0.5 else "black") |
|
|
| |
| vline.set_xdata([frame_idx * tactile_ratio]) |
|
|
| |
| reward_str = f", Reward: {rewards[frame_idx]:.3f}" if rewards is not None else "" |
| fig.suptitle( |
| f"Step {frame_idx}/{n_frames}{reward_str}", |
| fontsize=14, fontweight="bold" |
| ) |
|
|
| fig.canvas.draw_idle() |
| fig.canvas.flush_events() |
| plt.pause(delay) |
|
|
| except KeyboardInterrupt: |
| pass |
|
|
| plt.ioff() |
| print("\nVisualization complete. Close the window to exit.") |
| plt.show() |
|
|
|
|
| def visualize_live(task_name, n_episodes=1): |
| """ |
| Live visualization during data collection. |
| |
| Renders the MuJoCo scene and shows real-time tactile force distributions. |
| """ |
| import matplotlib |
| matplotlib.use("TkAgg") |
| import matplotlib.pyplot as plt |
| from matplotlib.gridspec import GridSpec |
|
|
| from tactile_tasks.uskin_sensor import USkinSensor |
| from tactile_tasks.motion_planner import MotionPlanner |
| from tactile_tasks.collect_data import create_env, TASK_CONFIGS, collect_episode |
|
|
| config = TASK_CONFIGS[task_name] |
|
|
| |
| env = create_env(task_name, has_renderer=True) |
| obs = env.reset() |
|
|
| tactile = USkinSensor(env.sim, gripper_prefix="gripper0_right_", noise_std=0.02) |
| planner = MotionPlanner(env, tactile_sensor=tactile) |
|
|
| |
| fig, axes = plt.subplots(1, 3, figsize=(14, 4)) |
|
|
| ax_left = axes[0] |
| ax_left.set_title("Left Finger Tactile (4x4)", fontweight="bold") |
| hm_left = ax_left.imshow(np.zeros((4, 4)), cmap="hot", vmin=0, vmax=2.0, |
| interpolation="nearest", aspect="equal") |
| plt.colorbar(hm_left, ax=ax_left, label="Force (N)") |
|
|
| ax_right = axes[1] |
| ax_right.set_title("Right Finger Tactile (4x4)", fontweight="bold") |
| hm_right = ax_right.imshow(np.zeros((4, 4)), cmap="hot", vmin=0, vmax=2.0, |
| interpolation="nearest", aspect="equal") |
| plt.colorbar(hm_right, ax=ax_right, label="Force (N)") |
|
|
| ax_force = axes[2] |
| ax_force.set_title("Force History", fontweight="bold") |
| ax_force.set_xlabel("Step") |
| ax_force.set_ylabel("Avg Force (N)") |
| left_history = [] |
| right_history = [] |
| line_l, = ax_force.plot([], [], "b-", label="Left") |
| line_r, = ax_force.plot([], [], "r-", label="Right") |
| ax_force.legend() |
| ax_force.set_ylim(0, 3) |
|
|
| for ax in [ax_left, ax_right]: |
| ax.set_xticks(range(4)) |
| ax.set_yticks(range(4)) |
|
|
| plt.ion() |
| plt.tight_layout() |
| plt.show() |
|
|
| |
| plan_fn = config["plan_fn"] |
| phases = plan_fn(planner, env) |
| current_phase_idx = 0 |
| phase_name, phase_init = phases[current_phase_idx] |
| phase_init() |
| print(f"Phase: {phase_name}") |
|
|
| step = 0 |
| try: |
| while step < config["horizon"]: |
| action, phase_done = planner.get_action() |
|
|
| |
| for _ in range(USkinSensor.FREQ_MULTIPLIER): |
| tactile_data = tactile.update() |
|
|
| |
| obs, reward, done, info = env.step(action) |
|
|
| |
| env.render() |
|
|
| |
| mags = tactile.get_force_magnitudes() |
| hm_left.set_data(mags["left_finger"]) |
| hm_right.set_data(mags["right_finger"]) |
|
|
| |
| cur_max = max(mags["left_finger"].max(), mags["right_finger"].max(), 0.5) |
| hm_left.set_clim(0, cur_max) |
| hm_right.set_clim(0, cur_max) |
|
|
| |
| left_history.append(mags["left_finger"].mean()) |
| right_history.append(mags["right_finger"].mean()) |
| line_l.set_data(range(len(left_history)), left_history) |
| line_r.set_data(range(len(right_history)), right_history) |
| ax_force.set_xlim(0, max(len(left_history), 10)) |
| ax_force.set_ylim(0, max(max(left_history + [0.5]), max(right_history + [0.5])) * 1.2) |
|
|
| fig.suptitle(f"Step {step} | Phase: {phase_name} | Reward: {reward:.3f}", fontweight="bold") |
| fig.canvas.draw_idle() |
| fig.canvas.flush_events() |
|
|
| step += 1 |
|
|
| if phase_done: |
| current_phase_idx += 1 |
| if current_phase_idx < len(phases): |
| phase_name, phase_init = phases[current_phase_idx] |
| phase_init() |
| print(f"Phase: {phase_name}") |
| else: |
| print("All phases complete!") |
| break |
|
|
| if done: |
| break |
|
|
| except KeyboardInterrupt: |
| pass |
|
|
| print(f"Episode done. Steps: {step}, Success: {env._check_success()}") |
| plt.ioff() |
| plt.show() |
| env.close() |
|
|
|
|
| def generate_video(data_file, episode_idx=None, output_path=None, fps=20, show_tactile=True): |
| """ |
| Generate MP4 video from a per-episode HDF5 file using ffmpeg. |
| |
| Args: |
| data_file (str): path to per-episode HDF5 file |
| episode_idx: deprecated, ignored |
| output_path (str): output video path (default: auto-generated) |
| fps (int): frames per second |
| show_tactile (bool): whether to include tactile heatmaps |
| """ |
| import subprocess |
| import tempfile |
| import shutil |
|
|
| try: |
| import matplotlib |
| matplotlib.use("Agg") |
| import matplotlib.pyplot as plt |
| from matplotlib.gridspec import GridSpec |
| except ImportError: |
| print("matplotlib is required for video generation") |
| return |
|
|
| |
| if shutil.which("ffmpeg") is None: |
| print("ERROR: ffmpeg not found. Install it: sudo apt install ffmpeg") |
| return |
|
|
| with h5py.File(data_file, "r") as f: |
| meta = f["metadata"] |
| task_name = meta.attrs["task"] |
| |
| success = f.attrs.get("success", False) |
| n_steps = f.attrs.get("n_steps", 0) |
|
|
| agentview = f["agentview_image"][:] if "agentview_image" in f else None |
| eye_in_hand = f["eye_in_hand_image"][:] if "eye_in_hand_image" in f else None |
| tactile_left = f["tactile_left"][:] if "tactile_left" in f else None |
| tactile_right = f["tactile_right"][:] if "tactile_right" in f else None |
| rewards = f["rewards"][:] if "rewards" in f else None |
|
|
| if agentview is None or len(agentview) == 0: |
| print("No image data to render.") |
| return |
|
|
| n_frames = len(agentview) |
| tactile_ratio = 5 |
|
|
| if output_path is None: |
| base = os.path.splitext(data_file)[0] |
| output_path = f"{base}_ep{episode_idx:04d}.mp4" |
|
|
| print(f"Generating video: {output_path}") |
| print(f" Task: {task_name}, Episode: {episode_idx}, Steps: {n_steps}, Success: {success}") |
| print(f" Frames: {n_frames}, FPS: {fps}") |
|
|
| |
| if show_tactile and tactile_left is not None: |
| left_mag_all = np.linalg.norm(tactile_left, axis=-1) |
| right_mag_all = np.linalg.norm(tactile_right, axis=-1) |
| vmax = max(left_mag_all.max(), right_mag_all.max(), 0.1) |
| left_avg = left_mag_all.mean(axis=(1, 2)) |
| right_avg = right_mag_all.mean(axis=(1, 2)) |
| else: |
| show_tactile = False |
|
|
| |
| if show_tactile: |
| fig = plt.figure(figsize=(12, 8), dpi=100) |
| gs = GridSpec(3, 4, figure=fig, hspace=0.3, wspace=0.3) |
| ax_agent = fig.add_subplot(gs[0:2, 0:2]) |
| ax_hand = fig.add_subplot(gs[0:2, 2:4]) |
| ax_tleft = fig.add_subplot(gs[2, 0]) |
| ax_tright = fig.add_subplot(gs[2, 1]) |
| ax_force = fig.add_subplot(gs[2, 2:4]) |
| else: |
| fig, axes = plt.subplots(1, 2, figsize=(10, 5), dpi=100) |
| ax_agent, ax_hand = axes |
|
|
| |
| tmpdir = tempfile.mkdtemp(prefix="tactile_video_") |
|
|
| try: |
| for frame_idx in range(n_frames): |
| |
| if show_tactile: |
| for ax in [ax_agent, ax_hand, ax_tleft, ax_tright, ax_force]: |
| ax.clear() |
| else: |
| ax_agent.clear() |
| ax_hand.clear() |
|
|
| |
| ax_agent.imshow(agentview[frame_idx]) |
| ax_agent.set_title("AgentView", fontsize=10, fontweight="bold") |
| ax_agent.axis("off") |
|
|
| if eye_in_hand is not None and len(eye_in_hand) > frame_idx: |
| ax_hand.imshow(eye_in_hand[frame_idx]) |
| ax_hand.set_title("Eye-in-Hand", fontsize=10, fontweight="bold") |
| ax_hand.axis("off") |
|
|
| if show_tactile: |
| t_idx = min(frame_idx * tactile_ratio + tactile_ratio - 1, len(tactile_left) - 1) |
| left_mag = np.linalg.norm(tactile_left[t_idx], axis=-1) |
| right_mag = np.linalg.norm(tactile_right[t_idx], axis=-1) |
|
|
| ax_tleft.imshow(left_mag, cmap="hot", vmin=0, vmax=vmax, interpolation="nearest") |
| ax_tleft.set_title("Left Finger", fontsize=9) |
| ax_tleft.set_xticks(range(4)) |
| ax_tleft.set_yticks(range(4)) |
| |
| for ri in range(4): |
| for ci in range(4): |
| val = left_mag[ri, ci] |
| color = "white" if val > vmax * 0.5 else "black" |
| ax_tleft.text(ci, ri, f"{val:.1f}", ha="center", va="center", |
| fontsize=6, color=color, fontweight="bold") |
|
|
| ax_tright.imshow(right_mag, cmap="hot", vmin=0, vmax=vmax, interpolation="nearest") |
| ax_tright.set_title("Right Finger", fontsize=9) |
| ax_tright.set_xticks(range(4)) |
| ax_tright.set_yticks(range(4)) |
| |
| for ri in range(4): |
| for ci in range(4): |
| val = right_mag[ri, ci] |
| color = "white" if val > vmax * 0.5 else "black" |
| ax_tright.text(ci, ri, f"{val:.1f}", ha="center", va="center", |
| fontsize=6, color=color, fontweight="bold") |
|
|
| |
| t_end = min((frame_idx + 1) * tactile_ratio, len(left_avg)) |
| ax_force.plot(range(t_end), left_avg[:t_end], "b-", label="Left", linewidth=1) |
| ax_force.plot(range(t_end), right_avg[:t_end], "r-", label="Right", linewidth=1) |
| ax_force.axvline(x=t_end - 1, color="gray", linestyle="--", alpha=0.5) |
| ax_force.set_xlim(0, len(left_avg)) |
| ax_force.set_ylim(0, vmax * 1.1) |
| ax_force.set_xlabel("Tactile Sample", fontsize=8) |
| ax_force.set_ylabel("Force (N)", fontsize=8) |
| ax_force.legend(fontsize=7, loc="upper right") |
| ax_force.set_title("Tactile Force", fontsize=9) |
|
|
| |
| reward_str = f" R={rewards[frame_idx]:.2f}" if rewards is not None else "" |
| status = "SUCCESS" if success else "RUNNING" |
| fig.suptitle( |
| f"{task_name} | Step {frame_idx+1}/{n_frames}{reward_str} | {status}", |
| fontsize=12, fontweight="bold" |
| ) |
|
|
| frame_path = os.path.join(tmpdir, f"frame_{frame_idx:06d}.png") |
| fig.savefig(frame_path, bbox_inches="tight", pad_inches=0.1) |
|
|
| if frame_idx % 50 == 0: |
| print(f" Rendered frame {frame_idx+1}/{n_frames}") |
|
|
| plt.close(fig) |
|
|
| |
| print(f" Encoding video with ffmpeg...") |
| ffmpeg_cmd = [ |
| "ffmpeg", "-y", |
| "-framerate", str(fps), |
| "-i", os.path.join(tmpdir, "frame_%06d.png"), |
| "-vf", "pad=ceil(iw/2)*2:ceil(ih/2)*2", |
| "-c:v", "libx264", |
| "-pix_fmt", "yuv420p", |
| "-crf", "23", |
| "-preset", "medium", |
| output_path, |
| ] |
| result = subprocess.run(ffmpeg_cmd, capture_output=True, text=True) |
| if result.returncode != 0: |
| print(f" ffmpeg error: {result.stderr[:500]}") |
| else: |
| file_size = os.path.getsize(output_path) / 1024 |
| print(f" Video saved: {output_path} ({file_size:.0f} KB)") |
|
|
| finally: |
| shutil.rmtree(tmpdir, ignore_errors=True) |
|
|
|
|
| def generate_all_videos(data_dir, output_dir=None, fps=20, show_tactile=True): |
| """Generate videos for all episodes in all HDF5 files in a directory.""" |
| if output_dir is None: |
| output_dir = os.path.join(data_dir, "videos") |
| os.makedirs(output_dir, exist_ok=True) |
|
|
| hdf5_files = sorted([f for f in os.listdir(data_dir) if f.endswith(".hdf5")]) |
| if not hdf5_files: |
| print(f"No HDF5 files found in {data_dir}") |
| return |
|
|
| for hdf5_file in hdf5_files: |
| filepath = os.path.join(data_dir, hdf5_file) |
| with h5py.File(filepath, "r") as f: |
| episodes = sorted([k for k in f.keys() if k.startswith("episode_")]) |
|
|
| task_name = os.path.splitext(hdf5_file)[0].replace("_data", "") |
| for ep_name in episodes: |
| ep_idx = int(ep_name.split("_")[1]) |
| output_path = os.path.join(output_dir, f"{task_name}_{ep_name}.mp4") |
| generate_video(filepath, ep_idx, output_path, fps, show_tactile) |
|
|
| print(f"\nAll videos saved to: {output_dir}") |
|
|
|
|
| def print_data_summary(data_file): |
| """Print summary of saved data file.""" |
| with h5py.File(data_file, "r") as f: |
| print(f"\nData file: {data_file}") |
| print(f"{'='*50}") |
|
|
| if "metadata" in f: |
| meta = f["metadata"] |
| for key in meta.attrs: |
| print(f" {key}: {meta.attrs[key]}") |
|
|
| episodes = [k for k in f.keys() if k.startswith("episode_")] |
| print(f"\nEpisodes: {len(episodes)}") |
|
|
| for ep_name in sorted(episodes): |
| ep = f[ep_name] |
| print(f"\n {ep_name}:") |
| print(f" Success: {ep.attrs.get('success', 'N/A')}") |
| print(f" Steps: {ep.attrs.get('n_steps', 'N/A')}") |
| for key in ep: |
| shape = ep[key].shape |
| dtype = ep[key].dtype |
| print(f" {key}: shape={shape}, dtype={dtype}") |
|
|
|
|
| def main(): |
| parser = argparse.ArgumentParser(description="Visualize tactile manipulation data") |
| parser.add_argument("--data_file", type=str, default=None, |
| help="HDF5 data file to visualize") |
| parser.add_argument("--episode", type=int, default=0, |
| help="Episode index to visualize") |
| parser.add_argument("--speed", type=float, default=1.0, |
| help="Playback speed multiplier") |
| parser.add_argument("--task", type=str, default="precision_grasp", |
| help="Task name for live visualization") |
| parser.add_argument("--live", action="store_true", |
| help="Live visualization during collection") |
| parser.add_argument("--summary", action="store_true", |
| help="Print data file summary") |
| parser.add_argument("--video", action="store_true", |
| help="Generate MP4 video from saved data") |
| parser.add_argument("--video_all", type=str, default=None, |
| help="Generate videos for all episodes in data directory") |
| parser.add_argument("--output", type=str, default=None, |
| help="Output video file path") |
| parser.add_argument("--fps", type=int, default=20, |
| help="Video frames per second") |
| parser.add_argument("--no_tactile", action="store_true", |
| help="Exclude tactile data from video") |
| args = parser.parse_args() |
|
|
| if args.video_all: |
| generate_all_videos(args.video_all, fps=args.fps, show_tactile=not args.no_tactile) |
| elif args.video and args.data_file: |
| generate_video(args.data_file, args.episode, args.output, args.fps, not args.no_tactile) |
| elif args.summary and args.data_file: |
| print_data_summary(args.data_file) |
| elif args.live: |
| visualize_live(args.task) |
| elif args.data_file: |
| visualize_offline(args.data_file, args.episode, args.speed) |
| else: |
| print("Usage:") |
| print(" Offline viz: python visualize_data.py --data_file data.hdf5 --episode 0") |
| print(" Generate video: python visualize_data.py --data_file data.hdf5 --episode 0 --video") |
| print(" All videos: python visualize_data.py --video_all ./tactile_data/") |
| print(" Summary: python visualize_data.py --data_file data.hdf5 --summary") |
| print(" Live viz: python visualize_data.py --task precision_grasp --live") |
|
|
|
|
| if __name__ == "__main__": |
| main() |
|
|