| import os |
| import json |
| import numpy as np |
| import matplotlib.pyplot as plt |
| from tqdm import tqdm |
| import time |
|
|
| |
| from action_state.utils import ( |
| CO3DDataLoader, |
| get_camera_center, |
| get_view_direction, |
| get_sequence_geometry |
| ) |
|
|
| |
| |
| |
| ROOT_PATH = "/run/determined/NAS1/public/lixinyuan/interleaved-co3d" |
|
|
| |
| CATEGORY = None |
| |
|
|
| |
| IMAGE_OUTPUT_DIR = "./debug/traj/" |
| JSON_OUTPUT_DIR = "./data/filter_log/" |
|
|
| |
| |
| |
|
|
| def get_pca_axis_ratio(coords): |
| if len(coords) < 3: return 999.0 |
| centered = coords - np.mean(coords, axis=0) |
| cov = np.cov(centered.T) |
| eigenvalues, _ = np.linalg.eig(cov) |
| eigenvalues = np.sort(eigenvalues)[::-1] |
| if eigenvalues[1] < 1e-6: return 999.0 |
| return np.sqrt(eigenvalues[0]) / np.sqrt(eigenvalues[1]) |
|
|
| def analyze_trajectory_robust(seq_data, mean_center): |
| """V3: 包含弯曲度(Sinuosity)和严格单调性检查""" |
| frame_indices = sorted(list(seq_data.keys())) |
| if len(frame_indices) < 10: |
| return {'valid': False, 'reason': 'too_few_frames'} |
|
|
| coords_xz = [] |
| for fid in frame_indices: |
| R = seq_data[fid]['R'] |
| T = seq_data[fid]['T'] |
| C = -R.T @ T |
| dx = C[0] - mean_center[0] |
| dz = C[2] - mean_center[2] |
| coords_xz.append([dx, dz]) |
| coords_xz = np.array(coords_xz) |
| |
| |
| azimuths = np.arctan2(coords_xz[:, 1], coords_xz[:, 0]) |
| azimuths_unwrapped = np.unwrap(azimuths) |
| sweep_rad = np.max(azimuths_unwrapped) - np.min(azimuths_unwrapped) |
| sweep_deg = np.degrees(sweep_rad) |
| |
| |
| diffs = np.diff(azimuths_unwrapped) |
| valid_diffs = diffs[np.abs(diffs) > np.radians(0.5)] |
| if len(valid_diffs) == 0: |
| monotonicity = 0.0 |
| else: |
| monotonicity = max(np.sum(valid_diffs > 0), np.sum(valid_diffs < 0)) / len(valid_diffs) |
|
|
| |
| axis_ratio = get_pca_axis_ratio(coords_xz) |
| radii = np.linalg.norm(coords_xz, axis=1) |
| mean_radius = np.mean(radii) |
| r_min = np.percentile(radii, 5) |
| r_max = np.percentile(radii, 95) |
| radius_ratio = r_min / (r_max + 1e-6) |
| |
| |
| steps = np.linalg.norm(np.diff(coords_xz, axis=0), axis=1) |
| jump_factor = np.max(steps) / (np.median(steps) + 1e-6) |
| |
| ideal_path_length = sweep_rad * mean_radius |
| total_path_length = np.sum(steps) |
| sinuosity = total_path_length / (ideal_path_length + 1e-3) if ideal_path_length > 1e-3 else 999.0 |
|
|
| return { |
| 'valid': True, |
| 'sweep_deg': sweep_deg, |
| 'monotonicity': monotonicity, |
| 'axis_ratio': axis_ratio, |
| 'radius_ratio': radius_ratio, |
| 'jump_factor': jump_factor, |
| 'sinuosity': sinuosity, |
| 'num_frames': len(frame_indices) |
| } |
|
|
| def check_if_sequence_is_good(metrics): |
| """V3筛选阈值""" |
| if not metrics['valid']: return False, metrics['reason'] |
|
|
| |
| MIN_SWEEP_DEG = 120.0 |
| MIN_MONOTONICITY = 0.70 |
| MAX_AXIS_RATIO = 6.0 |
| MIN_RADIUS_RATIO = 0.3 |
| MAX_JUMP_FACTOR = 5.0 |
| MAX_SINUOSITY = 2.0 |
|
|
| |
| if metrics['jump_factor'] > MAX_JUMP_FACTOR: return False, f"Jump ({metrics['jump_factor']:.1f})" |
| if metrics['radius_ratio'] < MIN_RADIUS_RATIO: return False, f"Unstable Radius ({metrics['radius_ratio']:.2f})" |
| if metrics['sweep_deg'] > 60.0 and metrics['sinuosity'] > MAX_SINUOSITY: return False, f"Jittery/Wavy ({metrics['sinuosity']:.2f})" |
|
|
| |
| if metrics['sweep_deg'] > 270.0: |
| if metrics['monotonicity'] < MIN_MONOTONICITY: return False, f"Messy Loop ({metrics['monotonicity']:.2f})" |
| return True, "Full Loop" |
| elif metrics['sweep_deg'] > MIN_SWEEP_DEG: |
| if metrics['axis_ratio'] > MAX_AXIS_RATIO: return False, f"Linear ({metrics['axis_ratio']:.1f})" |
| if metrics['monotonicity'] < MIN_MONOTONICITY: return False, f"Messy Semi ({metrics['monotonicity']:.2f})" |
| return True, "Semi Loop" |
| else: |
| return False, f"Small Angle ({metrics['sweep_deg']:.1f})" |
|
|
| |
| |
| |
|
|
| def plot_sequence_trajectory(loader, sequence_name, output_path, metrics): |
| """绘制并保存轨迹图""" |
| try: |
| frame_ids = sorted(loader.get_frames(sequence_name)) |
| seq_data = loader.seq_data[sequence_name] |
| mean_center, _, aligned_seq_data = get_sequence_geometry(seq_data, align_to_standard=True) |
| |
| camera_centers = [] |
| for fid in frame_ids: |
| info = aligned_seq_data[fid] |
| C = get_camera_center(info['R'], info['T']) |
| camera_centers.append(C) |
| camera_centers = np.array(camera_centers) |
| |
| x_coords = camera_centers[:, 0] - mean_center[0] |
| z_coords = camera_centers[:, 2] - mean_center[2] |
| |
| fig, ax = plt.subplots(1, 1, figsize=(10, 8)) |
| ax.plot(x_coords, z_coords, c='lightgray', alpha=0.5, linestyle='--') |
| sc = ax.scatter(x_coords, z_coords, c=frame_ids, cmap='viridis', s=30, zorder=5) |
| ax.scatter(0, 0, c='black', marker='X', s=200, label='Center', zorder=10) |
| |
| title = (f"Seq: {sequence_name}\n" |
| f"Sweep={metrics['sweep_deg']:.0f}°, Mono={metrics['monotonicity']:.2f}, " |
| f"Sinuosity={metrics['sinuosity']:.2f}") |
| ax.set_title(title, fontsize=12) |
| ax.axis('equal') |
| |
| |
| os.makedirs(os.path.dirname(output_path), exist_ok=True) |
| plt.savefig(output_path, dpi=100, bbox_inches='tight') |
| plt.close(fig) |
| except Exception as e: |
| print(f"Error plotting {sequence_name}: {e}") |
|
|
| |
| |
| |
|
|
| def process_category(category_name, global_stats): |
| """处理单个类别""" |
| print(f"\nProcessing Category: {category_name}") |
| |
| |
| try: |
| loader = CO3DDataLoader(ROOT_PATH, category_name) |
| sequences = loader.get_sequences() |
| except Exception as e: |
| print(f"Failed to load category {category_name}: {e}") |
| return |
|
|
| |
| cat_img_dir = os.path.join(IMAGE_OUTPUT_DIR, category_name) |
| cat_json_dir = os.path.join(JSON_OUTPUT_DIR, category_name) |
| os.makedirs(cat_json_dir, exist_ok=True) |
| |
| |
| keep_list = [] |
| stats = { |
| 'total': len(sequences), |
| 'kept': 0, |
| 'rejected': 0, |
| 'reasons': {} |
| } |
| |
| |
| for seq_name in tqdm(sequences, desc=f"Filtering {category_name}", leave=False): |
| try: |
| |
| seq_data = loader.seq_data[seq_name] |
| mean_center, _, aligned_seq_data = get_sequence_geometry(seq_data, align_to_standard=True) |
| metrics = analyze_trajectory_robust(aligned_seq_data, mean_center) |
| |
| |
| is_good, reason = check_if_sequence_is_good(metrics) |
| |
| if is_good: |
| keep_list.append(seq_name) |
| stats['kept'] += 1 |
| |
| |
| img_path = os.path.join(cat_img_dir, f"{seq_name}.png") |
| plot_sequence_trajectory(loader, seq_name, img_path, metrics) |
| else: |
| stats['rejected'] += 1 |
| |
| base_reason = reason.split('(')[0].strip() |
| stats['reasons'][base_reason] = stats['reasons'].get(base_reason, 0) + 1 |
| |
| except Exception as e: |
| print(f"Error processing {seq_name}: {e}") |
| stats['rejected'] += 1 |
| stats['reasons']['Error'] = stats['reasons'].get('Error', 0) + 1 |
|
|
| |
| keep_json_path = os.path.join(cat_json_dir, "keep.json") |
| with open(keep_json_path, 'w') as f: |
| json.dump(keep_list, f, indent=2) |
| |
| |
| global_stats[category_name] = stats |
| |
| print(f" -> Kept: {stats['kept']}/{stats['total']} ({stats['kept']/stats['total']*100:.1f}%)") |
| print(f" -> Saved keep list to: {keep_json_path}") |
|
|
| def main(): |
| start_time = time.time() |
| print(f"{'='*60}") |
| print(f"CO3D Trajectory Filtering Pipeline (V3)") |
| print(f"Root Path: {ROOT_PATH}") |
| print(f"Output Images: {IMAGE_OUTPUT_DIR}") |
| print(f"Output JSONs: {JSON_OUTPUT_DIR}") |
| print(f"{'='*60}") |
|
|
| |
| if CATEGORY: |
| categories_to_process = CATEGORY if isinstance(CATEGORY, list) else [CATEGORY] |
| else: |
| |
| data_root = os.path.join(ROOT_PATH, 'data', 'original') |
| if os.path.exists(data_root): |
| categories_to_process = sorted([d for d in os.listdir(data_root) |
| if os.path.isdir(os.path.join(data_root, d))]) |
| else: |
| print(f"Error: Data root {data_root} not found.") |
| return |
|
|
| print(f"Found {len(categories_to_process)} categories to process.") |
| |
| |
| global_stats = {} |
|
|
| |
| for cat in tqdm(categories_to_process, desc="Total Progress"): |
| process_category(cat, global_stats) |
|
|
| |
| os.makedirs(JSON_OUTPUT_DIR, exist_ok=True) |
| stats_path = os.path.join(JSON_OUTPUT_DIR, "statistics.json") |
| |
| |
| total_seqs = sum(s['total'] for s in global_stats.values()) |
| total_kept = sum(s['kept'] for s in global_stats.values()) |
| |
| final_report = { |
| 'summary': { |
| 'total_categories': len(global_stats), |
| 'total_sequences': total_seqs, |
| 'total_kept': total_kept, |
| 'overall_pass_rate': total_kept / total_seqs if total_seqs > 0 else 0 |
| }, |
| 'details': global_stats |
| } |
| |
| with open(stats_path, 'w') as f: |
| json.dump(final_report, f, indent=2) |
|
|
| print(f"\n{'='*60}") |
| print(f"Pipeline Completed in {time.time()-start_time:.1f}s") |
| print(f"Global Statistics saved to: {stats_path}") |
| print(f"Overall Pass Rate: {final_report['summary']['overall_pass_rate']*100:.1f}%") |
| print(f"{'='*60}") |
|
|
| if __name__ == "__main__": |
| main() |
|
|