| |
| """ |
| Train 3D Gaussian Splats for all DX.GL multi-view datasets. |
| |
| Downloads datasets (if needed), trains each with nerfstudio splatfacto, |
| exports the PLY, and converts to .splat for web viewers. |
| |
| Usage: |
| python train_all.py # train all objects |
| python train_all.py --object apple # train specific object |
| python train_all.py --data-dir ./dxgl-datasets # custom dataset location |
| python train_all.py --output ./splats # custom output directory |
| python train_all.py --iterations 30000 # custom iteration count |
| python train_all.py --dry-run # show what would be trained |
| |
| Requires: |
| pip install nerfstudio requests plyfile numpy |
| """ |
|
|
| import argparse |
| import glob |
| import json |
| import os |
| import struct |
| import subprocess |
| import sys |
| import time |
|
|
| try: |
| import numpy as np |
| from plyfile import PlyData |
| except ImportError: |
| print("Please install dependencies: pip install plyfile numpy requests") |
| sys.exit(1) |
|
|
| MANIFEST_LOCAL = os.path.join(os.path.dirname(os.path.abspath(__file__)), "manifest.json") |
|
|
| |
| DEFAULT_ITERATIONS = 20000 |
| TRAIN_PARAMS = [ |
| "--pipeline.model.sh-degree", "3", |
| "--pipeline.model.background-color", "white", |
| "--pipeline.model.cull-alpha-thresh", "0.2", |
| "--pipeline.model.densify-size-thresh", "0.005", |
| "--pipeline.model.use-scale-regularization", "True", |
| "--pipeline.model.max-gauss-ratio", "5.0", |
| ] |
|
|
|
|
| |
|
|
| SH_C0 = 0.28209479177387814 |
|
|
|
|
| def _ply_field(v, *names): |
| """Find the first matching field name in PLY vertex data.""" |
| available = v.data.dtype.names if hasattr(v.data, "dtype") else v.dtype.names |
| for name in names: |
| if name in available: |
| return v[name] |
| raise KeyError(f"No field matching: {names}. Available: {available}") |
|
|
|
|
| def ply_to_splat(input_path: str, output_path: str): |
| """Convert a nerfstudio Gaussian Splatting PLY to .splat format.""" |
| ply = PlyData.read(input_path) |
| v = ply["vertex"] |
| n = len(v) |
|
|
| xyz = np.column_stack([v["x"], v["y"], v["z"]]).astype(np.float32) |
|
|
| s0 = _ply_field(v, "f_scale_0", "scale_0", "sx") |
| s1 = _ply_field(v, "f_scale_1", "scale_1", "sy") |
| s2 = _ply_field(v, "f_scale_2", "scale_2", "sz") |
| scales = np.exp(np.column_stack([s0, s1, s2])).astype(np.float32) |
|
|
| raw_opacity = _ply_field(v, "opacity", "f_opacity") |
| opacity = (1.0 / (1.0 + np.exp(-raw_opacity.astype(np.float64)))).astype(np.float64) |
|
|
| dc0 = _ply_field(v, "f_dc_0", "f_rest_0", "red") |
| dc1 = _ply_field(v, "f_dc_1", "f_rest_1", "green") |
| dc2 = _ply_field(v, "f_dc_2", "f_rest_2", "blue") |
| if dc0.max() > 10: |
| r = np.clip(dc0, 0, 255).astype(np.uint8) |
| g = np.clip(dc1, 0, 255).astype(np.uint8) |
| b = np.clip(dc2, 0, 255).astype(np.uint8) |
| else: |
| r = np.clip((0.5 + SH_C0 * dc0) * 255, 0, 255).astype(np.uint8) |
| g = np.clip((0.5 + SH_C0 * dc1) * 255, 0, 255).astype(np.uint8) |
| b = np.clip((0.5 + SH_C0 * dc2) * 255, 0, 255).astype(np.uint8) |
| a = np.clip(opacity * 255, 0, 255).astype(np.uint8) |
|
|
| qw = _ply_field(v, "rot_0", "qw", "f_rot_0").astype(np.float64) |
| qx = _ply_field(v, "rot_1", "qx", "f_rot_1").astype(np.float64) |
| qy = _ply_field(v, "rot_2", "qy", "f_rot_2").astype(np.float64) |
| qz = _ply_field(v, "rot_3", "qz", "f_rot_3").astype(np.float64) |
| norm = np.sqrt(qw * qw + qx * qx + qy * qy + qz * qz) |
| qw /= norm; qx /= norm; qy /= norm; qz /= norm |
| rot_x = np.clip(qx * 128 + 128, 0, 255).astype(np.uint8) |
| rot_y = np.clip(qy * 128 + 128, 0, 255).astype(np.uint8) |
| rot_z = np.clip(qz * 128 + 128, 0, 255).astype(np.uint8) |
| rot_w = np.clip(qw * 128 + 128, 0, 255).astype(np.uint8) |
|
|
| order = np.argsort(-opacity) |
|
|
| buf = bytearray(n * 32) |
| for i in range(n): |
| idx = order[i] |
| off = i * 32 |
| struct.pack_into("3f", buf, off, xyz[idx, 0], xyz[idx, 1], xyz[idx, 2]) |
| struct.pack_into("3f", buf, off + 12, scales[idx, 0], scales[idx, 1], scales[idx, 2]) |
| buf[off + 24] = r[idx] |
| buf[off + 25] = g[idx] |
| buf[off + 26] = b[idx] |
| buf[off + 27] = a[idx] |
| buf[off + 28] = rot_w[idx] |
| buf[off + 29] = rot_x[idx] |
| buf[off + 30] = rot_y[idx] |
| buf[off + 31] = rot_z[idx] |
|
|
| with open(output_path, "wb") as f: |
| f.write(buf) |
|
|
| ply_mb = os.path.getsize(input_path) / 1e6 |
| splat_mb = len(buf) / 1e6 |
| return n, ply_mb, splat_mb |
|
|
|
|
| |
|
|
| def find_latest_config(output_base: str, experiment_name: str): |
| """Find the most recent config.yml from nerfstudio outputs.""" |
| pattern = os.path.join(output_base, experiment_name, "splatfacto", "*", "config.yml") |
| configs = sorted(glob.glob(pattern)) |
| if not configs: |
| return None |
| return configs[-1] |
|
|
|
|
| def train_splatfacto(data_dir: str, experiment_name: str, output_base: str, |
| max_iterations: int): |
| """Run ns-train splatfacto for a single dataset.""" |
| cmd = [ |
| "ns-train", "splatfacto", |
| "--data", data_dir, |
| "--output-dir", output_base, |
| "--experiment-name", experiment_name, |
| "--max-num-iterations", str(max_iterations), |
| *TRAIN_PARAMS, |
| ] |
| print(f" Command: {' '.join(cmd)}") |
| result = subprocess.run(cmd) |
| if result.returncode != 0: |
| raise RuntimeError(f"ns-train failed with exit code {result.returncode}") |
|
|
|
|
| def export_ply(config_path: str, export_dir: str): |
| """Run ns-export gaussian-splat to get the PLY file.""" |
| os.makedirs(export_dir, exist_ok=True) |
| cmd = [ |
| "ns-export", "gaussian-splat", |
| "--load-config", config_path, |
| "--output-dir", export_dir, |
| ] |
| print(f" Export command: {' '.join(cmd)}") |
| result = subprocess.run(cmd) |
| if result.returncode != 0: |
| raise RuntimeError(f"ns-export failed with exit code {result.returncode}") |
|
|
| |
| ply_path = os.path.join(export_dir, "splat.ply") |
| if not os.path.exists(ply_path): |
| |
| alt = os.path.join(export_dir, "point_cloud.ply") |
| if os.path.exists(alt): |
| return alt |
| raise FileNotFoundError(f"No PLY found in {export_dir}") |
| return ply_path |
|
|
|
|
| |
|
|
| def load_manifest(): |
| if os.path.exists(MANIFEST_LOCAL): |
| with open(MANIFEST_LOCAL) as f: |
| return json.load(f) |
| try: |
| import requests |
| url = "https://huggingface.co/datasets/dxgl/multiview-datasets/resolve/main/manifest.json" |
| print(f"Downloading manifest from {url} ...") |
| resp = requests.get(url) |
| resp.raise_for_status() |
| return resp.json() |
| except ImportError: |
| print("manifest.json not found locally and requests not installed.") |
| sys.exit(1) |
|
|
|
|
| def main(): |
| parser = argparse.ArgumentParser( |
| description="Train 3DGS splats for all DX.GL multi-view datasets" |
| ) |
| parser.add_argument("--object", default=None, |
| help="Train only a specific object (by name, case-insensitive)") |
| parser.add_argument("--data-dir", default="./dxgl-datasets", |
| help="Directory containing extracted datasets (default: ./dxgl-datasets)") |
| parser.add_argument("--output", default="./dxgl-splats", |
| help="Output directory for .ply and .splat files (default: ./dxgl-splats)") |
| parser.add_argument("--ns-output", default="./ns-outputs", |
| help="Nerfstudio outputs/checkpoints directory (default: ./ns-outputs)") |
| parser.add_argument("--iterations", type=int, default=DEFAULT_ITERATIONS, |
| help=f"Max training iterations (default: {DEFAULT_ITERATIONS})") |
| parser.add_argument("--dry-run", action="store_true", |
| help="Show what would be trained without running") |
| parser.add_argument("--export-only", action="store_true", |
| help="Skip training, only export/convert from existing ns-outputs") |
| args = parser.parse_args() |
|
|
| manifest = load_manifest() |
| objects = manifest["objects"] |
|
|
| if args.object: |
| objects = [o for o in objects if args.object.lower() in o["name"].lower()] |
| if not objects: |
| print(f"No object matching '{args.object}' found in manifest.") |
| sys.exit(1) |
|
|
| os.makedirs(args.output, exist_ok=True) |
| os.makedirs(args.ns_output, exist_ok=True) |
|
|
| results = [] |
| total_time = 0 |
|
|
| for i, obj in enumerate(objects, 1): |
| name = obj["name"] |
| slug = name.lower().replace(" ", "_") |
| splat_out = os.path.join(args.output, f"{slug}.splat") |
| ply_out = os.path.join(args.output, f"{slug}.ply") |
|
|
| print(f"\n{'='*60}") |
| print(f"[{i}/{len(objects)}] {name}") |
| print(f"{'='*60}") |
|
|
| |
| if os.path.exists(splat_out) and not args.export_only: |
| size_mb = os.path.getsize(splat_out) / 1e6 |
| print(f" ✓ Already done ({size_mb:.1f} MB .splat) — skipping") |
| results.append({"name": name, "status": "skipped"}) |
| continue |
|
|
| |
| data_dir = os.path.join(args.data_dir, slug) |
| transforms = os.path.join(data_dir, "transforms.json") |
| if not os.path.exists(transforms): |
| |
| nested = os.path.join(data_dir, "dataset", "transforms.json") |
| if os.path.exists(nested): |
| data_dir = os.path.join(data_dir, "dataset") |
| else: |
| print(f" ✗ Dataset not found at {data_dir}") |
| print(f" Run download_all.py first, or specify --data-dir") |
| results.append({"name": name, "status": "missing_data"}) |
| continue |
|
|
| if args.dry_run: |
| print(f" Would train: {data_dir}") |
| print(f" Iterations: {args.iterations}") |
| print(f" Output: {splat_out}") |
| results.append({"name": name, "status": "dry_run"}) |
| continue |
|
|
| t0 = time.time() |
| experiment = slug |
|
|
| |
| if not args.export_only: |
| print(f" Training splatfacto ({args.iterations} iterations) ...") |
| try: |
| train_splatfacto(data_dir, experiment, args.ns_output, args.iterations) |
| except RuntimeError as e: |
| print(f" ✗ Training failed: {e}") |
| results.append({"name": name, "status": "train_error", "error": str(e)}) |
| continue |
|
|
| |
| config_path = find_latest_config(args.ns_output, experiment) |
| if not config_path: |
| print(f" ✗ No config.yml found in {args.ns_output}/{experiment}/") |
| results.append({"name": name, "status": "no_config"}) |
| continue |
|
|
| print(f" Exporting PLY from {config_path} ...") |
| export_dir = os.path.join(args.ns_output, experiment, "export") |
| try: |
| exported_ply = export_ply(config_path, export_dir) |
| except (RuntimeError, FileNotFoundError) as e: |
| print(f" ✗ Export failed: {e}") |
| results.append({"name": name, "status": "export_error", "error": str(e)}) |
| continue |
|
|
| |
| import shutil |
| shutil.copy2(exported_ply, ply_out) |
| ply_mb = os.path.getsize(ply_out) / 1e6 |
| print(f" PLY: {ply_mb:.1f} MB → {ply_out}") |
|
|
| |
| print(f" Converting to .splat ...") |
| try: |
| n_gaussians, _, splat_mb = ply_to_splat(ply_out, splat_out) |
| except Exception as e: |
| print(f" ✗ Conversion failed: {e}") |
| results.append({"name": name, "status": "convert_error", "error": str(e)}) |
| continue |
|
|
| elapsed = time.time() - t0 |
| total_time += elapsed |
| print(f" ✓ Done: {n_gaussians:,} gaussians, {splat_mb:.1f} MB .splat ({elapsed:.0f}s)") |
| results.append({ |
| "name": name, "status": "done", |
| "gaussians": n_gaussians, "ply_mb": round(ply_mb, 1), |
| "splat_mb": round(splat_mb, 1), "seconds": round(elapsed), |
| }) |
|
|
| |
| print(f"\n{'='*60}") |
| print("SUMMARY") |
| print(f"{'='*60}") |
| done = [r for r in results if r["status"] == "done"] |
| skipped = [r for r in results if r["status"] == "skipped"] |
| errors = [r for r in results if r["status"] not in ("done", "skipped", "dry_run")] |
|
|
| if done: |
| print(f"\n Trained: {len(done)}") |
| for r in done: |
| print(f" {r['name']}: {r['gaussians']:,} gaussians, " |
| f"{r['splat_mb']} MB, {r['seconds']}s") |
| if skipped: |
| print(f"\n Skipped (already done): {len(skipped)}") |
| if errors: |
| print(f"\n Errors: {len(errors)}") |
| for r in errors: |
| print(f" {r['name']}: {r['status']} — {r.get('error', '')}") |
|
|
| if total_time > 0: |
| print(f"\n Total training time: {total_time/60:.1f} minutes") |
| print(f" Output: {os.path.abspath(args.output)}") |
|
|
|
|
| if __name__ == "__main__": |
| main() |
|
|