Tohru127 commited on
Commit
98d4821
·
verified ·
1 Parent(s): 78f747c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +61 -576
app.py CHANGED
@@ -1,584 +1,69 @@
1
- from __future__ import annotations
2
-
3
- import datetime as dt
4
- import io
5
- import json
6
  import os
7
  import shutil
8
  import subprocess
9
- import textwrap
10
- import uuid
11
- import zipfile
12
- from dataclasses import dataclass
13
  from pathlib import Path
14
- from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple
15
 
16
  import gradio as gr
 
 
17
  from PIL import Image
18
 
19
-
20
- def _run_command(command: List[str], cwd: Optional[Path] = None, env: Optional[Dict[str, str]] = None) -> Tuple[int, str]:
21
- """Execute a shell command and capture combined stdout/stderr."""
22
- process = subprocess.run(
23
- command,
24
- cwd=str(cwd) if cwd else None,
25
- env=env,
26
- stdout=subprocess.PIPE,
27
- stderr=subprocess.STDOUT,
28
- text=True,
29
- )
30
- return process.returncode, process.stdout
31
-
32
-
33
- @dataclass
34
- class Backend:
35
- name: str
36
- description: str
37
- runner: Callable[[Path, Path, Optional[Dict[str, Path]], int], Tuple[Path, List[str]]]
38
-
39
-
40
- class ReconstructionRunner:
41
- """Coordinate preprocessing, COLMAP, and neural backends."""
42
-
43
- def __init__(self, output_root: Optional[Path] = None) -> None:
44
- root = output_root or Path(os.environ.get("HF3D_OUTPUT_ROOT", "/tmp/hf_3d_runs"))
45
- root.mkdir(parents=True, exist_ok=True)
46
- self.output_root = root
47
- self.backends: Dict[str, Backend] = {}
48
- self._register_default_backends()
49
-
50
- # ------------------------------------------------------------------
51
- # Public API
52
- # ------------------------------------------------------------------
53
- def available_methods(self) -> List[str]:
54
- return list(self.backends.keys())
55
-
56
- def describe_backend(self, name: str) -> str:
57
- backend = self.backends.get(name)
58
- return backend.description if backend else ""
59
-
60
- def run(
61
- self,
62
- uploads: Iterable[Any],
63
- method: str,
64
- max_resolution: int,
65
- skip_colmap: bool,
66
- ) -> Tuple[str, Optional[Path]]:
67
- logs: List[str] = []
68
- timestamp = dt.datetime.utcnow().strftime("%Y%m%d_%H%M%S")
69
- workspace = self.output_root / f"run_{timestamp}_{uuid.uuid4().hex[:8]}"
70
- dataset_root = workspace / "dataset"
71
- images_dir = dataset_root / "images"
72
- images_dir.mkdir(parents=True, exist_ok=True)
73
- logs.append(f"Workspace initialized at {workspace}")
74
-
75
- try:
76
- ingest_count = self._ingest_uploads(uploads, images_dir, max_resolution)
77
- except Exception as exc: # noqa: BLE001 - top-level guard for user feedback
78
- logs.append(f"[ERROR] Failed to ingest inputs: {exc}")
79
- return "\n".join(logs), None
80
-
81
- if ingest_count == 0:
82
- logs.append("[ERROR] No images detected in upload. Provide JPG/PNG files or a ZIP archive.")
83
- return "\n".join(logs), None
84
-
85
- logs.append(f"Ingested {ingest_count} image(s). Max resolution capped at {max_resolution}px")
86
-
87
- colmap_outputs: Optional[Dict[str, Path]] = None
88
- if skip_colmap:
89
- logs.append("Skipping COLMAP as requested. Downstream models must rely on precomputed poses.")
90
- else:
91
- try:
92
- colmap_outputs, colmap_logs = self._run_colmap(images_dir, workspace / "colmap", max_resolution)
93
- logs.extend(colmap_logs)
94
- except FileNotFoundError as exc:
95
- logs.append(
96
- textwrap.dedent(
97
- f"""
98
- [ERROR] Required binary `{exc}` was not found. Ensure COLMAP is installed or set
99
- `skip_colmap=True` if you plan to upload precomputed camera poses.
100
- """
101
- ).strip()
102
- )
103
- return "\n".join(logs), None
104
- except RuntimeError as exc:
105
- logs.append(str(exc))
106
- return "\n".join(logs), None
107
-
108
- backend = self.backends.get(method)
109
- if not backend:
110
- logs.append(f"[ERROR] Unknown backend '{method}'. Available options: {', '.join(self.available_methods())}")
111
- return "\n".join(logs), None
112
-
113
- try:
114
- artifact_path, backend_logs = backend.runner(workspace, dataset_root, colmap_outputs, max_resolution)
115
- logs.extend(backend_logs)
116
- except Exception as exc: # noqa: BLE001 - propagate details to UI
117
- logs.append(f"[ERROR] Backend '{method}' failed: {exc}")
118
- return "\n".join(logs), None
119
-
120
- logs.append(f"Artifacts packaged at {artifact_path}")
121
- return "\n".join(logs), artifact_path
122
-
123
- # ------------------------------------------------------------------
124
- # Backend registration
125
- # ------------------------------------------------------------------
126
- def register_backend(self, backend: Backend) -> None:
127
- self.backends[backend.name] = backend
128
-
129
- def _register_default_backends(self) -> None:
130
- self.register_backend(
131
- Backend(
132
- name="Nerfstudio (NeRF)",
133
- description=(
134
- "Optimizes a NeRF with the nerfacto recipe, exports a Poisson surface mesh, and packs all outputs "
135
- "(config, checkpoints, mesh, transforms.json) into a ZIP archive."
136
- ),
137
- runner=self._run_nerfstudio,
138
- )
139
- )
140
- self.register_backend(
141
- Backend(
142
- name="3D Gaussian Splatting",
143
- description=(
144
- "Uses the Inria Gaussian Splatting reference implementation initialized from COLMAP cameras. "
145
- "Returns the optimized Gaussian point cloud and training logs."
146
- ),
147
- runner=self._run_gaussian_splatting,
148
- )
149
- )
150
-
151
- # ------------------------------------------------------------------
152
- # Input ingestion helpers
153
- # ------------------------------------------------------------------
154
- def _ingest_uploads(self, uploads: Iterable[Any], images_dir: Path, max_resolution: int) -> int:
155
- metadata: List[Dict[str, object]] = []
156
- count = 0
157
- for item in uploads:
158
- if not item:
159
- continue
160
- src_path = Path(getattr(item, "name", getattr(item, "path", "")))
161
- if not src_path.exists():
162
- # Gradio may store temp files in `.name`; fallback to `.path` when available
163
- if hasattr(item, "path"):
164
- src_path = Path(item.path)
165
- if not src_path.exists():
166
- continue
167
-
168
- if zipfile.is_zipfile(src_path):
169
- with zipfile.ZipFile(src_path, "r") as archive:
170
- for member in archive.namelist():
171
- lower = member.lower()
172
- if lower.endswith((".jpg", ".jpeg", ".png")):
173
- data = archive.read(member)
174
- image = Image.open(io.BytesIO(data))
175
- dest = images_dir / Path(member).name
176
- self._save_image(image, dest, max_resolution)
177
- metadata.append(self._image_metadata(dest, source=str(member)))
178
- count += 1
179
- else:
180
- image = Image.open(src_path)
181
- dest = images_dir / src_path.name
182
- self._save_image(image, dest, max_resolution)
183
- metadata.append(self._image_metadata(dest, source=str(src_path.name)))
184
- count += 1
185
-
186
- if metadata:
187
- dataset_meta = {
188
- "created_at": dt.datetime.utcnow().isoformat() + "Z",
189
- "max_resolution": max_resolution,
190
- "images": metadata,
191
- }
192
- meta_path = images_dir.parent / "metadata.json"
193
- meta_path.write_text(json.dumps(dataset_meta, indent=2))
194
- return count
195
-
196
- @staticmethod
197
- def _save_image(image: Image.Image, destination: Path, max_resolution: int) -> None:
198
- image = image.convert("RGB")
199
- width, height = image.size
200
- scale = min(1.0, max_resolution / max(width, height))
201
- if scale < 1.0:
202
- new_size = (int(width * scale), int(height * scale))
203
- image = image.resize(new_size, Image.LANCZOS)
204
- destination.parent.mkdir(parents=True, exist_ok=True)
205
- image.save(destination, quality=95)
206
-
207
- @staticmethod
208
- def _image_metadata(path: Path, source: str) -> Dict[str, object]:
209
- with Image.open(path) as image:
210
- width, height = image.size
211
- return {
212
- "filename": path.name,
213
- "width": width,
214
- "height": height,
215
- "source": source,
216
- }
217
-
218
- def _colmap_gpu_mode(self) -> Tuple[bool, str]:
219
- """Determine whether COLMAP should use CUDA acceleration."""
220
-
221
- override = os.environ.get("HF3D_COLMAP_USE_GPU")
222
- if override is not None:
223
- value = override.strip().lower()
224
- if value in {"1", "true", "yes", "on"}:
225
- return True, "Forced on via HF3D_COLMAP_USE_GPU"
226
- if value in {"0", "false", "no", "off"}:
227
- return False, "Disabled via HF3D_COLMAP_USE_GPU"
228
-
229
- if shutil.which("nvidia-smi") is None:
230
- return False, "nvidia-smi not found; assuming CPU-only environment"
231
-
232
- try:
233
- probe = subprocess.run(
234
- ["nvidia-smi"],
235
- stdout=subprocess.PIPE,
236
- stderr=subprocess.STDOUT,
237
- text=True,
238
- timeout=5,
239
- )
240
- except Exception:
241
- return False, "nvidia-smi probe failed; defaulting to CPU"
242
-
243
- if probe.returncode != 0:
244
- return False, "nvidia-smi returned non-zero exit code; defaulting to CPU"
245
-
246
- return True, "Detected NVIDIA GPU via nvidia-smi"
247
-
248
- # ------------------------------------------------------------------
249
- # COLMAP integration
250
- # ------------------------------------------------------------------
251
- def _run_colmap(self, images_dir: Path, output_dir: Path, max_resolution: int) -> Tuple[Dict[str, Path], List[str]]:
252
- if shutil.which("colmap") is None:
253
- raise FileNotFoundError("colmap")
254
-
255
- logs: List[str] = ["Running COLMAP reconstruction…"]
256
- output_dir.mkdir(parents=True, exist_ok=True)
257
- database_path = output_dir / "database.db"
258
- sparse_dir = output_dir / "sparse"
259
- dense_dir = output_dir / "dense"
260
- sparse_dir.mkdir(exist_ok=True)
261
-
262
- use_gpu, gpu_reason = self._colmap_gpu_mode()
263
- logs.append(
264
- f"COLMAP GPU acceleration: {'enabled' if use_gpu else 'disabled'} ({gpu_reason})."
265
- )
266
- gpu_flag = "1" if use_gpu else "0"
267
-
268
- commands = [
269
- (
270
- "Feature extraction",
271
- [
272
- "colmap",
273
- "feature_extractor",
274
- "--database_path",
275
- str(database_path),
276
- "--image_path",
277
- str(images_dir),
278
- "--SiftExtraction.use_gpu",
279
- gpu_flag,
280
- "--SiftExtraction.max_image_size",
281
- str(max_resolution),
282
- ],
283
- ),
284
- (
285
- "Exhaustive matcher",
286
- [
287
- "colmap",
288
- "exhaustive_matcher",
289
- "--database_path",
290
- str(database_path),
291
- "--SiftMatching.use_gpu",
292
- gpu_flag,
293
- ],
294
- ),
295
- (
296
- "Mapper",
297
- [
298
- "colmap",
299
- "mapper",
300
- "--database_path",
301
- str(database_path),
302
- "--image_path",
303
- str(images_dir),
304
- "--output_path",
305
- str(sparse_dir),
306
- ],
307
- ),
308
- (
309
- "Image undistorter",
310
- [
311
- "colmap",
312
- "image_undistorter",
313
- "--image_path",
314
- str(images_dir),
315
- "--input_path",
316
- str(sparse_dir / "0"),
317
- "--output_path",
318
- str(dense_dir),
319
- "--output_type",
320
- "COLMAP",
321
- ],
322
- ),
323
- ]
324
-
325
- for stage, command in commands:
326
- logs.append(f"\n$ {' '.join(command)}")
327
- code, output = _run_command(command)
328
- logs.append(output)
329
- if code != 0:
330
- raise RuntimeError(f"[ERROR] COLMAP stage '{stage}' failed with exit code {code}.")
331
-
332
- outputs = {
333
- "database": database_path,
334
- "sparse": sparse_dir / "0",
335
- "dense": dense_dir,
336
- }
337
- logs.append("COLMAP completed successfully.")
338
- return outputs, logs
339
-
340
- # ------------------------------------------------------------------
341
- # Backend implementations
342
- # ------------------------------------------------------------------
343
- def _run_nerfstudio(
344
- self,
345
- workspace: Path,
346
- dataset_root: Path,
347
- colmap_outputs: Optional[Dict[str, Path]],
348
- max_resolution: int,
349
- ) -> Tuple[Path, List[str]]:
350
- if shutil.which("ns-train") is None:
351
- raise FileNotFoundError("ns-train")
352
-
353
- logs: List[str] = ["Launching Nerfstudio pipeline…"]
354
- processed_dir = workspace / "nerfstudio" / "processed"
355
- runs_dir = workspace / "nerfstudio" / "runs"
356
- export_dir = workspace / "nerfstudio" / "export"
357
- processed_dir.mkdir(parents=True, exist_ok=True)
358
- runs_dir.mkdir(parents=True, exist_ok=True)
359
- export_dir.mkdir(parents=True, exist_ok=True)
360
-
361
- data_source = dataset_root / "images"
362
- process_cmd = [
363
- "ns-process-data",
364
- "images",
365
- "--data",
366
- str(data_source),
367
- "--output-dir",
368
- str(processed_dir),
369
- "--max-num-downscales",
370
- str(max(1, int(max_resolution / 512))),
371
- ]
372
- if colmap_outputs:
373
- process_cmd.extend(["--skip-colmap"])
374
- process_cmd.extend(["--colmap-model-path", str(colmap_outputs["sparse"])])
375
-
376
- logs.append(f"\n$ {' '.join(process_cmd)}")
377
- code, output = _run_command(process_cmd)
378
- logs.append(output)
379
- if code != 0:
380
- raise RuntimeError("ns-process-data failed. See logs above.")
381
-
382
- train_cmd = [
383
- "ns-train",
384
- "nerfacto",
385
- "--data",
386
- str(processed_dir),
387
- "--max-num-iterations",
388
- "3000",
389
- "--output-dir",
390
- str(runs_dir),
391
- "--viewer.quit-on-train-completion",
392
- "True",
393
- "--pipeline.model.depth-importance",
394
- "0.3",
395
- ]
396
- logs.append(f"\n$ {' '.join(train_cmd)}")
397
- code, output = _run_command(train_cmd)
398
- logs.append(output)
399
- if code != 0:
400
- raise RuntimeError("ns-train failed. Consider reducing iterations or verifying GPU availability.")
401
-
402
- configs = sorted(runs_dir.rglob("config.yml"))
403
- if not configs:
404
- raise RuntimeError("Unable to locate Nerfstudio config.yml after training.")
405
- config_path = configs[-1]
406
-
407
- export_cmd = [
408
- "ns-export",
409
- "poisson",
410
- "--load-config",
411
- str(config_path),
412
- "--output-path",
413
- str(export_dir),
414
- ]
415
- logs.append(f"\n$ {' '.join(export_cmd)}")
416
- code, output = _run_command(export_cmd)
417
- logs.append(output)
418
- if code != 0:
419
- raise RuntimeError("ns-export failed. Check above logs for details.")
420
-
421
- mesh_path = export_dir / "mesh.obj"
422
- artifact_path = workspace / "nerfstudio_result.zip"
423
- with zipfile.ZipFile(artifact_path, "w") as archive:
424
- for path in [mesh_path, export_dir / "mesh.mtl", config_path, processed_dir / "transforms.json"]:
425
- if path.exists():
426
- archive.write(path, arcname=path.relative_to(workspace))
427
- for ckpt in runs_dir.rglob("*.ckpt"):
428
- archive.write(ckpt, arcname=ckpt.relative_to(workspace))
429
- logs.append("Nerfstudio export complete.")
430
- return artifact_path, logs
431
-
432
- def _run_gaussian_splatting(
433
- self,
434
- workspace: Path,
435
- dataset_root: Path,
436
- colmap_outputs: Optional[Dict[str, Path]],
437
- max_resolution: int,
438
- ) -> Tuple[Path, List[str]]:
439
- default_repo = Path(__file__).resolve().parent / "external" / "gaussian-splatting"
440
- repo_root = Path(os.environ.get("GAUSSIAN_SPLATTING_ROOT", default_repo))
441
- convert_script = repo_root / "convert.py"
442
- train_script = repo_root / "train.py"
443
- if not convert_script.exists() or not train_script.exists():
444
- raise FileNotFoundError(
445
- "Gaussian Splatting repository not found. Clone it to 'external/gaussian-splatting' "
446
- "or set GAUSSIAN_SPLATTING_ROOT to point at the upstream project."
447
- )
448
- if not colmap_outputs:
449
- raise RuntimeError("Gaussian Splatting requires COLMAP outputs. Disable 'Skip COLMAP'.")
450
-
451
- logs: List[str] = ["Launching 3D Gaussian Splatting pipeline…"]
452
- gaussian_root = workspace / "gaussian"
453
- data_dir = gaussian_root / "data"
454
- model_dir = gaussian_root / "model"
455
- gaussian_root.mkdir(parents=True, exist_ok=True)
456
-
457
- convert_cmd = [
458
- "python3",
459
- str(convert_script),
460
- "-s",
461
- str(colmap_outputs["dense"]),
462
- "-o",
463
- str(data_dir),
464
- ]
465
- logs.append(f"\n$ {' '.join(convert_cmd)}")
466
- code, output = _run_command(convert_cmd, cwd=repo_root)
467
- logs.append(output)
468
- if code != 0:
469
- raise RuntimeError("Gaussian Splatting conversion failed. Verify COLMAP dense output.")
470
-
471
- train_cmd = [
472
- "python3",
473
- str(train_script),
474
- "-s",
475
- str(data_dir),
476
- "-m",
477
- str(model_dir),
478
- "--iterations",
479
- "7000",
480
- "--resolution",
481
- str(max(1, max_resolution // 512)),
482
- ]
483
- logs.append(f"\n$ {' '.join(train_cmd)}")
484
- code, output = _run_command(train_cmd, cwd=repo_root)
485
- logs.append(output)
486
- if code != 0:
487
- raise RuntimeError("Gaussian Splatting training failed. See logs for CUDA-related messages.")
488
-
489
- ply_candidates = sorted(model_dir.rglob("*.ply"))
490
- if not ply_candidates:
491
- raise RuntimeError("No PLY point cloud found after Gaussian Splatting training.")
492
- ply_path = ply_candidates[-1]
493
-
494
- artifact_path = workspace / "gaussian_result.zip"
495
- with zipfile.ZipFile(artifact_path, "w") as archive:
496
- archive.write(ply_path, arcname=ply_path.relative_to(workspace))
497
- for log_file in gaussian_root.rglob("*.log"):
498
- archive.write(log_file, arcname=log_file.relative_to(workspace))
499
- logs.append("Gaussian Splatting export complete.")
500
- return artifact_path, logs
501
-
502
-
503
- # ----------------------------------------------------------------------
504
- # Gradio interface
505
- # ----------------------------------------------------------------------
506
-
507
- def build_interface() -> gr.Blocks:
508
- output_override = os.environ.get("HF3D_OUTPUT_ROOT")
509
- if output_override:
510
- output_root = Path(output_override)
511
- else:
512
- output_root = Path(__file__).resolve().parent / "runs"
513
- runner = ReconstructionRunner(output_root=output_root)
514
-
515
- with gr.Blocks(title="Sparse Images to 3D Reconstruction") as demo:
516
- gr.Markdown(
517
- textwrap.dedent(
518
- """
519
- # Sparse Images ➜ 3D Reconstruction
520
-
521
- Upload a folder or ZIP archive of sparse, non-overlapping photographs. The app will run COLMAP to estimate camera
522
- poses, then optimize either a Nerfstudio NeRF or a 3D Gaussian Splatting model and return a downloadable artifact.
523
- Expect several minutes of processing time for high-resolution captures.
524
- """
525
- )
526
- )
527
-
528
- with gr.Row():
529
- uploads = gr.Files(label="Images or ZIP archive", file_types=["image", ".zip"], file_count="multiple")
530
- method = gr.Dropdown(
531
- choices=runner.available_methods(),
532
- value="Nerfstudio (NeRF)",
533
- label="Reconstruction backend",
534
- )
535
-
536
- with gr.Row():
537
- max_resolution = gr.Slider(
538
- minimum=512,
539
- maximum=4096,
540
- step=256,
541
- value=2048,
542
- label="Max processing resolution (pixels)",
543
- )
544
- skip_colmap = gr.Checkbox(
545
- value=False,
546
- label="Skip COLMAP (use existing poses)",
547
- )
548
-
549
- default_backend = runner.available_methods()[0] if runner.available_methods() else ""
550
- backend_description = gr.Markdown(runner.describe_backend(default_backend))
551
- method.change(
552
- fn=lambda choice: runner.describe_backend(choice),
553
- inputs=method,
554
- outputs=backend_description,
555
- )
556
- run_button = gr.Button("Start reconstruction", variant="primary")
557
-
558
- logs = gr.Textbox(label="Pipeline log", lines=20)
559
- artifact = gr.File(label="Download results")
560
-
561
- def _execute(files: List[Any], backend: str, resolution: int, skip: bool) -> Tuple[str, Optional[str]]:
562
- log_text, artifact_path = runner.run(files, backend, resolution, skip)
563
- if artifact_path is None:
564
- return log_text, None
565
- return log_text, str(artifact_path)
566
-
567
- run_button.click(
568
- fn=_execute,
569
- inputs=[uploads, method, max_resolution, skip_colmap],
570
- outputs=[logs, artifact],
571
- )
572
-
573
- return demo
574
-
575
-
576
- def main() -> None:
577
- demo = build_interface()
578
- demo.queue(default_concurrency_limit=1).launch(
579
- server_name=os.environ.get("GRADIO_SERVER_NAME", "0.0.0.0")
580
- )
581
-
582
-
583
- if __name__ == "__main__":
584
- main()
 
 
 
 
 
 
1
  import os
2
  import shutil
3
  import subprocess
4
+ import tempfile
5
+ import time
 
 
6
  from pathlib import Path
7
+ from typing import List, Tuple
8
 
9
  import gradio as gr
10
+ import numpy as np
11
+ import open3d as o3d
12
  from PIL import Image
13
 
14
+ APP_DIR = Path(__file__).parent.resolve()
15
+ OUT_DIR = APP_DIR / "outputs"
16
+ RUNS_DIR = APP_DIR / "runs"
17
+ OUT_DIR.mkdir(exist_ok=True)
18
+ RUNS_DIR.mkdir(exist_ok=True)
19
+
20
+
21
+ def _run(cmd: List[str], cwd: Path, logfile: Path) -> Tuple[int, str]:
22
+ """Run a shell command, tee output to logfile, return (code, text)."""
23
+ proc = subprocess.Popen(cmd, cwd=str(cwd), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True)
24
+ lines = []
25
+ with logfile.open("a") as f:
26
+ for line in iter(proc.stdout.readline, ""):
27
+ f.write(line)
28
+ lines.append(line)
29
+ proc.wait()
30
+ out = "".join(lines)
31
+ return proc.returncode, out
32
+
33
+
34
+ def _save_images_to(dirpath: Path, images: List[Image.Image], max_size: int) -> None:
35
+ dirpath.mkdir(parents=True, exist_ok=True)
36
+ for i, im in enumerate(images):
37
+ im = im.convert("RGB")
38
+ im.thumbnail((max_size, max_size))
39
+ im.save(dirpath / f"im_{i:03d}.jpg", quality=92)
40
+
41
+
42
+ def _colmap_pipeline(img_dir: Path, work_dir: Path, num_threads: int = 4) -> Path:
43
+ """Run COLMAP SfM+MVS. Returns path to fused point cloud (PLY)."""
44
+ os.environ.setdefault("OMP_NUM_THREADS", str(num_threads))
45
+ db = work_dir / "database.db"
46
+ sparse = work_dir / "sparse"
47
+ dense = work_dir / "dense"
48
+ logs = work_dir / "logs.txt"
49
+
50
+ sparse.mkdir(exist_ok=True)
51
+ dense.mkdir(exist_ok=True)
52
+
53
+ # 1) Feature extraction
54
+ code, _ = _run([
55
+ "colmap", "feature_extractor",
56
+ "--database_path", str(db),
57
+ "--image_path", str(img_dir),
58
+ "--ImageReader.single_camera", "1",
59
+ "--SiftExtraction.max_image_size", "2400",
60
+ "--SiftExtraction.num_threads", str(num_threads),
61
+ ], cwd=work_dir, logfile=logs)
62
+ if code != 0:
63
+ raise RuntimeError("COLMAP feature_extractor failed. See logs.txt")
64
+
65
+ # 2) Exhaustive matching
66
+ code, _ = _run([
67
+ "colmap", "exhaustive_matcher",
68
+ "--database_path", str(db),
69
+ demo.launch()