wop commited on
Commit
d3d9a93
·
verified ·
1 Parent(s): 1ecbfb8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +53 -51
app.py CHANGED
@@ -1,61 +1,63 @@
1
  import os
2
- from depth_anything_3.app.gradio_app import DepthAnything3App
3
- from depth_anything_3.app.modules.model_inference import ModelInference
4
 
5
- # Remove GPU decorator; run normally on CPU
6
- original_run_inference = ModelInference.run_inference
7
 
8
- def cpu_run_inference(self, *args, **kwargs):
9
- """CPU-only inference"""
10
- return original_run_inference(self, *args, **kwargs)
11
 
12
- # Replace original method
13
- ModelInference.run_inference = cpu_run_inference
 
14
 
15
- # Directories
16
- model_dir = os.environ.get("DA3_MODEL_DIR", "depth-anything/DA3NESTED-GIANT-LARGE")
17
- workspace_dir = os.environ.get("DA3_WORKSPACE_DIR", "workspace/gradio")
18
- gallery_dir = os.environ.get("DA3_GALLERY_DIR", "workspace/gallery")
 
 
 
19
 
20
- os.makedirs(workspace_dir, exist_ok=True)
21
- os.makedirs(gallery_dir, exist_ok=True)
22
 
23
- # Global app object for Hugging Face Spaces
24
- app = DepthAnything3App(
25
- model_dir=model_dir,
26
- workspace_dir=workspace_dir,
27
- gallery_dir=gallery_dir
28
- )
 
 
 
 
 
 
 
 
 
 
 
 
29
 
30
- # Optional: pre-cache examples
31
- examples_dir = os.path.join(workspace_dir, "examples")
32
- examples_exist = os.path.exists(examples_dir)
33
- cache_examples_env = os.environ.get("DA3_CACHE_EXAMPLES", "").lower()
34
- if cache_examples_env in ("false", "0", "no"):
35
- cache_examples = False
36
- elif cache_examples_env in ("true", "1", "yes"):
37
- cache_examples = True
38
- else:
39
- cache_examples = examples_exist
40
-
41
- cache_gs_tag = os.environ.get("DA3_CACHE_GS_TAG", "dl3dv")
42
-
43
- if cache_examples:
44
- app.cache_examples(
45
- show_cam=True,
46
- filter_black_bg=False,
47
- filter_white_bg=False,
48
- save_percentage=5.0,
49
- num_max_points=1000,
50
- cache_gs_tag=cache_gs_tag,
51
- gs_trj_mode="smooth",
52
- gs_video_quality="low",
53
- )
54
 
55
  if __name__ == "__main__":
56
- print("🚀 Launching Depth Anything 3 (CPU-only) on Hugging Face Spaces...")
57
- app.launch(
58
- host="0.0.0.0",
59
- port=7860,
60
- share=False
61
- )
 
1
  import os
2
+ import torch
3
+ import gradio as gr
4
 
5
+ from depth_anything_3.model import DepthAnything3
 
6
 
 
 
 
7
 
8
+ # ---------------------------------------------------------
9
+ # CPU-safe DepthAnything3 loader
10
+ # ---------------------------------------------------------
11
 
12
+ def load_model_cpu(model_dir):
13
+ print("🔄 Loading DepthAnything3 model on CPU...")
14
+ model = DepthAnything3.from_pretrained(model_dir, config_name="config.json")
15
+ model.to("cpu")
16
+ model.eval()
17
+ print("✅ Model ready on CPU")
18
+ return model
19
 
 
 
20
 
21
+ MODEL_DIR = os.environ.get("DA3_MODEL_DIR", "depth-anything/DA3NESTED-GIANT-LARGE")
22
+ model = load_model_cpu(MODEL_DIR)
23
+
24
+
25
+ # ---------------------------------------------------------
26
+ # CPU-safe inference (single image only)
27
+ # ---------------------------------------------------------
28
+
29
+ def run_depth(img):
30
+ """
31
+ CPU version of depth inference.
32
+ No batching, no multiview, no GS, no reconstruction.
33
+ """
34
+ if img is None:
35
+ return None
36
+
37
+ with torch.no_grad():
38
+ depth = model.infer_image(img, device="cpu")
39
 
40
+ return depth
41
+
42
+
43
+ # ---------------------------------------------------------
44
+ # Minimal Gradio UI (fast startup)
45
+ # ---------------------------------------------------------
46
+
47
+ title = "Depth Anything 3 — CPU Mode (Safe HF Version)"
48
+ description = """
49
+ This Hugging Face Space runs **DepthAnything3** in CPU-only mode.
50
+ Only single-image depth inference is enabled.
51
+ All heavy multiview / GS / reconstruction features were removed so the Space can boot on CPU.
52
+ """
53
+
54
+ demo = gr.Interface(
55
+ fn=run_depth,
56
+ inputs=gr.Image(type="pil", label="Upload an image"),
57
+ outputs=gr.Image(label="Predicted Depth"),
58
+ title=title,
59
+ description=description,
60
+ )
 
 
 
61
 
62
  if __name__ == "__main__":
63
+ demo.launch(server_name="0.0.0.0", server_port=7860)