Justin Means commited on
Commit
b78b919
·
1 Parent(s): e69e213

Increase GPU timeout to 10 minutes for full quality processing

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -29,7 +29,7 @@ from depth_anything_3.app.modules.model_inference import ModelInference
29
  # Model loading and inference will occur in GPU subprocess, not main process
30
  original_run_inference = ModelInference.run_inference
31
 
32
- @spaces.GPU(duration=120) # Request GPU for up to 120 seconds per inference
33
  def gpu_run_inference(self, *args, **kwargs):
34
  """
35
  GPU-accelerated inference with Spaces decorator.
 
29
  # Model loading and inference will occur in GPU subprocess, not main process
30
  original_run_inference = ModelInference.run_inference
31
 
32
+ @spaces.GPU(duration=600) # Request GPU for up to 10 minutes per inference
33
  def gpu_run_inference(self, *args, **kwargs):
34
  """
35
  GPU-accelerated inference with Spaces decorator.