ghitaben commited on
Commit
244eb20
·
1 Parent(s): de5f46b

add three detection layers

Browse files
Files changed (1) hide show
  1. src/loader.py +14 -2
src/loader.py CHANGED
@@ -170,9 +170,21 @@ def get_text_model(
170
 
171
 
172
  def _is_zerogpu_error(e: Exception) -> bool:
173
- """Return True for errors that indicate ZeroGPU failed to allocate / init a GPU."""
 
 
 
 
 
174
  msg = str(e)
175
- return "No CUDA GPUs are available" in msg or "CUDA" in msg
 
 
 
 
 
 
 
176
 
177
 
178
  def _inference_core(
 
170
 
171
 
172
  def _is_zerogpu_error(e: Exception) -> bool:
173
+ """Return True for errors that indicate ZeroGPU failed to allocate / init a GPU.
174
+
175
+ The spaces package re-wraps the original CUDA RuntimeError as
176
+ RuntimeError('RuntimeError'), so we check for that pattern too.
177
+ """
178
+ import traceback as _tb
179
  msg = str(e)
180
+ if "No CUDA GPUs are available" in msg or "CUDA" in msg:
181
+ return True
182
+ # spaces re-wraps: RuntimeError('RuntimeError')
183
+ if msg == "RuntimeError":
184
+ return True
185
+ # Inspect traceback for ZeroGPU stack frames
186
+ full_tb = "".join(_tb.format_exception(type(e), e, e.__traceback__))
187
+ return "spaces/zero" in full_tb or "device-api.zero" in full_tb
188
 
189
 
190
  def _inference_core(