vibesecurityguy commited on
Commit
60487c0
·
verified ·
1 Parent(s): fdbfcae

Upload app.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. app.py +35 -12
app.py CHANGED
@@ -303,6 +303,14 @@ EXAMPLES_QA = [
303
  # ---------------------------------------------------------------------------
304
  # Inference functions
305
  # ---------------------------------------------------------------------------
 
 
 
 
 
 
 
 
306
 
307
  def _use_hf_model() -> bool:
308
  """Check if we should use the fine-tuned HF model."""
@@ -320,8 +328,8 @@ def classify(description: str, api_key: str) -> str:
320
 
321
  use_hf = _use_hf_model()
322
 
323
- # If user provided an API key, prefer OpenAI (explicit choice)
324
- if api_key.strip():
325
  use_hf = False
326
 
327
  if use_hf:
@@ -330,7 +338,14 @@ def classify(description: str, api_key: str) -> str:
330
  return json.dumps(result, indent=2)
331
  except Exception as e:
332
  logger.error(f"HF model error: {e}")
333
- # Fall through to OpenAI if available
 
 
 
 
 
 
 
334
  key = os.getenv("OPENAI_API_KEY", "")
335
  if not key:
336
  return json.dumps({"error": f"Model inference failed: {str(e)}"}, indent=2)
@@ -356,7 +371,7 @@ def ask(question: str, api_key: str) -> str:
356
 
357
  use_hf = _use_hf_model()
358
 
359
- if api_key.strip():
360
  use_hf = False
361
 
362
  if use_hf:
@@ -364,6 +379,11 @@ def ask(question: str, api_key: str) -> str:
364
  return _ask_gpu(question)
365
  except Exception as e:
366
  logger.error(f"HF model error: {e}")
 
 
 
 
 
367
  key = os.getenv("OPENAI_API_KEY", "")
368
  if not key:
369
  return f"**Error:** Model inference failed: {str(e)}"
@@ -542,14 +562,17 @@ def build_app() -> gr.Blocks:
542
  </div>
543
  """)
544
 
545
- # --- API Key (optional on Spaces) ---
546
- with gr.Group():
547
- api_key = gr.Textbox(
548
- label="OpenAI API Key (Optional)" if IS_SPACES else "OpenAI API Key",
549
- placeholder="sk-... (optional — the fine-tuned model runs for free)" if IS_SPACES else "sk-... (required for classification)",
550
- type="password",
551
- info="Your key is never stored. If provided, GPT-4o will be used instead of the fine-tuned model.",
552
- )
 
 
 
553
 
554
  # --- Main Tabs ---
555
  with gr.Tabs() as tabs:
 
303
  # ---------------------------------------------------------------------------
304
  # Inference functions
305
  # ---------------------------------------------------------------------------
306
+ ZEROGPU_QUEUE_HINT = "No GPU was available after"
307
+
308
+
309
+ def _is_zerogpu_queue_timeout(err: Exception) -> bool:
310
+ """Detect ZeroGPU queue timeout errors from the spaces runtime."""
311
+ return ZEROGPU_QUEUE_HINT in str(err)
312
+
313
+
314
 
315
  def _use_hf_model() -> bool:
316
  """Check if we should use the fine-tuned HF model."""
 
328
 
329
  use_hf = _use_hf_model()
330
 
331
+ # Local-only override: allow OpenAI fallback when running outside Spaces.
332
+ if api_key.strip() and not IS_SPACES:
333
  use_hf = False
334
 
335
  if use_hf:
 
338
  return json.dumps(result, indent=2)
339
  except Exception as e:
340
  logger.error(f"HF model error: {e}")
341
+ if _is_zerogpu_queue_timeout(e):
342
+ return json.dumps(
343
+ {"error": "ZeroGPU queue is full right now. Try again in 1-2 minutes."},
344
+ indent=2,
345
+ )
346
+ if IS_SPACES:
347
+ return json.dumps({"error": f"Model inference failed: {str(e)}"}, indent=2)
348
+ # Local fallback path only.
349
  key = os.getenv("OPENAI_API_KEY", "")
350
  if not key:
351
  return json.dumps({"error": f"Model inference failed: {str(e)}"}, indent=2)
 
371
 
372
  use_hf = _use_hf_model()
373
 
374
+ if api_key.strip() and not IS_SPACES:
375
  use_hf = False
376
 
377
  if use_hf:
 
379
  return _ask_gpu(question)
380
  except Exception as e:
381
  logger.error(f"HF model error: {e}")
382
+ if _is_zerogpu_queue_timeout(e):
383
+ return "**Error:** ZeroGPU queue is full right now. Try again in 1-2 minutes."
384
+ if IS_SPACES:
385
+ return f"**Error:** Model inference failed: {str(e)}"
386
+ # Local fallback path only.
387
  key = os.getenv("OPENAI_API_KEY", "")
388
  if not key:
389
  return f"**Error:** Model inference failed: {str(e)}"
 
562
  </div>
563
  """)
564
 
565
+ # --- API Key (local mode only) ---
566
+ if IS_SPACES:
567
+ api_key = gr.State("")
568
+ else:
569
+ with gr.Group():
570
+ api_key = gr.Textbox(
571
+ label="OpenAI API Key",
572
+ placeholder="sk-... (required for OpenAI fallback)",
573
+ type="password",
574
+ info="Your key is never stored.",
575
+ )
576
 
577
  # --- Main Tabs ---
578
  with gr.Tabs() as tabs: