Spaces:
Running on Zero
Running on Zero
Upload app.py with huggingface_hub
Browse files
app.py
CHANGED
|
@@ -311,6 +311,13 @@ def _is_zerogpu_queue_timeout(err: Exception) -> bool:
|
|
| 311 |
return ZEROGPU_QUEUE_HINT in str(err)
|
| 312 |
|
| 313 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 314 |
|
| 315 |
def _use_hf_model() -> bool:
|
| 316 |
"""Check if we should use the fine-tuned HF model."""
|
|
@@ -321,10 +328,20 @@ def _use_hf_model() -> bool:
|
|
| 321 |
return os.getenv("VERIS_USE_HF", "").lower() in ("1", "true", "yes")
|
| 322 |
|
| 323 |
|
| 324 |
-
def classify(description: str, api_key: str) -> str:
|
| 325 |
"""Classify an incident — uses HF model on Spaces, OpenAI otherwise."""
|
| 326 |
if not description.strip():
|
| 327 |
return "Please enter an incident description."
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 328 |
|
| 329 |
use_hf = _use_hf_model()
|
| 330 |
|
|
@@ -364,10 +381,15 @@ def classify(description: str, api_key: str) -> str:
|
|
| 364 |
return json.dumps({"error": str(e)}, indent=2)
|
| 365 |
|
| 366 |
|
| 367 |
-
def ask(question: str, api_key: str) -> str:
|
| 368 |
"""Answer a VERIS question — uses HF model on Spaces, OpenAI otherwise."""
|
| 369 |
if not question.strip():
|
| 370 |
return "*Please enter a question.*"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 371 |
|
| 372 |
use_hf = _use_hf_model()
|
| 373 |
|
|
@@ -547,6 +569,12 @@ def build_app() -> gr.Blocks:
|
|
| 547 |
</span>
|
| 548 |
</div>
|
| 549 |
""")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 550 |
else:
|
| 551 |
gr.HTML("""
|
| 552 |
<div class="model-banner">
|
|
|
|
| 311 |
return ZEROGPU_QUEUE_HINT in str(err)
|
| 312 |
|
| 313 |
|
| 314 |
+
def _spaces_user_logged_in(request: gr.Request | None) -> bool:
|
| 315 |
+
"""True when a Spaces OAuth user is attached to this request."""
|
| 316 |
+
if request is None:
|
| 317 |
+
return False
|
| 318 |
+
return bool(getattr(request, "username", None))
|
| 319 |
+
|
| 320 |
+
|
| 321 |
|
| 322 |
def _use_hf_model() -> bool:
|
| 323 |
"""Check if we should use the fine-tuned HF model."""
|
|
|
|
| 328 |
return os.getenv("VERIS_USE_HF", "").lower() in ("1", "true", "yes")
|
| 329 |
|
| 330 |
|
| 331 |
+
def classify(description: str, api_key: str, request: gr.Request | None = None) -> str:
|
| 332 |
"""Classify an incident — uses HF model on Spaces, OpenAI otherwise."""
|
| 333 |
if not description.strip():
|
| 334 |
return "Please enter an incident description."
|
| 335 |
+
if IS_SPACES and not _spaces_user_logged_in(request):
|
| 336 |
+
return json.dumps(
|
| 337 |
+
{
|
| 338 |
+
"error": (
|
| 339 |
+
"Please sign in with Hugging Face in this app before running inference. "
|
| 340 |
+
"ZeroGPU quota is per logged-in user."
|
| 341 |
+
)
|
| 342 |
+
},
|
| 343 |
+
indent=2,
|
| 344 |
+
)
|
| 345 |
|
| 346 |
use_hf = _use_hf_model()
|
| 347 |
|
|
|
|
| 381 |
return json.dumps({"error": str(e)}, indent=2)
|
| 382 |
|
| 383 |
|
| 384 |
+
def ask(question: str, api_key: str, request: gr.Request | None = None) -> str:
|
| 385 |
"""Answer a VERIS question — uses HF model on Spaces, OpenAI otherwise."""
|
| 386 |
if not question.strip():
|
| 387 |
return "*Please enter a question.*"
|
| 388 |
+
if IS_SPACES and not _spaces_user_logged_in(request):
|
| 389 |
+
return (
|
| 390 |
+
"**Error:** Please sign in with Hugging Face in this app before running inference. "
|
| 391 |
+
"ZeroGPU quota is per logged-in user."
|
| 392 |
+
)
|
| 393 |
|
| 394 |
use_hf = _use_hf_model()
|
| 395 |
|
|
|
|
| 569 |
</span>
|
| 570 |
</div>
|
| 571 |
""")
|
| 572 |
+
with gr.Row():
|
| 573 |
+
gr.Markdown(
|
| 574 |
+
"**Required:** Click Sign in with Hugging Face below so ZeroGPU usage "
|
| 575 |
+
"counts against your account quota."
|
| 576 |
+
)
|
| 577 |
+
gr.LoginButton("Sign in with Hugging Face")
|
| 578 |
else:
|
| 579 |
gr.HTML("""
|
| 580 |
<div class="model-banner">
|