incendies commited on
Commit
a6541eb
·
1 Parent(s): cda4df7

Pass HF token to Client for ZeroGPU quota

Browse files
Files changed (1) hide show
  1. app.py +16 -3
app.py CHANGED
@@ -1,4 +1,5 @@
1
  import base64
 
2
  import tempfile
3
  import urllib.request
4
  import gradio as gr
@@ -10,6 +11,18 @@ from daggr import FnNode, GradioNode, Graph
10
  # These tools are free to use and don't consume credits
11
 
12
 
 
 
 
 
 
 
 
 
 
 
 
 
13
  def _url_to_path(url):
14
  """Download image from URL to a temp file; Daggr needs file paths to display images."""
15
  if not url or not isinstance(url, str) or not url.startswith("http"):
@@ -100,7 +113,7 @@ def run_upscaler(image, model_selection="4xBHI_dat2_real"):
100
  except Exception as e:
101
  raise RuntimeError(f"Upscaler: could not load image from path: {e}") from e
102
  try:
103
- client = Client("Phips/Upscaler")
104
  result = client.predict(image_arg, model_selection, api_name="/upscale_image")
105
  except Exception as e:
106
  raise RuntimeError(f"Upscaler API error: {e}") from e
@@ -140,7 +153,7 @@ def run_z_image_turbo(prompt, height=1024, width=1024, seed=42):
140
  if not prompt or not isinstance(prompt, str) or not prompt.strip():
141
  return None
142
  try:
143
- client = Client("hf-applications/Z-Image-Turbo")
144
  result = client.predict(
145
  prompt=prompt.strip(),
146
  height=float(height),
@@ -190,7 +203,7 @@ def run_flux_klein(
190
  if not prompt or not isinstance(prompt, str) or not prompt.strip():
191
  return None
192
  try:
193
- client = Client("black-forest-labs/FLUX.2-klein-9B")
194
  result = client.predict(
195
  prompt=prompt.strip(),
196
  input_images=[],
 
1
  import base64
2
+ import os
3
  import tempfile
4
  import urllib.request
5
  import gradio as gr
 
11
  # These tools are free to use and don't consume credits
12
 
13
 
14
+ def _hf_token():
15
+ """HF token so ZeroGPU quota is used (not 'unlogged user'). Prefer env, then huggingface_hub."""
16
+ token = os.environ.get("HF_TOKEN")
17
+ if token:
18
+ return token
19
+ try:
20
+ from huggingface_hub import get_token
21
+ return get_token()
22
+ except Exception:
23
+ return None
24
+
25
+
26
  def _url_to_path(url):
27
  """Download image from URL to a temp file; Daggr needs file paths to display images."""
28
  if not url or not isinstance(url, str) or not url.startswith("http"):
 
113
  except Exception as e:
114
  raise RuntimeError(f"Upscaler: could not load image from path: {e}") from e
115
  try:
116
+ client = Client("Phips/Upscaler", hf_token=_hf_token())
117
  result = client.predict(image_arg, model_selection, api_name="/upscale_image")
118
  except Exception as e:
119
  raise RuntimeError(f"Upscaler API error: {e}") from e
 
153
  if not prompt or not isinstance(prompt, str) or not prompt.strip():
154
  return None
155
  try:
156
+ client = Client("hf-applications/Z-Image-Turbo", hf_token=_hf_token())
157
  result = client.predict(
158
  prompt=prompt.strip(),
159
  height=float(height),
 
203
  if not prompt or not isinstance(prompt, str) or not prompt.strip():
204
  return None
205
  try:
206
+ client = Client("black-forest-labs/FLUX.2-klein-9B", hf_token=_hf_token())
207
  result = client.predict(
208
  prompt=prompt.strip(),
209
  input_images=[],