Spaces:
Running
Running
refactor: update Client initialization to use 'token' instead of 'hf_token' for consistency across models
Browse files
app.py
CHANGED
|
@@ -113,7 +113,7 @@ def run_upscaler(image, model_selection="4xBHI_dat2_real"):
|
|
| 113 |
except Exception as e:
|
| 114 |
raise RuntimeError(f"Upscaler: could not load image from path: {e}") from e
|
| 115 |
try:
|
| 116 |
-
client = Client("Phips/Upscaler",
|
| 117 |
result = client.predict(image_arg, model_selection, api_name="/upscale_image")
|
| 118 |
except Exception as e:
|
| 119 |
raise RuntimeError(f"Upscaler API error: {e}") from e
|
|
@@ -153,7 +153,7 @@ def run_z_image_turbo(prompt, height=1024, width=1024, seed=42):
|
|
| 153 |
if not prompt or not isinstance(prompt, str) or not prompt.strip():
|
| 154 |
return None
|
| 155 |
try:
|
| 156 |
-
client = Client("hf-applications/Z-Image-Turbo",
|
| 157 |
result = client.predict(
|
| 158 |
prompt=prompt.strip(),
|
| 159 |
height=float(height),
|
|
@@ -203,7 +203,7 @@ def run_flux_klein(
|
|
| 203 |
if not prompt or not isinstance(prompt, str) or not prompt.strip():
|
| 204 |
return None
|
| 205 |
try:
|
| 206 |
-
client = Client("black-forest-labs/FLUX.2-klein-9B",
|
| 207 |
result = client.predict(
|
| 208 |
prompt=prompt.strip(),
|
| 209 |
input_images=[],
|
|
|
|
| 113 |
except Exception as e:
|
| 114 |
raise RuntimeError(f"Upscaler: could not load image from path: {e}") from e
|
| 115 |
try:
|
| 116 |
+
client = Client("Phips/Upscaler", token=_hf_token())
|
| 117 |
result = client.predict(image_arg, model_selection, api_name="/upscale_image")
|
| 118 |
except Exception as e:
|
| 119 |
raise RuntimeError(f"Upscaler API error: {e}") from e
|
|
|
|
| 153 |
if not prompt or not isinstance(prompt, str) or not prompt.strip():
|
| 154 |
return None
|
| 155 |
try:
|
| 156 |
+
client = Client("hf-applications/Z-Image-Turbo", token=_hf_token())
|
| 157 |
result = client.predict(
|
| 158 |
prompt=prompt.strip(),
|
| 159 |
height=float(height),
|
|
|
|
| 203 |
if not prompt or not isinstance(prompt, str) or not prompt.strip():
|
| 204 |
return None
|
| 205 |
try:
|
| 206 |
+
client = Client("black-forest-labs/FLUX.2-klein-9B", token=_hf_token())
|
| 207 |
result = client.predict(
|
| 208 |
prompt=prompt.strip(),
|
| 209 |
input_images=[],
|