Nymbo commited on
Commit
67577b9
·
verified ·
1 Parent(s): f9d461e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +28 -11
app.py CHANGED
@@ -1,13 +1,17 @@
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
 
3
  import os
4
  import base64
5
  from PIL import Image
6
  import io
7
  from typing import Optional
8
 
9
- ACCESS_TOKEN = os.getenv("HF_TOKEN")
10
- print("Access token loaded.")
 
 
 
11
 
12
  # Function to encode image to base64
13
  def encode_image(image_path):
@@ -78,17 +82,19 @@ def respond(
78
  if oauth_token is not None and getattr(oauth_token, "token", None):
79
  api_token = oauth_token.token
80
  print("Using OAuth token from signed-in user for inference.")
81
- elif ACCESS_TOKEN:
82
- api_token = ACCESS_TOKEN
83
  print("Using server-configured Hugging Face token for inference.")
84
- else:
85
- raise gr.Error(
86
- "No Hugging Face session detected. Please sign in with your Hugging Face account before running the chat."
87
- )
88
 
89
  # Initialize the Inference Client with default HF inference
90
- client = InferenceClient(token=api_token)
91
- print("Hugging Face Inference Client initialized with available token.")
 
 
 
 
 
 
92
 
93
  # Convert seed to None if -1 (meaning random)
94
  if seed == -1:
@@ -215,6 +221,17 @@ def respond(
215
  yield response
216
 
217
  print()
 
 
 
 
 
 
 
 
 
 
 
218
  except Exception as e:
219
  print(f"Error during inference: {e}")
220
  response += f"\nError: {str(e)}"
@@ -539,4 +556,4 @@ print("Gradio interface initialized.")
539
 
540
  if __name__ == "__main__":
541
  print("Launching the demo application.")
542
- demo.launch(show_api=True)
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
+ from huggingface_hub.errors import HfHubHTTPError
4
  import os
5
  import base64
6
  from PIL import Image
7
  import io
8
  from typing import Optional
9
 
10
+ HF_READ_TOKEN = os.getenv("HF_READ_TOKEN")
11
+ if HF_READ_TOKEN:
12
+ print("Default Hugging Face token available from environment.")
13
+ else:
14
+ print("No default Hugging Face token configured; relying on user sign-in.")
15
 
16
  # Function to encode image to base64
17
  def encode_image(image_path):
 
82
  if oauth_token is not None and getattr(oauth_token, "token", None):
83
  api_token = oauth_token.token
84
  print("Using OAuth token from signed-in user for inference.")
85
+ elif HF_READ_TOKEN:
86
+ api_token = HF_READ_TOKEN
87
  print("Using server-configured Hugging Face token for inference.")
 
 
 
 
88
 
89
  # Initialize the Inference Client with default HF inference
90
+ client_kwargs = {}
91
+ if api_token is not None:
92
+ client_kwargs["token"] = api_token
93
+ else:
94
+ print("No Hugging Face token available; attempting anonymous inference (may fail for private models).")
95
+
96
+ client = InferenceClient(**client_kwargs)
97
+ print("Hugging Face Inference Client initialized.")
98
 
99
  # Convert seed to None if -1 (meaning random)
100
  if seed == -1:
 
221
  yield response
222
 
223
  print()
224
+ except HfHubHTTPError as e:
225
+ status = getattr(e.response, "status_code", None)
226
+ if status in (401, 403):
227
+ raise gr.Error(
228
+ "Failed to generate response: {}. Sign in with your Hugging Face account and retry."
229
+ .format(e)
230
+ ) from e
231
+
232
+ print(f"Error during inference: {e}")
233
+ response += f"\nError: {str(e)}"
234
+ yield response
235
  except Exception as e:
236
  print(f"Error during inference: {e}")
237
  response += f"\nError: {str(e)}"
 
556
 
557
  if __name__ == "__main__":
558
  print("Launching the demo application.")
559
+ demo.launch(show_api=True, mcp_server=True)