PeterPinetree commited on
Commit
e4ac69f
·
verified ·
1 Parent(s): 2d37034

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -5
app.py CHANGED
@@ -1,11 +1,14 @@
1
  import gradio as gr
2
  import os
3
  from huggingface_hub import InferenceClient
 
4
  import logging
5
- from typing import List, Tuple, Optional
6
 
7
  logging.basicConfig(level=logging.INFO)
8
- import huggingface_hub
 
 
9
  print("Hugging Face Hub version:", huggingface_hub.__version__)
10
 
11
  if not hf_token:
@@ -13,8 +16,6 @@ if not hf_token:
13
  else:
14
  print("✅ Token loaded successfully.")
15
 
16
- # Load HF token from environment
17
- hf_token = os.environ.get("ZEPHYR_BRIGHTSIDE_TOKEN")
18
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta", token=hf_token)
19
 
20
  MAX_HISTORY_LENGTH = 20
@@ -57,7 +58,9 @@ def respond(message: str, chat_history: List[Tuple[str, str]]) -> Tuple[str, Lis
57
  return "", updated_history
58
 
59
  except Exception as e:
60
- error_msg = f"Oops! Something went wrong. Please try again. (Error: {str(e)})"
 
 
61
  return "", chat_history + [(message, error_msg)]
62
 
63
  def get_avatar_url():
 
1
  import gradio as gr
2
  import os
3
  from huggingface_hub import InferenceClient
4
+ import huggingface_hub
5
  import logging
6
+ from typing import List, Tuple
7
 
8
  logging.basicConfig(level=logging.INFO)
9
+
10
+ # Debugging info
11
+ hf_token = os.environ.get("ZEPHYR_BRIGHTSIDE_TOKEN")
12
  print("Hugging Face Hub version:", huggingface_hub.__version__)
13
 
14
  if not hf_token:
 
16
  else:
17
  print("✅ Token loaded successfully.")
18
 
 
 
19
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta", token=hf_token)
20
 
21
  MAX_HISTORY_LENGTH = 20
 
58
  return "", updated_history
59
 
60
  except Exception as e:
61
+ import traceback
62
+ traceback.print_exc()
63
+ error_msg = f"[ERROR] {type(e).__name__}: {str(e)}"
64
  return "", chat_history + [(message, error_msg)]
65
 
66
  def get_avatar_url():