GueuleDange commited on
Commit
8ea8dc0
·
verified ·
1 Parent(s): 832a6f5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +37 -24
app.py CHANGED
@@ -1,6 +1,9 @@
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
 
 
 
4
 
5
  def respond(
6
  message,
@@ -9,35 +12,40 @@ def respond(
9
  max_tokens,
10
  temperature,
11
  top_p,
12
- hf_token: gr.OAuthToken,
13
  ):
14
  """
15
  For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
16
  """
17
- client = InferenceClient(token=hf_token.token, model="openai/gpt-oss-20b")
 
 
 
 
 
18
 
19
- messages = [{"role": "system", "content": system_message}]
 
 
20
 
21
- messages.extend(history)
22
 
23
- messages.append({"role": "user", "content": message})
 
 
 
 
 
 
 
 
 
 
24
 
25
- response = ""
26
-
27
- for message in client.chat_completion(
28
- messages,
29
- max_tokens=max_tokens,
30
- stream=True,
31
- temperature=temperature,
32
- top_p=top_p,
33
- ):
34
- choices = message.choices
35
- token = ""
36
- if len(choices) and choices[0].delta.content:
37
- token = choices[0].delta.content
38
-
39
- response += token
40
- yield response
41
 
42
 
43
  """
@@ -61,10 +69,15 @@ chatbot = gr.ChatInterface(
61
  )
62
 
63
  with gr.Blocks() as demo:
64
- with gr.Sidebar():
65
- gr.LoginButton()
 
 
 
 
 
66
  chatbot.render()
67
 
68
 
69
  if __name__ == "__main__":
70
- demo.launch()
 
1
+ import os
2
  import gradio as gr
3
  from huggingface_hub import InferenceClient
4
 
5
+ # Récupérer le token depuis les variables d'environnement (Secrets)
6
+ HF_TOKEN = os.environ.get("HF_TOKEN")
7
 
8
  def respond(
9
  message,
 
12
  max_tokens,
13
  temperature,
14
  top_p,
 
15
  ):
16
  """
17
  For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
18
  """
19
+ if not HF_TOKEN:
20
+ yield "❌ Erreur : HF_TOKEN non configuré. Ajoutez-le dans les Secrets de votre Space."
21
+ return
22
+
23
+ try:
24
+ client = InferenceClient(token=HF_TOKEN, model="openai/gpt-oss-20b")
25
 
26
+ messages = [{"role": "system", "content": system_message}]
27
+ messages.extend(history)
28
+ messages.append({"role": "user", "content": message})
29
 
30
+ response = ""
31
 
32
+ for message in client.chat_completion(
33
+ messages,
34
+ max_tokens=max_tokens,
35
+ stream=True,
36
+ temperature=temperature,
37
+ top_p=top_p,
38
+ ):
39
+ choices = message.choices
40
+ token = ""
41
+ if len(choices) and choices[0].delta.content:
42
+ token = choices[0].delta.content
43
 
44
+ response += token
45
+ yield response
46
+
47
+ except Exception as e:
48
+ yield f"❌ Erreur : {str(e)}"
 
 
 
 
 
 
 
 
 
 
 
49
 
50
 
51
  """
 
69
  )
70
 
71
  with gr.Blocks() as demo:
72
+ # Affichage d'information sur le token (debug)
73
+ gr.Markdown(f"Token configuré: {'✅ Oui' if HF_TOKEN else '❌ Non'}")
74
+
75
+ # Vous pouvez retirer le LoginButton puisque nous utilisons le token direct
76
+ # with gr.Sidebar():
77
+ # gr.LoginButton()
78
+
79
  chatbot.render()
80
 
81
 
82
  if __name__ == "__main__":
83
+ demo.launch()