sudo-soldier commited on
Commit
b335d43
·
verified ·
1 Parent(s): ab73e6b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -7
app.py CHANGED
@@ -1,7 +1,7 @@
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
 
4
- # Initialize Hugging Face model
5
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
6
 
7
  def respond(
@@ -12,20 +12,20 @@ def respond(
12
  temperature,
13
  top_p,
14
  ):
15
- # Initialize messages with the system message (Jesse's identity)
16
  messages = [{"role": "system", "content": system_message}]
17
 
18
- # Add previous chat history to the message stream
19
  for user_msg, assistant_msg in history:
20
  if user_msg:
21
  messages.append({"role": "user", "content": user_msg})
22
  if assistant_msg:
23
  messages.append({"role": "assistant", "content": assistant_msg})
24
 
25
- # Append the latest user message
26
  messages.append({"role": "user", "content": message})
27
 
28
- # Generate and stream the response
29
  response = ""
30
  for message in client.chat_completion(
31
  messages,
@@ -38,7 +38,7 @@ def respond(
38
  response += token
39
  yield response
40
 
41
- # Gradio UI configuration
42
  demo = gr.ChatInterface(
43
  fn=respond,
44
  additional_inputs=[
@@ -61,7 +61,7 @@ demo = gr.ChatInterface(
61
  theme="default"
62
  )
63
 
64
- # Launch the interface
65
  if __name__ == "__main__":
66
  demo.launch()
67
 
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
 
4
+
5
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
6
 
7
  def respond(
 
12
  temperature,
13
  top_p,
14
  ):
15
+
16
  messages = [{"role": "system", "content": system_message}]
17
 
18
+
19
  for user_msg, assistant_msg in history:
20
  if user_msg:
21
  messages.append({"role": "user", "content": user_msg})
22
  if assistant_msg:
23
  messages.append({"role": "assistant", "content": assistant_msg})
24
 
25
+
26
  messages.append({"role": "user", "content": message})
27
 
28
+
29
  response = ""
30
  for message in client.chat_completion(
31
  messages,
 
38
  response += token
39
  yield response
40
 
41
+
42
  demo = gr.ChatInterface(
43
  fn=respond,
44
  additional_inputs=[
 
61
  theme="default"
62
  )
63
 
64
+
65
  if __name__ == "__main__":
66
  demo.launch()
67