Muhammadidrees commited on
Commit
4ee5aad
·
verified ·
1 Parent(s): d653395

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -3
app.py CHANGED
@@ -1,3 +1,6 @@
 
 
 
1
  import os
2
  import gradio as gr
3
  from openai import OpenAI
@@ -13,7 +16,6 @@ def chat_with_model(user_message, history):
13
  if history is None:
14
  history = []
15
 
16
- # prepare messages for the API
17
  messages = [{"role": "system", "content": "You are a helpful assistant."}]
18
  for human, bot in history:
19
  messages.append({"role": "user", "content": human})
@@ -29,7 +31,6 @@ def chat_with_model(user_message, history):
29
 
30
  reply = completion.choices[0].message["content"]
31
 
32
- # MUST be appended as tuple
33
  history.append((user_message, reply))
34
  return history
35
 
@@ -39,7 +40,7 @@ def chat_with_model(user_message, history):
39
 
40
 
41
  with gr.Blocks() as demo:
42
- gr.Markdown("# 🤖 Chat with HuggingFace Router (OpenAI Compatible)")
43
 
44
  chatbot = gr.Chatbot(height=500)
45
  text_input = gr.Textbox(label="Your message")
 
1
+ import asyncio
2
+ asyncio.set_event_loop_policy(asyncio.DefaultEventLoopPolicy()) # <<< FIXES THE ERROR
3
+
4
  import os
5
  import gradio as gr
6
  from openai import OpenAI
 
16
  if history is None:
17
  history = []
18
 
 
19
  messages = [{"role": "system", "content": "You are a helpful assistant."}]
20
  for human, bot in history:
21
  messages.append({"role": "user", "content": human})
 
31
 
32
  reply = completion.choices[0].message["content"]
33
 
 
34
  history.append((user_message, reply))
35
  return history
36
 
 
40
 
41
 
42
  with gr.Blocks() as demo:
43
+ gr.Markdown("# 🤖 Chat with HuggingFace Router (OpenAI API Compatible)")
44
 
45
  chatbot = gr.Chatbot(height=500)
46
  text_input = gr.Textbox(label="Your message")