trinity-tandon commited on
Commit
5893ff1
·
verified ·
1 Parent(s): 88c327d

Created a respond method

Browse files
Files changed (1) hide show
  1. app.py +15 -1
app.py CHANGED
@@ -1,11 +1,25 @@
1
  import gradio as gr
2
  import random
 
 
 
3
 
4
  def echo(message, history):
5
  choices = ["yes", "no", "silly ahh question", "not likely", "ask again", "absolutely", "YASSS"]
6
  yes_or_no = random.choice(choices)
7
  return yes_or_no
 
 
 
 
 
 
 
 
 
 
 
8
 
9
- chatbot = gr.ChatInterface(echo, type = "messages", title = "Chatbot for KWK")
10
 
11
  chatbot.launch()
 
1
  import gradio as gr
2
  import random
3
+ from huggingface_hub import InferenceClient
4
+
5
+ client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
6
 
7
  def echo(message, history):
8
  choices = ["yes", "no", "silly ahh question", "not likely", "ask again", "absolutely", "YASSS"]
9
  yes_or_no = random.choice(choices)
10
  return yes_or_no
11
+
12
+ def respond(message, history):
13
+ messages = [{"role": "system", "content": "You are a friendly chatbot."}]
14
+ if history:
15
+ messages.extend(history)
16
+ messages.append({"role": "user", "content": message})
17
+ response = client.chat_completion(
18
+ messages,
19
+ max_tokens=100
20
+ )
21
+ return response['choices'][0]['message']['content'].strip()
22
 
23
+ chatbot = gr.ChatInterface(respond, type = "messages")
24
 
25
  chatbot.launch()