trinity-tandon commited on
Commit
83f893c
·
verified ·
1 Parent(s): e5e8903

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -5
app.py CHANGED
@@ -4,10 +4,10 @@ from huggingface_hub import InferenceClient
4
 
5
  client = InferenceClient("Qwen/Qwen2.5-72B-Instruct")
6
 
7
- #def echo(message, history):
8
- # choices = ["yes", "no", "silly ahh question", "not likely", "ask again", "absolutely", "YASSS"]
9
- # yes_or_no = random.choice(choices)
10
- # return yes_or_no
11
 
12
  def respond(message, history):
13
  messages = [{"role": "system", "content": "You are a sassy chatbot from the 1800s."}]
@@ -21,7 +21,7 @@ def respond(message, history):
21
  max_tokens = 100,
22
  stream = True,
23
  ):
24
- token = messages.choices[0].delta.content
25
  response += token
26
  yield response
27
 
 
4
 
5
  client = InferenceClient("Qwen/Qwen2.5-72B-Instruct")
6
 
7
+ def echo(message, history):
8
+ choices = ["yes", "no", "silly ahh question", "not likely", "ask again", "absolutely", "YASSS"]
9
+ yes_or_no = random.choice(choices)
10
+ return yes_or_no
11
 
12
  def respond(message, history):
13
  messages = [{"role": "system", "content": "You are a sassy chatbot from the 1800s."}]
 
21
  max_tokens = 100,
22
  stream = True,
23
  ):
24
+ token = delta.get("choices", [{}])[0].get("delta", {}).get("content", "")
25
  response += token
26
  yield response
27