mmargg commited on
Commit
9f89ce3
·
verified ·
1 Parent(s): 22fbb3d
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -136,7 +136,7 @@ def respond(message,history):
136
 
137
  response=client.chat_completion(messages, stream=True, max_tokens=100) #capping how many words the LLM is allowed to generate as a respond (100 words)
138
  for message in client.chat_completion(messages):
139
- token = message.choices[0].delta.content
140
  response+=token
141
  yield response['choices'][0]['message']['content'].strip() #storing value of response in a readable format to display
142
 
 
136
 
137
  response=client.chat_completion(messages, stream=True, max_tokens=100) #capping how many words the LLM is allowed to generate as a respond (100 words)
138
  for message in client.chat_completion(messages):
139
+ token = message.(choices[0]).delta.content
140
  response+=token
141
  yield response['choices'][0]['message']['content'].strip() #storing value of response in a readable format to display
142