mmargg commited on
Commit
0343306
·
verified ·
1 Parent(s): ba02dad
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -20,7 +20,7 @@ def respond(message,history):
20
 
21
  response=client.chat_completion(messages, temperature=0.8, stream=True, max_tokens=100) #capping how many words the LLM is allowed to generate as a respond (300 words)
22
 
23
- for message in client.chat_completion():
24
  token=message.choices[0].delta.content
25
  response+=token
26
  yield response['choices'][0]['message']['content'].strip() #storing value of response in a readable format to display
 
20
 
21
  response=client.chat_completion(messages, temperature=0.8, stream=True, max_tokens=100) #capping how many words the LLM is allowed to generate as a respond (300 words)
22
 
23
+ for message in client.chat_completion(messages):
24
  token=message.choices[0].delta.content
25
  response+=token
26
  yield response['choices'][0]['message']['content'].strip() #storing value of response in a readable format to display