mmargg commited on
Commit
e2b1b49
·
verified ·
1 Parent(s): 9040a6b

temp and changed max tokens

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -133,7 +133,7 @@ def respond(message,history):
133
 
134
  messages.append({"role":"user","content": message})
135
 
136
- response=client.chat_completion(messages, max_tokens=100)#capping how many words the LLM is allowed to generate as a respond (100 words)
137
 
138
  return response['choices'][0]['message']['content'].strip() #storing value of response in a readable format to display
139
 
 
133
 
134
  messages.append({"role":"user","content": message})
135
 
136
+ response=client.chat_completion(messages, temperature=0.2, max_tokens=250)#capping how many words the LLM is allowed to generate as a respond (100 words)
137
 
138
  return response['choices'][0]['message']['content'].strip() #storing value of response in a readable format to display
139