skinapi commited on
Commit
5220d79
·
1 Parent(s): a704879

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -4
app.py CHANGED
@@ -23,8 +23,8 @@ def submit_message(user_token, prompt, prompt_template, temperature, max_tokens,
23
  prompt_msg = { "role": "user", "content": prompt }
24
 
25
  try:
26
- response = openai.Completion.create(
27
- engine="davinci",
28
  prompt=system_prompt + history + [prompt_msg],
29
  temperature=temperature,
30
  max_tokens=max_tokens
@@ -47,7 +47,6 @@ def submit_message(user_token, prompt, prompt_template, temperature, max_tokens,
47
 
48
  total_tokens_used_msg = f"Total tokens used: {state['total_tokens']} / 3000" if not user_token else ""
49
  chat_messages = [(history[i]['content'], history[i+1]['content']) for i in range(0, len(history)-1, 2)]
50
- input_visibility
51
 
52
  return {
53
  'chat_messages': chat_messages,
@@ -64,7 +63,6 @@ state = {
64
  @app.route('/chat', methods=['POST'])
65
  def chat():
66
  data = request.json
67
- user_token = data.get('user_token')
68
  prompt = data.get('prompt')
69
  prompt_template = data.get('prompt_template')
70
  temperature = data.get('temperature', 0.5)
 
23
  prompt_msg = { "role": "user", "content": prompt }
24
 
25
  try:
26
+ response = openai.ChatCompletion.create(
27
+ model="gpt-3.5-turbo",
28
  prompt=system_prompt + history + [prompt_msg],
29
  temperature=temperature,
30
  max_tokens=max_tokens
 
47
 
48
  total_tokens_used_msg = f"Total tokens used: {state['total_tokens']} / 3000" if not user_token else ""
49
  chat_messages = [(history[i]['content'], history[i+1]['content']) for i in range(0, len(history)-1, 2)]
 
50
 
51
  return {
52
  'chat_messages': chat_messages,
 
63
  @app.route('/chat', methods=['POST'])
64
  def chat():
65
  data = request.json
 
66
  prompt = data.get('prompt')
67
  prompt_template = data.get('prompt_template')
68
  temperature = data.get('temperature', 0.5)