Thomas Manning commited on
Commit
cd6e68b
·
1 Parent(s): 48217bb

Fix key handling, limit total messages to 20

Browse files
Files changed (2) hide show
  1. .gitignore +1 -0
  2. app.py +7 -4
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ .history
app.py CHANGED
@@ -14,9 +14,6 @@ def seed_submit(seed, history):
14
 
15
  def bot_update(history, key, alice_system, alice_temp, alice_model, bob_system, bob_temp, bob_model):
16
 
17
- if key:
18
- os.environ['OPENAI_API_KEY'] = key
19
-
20
  while True:
21
  if history[-1][1] is None: # Bob's turn
22
  messages = [
@@ -27,6 +24,7 @@ def bot_update(history, key, alice_system, alice_temp, alice_model, bob_system,
27
  if resp:
28
  messages.append({"role": "assistant", "content": resp.replace(bob_prefix, '', 1)})
29
  resp = openai.ChatCompletion.create(
 
30
  model=bob_model,
31
  messages=messages,
32
  temperature=bob_temp
@@ -43,13 +41,18 @@ def bot_update(history, key, alice_system, alice_temp, alice_model, bob_system,
43
  if resp:
44
  messages.append({"role": "user", "content": resp.replace(bob_prefix, '', 1)})
45
  resp = openai.ChatCompletion.create(
 
46
  model=alice_model,
47
  messages=messages,
48
  temperature=alice_temp
49
  )
50
  alice_response = resp['choices'][0]['message']['content']
51
  history.append([f'{alice_prefix}{alice_response}', None])
52
- yield history
 
 
 
 
53
 
54
 
55
  with gr.Blocks() as demo:
 
14
 
15
  def bot_update(history, key, alice_system, alice_temp, alice_model, bob_system, bob_temp, bob_model):
16
 
 
 
 
17
  while True:
18
  if history[-1][1] is None: # Bob's turn
19
  messages = [
 
24
  if resp:
25
  messages.append({"role": "assistant", "content": resp.replace(bob_prefix, '', 1)})
26
  resp = openai.ChatCompletion.create(
27
+ api_key=key,
28
  model=bob_model,
29
  messages=messages,
30
  temperature=bob_temp
 
41
  if resp:
42
  messages.append({"role": "user", "content": resp.replace(bob_prefix, '', 1)})
43
  resp = openai.ChatCompletion.create(
44
+ api_key=key,
45
  model=alice_model,
46
  messages=messages,
47
  temperature=alice_temp
48
  )
49
  alice_response = resp['choices'][0]['message']['content']
50
  history.append([f'{alice_prefix}{alice_response}', None])
51
+
52
+ if len(messages) >= 10:
53
+ return history
54
+ else:
55
+ yield history
56
 
57
 
58
  with gr.Blocks() as demo: