fix chat format for new Gradio message API
Browse files
app.py
CHANGED
|
@@ -6,13 +6,18 @@ from openai import OpenAI
|
|
| 6 |
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
|
| 7 |
|
| 8 |
def chat_with_openai(message, history):
|
| 9 |
-
# build conversation history
|
| 10 |
messages = [{"role": "system", "content": "You are Neuro, a concise helpful AI assistant."}]
|
| 11 |
-
for
|
| 12 |
-
messages.append(
|
| 13 |
-
messages.append({"role": "assistant", "content": ai})
|
| 14 |
messages.append({"role": "user", "content": message})
|
| 15 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 16 |
# call model
|
| 17 |
response = client.chat.completions.create(
|
| 18 |
model="gpt-4o-mini",
|
|
@@ -20,7 +25,7 @@ def chat_with_openai(message, history):
|
|
| 20 |
)
|
| 21 |
reply = response.choices[0].message.content
|
| 22 |
history = history + [(message, reply)]
|
| 23 |
-
return history,
|
| 24 |
|
| 25 |
demo = gr.ChatInterface(
|
| 26 |
fn=chat_with_openai,
|
|
|
|
| 6 |
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
|
| 7 |
|
| 8 |
def chat_with_openai(message, history):
|
|
|
|
| 9 |
messages = [{"role": "system", "content": "You are Neuro, a concise helpful AI assistant."}]
|
| 10 |
+
for m in history:
|
| 11 |
+
messages.append(m)
|
|
|
|
| 12 |
messages.append({"role": "user", "content": message})
|
| 13 |
|
| 14 |
+
response = client.chat.completions.create(
|
| 15 |
+
model="gpt-4o-mini",
|
| 16 |
+
messages=messages,
|
| 17 |
+
)
|
| 18 |
+
reply = response.choices[0].message.content
|
| 19 |
+
return history + [{"role": "assistant", "content": reply}]
|
| 20 |
+
|
| 21 |
# call model
|
| 22 |
response = client.chat.completions.create(
|
| 23 |
model="gpt-4o-mini",
|
|
|
|
| 25 |
)
|
| 26 |
reply = response.choices[0].message.content
|
| 27 |
history = history + [(message, reply)]
|
| 28 |
+
return history + [{"role": "assistant", "content": reply}]
|
| 29 |
|
| 30 |
demo = gr.ChatInterface(
|
| 31 |
fn=chat_with_openai,
|