Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -10,13 +10,14 @@ def predict(inputs, top_p, temperature, openai_api_key, chat_counter, chatbot=[]
|
|
| 10 |
response = make_request(API_URL, openai_api_key, payload)
|
| 11 |
|
| 12 |
# Processar a resposta
|
| 13 |
-
|
| 14 |
|
| 15 |
-
# Atualizar o hist贸rico apenas se houver novos tokens
|
| 16 |
if token_counter > 0:
|
|
|
|
| 17 |
history = new_history
|
| 18 |
|
| 19 |
-
return
|
| 20 |
|
| 21 |
def format_messages(chatbot, inputs, chat_counter):
|
| 22 |
messages = []
|
|
@@ -54,7 +55,7 @@ def process_response(response, history):
|
|
| 54 |
for chunk in response.iter_lines():
|
| 55 |
if chunk:
|
| 56 |
chunk_str = chunk.decode('utf-8').lstrip('data: ')
|
| 57 |
-
if "[DONE]"
|
| 58 |
break
|
| 59 |
try:
|
| 60 |
chunk_json = json.loads(chunk_str)
|
|
|
|
| 10 |
response = make_request(API_URL, openai_api_key, payload)
|
| 11 |
|
| 12 |
# Processar a resposta
|
| 13 |
+
new_chatbot, new_history, token_counter = process_response(response, history)
|
| 14 |
|
| 15 |
+
# Atualizar o chatbot e o hist贸rico apenas se houver novos tokens
|
| 16 |
if token_counter > 0:
|
| 17 |
+
chatbot.extend(new_chatbot)
|
| 18 |
history = new_history
|
| 19 |
|
| 20 |
+
return chatbot, history, chat_counter
|
| 21 |
|
| 22 |
def format_messages(chatbot, inputs, chat_counter):
|
| 23 |
messages = []
|
|
|
|
| 55 |
for chunk in response.iter_lines():
|
| 56 |
if chunk:
|
| 57 |
chunk_str = chunk.decode('utf-8').lstrip('data: ')
|
| 58 |
+
if chunk_str.strip() in ["[DONE]", "}"]: # Adiciona } 脿 verifica莽茫o
|
| 59 |
break
|
| 60 |
try:
|
| 61 |
chunk_json = json.loads(chunk_str)
|