Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -10,7 +10,14 @@ OPEN_AI_MODEL = "gpt-4-1106-preview"
|
|
| 10 |
|
| 11 |
# thread = gr.State(client.beta.threads.create())
|
| 12 |
|
| 13 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 14 |
def wait_on_run(run, thread):
|
| 15 |
while run.status == "queued" or run.status == "in_progress":
|
| 16 |
run = client.beta.threads.runs.retrieve(
|
|
@@ -101,14 +108,17 @@ def response_evaluation_for_case_tx(thread_id, query, question_text, input, outp
|
|
| 101 |
|
| 102 |
|
| 103 |
def run_chat_in_all_cases(message, history, question_text,input, output, examples, code_written):
|
| 104 |
-
thread = client.beta.threads.create()
|
| 105 |
-
thread_id = thread.id
|
|
|
|
|
|
|
|
|
|
| 106 |
if not message and not code_written:
|
| 107 |
-
ai_message = opening_statement(thread_id, question_text, input, output, examples,
|
| 108 |
if not code_written:
|
| 109 |
-
ai_message = get_response_for_case_t0(thread_id, message, question_text, input, output, examples,
|
| 110 |
else:
|
| 111 |
-
ai_message = response_evaluation_for_case_tx(thread_id, message, question_text, input, output, examples, code_written,
|
| 112 |
print({"question_text":question_text, "input":input, "output":output, "examples":examples,
|
| 113 |
"user_code":code_written, "query":message, "ai_message":ai_message})
|
| 114 |
return ai_message
|
|
|
|
| 10 |
|
| 11 |
# thread = gr.State(client.beta.threads.create())
|
| 12 |
|
| 13 |
+
def initialize_thread():
|
| 14 |
+
session = gr.interface.get_session()
|
| 15 |
+
if "thread_id" not in session:
|
| 16 |
+
thread = client.beta.threads.create()
|
| 17 |
+
session["thread_id"] = thread.id
|
| 18 |
+
session["thread"] = thread
|
| 19 |
+
return session["thread_id"], session["thread"]
|
| 20 |
+
|
| 21 |
def wait_on_run(run, thread):
|
| 22 |
while run.status == "queued" or run.status == "in_progress":
|
| 23 |
run = client.beta.threads.runs.retrieve(
|
|
|
|
| 108 |
|
| 109 |
|
| 110 |
def run_chat_in_all_cases(message, history, question_text,input, output, examples, code_written):
|
| 111 |
+
# thread = client.beta.threads.create()
|
| 112 |
+
# thread_id = thread.id
|
| 113 |
+
thread_id = initialize_thread()
|
| 114 |
+
session = gr.interface.get_session()
|
| 115 |
+
print(thread_id)
|
| 116 |
if not message and not code_written:
|
| 117 |
+
ai_message = opening_statement(thread_id[0], question_text, input, output, examples, thread_id[1])
|
| 118 |
if not code_written:
|
| 119 |
+
ai_message = get_response_for_case_t0(thread_id[0], message, question_text, input, output, examples, thread_id[1])
|
| 120 |
else:
|
| 121 |
+
ai_message = response_evaluation_for_case_tx(thread_id[0], message, question_text, input, output, examples, code_written, thread_id[1])
|
| 122 |
print({"question_text":question_text, "input":input, "output":output, "examples":examples,
|
| 123 |
"user_code":code_written, "query":message, "ai_message":ai_message})
|
| 124 |
return ai_message
|