Commit ยท
e9af4ac
1
Parent(s): f3502d2
Added API
Browse files- blubeAPI.py +101 -2
blubeAPI.py
CHANGED
|
@@ -1,5 +1,18 @@
|
|
| 1 |
import gradio as gr
|
| 2 |
import openai
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 3 |
|
| 4 |
def format(data: str):
|
| 5 |
data = data.replace("['", "")
|
|
@@ -9,7 +22,7 @@ def format(data: str):
|
|
| 9 |
data = data.replace("\\\'", "\'")
|
| 10 |
data = data.replace("\\\n", "")
|
| 11 |
return data
|
| 12 |
-
def runbot(
|
| 13 |
iface = gr.Interface(fn=chatbot,
|
| 14 |
inputs=gr.components.Textbox(lines=7, label="Enter your text"),
|
| 15 |
outputs="text",
|
|
@@ -51,4 +64,90 @@ def getAssistant():
|
|
| 51 |
def getPersona():
|
| 52 |
return persona
|
| 53 |
def getThread():
|
| 54 |
-
return thread
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import gradio as gr
|
| 2 |
import openai
|
| 3 |
+
import json
|
| 4 |
+
import time
|
| 5 |
+
import sys
|
| 6 |
+
from gpt_index import SimpleDirectoryReader, GPTListIndex, GPTSimpleVectorIndex, LLMPredictor, PromptHelper
|
| 7 |
+
from langchain.chat_models import ChatOpenAI
|
| 8 |
+
import gradio as gr
|
| 9 |
+
import openai
|
| 10 |
+
from openai import OpenAI
|
| 11 |
+
import os
|
| 12 |
+
|
| 13 |
+
os.environ["OPENAI_API_KEY"] = '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'
|
| 14 |
+
|
| 15 |
+
client = OpenAI()
|
| 16 |
|
| 17 |
def format(data: str):
|
| 18 |
data = data.replace("['", "")
|
|
|
|
| 22 |
data = data.replace("\\\'", "\'")
|
| 23 |
data = data.replace("\\\n", "")
|
| 24 |
return data
|
| 25 |
+
def runbot():
|
| 26 |
iface = gr.Interface(fn=chatbot,
|
| 27 |
inputs=gr.components.Textbox(lines=7, label="Enter your text"),
|
| 28 |
outputs="text",
|
|
|
|
| 64 |
def getPersona():
|
| 65 |
return persona
|
| 66 |
def getThread():
|
| 67 |
+
return thread
|
| 68 |
+
def chatbot(input_text):
|
| 69 |
+
my_assistant = getAssistant()
|
| 70 |
+
persona = getPersona()
|
| 71 |
+
|
| 72 |
+
print("(1/4) Message sent, creating thread...\n")
|
| 73 |
+
# Create a thread for the assistant
|
| 74 |
+
thread = client.beta.threads.create()
|
| 75 |
+
message = client.beta.threads.messages.create(
|
| 76 |
+
thread_id=thread.id,
|
| 77 |
+
role="user",
|
| 78 |
+
content=f"{input_text}"
|
| 79 |
+
)
|
| 80 |
+
|
| 81 |
+
print("(2/4) Thread created, running thread...\n")
|
| 82 |
+
|
| 83 |
+
# Run the thread
|
| 84 |
+
run = client.beta.threads.runs.create(
|
| 85 |
+
thread_id=thread.id,
|
| 86 |
+
assistant_id=my_assistant.id,
|
| 87 |
+
instructions=f"{persona}"
|
| 88 |
+
)
|
| 89 |
+
print("(3/4) Thread active, getting thread...\n")
|
| 90 |
+
|
| 91 |
+
# Get the thread
|
| 92 |
+
run = client.beta.threads.runs.retrieve(
|
| 93 |
+
thread_id=thread.id,
|
| 94 |
+
run_id=run.id
|
| 95 |
+
)
|
| 96 |
+
print("(4/4) Thread received, awaiting chat completion\n")
|
| 97 |
+
|
| 98 |
+
# Wait for completion
|
| 99 |
+
t: int = 0
|
| 100 |
+
tt = 0
|
| 101 |
+
errorCode = 0
|
| 102 |
+
while run.status != "completed":
|
| 103 |
+
run = client.beta.threads.runs.retrieve(
|
| 104 |
+
thread_id=thread.id, run_id=run.id
|
| 105 |
+
)
|
| 106 |
+
s = run.status
|
| 107 |
+
|
| 108 |
+
if s == "failed":
|
| 109 |
+
print(f"Process failed after {tt} ticks at {run.failed_at} with error stack:\n{run.last_error}\n\n")
|
| 110 |
+
errorCode = 1
|
| 111 |
+
break
|
| 112 |
+
elif s == "cancelled":
|
| 113 |
+
print(f"Process cancelled after {tt} ticks at {run.cancelled_at} with error stack:\n{run.last_error}\n\n")
|
| 114 |
+
errorCode = 2
|
| 115 |
+
elif s == "in_progress" or s == "queued":
|
| 116 |
+
t = t + 1
|
| 117 |
+
tt = tt + 1
|
| 118 |
+
else:
|
| 119 |
+
print(s)
|
| 120 |
+
if t > 15:
|
| 121 |
+
print(f"Time taken is over 15 ticks, average time is 11, check issues.\n\nINFO: Status is {s} and the total ticks is {tt}")
|
| 122 |
+
t = 0
|
| 123 |
+
if errorCode == 1:
|
| 124 |
+
raise RuntimeError("\n\nThe process finished with an error: Process failed unexpectedly. Check the command prompt for more details. This error is most likely a overuse error, so the program will sleep for 20 seconds to avoid more errors\n\n")
|
| 125 |
+
time.sleep(20)
|
| 126 |
+
print("\n20 seconds is over: requests are up")
|
| 127 |
+
elif errorCode == 2:
|
| 128 |
+
raise RuntimeError("The process finished with an error: Process cancelled unexpectedly. Check the command prompt for more details.")
|
| 129 |
+
else:
|
| 130 |
+
messages = client.beta.threads.messages.list(
|
| 131 |
+
thread_id=thread.id
|
| 132 |
+
)
|
| 133 |
+
messages = messages.json()
|
| 134 |
+
print(f"Process completed in {tt} ticks at {run.completed_at}")
|
| 135 |
+
|
| 136 |
+
mesf = open("message.json", "w")
|
| 137 |
+
mesf.write(messages)
|
| 138 |
+
mesf.close()
|
| 139 |
+
|
| 140 |
+
mesf = open("./message.json", "rt")
|
| 141 |
+
mess = json.load(mesf)
|
| 142 |
+
mess = mess["data"]
|
| 143 |
+
|
| 144 |
+
all_msgs = []
|
| 145 |
+
l = 0
|
| 146 |
+
for m in mess:
|
| 147 |
+
l = l + 1
|
| 148 |
+
for i in range(l - 1):
|
| 149 |
+
all_msgs.append(mess[i]["content"][0]["text"]["value"])
|
| 150 |
+
|
| 151 |
+
all_msgs = str(all_msgs)
|
| 152 |
+
all_msgs = format(all_msgs)
|
| 153 |
+
return all_msgs
|