si_support / app.py
jh-didero's picture
Update app.py
a82b493 verified
import os
import gradio as gr
from openai import OpenAI
from langfuse.openai import openai
from langfuse.decorators import observe
client = OpenAI()
assistant_id=os.environ["OPENAI_ASSISTANT_ID"]
def new_state():
return gr.State({
"user": None,
"thread": None,
})
def get_thread_id(streaming_chat_thread_id_state):
if not streaming_chat_thread_id_state:
streaming_chat_thread_id_state = gr.State({ "thread_id": None})
print("======thread_id======")
thread_id = streaming_chat_thread_id_state["thread_id"]
print(thread_id)
if not thread_id:
thread = client.beta.threads.create()
thread_id = thread.id
print(f"new thread_id: {thread_id}")
streaming_chat_thread_id_state["thread_id"] = thread_id
else:
print(f"old thread_id: {thread_id}")
return thread_id
@observe()
def openai_chat(user_message, thread_id):
client.beta.threads.messages.create(
thread_id=thread_id,
role="user",
content=user_message
)
with client.beta.threads.runs.stream(
thread_id=thread_id,
assistant_id=assistant_id,
) as stream:
total = ''
for delta in stream.text_deltas:
total += delta
yield total
def chat(user_message, history, streaming_chat_thread_id_state):
thread_id =get_thread_id(streaming_chat_thread_id_state)
yield from openai_chat(user_message, thread_id)
history = []
streaming_chat_thread_id_state = new_state()
# streaming_chat_thread_id_state = gr.State({ "thread_id": None})
additional_inputs = [streaming_chat_thread_id_state]
gr.ChatInterface(chat, examples=[["What are the troy rack mounting options?"], ["what are the steps for helen"], ["what are the main comparison between troy and janus specs"]], title="User Support Bot", additional_inputs=additional_inputs).launch(debug=True)