File size: 1,885 Bytes
a82b493
92e13b2
ea69845
6303609
 
350b367
6303609
 
350b367
 
 
 
 
 
 
 
6303609
 
 
 
 
 
350b367
6303609
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
import os 
import gradio as gr
from openai import OpenAI
from langfuse.openai import openai
from langfuse.decorators import observe

client = OpenAI()
assistant_id=os.environ["OPENAI_ASSISTANT_ID"]

def new_state():
    return gr.State({
        "user": None,
        "thread": None,
    })


def get_thread_id(streaming_chat_thread_id_state):
    if not streaming_chat_thread_id_state:
      streaming_chat_thread_id_state = gr.State({ "thread_id": None})
    print("======thread_id======")
    thread_id = streaming_chat_thread_id_state["thread_id"]
    print(thread_id)

    if not thread_id:
        thread = client.beta.threads.create()
        thread_id = thread.id
        print(f"new thread_id: {thread_id}")
        streaming_chat_thread_id_state["thread_id"] = thread_id
    else:
        print(f"old thread_id: {thread_id}")
    return thread_id

@observe()
def openai_chat(user_message, thread_id):

    client.beta.threads.messages.create(
        thread_id=thread_id,
        role="user",
        content=user_message
    )

    with client.beta.threads.runs.stream(
        thread_id=thread_id,
        assistant_id=assistant_id,
    ) as stream:
        total = ''
        for delta in stream.text_deltas:
            total += delta
            yield total

def chat(user_message, history, streaming_chat_thread_id_state):
  thread_id =get_thread_id(streaming_chat_thread_id_state)
  yield from openai_chat(user_message, thread_id)

history = []
streaming_chat_thread_id_state = new_state()
# streaming_chat_thread_id_state = gr.State({ "thread_id": None})
additional_inputs = [streaming_chat_thread_id_state]

gr.ChatInterface(chat, examples=[["What are the troy rack mounting options?"], ["what are the steps for helen"], ["what are the main comparison between troy and janus specs"]], title="User Support Bot", additional_inputs=additional_inputs).launch(debug=True)