File size: 4,072 Bytes
beb9ce6
 
 
 
 
7c67e12
 
beb9ce6
 
 
c3b735b
beb9ce6
 
a7bec97
 
beb9ce6
 
7c67e12
 
 
b14ca36
7c67e12
beb9ce6
 
d80d48d
0f87b5f
f75f3a4
 
 
beb9ce6
7eabdd5
beb9ce6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12c6795
beb9ce6
12c6795
beb9ce6
 
1b7769d
 
beb9ce6
 
 
2b9ffad
 
b14ca36
beb9ce6
 
 
 
 
 
 
 
 
 
 
 
 
0f87b5f
7c67e12
 
beb9ce6
 
11ea324
2731969
 
13c819a
 
beb9ce6
 
 
 
fbdb327
beb9ce6
 
084fb78
beb9ce6
 
 
 
b14ca36
beb9ce6
 
 
 
7c67e12
 
 
1b7769d
beb9ce6
12c6795
 
 
 
beb9ce6
 
 
a7bec97
beb9ce6
 
 
d80d48d
beb9ce6
 
7c67e12
 
beb9ce6
 
7eabdd5
6b05307
7eabdd5
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
import gradio as gr
import cohere
import os
import re
import uuid
import secrets



cohere_api_key = os.getenv("COHERE_API_KEY")
co = cohere.Client(cohere_api_key, client_name="huggingface-rp")


def trigger_example(example):
    chat, updated_history = generate_response(example)
    return chat, updated_history
        
def generate_response(user_message, cid, token, history=None):

    if not token:
        raise gr.Error("Vui lòng truy cập website của Cohere để lấy API Key")
        
    if history is None:
        history = []
    if cid == "" or None:    
        cid = str(uuid.uuid4())

    print(f"cid: {cid} prompt:{user_message}")
    
    history.append(user_message)
    
    stream = co.chat_stream(message=user_message, conversation_id=cid, model='command-r-plus', connectors=[], temperature=0.3)
    
    output = ""
    
    for idx, response in enumerate(stream):
        if response.event_type == "text-generation":
            output += response.text
        if idx == 0:
            history.append(" " + output)
        else:
            history[-1] = output
        chat = [
            (history[i].strip(), history[i + 1].strip())
            for i in range(0, len(history) - 1, 2)
        ] 
        yield chat, history, cid
        
    return chat, history, cid
    

def clear_chat():
    return [], [], str(uuid.uuid4())


examples = [
    "What are 8 good questions to get to know a stranger?",
    "Create a list of 10 unusual excuses people might use to get out of a work meeting",
    "Vân vân và mây mây..."
]

custom_css = """
#logo-img {
    border: none !important;
}
#chat-message {
    font-size: 14px;
    min-height: 300px;
}
"""

with gr.Blocks(analytics_enabled=False, css=custom_css) as demo:
    cid = gr.State("")
    token = gr.State(value=None)
    
    with gr.Row():
        with gr.Column(scale=1):
            gr.Image("https://i.ibb.co/w66DyJz/KTH.jpg", elem_id="logo-img", width=100, height=100, show_label=False, show_share_button=False, show_download_button=False)
        with gr.Column(scale=3):
            gr.Markdown("""C4AI Command R+ sử dụng để nghiên cứu, suy luận rất tốt. Hạn chế dùng trong Tạo bài viết content Facebook hay Quảng cáo.
            <br/><br/>
            """
            )
            
    with gr.Column():
        with gr.Row():
            chatbot = gr.Chatbot(show_label=False, show_share_button=False, show_copy_button=True)
        
        with gr.Row():
            user_message = gr.Textbox(lines=1, placeholder="Hòi gì đi chứ ...", label="Nhập vào", show_label=False)

      
        with gr.Row():
            submit_button = gr.Button("Submit")
            clear_button = gr.Button("Xoá đoạn chat")

                        
        history = gr.State([])
        
        user_message.submit(fn=generate_response, inputs=[user_message, cid, token, history], outputs=[chatbot, history, cid], concurrency_limit=32)
        submit_button.click(fn=generate_response, inputs=[user_message, cid, token, history], outputs=[chatbot, history, cid], concurrency_limit=32)
        
        clear_button.click(fn=clear_chat, inputs=None, outputs=[chatbot, history, cid], concurrency_limit=32)

        user_message.submit(lambda x: gr.update(value=""), None, [user_message], queue=False)
        submit_button.click(lambda x: gr.update(value=""), None, [user_message], queue=False)
        clear_button.click(lambda x: gr.update(value=""), None, [user_message], queue=False)
        
        with gr.Row():
            gr.Examples(
                examples=examples,
                inputs=user_message,
                cache_examples=False,
                fn=trigger_example,
                outputs=[chatbot],
                examples_per_page=100
            )

    demo.load(lambda: secrets.token_hex(16), None, token)

if __name__ == "__main__":
    # demo.launch(debug=True)
    try:
        demo.queue(api_open=False, max_size=40).launch(show_api=False, share=True)
    except Exception as e:
        print(f"Error: {e}")