File size: 2,236 Bytes
0e16cd8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
from gradio_client import Client
import gradio as gr
import os
import time

client = Client(os.getenv('ASSISTANT_SPACE'), hf_token=os.getenv('HF_TOKEN'))

def predict(history):
    global client
    history[-1][1] = ""
    job = client.submit(history={"headers":["1"],"data":history,"metadata":None}, api_name="/get_response")
    for res in job:
        history[-1][1] += res
        #time.sleep(0.03) # To get a smooth letters entry. need something smarter in the future.
        yield history

def add_message(history, message):
    if history is None:
        history=[]
    for x in message["files"]:
        history.append(((x,), None))
    if message["text"] is not None:
        history.append((message["text"], None))
    return history, gr.MultimodalTextbox(value=None, interactive=False)


# Theme logic
theme = gr.themes.Default(
    primary_hue="indigo",
)

js_func = """

function refresh() {

    const url = new URL(window.location);



    if (url.searchParams.get('__theme') !== 'light') {

        url.searchParams.set('__theme', 'light');

        window.location.href = url.href;

    }

}

"""
example_questions = ["Quelle est la date du prochain relevé des compteurs ?",
                     "Il y a un dégât des eaux au-dessus de chez moi. Que dois-je faire ?",
                     "Est-ce que mon colis est arrivé ?",
                     "J'ai perdu mes clés. Pouvez-vous m'aider ?"]

with gr.Blocks(theme=theme, js=js_func, fill_height=True) as demo:
    chatbot = gr.Chatbot(
        elem_id="chatbot",
        bubble_full_width=False,
        scale=1,
        height="60 vh",
        placeholder= "\n\n\n".join(example_questions),
    )

    chat_input = gr.MultimodalTextbox(interactive=True,
                                      file_count="multiple",
                                      placeholder="Entrez un message ou ajoutez des fichiers...", show_label=False)

    chat_msg = chat_input.submit(add_message, [chatbot, chat_input], [chatbot, chat_input])
    bot_msg = chat_msg.then(predict, chatbot, chatbot, api_name="bot_response")
    bot_msg.then(lambda: gr.MultimodalTextbox(interactive=True), None, [chat_input])
    
demo.launch()