File size: 3,524 Bytes
a6ea261
 
 
 
 
ecf2909
a6ea261
 
0c6afa2
3c44f19
0c6afa2
a6ea261
 
 
 
 
 
 
 
7ce3ff5
a6ea261
ecf2909
a6ea261
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0c6afa2
3c44f19
a6ea261
0c6afa2
a6ea261
 
4d5736d
a6ea261
 
 
 
4d5736d
 
 
 
 
 
 
 
 
 
 
 
 
 
a6ea261
 
 
 
 
4d5736d
a6ea261
 
 
6ac4e6b
 
 
 
 
 
a6ea261
 
7ce3ff5
 
a6ea261
 
 
6ac4e6b
 
ecf2909
6ac4e6b
 
ecf2909
6ac4e6b
 
 
 
 
 
 
a6ea261
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
"""
A chatbot that will answer using australian slang
"""
import os
import time

import gradio as gr
import openai
from langchain import LLMChain, PromptTemplate
from langchain.chat_models import ChatOpenAI
from langchain.memory import ConversationBufferWindowMemory

openai.api_key = os.getenv('OPENAI_API_KEY')


def get_template() -> str:
    """
    Returns the template for the chatbot
    """
    template = """Brissy is an Australian Slang Chatbot based on large language model.

    Brissy is a fair dinkum Aussie model and knows all about Australian slang. It's a top-notch mate and can answer questions about Australia, Aussie culture, and a whole bunch of other topics. It always uses friendly slang and can chat like a true blue Aussie. Brissy start answering every question differently. Brissy will always answer every question within 4000 characters.

    Reckon you can rewrite your response using Australian slang?

    {history}
    Human: {human_input}
    Brissy:"""

    return template


def get_chain() -> LLMChain:
    """
    Returns the chatbot chain
    """
    template = get_template()

    prompt = PromptTemplate(
        input_variables=['history', 'human_input'],
        template=template
    )

    chat = ChatOpenAI(model_name='gpt-3.5-turbo', temperature=1.0)

    chatgpt_chain = LLMChain(
        llm=chat,
        prompt=prompt,
        verbose=True,
        memory=ConversationBufferWindowMemory(k=5),
    )
    return chatgpt_chain


def buy_me_a_coffee() -> str:
    """
    Returns the buy me a coffee button
    """
    return """
        <p style="margin-bottom: 10px; font-size: 60%">
        <span style="display: flex;align-items: center;justify-content: center;height: 30px;">
        <a href="https://www.buymeacoffee.com/qmaruf">
        <img src="https://badgen.net/badge/icon/Buy%20Me%20A%20Coffee?icon=buymeacoffee&label" alt="Buy me a coffee"></a>
        </span>
        </p>
    """


def interface() -> None:
    """
    Launches the chatbot interface.
    """
    with gr.Blocks() as demo:
        gr.HTML(buy_me_a_coffee())
        chatbot = gr.Chatbot()
        msg = gr.Textbox()
        clear = gr.Button('Clear')

        try:
            chatgpt_chain = get_chain()
        except Exception as e:
            print(e)
            chatgpt_chain = None

        def user(user_message, history):
            if len(history) > 3500:
                history = history[-3500:]
            return '', history + [[user_message, None]]

        def bot(history):
            try:
                human_input = history[-1][0]

                if chatgpt_chain is None:
                    raise Exception('Chatbot not initialized')

                if len(human_input) < 512:
                    response = chatgpt_chain.predict(human_input=human_input)
                else:
                    response = 'Sorry, I can only answer questions shorter than 512 characters.'
            except Exception as e:
                print(e)
                response = 'Sorry, I had trouble answering that question. Please try again.'

            history[-1][1] = ''
            for character in response:
                history[-1][1] += character
                time.sleep(0.01)
                yield history

        msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
            bot, chatbot, chatbot
        )
        clear.click(lambda: None, None, chatbot, queue=False)

    demo.queue()
    demo.launch()


if __name__ == '__main__':
    interface()