File size: 3,177 Bytes
c4555db
 
 
a4b071c
 
 
 
 
 
 
0a24f02
a4b071c
 
 
c4555db
 
 
 
 
 
0bc7b50
 
 
 
c4555db
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0a24f02
c4555db
0a24f02
 
c4555db
 
 
 
0a24f02
 
 
 
 
c4555db
 
 
 
 
7beeafa
c4555db
0a24f02
c4555db
 
a4b071c
c4555db
 
 
 
 
 
 
 
a4b071c
c4555db
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
import time
import gradio as gr
from gpt_index import RefinePrompt
from gpt_index import (
    SimpleWebPageReader, 
    WikipediaReader, 
    GPTListIndex, 
    GPTSimpleVectorIndex, 
    LLMPredictor, 
    QuestionAnswerPrompt,
    RefinePrompt,
    PromptHelper
)

system_message = {"role": "system", "content": "You are an AI specialized in Atlanta."}


with gr.Blocks() as demo:
    gr.Markdown(
    '''
    # Customized Atlanta Chatbot Demo
    This chatbot uses the Atlantaga.gov and ATL311.com websites as its custom knowledge base.
    Before starting a new conversation, please refresh the chatbot for the best results.
    If the chatbot is giving incorrect answers, please refresh.
    '''
    )
    chatbot = gr.Chatbot()
    msg = gr.Textbox()
    clear = gr.Button("Clear")


    state = gr.State([])

    def user(user_message, history):
        return "", history + [[user_message, None]]

    def bot(history, messages_history):
        user_message = history[-1][0]
        bot_message, messages_history = ask_gpt(user_message, messages_history)
        messages_history += [{"role": "assistant", "content": bot_message}]
        history[-1][1] = bot_message
        time.sleep(1)
        return history, messages_history

    def ask_gpt(message, messages_history):
        messages_history += [{"role": "user", "content": message}]
        query_str = ''
        QA_PROMPT_TMPL = (
            "You are an conversational AI specialized in Atlanta.\n"
            "If a query does not relate to Atlanta, say you can't answer the query.\n"# and make the answer related to Atlanta.\n"
            "We have provided context information below. \n"
            "---------------------\n"
            "{context_str}"
            "\n---------------------\n"
            "Given this information, please give a detailed and conversational answer to the query: {query_str} and cite the url source associated with this answer.\n"
            "Use information from previous queries in your response when appropriate.\n"
            "Format the answer to the query like this: Answer: .\n"
            "\nSource: followed by the source in bold.\n"
            "Put the Answer and Source on different lines of the response and the Source is the url source associated with the answer.\n"
        )
        QA_PROMPT = QuestionAnswerPrompt(QA_PROMPT_TMPL)
        

        # Takes in the input from the user to deliver responses
        index = GPTSimpleVectorIndex.load_from_disk('index_demo.json')
        message = ' '.join([message['content'] for message in messages_history])
        response = index.query(message, text_qa_template = QA_PROMPT)
        return response.response, messages_history
        #return response['choices'][0]['message']['content'], messages_history
    

    def init_history(messages_history):
        messages_history = []
        messages_history += [system_message]
        return messages_history

    msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
        bot, [chatbot, state], [chatbot, state]
    )

    clear.click(lambda: None, None, chatbot, queue=False).success(init_history, [state], [state])

demo.launch()