Spaces:
Sleeping
Sleeping
| import time | |
| import gradio as gr | |
| from gpt_index import RefinePrompt | |
| from gpt_index import ( | |
| SimpleWebPageReader, | |
| WikipediaReader, | |
| GPTListIndex, | |
| GPTSimpleVectorIndex, | |
| LLMPredictor, | |
| QuestionAnswerPrompt, | |
| RefinePrompt, | |
| PromptHelper | |
| ) | |
| system_message = {"role": "system", "content": "You are an AI specialized in Atlanta."} | |
| with gr.Blocks() as demo: | |
| gr.Markdown( | |
| ''' | |
| # Customized Atlanta Chatbot Demo | |
| This chatbot uses the Atlantaga.gov and ATL311.com websites as its custom knowledge base. | |
| Before starting a new conversation, please refresh the chatbot for the best results. | |
| If the chatbot is giving incorrect answers, please refresh. | |
| ''' | |
| ) | |
| chatbot = gr.Chatbot() | |
| msg = gr.Textbox() | |
| clear = gr.Button("Clear") | |
| state = gr.State([]) | |
| def user(user_message, history): | |
| return "", history + [[user_message, None]] | |
| def bot(history, messages_history): | |
| user_message = history[-1][0] | |
| bot_message, messages_history = ask_gpt(user_message, messages_history) | |
| messages_history += [{"role": "assistant", "content": bot_message}] | |
| history[-1][1] = bot_message | |
| time.sleep(1) | |
| return history, messages_history | |
| def ask_gpt(message, messages_history): | |
| messages_history += [{"role": "user", "content": message}] | |
| query_str = '' | |
| QA_PROMPT_TMPL = ( | |
| "You are an conversational AI specialized in Atlanta.\n" | |
| "If a query does not relate to Atlanta, say you can't answer the query.\n"# and make the answer related to Atlanta.\n" | |
| "We have provided context information below. \n" | |
| "---------------------\n" | |
| "{context_str}" | |
| "\n---------------------\n" | |
| "Given this information, please give a detailed and conversational answer to the query: {query_str} and cite the url source associated with this answer.\n" | |
| "Use information from previous queries in your response when appropriate.\n" | |
| "Format the answer to the query like this: Answer: .\n" | |
| "\nSource: followed by the source in bold.\n" | |
| "Put the Answer and Source on different lines of the response and the Source is the url source associated with the answer.\n" | |
| ) | |
| QA_PROMPT = QuestionAnswerPrompt(QA_PROMPT_TMPL) | |
| # Takes in the input from the user to deliver responses | |
| index = GPTSimpleVectorIndex.load_from_disk('index_demo.json') | |
| message = ' '.join([message['content'] for message in messages_history]) | |
| response = index.query(message, text_qa_template = QA_PROMPT) | |
| return response.response, messages_history | |
| #return response['choices'][0]['message']['content'], messages_history | |
| def init_history(messages_history): | |
| messages_history = [] | |
| messages_history += [system_message] | |
| return messages_history | |
| msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then( | |
| bot, [chatbot, state], [chatbot, state] | |
| ) | |
| clear.click(lambda: None, None, chatbot, queue=False).success(init_history, [state], [state]) | |
| demo.launch() |