khadija3818 commited on
Commit
0d9889d
·
1 Parent(s): ec9f834

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +158 -0
app.py CHANGED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Dict, Any
2
+ import asyncio
3
+ import os
4
+
5
+ # Set your OpenAI API key
6
+ os.environ["OPENAI_API_KEY"] = "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"
7
+
8
+ # Create a new event loop
9
+ loop = asyncio.new_event_loop()
10
+
11
+ # Set the event loop as the current event loop
12
+ asyncio.set_event_loop(loop)
13
+
14
+ from llama_index import (
15
+ VectorStoreIndex,
16
+ ServiceContext,
17
+ download_loader,
18
+ )
19
+ from llama_index.llama_pack.base import BaseLlamaPack
20
+ from llama_index.llms import OpenAI
21
+
22
+
23
+ class StreamlitChatPack(BaseLlamaPack):
24
+ """Streamlit chatbot pack."""
25
+
26
+ def __init__(
27
+ self,
28
+ wikipedia_page: str = "Snowflake Inc.",
29
+ run_from_main: bool = False,
30
+ **kwargs: Any,
31
+ ) -> None:
32
+ """Init params."""
33
+ if not run_from_main:
34
+ raise ValueError(
35
+ "Please run this llama-pack directly with "
36
+ "streamlit run [download_dir]/streamlit_chatbot/base.py"
37
+ )
38
+
39
+ self.wikipedia_page = wikipedia_page
40
+
41
+ def get_modules(self) -> Dict[str, Any]:
42
+ """Get modules."""
43
+ return {}
44
+
45
+ def run(self, *args: Any, **kwargs: Any) -> Any:
46
+ """Run the pipeline."""
47
+ import streamlit as st
48
+ from streamlit_pills import pills
49
+
50
+ st.set_page_config(
51
+ page_title=f"Chat with {self.wikipedia_page}'s Wikipedia page, powered by LlamaIndex",
52
+ page_icon="🦙",
53
+ layout="centered",
54
+ initial_sidebar_state="auto",
55
+ menu_items=None,
56
+ )
57
+
58
+ if "messages" not in st.session_state: # Initialize the chat messages history
59
+ st.session_state["messages"] = [
60
+ {"role": "assistant", "content": "Ask me a question about Snowflake!"}
61
+ ]
62
+
63
+ st.title(
64
+ f"Chat with {self.wikipedia_page}'s Wikipedia page, powered by LlamaIndex 💬🦙"
65
+ )
66
+ st.info(
67
+ "This example is powered by the **[Llama Hub Wikipedia Loader](https://llamahub.ai/l/wikipedia)**. Use any of [Llama Hub's many loaders](https://llamahub.ai/) to retrieve and chat with your data via a Streamlit app.",
68
+ icon="ℹ️",
69
+ )
70
+
71
+ def add_to_message_history(role, content):
72
+ message = {"role": role, "content": str(content)}
73
+ st.session_state["messages"].append(
74
+ message
75
+ ) # Add response to message history
76
+
77
+ @st.cache_resource
78
+ def load_index_data():
79
+ WikipediaReader = download_loader(
80
+ "WikipediaReader", custom_path="local_dir"
81
+ )
82
+ loader = WikipediaReader()
83
+ docs = loader.load_data(pages=[self.wikipedia_page])
84
+ service_context = ServiceContext.from_defaults(
85
+ llm=OpenAI(model="gpt-3.5-turbo", temperature=0.5)
86
+ )
87
+ index = VectorStoreIndex.from_documents(
88
+ docs, service_context=service_context
89
+ )
90
+ return index
91
+
92
+ index = load_index_data()
93
+
94
+ selected = pills(
95
+ "Choose a question to get started or write your own below.",
96
+ [
97
+ "What is Snowflake?",
98
+ "What company did Snowflake announce they would acquire in October 2023?",
99
+ "What company did Snowflake acquire in March 2022?",
100
+ "When did Snowflake IPO?",
101
+ ],
102
+ clearable=True,
103
+ index=None,
104
+ )
105
+
106
+ if "chat_engine" not in st.session_state: # Initialize the query engine
107
+ st.session_state["chat_engine"] = index.as_chat_engine(
108
+ chat_mode="context", verbose=True
109
+ )
110
+
111
+ for message in st.session_state["messages"]: # Display the prior chat messages
112
+ with st.chat_message(message["role"]):
113
+ st.write(message["content"])
114
+
115
+ # To avoid duplicated display of answered pill questions each rerun
116
+ if selected and selected not in st.session_state.get(
117
+ "displayed_pill_questions", set()
118
+ ):
119
+ st.session_state.setdefault("displayed_pill_questions", set()).add(selected)
120
+ with st.chat_message("user"):
121
+ st.write(selected)
122
+ with st.chat_message("assistant"):
123
+ response = st.session_state["chat_engine"].stream_chat(selected)
124
+ response_str = ""
125
+ response_container = st.empty()
126
+ for token in response.response_gen:
127
+ response_str += token
128
+ response_container.write(response_str)
129
+ add_to_message_history("user", selected)
130
+ add_to_message_history("assistant", response)
131
+
132
+ if prompt := st.chat_input(
133
+ "Your question"
134
+ ): # Prompt for user input and save to chat history
135
+ add_to_message_history("user", prompt)
136
+
137
+ # Display the new question immediately after it is entered
138
+ with st.chat_message("user"):
139
+ st.write(prompt)
140
+
141
+ # If last message is not from assistant, generate a new response
142
+ # if st.session_state["messages"][-1]["role"] != "assistant":
143
+ with st.chat_message("assistant"):
144
+ response = st.session_state["chat_engine"].stream_chat(prompt)
145
+ response_str = ""
146
+ response_container = st.empty()
147
+ for token in response.response_gen:
148
+ response_str += token
149
+ response_container.write(response_str)
150
+ # st.write(response.response)
151
+ add_to_message_history("assistant", response.response)
152
+
153
+ # Save the state of the generator
154
+ st.session_state["response_gen"] = response.response_gen
155
+
156
+
157
+ if __name__ == "__main__":
158
+ StreamlitChatPack(run_from_main=True).run()