1MR commited on
Commit
bdf523c
Β·
verified Β·
1 Parent(s): 6a86d06

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +117 -40
app.py CHANGED
@@ -97,19 +97,39 @@ def get_conversation_chain(vectorstore):
97
  return conversation_chain
98
 
99
  # μ‚¬μš©μž μž…λ ₯을 μ²˜λ¦¬ν•˜λŠ” ν•¨μˆ˜μž…λ‹ˆλ‹€.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
100
  def handle_userinput(user_question):
101
- # λŒ€ν™” 체인을 μ‚¬μš©ν•˜μ—¬ μ‚¬μš©μž μ§ˆλ¬Έμ— λŒ€ν•œ 응닡을 μƒμ„±ν•©λ‹ˆλ‹€.
102
- response = st.session_state.conversation({'question': user_question})
103
- # λŒ€ν™” 기둝을 μ €μž₯ν•©λ‹ˆλ‹€.
104
- st.session_state.chat_history = response['chat_history']
105
 
106
- for i, message in enumerate(st.session_state.chat_history):
107
- if i % 2 == 0:
108
- st.write(user_template.replace(
109
- "{{MSG}}", message.content), unsafe_allow_html=True)
110
- else:
111
- st.write(bot_template.replace(
112
- "{{MSG}}", message.content), unsafe_allow_html=True)
 
 
 
 
 
 
113
 
114
 
115
  def main():
@@ -123,7 +143,7 @@ def main():
123
  if "chat_history" not in st.session_state:
124
  st.session_state.chat_history = None
125
 
126
- st.header("Chat with multiple Files :")
127
  user_question = st.text_input("Ask a question about your documents:")
128
  if user_question:
129
  handle_userinput(user_question)
@@ -137,38 +157,95 @@ def main():
137
  docs = st.file_uploader(
138
  "Upload your PDFs here and click on 'Process'", accept_multiple_files=True)
139
  if st.button("Process"):
140
- with st.spinner("Processing"):
141
- # get pdf text
142
- doc_list = []
143
-
144
- for file in docs:
145
- print('file - type : ', file.type)
146
- if file.type == 'text/plain':
147
- # file is .txt
148
- doc_list.extend(get_text_file(file))
149
- elif file.type in ['application/octet-stream', 'application/pdf']:
150
- # file is .pdf
151
- doc_list.extend(get_pdf_text(file))
152
- elif file.type == 'text/csv':
153
- # file is .csv
154
- doc_list.extend(get_csv_file(file))
155
- elif file.type == 'application/json':
156
- # file is .json
157
- doc_list.extend(get_json_file(file))
158
-
159
- # get the text chunks
160
- text_chunks = get_text_chunks(doc_list)
161
-
162
- # create vector store
163
- vectorstore = get_vectorstore(text_chunks)
164
-
165
- # create conversation chain
166
- st.session_state.conversation = get_conversation_chain(
167
- vectorstore)
 
 
 
168
 
169
 
170
  if __name__ == '__main__':
171
  main()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
172
 
173
  # import streamlit as st
174
  # # from dotenv import load_dotenv
 
97
  return conversation_chain
98
 
99
  # μ‚¬μš©μž μž…λ ₯을 μ²˜λ¦¬ν•˜λŠ” ν•¨μˆ˜μž…λ‹ˆλ‹€.
100
+ # def handle_userinput(user_question):
101
+ # # λŒ€ν™” 체인을 μ‚¬μš©ν•˜μ—¬ μ‚¬μš©μž μ§ˆλ¬Έμ— λŒ€ν•œ 응닡을 μƒμ„±ν•©λ‹ˆλ‹€.
102
+ # response = st.session_state.conversation({'question': user_question})
103
+ # # λŒ€ν™” 기둝을 μ €μž₯ν•©λ‹ˆλ‹€.
104
+ # st.session_state.chat_history = response['chat_history']
105
+
106
+ # for i, message in enumerate(st.session_state.chat_history):
107
+ # if i % 2 == 0:
108
+ # st.write(user_template.replace(
109
+ # "{{MSG}}", message.content), unsafe_allow_html=True)
110
+ # else:
111
+ # st.write(bot_template.replace(
112
+ # "{{MSG}}", message.content), unsafe_allow_html=True)
113
+
114
+
115
  def handle_userinput(user_question):
116
+ if not st.session_state.conversation:
117
+ st.error("Please upload and process your documents first.")
118
+ return
 
119
 
120
+ try:
121
+ response = st.session_state.conversation({'question': user_question})
122
+ st.session_state.chat_history = response['chat_history']
123
+
124
+ for i, message in enumerate(st.session_state.chat_history):
125
+ if i % 2 == 0:
126
+ st.write(user_template.replace(
127
+ "{{MSG}}", message.content), unsafe_allow_html=True)
128
+ else:
129
+ st.write(bot_template.replace(
130
+ "{{MSG}}", message.content), unsafe_allow_html=True)
131
+ except Exception as e:
132
+ st.error(f"An error occurred: {e}")
133
 
134
 
135
  def main():
 
143
  if "chat_history" not in st.session_state:
144
  st.session_state.chat_history = None
145
 
146
+ st.header("Chat with multiple Files:")
147
  user_question = st.text_input("Ask a question about your documents:")
148
  if user_question:
149
  handle_userinput(user_question)
 
157
  docs = st.file_uploader(
158
  "Upload your PDFs here and click on 'Process'", accept_multiple_files=True)
159
  if st.button("Process"):
160
+ if not docs:
161
+ st.error("Please upload at least one document.")
162
+ return
163
+
164
+ with st.spinner("Processing..."):
165
+ try:
166
+ doc_list = []
167
+
168
+ for file in docs:
169
+ if file.type == 'text/plain':
170
+ doc_list.extend(get_text_file(file))
171
+ elif file.type in ['application/octet-stream', 'application/pdf']:
172
+ doc_list.extend(get_pdf_text(file))
173
+ elif file.type == 'text/csv':
174
+ doc_list.extend(get_csv_file(file))
175
+ elif file.type == 'application/json':
176
+ doc_list.extend(get_json_file(file))
177
+
178
+ if not doc_list:
179
+ st.error("No valid documents processed. Please check your files.")
180
+ return
181
+
182
+ text_chunks = get_text_chunks(doc_list)
183
+
184
+ vectorstore = get_vectorstore(text_chunks)
185
+
186
+ st.session_state.conversation = get_conversation_chain(vectorstore)
187
+
188
+ st.success("Documents processed successfully!")
189
+ except Exception as e:
190
+ st.error(f"An error occurred during processing: {e}")
191
 
192
 
193
  if __name__ == '__main__':
194
  main()
195
+ # def main():
196
+ # load_dotenv()
197
+ # st.set_page_config(page_title="Chat with multiple Files",
198
+ # page_icon=":books:")
199
+ # st.write(css, unsafe_allow_html=True)
200
+
201
+ # if "conversation" not in st.session_state:
202
+ # st.session_state.conversation = None
203
+ # if "chat_history" not in st.session_state:
204
+ # st.session_state.chat_history = None
205
+
206
+ # st.header("Chat with multiple Files :")
207
+ # user_question = st.text_input("Ask a question about your documents:")
208
+ # if user_question:
209
+ # handle_userinput(user_question)
210
+
211
+ # with st.sidebar:
212
+ # openai_key = st.text_input("Paste your OpenAI API key (sk-...)")
213
+ # if openai_key:
214
+ # os.environ["OPENAI_API_KEY"] = openai_key
215
+
216
+ # st.subheader("Your documents")
217
+ # docs = st.file_uploader(
218
+ # "Upload your PDFs here and click on 'Process'", accept_multiple_files=True)
219
+ # if st.button("Process"):
220
+ # with st.spinner("Processing"):
221
+ # # get pdf text
222
+ # doc_list = []
223
+
224
+ # for file in docs:
225
+ # print('file - type : ', file.type)
226
+ # if file.type == 'text/plain':
227
+ # # file is .txt
228
+ # doc_list.extend(get_text_file(file))
229
+ # elif file.type in ['application/octet-stream', 'application/pdf']:
230
+ # # file is .pdf
231
+ # doc_list.extend(get_pdf_text(file))
232
+ # elif file.type == 'text/csv':
233
+ # # file is .csv
234
+ # doc_list.extend(get_csv_file(file))
235
+ # elif file.type == 'application/json':
236
+ # # file is .json
237
+ # doc_list.extend(get_json_file(file))
238
+
239
+ # # get the text chunks
240
+ # text_chunks = get_text_chunks(doc_list)
241
+
242
+ # # create vector store
243
+ # vectorstore = get_vectorstore(text_chunks)
244
+
245
+ # # create conversation chain
246
+ # st.session_state.conversation = get_conversation_chain(
247
+ # vectorstore)
248
+
249
 
250
  # import streamlit as st
251
  # # from dotenv import load_dotenv