Deepakkori45 commited on
Commit
4a0abee
·
verified ·
1 Parent(s): bc831a1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +244 -119
app.py CHANGED
@@ -1,3 +1,174 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import openai
2
  import streamlit as st
3
  from openai import OpenAI
@@ -6,166 +177,120 @@ import time
6
  import os
7
  from dotenv import load_dotenv
8
 
9
- # Initialize the OpenAI client with your API key
10
- # Load environment variables from the .env file
11
  load_dotenv()
12
-
13
- # Get the OpenAI API key
14
  OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
15
-
 
16
  client = OpenAI(api_key=OPENAI_API_KEY)
17
- vector_store_id = "vs_sHLBqA88ng66RDZYMgaBmDuX" # Vector Store ID to use
18
-
19
- # all_files = list(client.beta.vector_stores.files.list(vector_store_id))
20
- # for file in all_files:
21
- # # print(file)
22
- # file_id = file.id
23
- # st.write(file_id)
24
-
25
- # Set the assistant ID
26
- assistant_id = "asst_UeIHpkpVejAgrIz3RnisxSLm" # Replace with your own assistant ID
27
 
 
28
  def ensure_single_thread_id():
29
  if "thread_id" not in st.session_state:
30
  thread = client.beta.threads.create()
31
  st.session_state.thread_id = thread.id
32
  return st.session_state.thread_id
33
 
 
34
  def safe_message_send(prompt, thread_id):
35
  try:
36
- message = client.beta.threads.messages.create(
37
  thread_id=thread_id,
38
  role="user",
39
  content=prompt
40
  )
41
- return message
42
  except Exception as e:
43
  if "active" in str(e):
44
- print("Waiting for the current run to finish...")
45
- time.sleep(1) # wait a bit before retrying
46
- return safe_message_send(prompt, thread_id) # retry sending the message
47
- else:
48
- raise e
49
 
 
50
  def stream_generator(prompt, thread_id):
51
- # print(f'First time thread in the function {thread_id}')
52
- message = safe_message_send(prompt, thread_id) # use the new safe send function
53
-
54
- with st.spinner("Wait... Generating response..."):
55
- try:
56
- stream = client.beta.threads.runs.create(
57
- thread_id=thread_id,
58
- assistant_id=assistant_id,
59
- stream=True
60
- )
61
-
62
- for event in stream:
63
- if event.data.object == "thread.message.delta":
64
- for content in event.data.delta.content:
65
- if content.type == 'text':
66
- yield content.text.value
67
- time.sleep(0.01)
68
- elif event.data.object == "thread.run.stop":
69
- break # Break if the run stops
70
- except Exception as e:
71
- print(f"Error during streaming: {str(e)}")
72
 
 
73
  def upload_and_add_to_vector_store(uploaded_file):
74
- """Upload a file to OpenAI and add it to the specified vector store."""
75
  try:
76
- # Convert the uploaded file to a BytesIO stream for uploading
77
  file_stream = io.BytesIO(uploaded_file.getvalue())
78
- file_stream.name = uploaded_file.name # Preserve the file name
79
- # Upload the file to the vector store
80
- file_batch = client.beta.vector_stores.file_batches.upload_and_poll(
81
- vector_store_id=vector_store_id,
82
  files=[file_stream]
83
  )
84
-
85
-
86
- st.success(f"File '{uploaded_file.name}' processed and added to vector store. Status: {file_batch.status}")
87
  except Exception as e:
88
- st.error(f"Failed to process file: {str(e)}")
89
-
90
-
91
 
 
92
  def list_all_files_in_vector_store():
93
- """List all files in the specified vector store."""
94
  try:
95
- all_files = list(client.beta.vector_stores.files.list(vector_store_id=vector_store_id))
96
- # st.write(all_files)
97
- for file in all_files:
98
- file_id = file.id
99
- st.write(file_id)
100
  except Exception as e:
101
- st.error(f"Failed to list files: {str(e)}")
102
- return {}
103
 
104
- def delete_file_from_vector_store(vector_store_id, file_id):
105
- """Delete a file from the specified vector store."""
106
  try:
107
- client.beta.vector_stores.files.delete(
108
- vector_store_id=vector_store_id,
109
  file_id=file_id
110
  )
111
- st.success(f"File with ID '{file_id}' deleted from vector store '{vector_store_id}'.")
112
  except Exception as e:
113
- st.error(f"Failed to delete file. File id is not Found.")
114
 
115
- # Interface to delete files from vector store
116
- st.sidebar.subheader("Delete File from Vector Store")
117
- file_id_to_delete = st.sidebar.text_input("Enter File ID to Delete", "")
118
  if st.sidebar.button("Delete File"):
119
- delete_file_from_vector_store(vector_store_id, file_id_to_delete)
 
 
 
 
 
 
120
 
121
- # Streamlit interface setup
122
- st.title("💬Chatbot")
123
- st.caption("🚀 A Streamlit Custom Chatbot")
124
 
 
125
 
126
- with st.sidebar:
127
- st.write("Upload PDF File")
128
- uploaded_file = st.file_uploader("Choose a file", type=['pdf', 'docx'], key='file_uploader')
 
129
 
130
- if st.button('Upload File', key='process_file'):
131
- if uploaded_file is not None:
132
- upload_and_add_to_vector_store(uploaded_file)
133
- st.success("File successfully uploaded and processed.")
134
- else:
135
- st.error("Please upload a file to process.")
 
 
136
 
137
- # List all uploaded files
138
- st.write("### Uploaded Files")
139
- if 'uploaded_files' in st.session_state and st.session_state.uploaded_files:
140
- for file_name, file_id in st.session_state.uploaded_files.items():
141
- st.write(f"{file_name}: {file_id}")
142
-
143
- # List all files in the vector store
144
- st.write("## All Files in Vector Store")
145
- all_files = list_all_files_in_vector_store()
146
 
147
- # Initialize session state for chat
148
- st.session_state.start_chat = True
149
- if 'start_chat' not in st.session_state:
150
- st.session_state.start_chat = False
151
-
152
- # Main chat interface
153
- if st.session_state.start_chat:
154
- # if "messages" not in st.session_state:
155
- # st.session_state.messages = []
156
-
157
- for message in st.session_state.messages:
158
- with st.chat_message(message["role"]):
159
- st.markdown(message["content"])
160
-
161
- prompt = st.chat_input("Enter your message")
162
- if prompt:
163
- thread_id = ensure_single_thread_id()
164
- with st.chat_message("user"):
165
- st.markdown(prompt)
166
- st.session_state.messages.append({"role": "user", "content": prompt})
167
-
168
- with st.chat_message("assistant"):
169
- response = st.write_stream(stream_generator(prompt, thread_id))
170
- st.session_state.messages.append({"role": "assistant", "content": response})
171
-
 
1
+ # import openai
2
+ # import streamlit as st
3
+ # from openai import OpenAI
4
+ # import io
5
+ # import time
6
+ # import os
7
+ # from dotenv import load_dotenv
8
+
9
+ # # Initialize the OpenAI client with your API key
10
+ # # Load environment variables from the .env file
11
+ # load_dotenv()
12
+
13
+ # # Get the OpenAI API key
14
+ # OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
15
+
16
+ # client = OpenAI(api_key=OPENAI_API_KEY)
17
+ # vector_store_id = "vs_sHLBqA88ng66RDZYMgaBmDuX" # Vector Store ID to use
18
+
19
+ # # all_files = list(client.beta.vector_stores.files.list(vector_store_id))
20
+ # # for file in all_files:
21
+ # # # print(file)
22
+ # # file_id = file.id
23
+ # # st.write(file_id)
24
+
25
+ # # Set the assistant ID
26
+ # assistant_id = "asst_UeIHpkpVejAgrIz3RnisxSLm" # Replace with your own assistant ID
27
+
28
+ # def ensure_single_thread_id():
29
+ # if "thread_id" not in st.session_state:
30
+ # thread = client.beta.threads.create()
31
+ # st.session_state.thread_id = thread.id
32
+ # return st.session_state.thread_id
33
+
34
+ # def safe_message_send(prompt, thread_id):
35
+ # try:
36
+ # message = client.beta.threads.messages.create(
37
+ # thread_id=thread_id,
38
+ # role="user",
39
+ # content=prompt
40
+ # )
41
+ # return message
42
+ # except Exception as e:
43
+ # if "active" in str(e):
44
+ # print("Waiting for the current run to finish...")
45
+ # time.sleep(1) # wait a bit before retrying
46
+ # return safe_message_send(prompt, thread_id) # retry sending the message
47
+ # else:
48
+ # raise e
49
+
50
+ # def stream_generator(prompt, thread_id):
51
+ # # print(f'First time thread in the function {thread_id}')
52
+ # message = safe_message_send(prompt, thread_id) # use the new safe send function
53
+
54
+ # with st.spinner("Wait... Generating response..."):
55
+ # try:
56
+ # stream = client.beta.threads.runs.create(
57
+ # thread_id=thread_id,
58
+ # assistant_id=assistant_id,
59
+ # stream=True
60
+ # )
61
+
62
+ # for event in stream:
63
+ # if event.data.object == "thread.message.delta":
64
+ # for content in event.data.delta.content:
65
+ # if content.type == 'text':
66
+ # yield content.text.value
67
+ # time.sleep(0.01)
68
+ # elif event.data.object == "thread.run.stop":
69
+ # break # Break if the run stops
70
+ # except Exception as e:
71
+ # print(f"Error during streaming: {str(e)}")
72
+
73
+ # def upload_and_add_to_vector_store(uploaded_file):
74
+ # """Upload a file to OpenAI and add it to the specified vector store."""
75
+ # try:
76
+ # # Convert the uploaded file to a BytesIO stream for uploading
77
+ # file_stream = io.BytesIO(uploaded_file.getvalue())
78
+ # file_stream.name = uploaded_file.name # Preserve the file name
79
+ # # Upload the file to the vector store
80
+ # file_batch = client.beta.vector_stores.file_batches.upload_and_poll(
81
+ # vector_store_id=vector_store_id,
82
+ # files=[file_stream]
83
+ # )
84
+
85
+
86
+ # st.success(f"File '{uploaded_file.name}' processed and added to vector store. Status: {file_batch.status}")
87
+ # except Exception as e:
88
+ # st.error(f"Failed to process file: {str(e)}")
89
+
90
+
91
+
92
+ # def list_all_files_in_vector_store():
93
+ # """List all files in the specified vector store."""
94
+ # try:
95
+ # all_files = list(client.beta.vector_stores.files.list(vector_store_id=vector_store_id))
96
+ # # st.write(all_files)
97
+ # for file in all_files:
98
+ # file_id = file.id
99
+ # st.write(file_id)
100
+ # except Exception as e:
101
+ # st.error(f"Failed to list files: {str(e)}")
102
+ # return {}
103
+
104
+ # def delete_file_from_vector_store(vector_store_id, file_id):
105
+ # """Delete a file from the specified vector store."""
106
+ # try:
107
+ # client.beta.vector_stores.files.delete(
108
+ # vector_store_id=vector_store_id,
109
+ # file_id=file_id
110
+ # )
111
+ # st.success(f"File with ID '{file_id}' deleted from vector store '{vector_store_id}'.")
112
+ # except Exception as e:
113
+ # st.error(f"Failed to delete file. File id is not Found.")
114
+
115
+ # # Interface to delete files from vector store
116
+ # st.sidebar.subheader("Delete File from Vector Store")
117
+ # file_id_to_delete = st.sidebar.text_input("Enter File ID to Delete", "")
118
+ # if st.sidebar.button("Delete File"):
119
+ # delete_file_from_vector_store(vector_store_id, file_id_to_delete)
120
+
121
+ # # Streamlit interface setup
122
+ # st.title("💬Chatbot")
123
+ # st.caption("🚀 A Streamlit Custom Chatbot")
124
+
125
+
126
+ # with st.sidebar:
127
+ # st.write("Upload PDF File")
128
+ # uploaded_file = st.file_uploader("Choose a file", type=['pdf', 'docx'], key='file_uploader')
129
+
130
+ # if st.button('Upload File', key='process_file'):
131
+ # if uploaded_file is not None:
132
+ # upload_and_add_to_vector_store(uploaded_file)
133
+ # st.success("File successfully uploaded and processed.")
134
+ # else:
135
+ # st.error("Please upload a file to process.")
136
+
137
+ # # List all uploaded files
138
+ # st.write("### Uploaded Files")
139
+ # if 'uploaded_files' in st.session_state and st.session_state.uploaded_files:
140
+ # for file_name, file_id in st.session_state.uploaded_files.items():
141
+ # st.write(f"{file_name}: {file_id}")
142
+
143
+ # # List all files in the vector store
144
+ # st.write("## All Files in Vector Store")
145
+ # all_files = list_all_files_in_vector_store()
146
+
147
+ # # Initialize session state for chat
148
+ # st.session_state.start_chat = True
149
+ # if 'start_chat' not in st.session_state:
150
+ # st.session_state.start_chat = False
151
+
152
+ # # Main chat interface
153
+ # if st.session_state.start_chat:
154
+ # if "messages" not in st.session_state:
155
+ # st.session_state.messages = []
156
+
157
+ # for message in st.session_state.messages:
158
+ # with st.chat_message(message["role"]):
159
+ # st.markdown(message["content"])
160
+
161
+ # prompt = st.chat_input("Enter your message")
162
+ # if prompt:
163
+ # thread_id = ensure_single_thread_id()
164
+ # with st.chat_message("user"):
165
+ # st.markdown(prompt)
166
+ # st.session_state.messages.append({"role": "user", "content": prompt})
167
+
168
+ # with st.chat_message("assistant"):
169
+ # response = st.write_stream(stream_generator(prompt, thread_id))
170
+ # st.session_state.messages.append({"role": "assistant", "content": response})
171
+
172
  import openai
173
  import streamlit as st
174
  from openai import OpenAI
 
177
  import os
178
  from dotenv import load_dotenv
179
 
180
+ # Initialize the OpenAI client
 
181
  load_dotenv()
 
 
182
  OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
183
+ VECTOR_STORE_ID = "vs_sHLBqA88ng66RDZYMgaBmDuX"
184
+ ASSISTANT_ID = "asst_UeIHpkpVejAgrIz3RnisxSLm"
185
  client = OpenAI(api_key=OPENAI_API_KEY)
 
 
 
 
 
 
 
 
 
 
186
 
187
+ # Ensure a single OpenAI thread per session
188
  def ensure_single_thread_id():
189
  if "thread_id" not in st.session_state:
190
  thread = client.beta.threads.create()
191
  st.session_state.thread_id = thread.id
192
  return st.session_state.thread_id
193
 
194
+ # Send a user message safely
195
  def safe_message_send(prompt, thread_id):
196
  try:
197
+ return client.beta.threads.messages.create(
198
  thread_id=thread_id,
199
  role="user",
200
  content=prompt
201
  )
 
202
  except Exception as e:
203
  if "active" in str(e):
204
+ time.sleep(1)
205
+ return safe_message_send(prompt, thread_id)
206
+ raise e
 
 
207
 
208
+ # Generator for streaming assistant response
209
  def stream_generator(prompt, thread_id):
210
+ safe_message_send(prompt, thread_id)
211
+ stream = client.beta.threads.runs.create(
212
+ thread_id=thread_id,
213
+ assistant_id=ASSISTANT_ID,
214
+ stream=True
215
+ )
216
+ for event in stream:
217
+ if event.data.object == "thread.message.delta":
218
+ for content in event.data.delta.content:
219
+ if content.type == 'text':
220
+ yield content.text.value
221
+ elif event.data.object == "thread.run.stop":
222
+ break
 
 
 
 
 
 
 
 
223
 
224
+ # Upload file to vector store
225
  def upload_and_add_to_vector_store(uploaded_file):
 
226
  try:
 
227
  file_stream = io.BytesIO(uploaded_file.getvalue())
228
+ file_stream.name = uploaded_file.name
229
+ client.vector_stores.file_batches.upload_and_poll(
230
+ vector_store_id=VECTOR_STORE_ID,
 
231
  files=[file_stream]
232
  )
233
+ st.success(f"Uploaded '{uploaded_file.name}' to vector store.")
 
 
234
  except Exception as e:
235
+ st.error(f"Upload failed: {e}")
 
 
236
 
237
+ # List files in vector store
238
  def list_all_files_in_vector_store():
 
239
  try:
240
+ all_files = client.vector_stores.files.list(vector_store_id=VECTOR_STORE_ID)
241
+ for f in all_files.data:
242
+ st.write(f.id)
 
 
243
  except Exception as e:
244
+ st.error(f"List files failed: {e}")
 
245
 
246
+ # Delete a file
247
+ def delete_file_from_vector_store(file_id):
248
  try:
249
+ client.vector_stores.files.delete(
250
+ vector_store_id=VECTOR_STORE_ID,
251
  file_id=file_id
252
  )
253
+ st.success(f"Deleted file {file_id}")
254
  except Exception as e:
255
+ st.error(f"Delete failed: {e}")
256
 
257
+ # Sidebar UI
258
+ st.sidebar.header("File Management")
259
+ file_id = st.sidebar.text_input("File ID to delete")
260
  if st.sidebar.button("Delete File"):
261
+ delete_file_from_vector_store(file_id)
262
+ uploaded = st.sidebar.file_uploader("Upload PDF/DOCX", type=['pdf','docx'])
263
+ if st.sidebar.button("Upload File") and uploaded:
264
+ upload_and_add_to_vector_store(uploaded)
265
+ st.sidebar.markdown("---")
266
+ st.sidebar.subheader("All Files in Vector Store")
267
+ list_all_files_in_vector_store()
268
 
269
+ # Initialize session state for chat
270
+ if "messages" not in st.session_state:
271
+ st.session_state.messages = []
272
 
273
+ st.title("💬 Chatbot")
274
 
275
+ # Display chat history
276
+ for msg in st.session_state.messages:
277
+ with st.chat_message(msg["role"]):
278
+ st.markdown(msg["content"])
279
 
280
+ # Chat input and streaming
281
+ prompt = st.chat_input("Your question...")
282
+ if prompt:
283
+ thread_id = ensure_single_thread_id()
284
+ # display user message
285
+ with st.chat_message("user"):
286
+ st.markdown(prompt)
287
+ st.session_state.messages.append({"role": "user", "content": prompt})
288
 
289
+ # stream assistant message
290
+ full_response = ""
291
+ with st.chat_message("assistant"):
292
+ for chunk in stream_generator(prompt, thread_id):
293
+ full_response += chunk
294
+ st.markdown(full_response)
295
+ st.session_state.messages.append({"role": "assistant", "content": full_response})
 
 
296