Deepakkori45 commited on
Commit
2268362
·
verified ·
1 Parent(s): c022297

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -176
app.py CHANGED
@@ -1,175 +1,3 @@
1
- # import openai
2
- # import streamlit as st
3
- # from openai import OpenAI
4
- # import io
5
- # import time
6
- # import os
7
- # from dotenv import load_dotenv
8
-
9
- # # Initialize the OpenAI client with your API key
10
- # # Load environment variables from the .env file
11
- # load_dotenv()
12
-
13
- # # Get the OpenAI API key
14
- # OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
15
-
16
- # client = OpenAI(api_key=OPENAI_API_KEY)
17
- # vector_store_id = "vs_sHLBqA88ng66RDZYMgaBmDuX" # Vector Store ID to use
18
- # # vector_store_id = os.getenv("VECTOR_STORE_ID") # Vector Store ID to use
19
- # # all_files = list(client.beta.vector_stores.files.list(vector_store_id))
20
- # # for file in all_files:
21
- # # # print(file)
22
- # # file_id = file.id
23
- # # st.write(file_id)
24
-
25
- # # Set the assistant ID
26
- # assistant_id = "asst_UeIHpkpVejAgrIz3RnisxSLm" # Replace with your own assistant ID
27
- # # assistant_id = os.getenv("ASSISTANT_ID") # Replace with your own assistant ID
28
-
29
- # def ensure_single_thread_id():
30
- # if "thread_id" not in st.session_state:
31
- # thread = client.beta.threads.create()
32
- # st.session_state.thread_id = thread.id
33
- # return st.session_state.thread_id
34
-
35
- # def safe_message_send(prompt, thread_id):
36
- # try:
37
- # message = client.beta.threads.messages.create(
38
- # thread_id=thread_id,
39
- # role="user",
40
- # content=prompt
41
- # )
42
- # return message
43
- # except Exception as e:
44
- # if "active" in str(e):
45
- # print("Waiting for the current run to finish...")
46
- # time.sleep(1) # wait a bit before retrying
47
- # return safe_message_send(prompt, thread_id) # retry sending the message
48
- # else:
49
- # raise e
50
-
51
- # def stream_generator(prompt, thread_id):
52
- # # print(f'First time thread in the function {thread_id}')
53
- # message = safe_message_send(prompt, thread_id) # use the new safe send function
54
-
55
- # with st.spinner("Wait... Generating response..."):
56
- # try:
57
- # stream = client.beta.threads.runs.create(
58
- # thread_id=thread_id,
59
- # assistant_id=assistant_id,
60
- # stream=True
61
- # )
62
-
63
- # for event in stream:
64
- # if event.data.object == "thread.message.delta":
65
- # for content in event.data.delta.content:
66
- # if content.type == 'text':
67
- # yield content.text.value
68
- # time.sleep(0.01)
69
- # elif event.data.object == "thread.run.stop":
70
- # break # Break if the run stops
71
- # except Exception as e:
72
- # print(f"Error during streaming: {str(e)}")
73
-
74
- # def upload_and_add_to_vector_store(uploaded_file):
75
- # """Upload a file to OpenAI and add it to the specified vector store."""
76
- # try:
77
- # # Convert the uploaded file to a BytesIO stream for uploading
78
- # file_stream = io.BytesIO(uploaded_file.getvalue())
79
- # file_stream.name = uploaded_file.name # Preserve the file name
80
- # # Upload the file to the vector store
81
- # file_batch = client.beta.vector_stores.file_batches.upload_and_poll(
82
- # vector_store_id=vector_store_id,
83
- # files=[file_stream]
84
- # )
85
-
86
-
87
- # st.success(f"File '{uploaded_file.name}' processed and added to vector store. Status: {file_batch.status}")
88
- # except Exception as e:
89
- # st.error(f"Failed to process file: {str(e)}")
90
-
91
-
92
-
93
- # def list_all_files_in_vector_store():
94
- # """List all files in the specified vector store."""
95
- # try:
96
- # all_files = list(client.beta.vector_stores.files.list(vector_store_id=vector_store_id))
97
- # # st.write(all_files)
98
- # for file in all_files:
99
- # file_id = file.id
100
- # st.write(file_id)
101
- # except Exception as e:
102
- # st.error(f"Failed to list files: {str(e)}")
103
- # return {}
104
-
105
- # def delete_file_from_vector_store(vector_store_id, file_id):
106
- # """Delete a file from the specified vector store."""
107
- # try:
108
- # client.beta.vector_stores.files.delete(
109
- # vector_store_id=vector_store_id,
110
- # file_id=file_id
111
- # )
112
- # st.success(f"File with ID '{file_id}' deleted from vector store '{vector_store_id}'.")
113
- # except Exception as e:
114
- # st.error(f"Failed to delete file. File id is not Found.")
115
-
116
- # # Interface to delete files from vector store
117
- # st.sidebar.subheader("Delete File from Vector Store")
118
- # file_id_to_delete = st.sidebar.text_input("Enter File ID to Delete", "")
119
- # if st.sidebar.button("Delete File"):
120
- # delete_file_from_vector_store(vector_store_id, file_id_to_delete)
121
-
122
- # # Streamlit interface setup
123
- # st.title("💬Chatbot")
124
- # st.caption("🚀 A Streamlit Custom Chatbot")
125
-
126
-
127
- # with st.sidebar:
128
- # st.write("Upload PDF File")
129
- # uploaded_file = st.file_uploader("Choose a file", type=['pdf', 'docx'], key='file_uploader')
130
-
131
- # if st.button('Upload File', key='process_file'):
132
- # if uploaded_file is not None:
133
- # upload_and_add_to_vector_store(uploaded_file)
134
- # st.success("File successfully uploaded and processed.")
135
- # else:
136
- # st.error("Please upload a file to process.")
137
-
138
- # # List all uploaded files
139
- # st.write("### Uploaded Files")
140
- # if 'uploaded_files' in st.session_state and st.session_state.uploaded_files:
141
- # for file_name, file_id in st.session_state.uploaded_files.items():
142
- # st.write(f"{file_name}: {file_id}")
143
-
144
- # # List all files in the vector store
145
- # st.write("## All Files in Vector Store")
146
- # all_files = list_all_files_in_vector_store()
147
-
148
- # # Initialize session state for chat
149
- # st.session_state.start_chat = True
150
- # if 'start_chat' not in st.session_state:
151
- # st.session_state.start_chat = False
152
-
153
- # # Main chat interface
154
- # if st.session_state.start_chat:
155
- # if "messages" not in st.session_state:
156
- # st.session_state.messages = []
157
-
158
- # for message in st.session_state.messages:
159
- # with st.chat_message(message["role"]):
160
- # st.markdown(message["content"])
161
-
162
- # prompt = st.chat_input("Enter your message")
163
- # if prompt:
164
- # thread_id = ensure_single_thread_id()
165
- # with st.chat_message("user"):
166
- # st.markdown(prompt)
167
- # st.session_state.messages.append({"role": "user", "content": prompt})
168
-
169
- # with st.chat_message("assistant"):
170
- # response = st.write_stream(stream_generator(prompt, thread_id))
171
- # st.session_state.messages.append({"role": "assistant", "content": response})
172
-
173
  import openai
174
  import streamlit as st
175
  from openai import OpenAI
@@ -187,7 +15,7 @@ OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
187
 
188
  client = OpenAI(api_key=OPENAI_API_KEY)
189
  vector_store_id = "vs_sHLBqA88ng66RDZYMgaBmDuX" # Vector Store ID to use
190
-
191
  # all_files = list(client.beta.vector_stores.files.list(vector_store_id))
192
  # for file in all_files:
193
  # # print(file)
@@ -196,6 +24,7 @@ vector_store_id = "vs_sHLBqA88ng66RDZYMgaBmDuX" # Vector Store ID to use
196
 
197
  # Set the assistant ID
198
  assistant_id = "asst_UeIHpkpVejAgrIz3RnisxSLm" # Replace with your own assistant ID
 
199
 
200
  def ensure_single_thread_id():
201
  if "thread_id" not in st.session_state:
@@ -264,7 +93,7 @@ def upload_and_add_to_vector_store(uploaded_file):
264
  def list_all_files_in_vector_store():
265
  """List all files in the specified vector store."""
266
  try:
267
- all_files = list(client.beta.vector_stores.files.list(vector_store_id=vector_store_id))
268
  # st.write(all_files)
269
  for file in all_files:
270
  file_id = file.id
@@ -276,7 +105,7 @@ def list_all_files_in_vector_store():
276
  def delete_file_from_vector_store(vector_store_id, file_id):
277
  """Delete a file from the specified vector store."""
278
  try:
279
- client.beta.vector_stores.files.delete(
280
  vector_store_id=vector_store_id,
281
  file_id=file_id
282
  )
@@ -339,4 +168,4 @@ if st.session_state.start_chat:
339
 
340
  with st.chat_message("assistant"):
341
  response = st.write_stream(stream_generator(prompt, thread_id))
342
- st.session_state.messages.append({"role": "assistant", "content": response})
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import openai
2
  import streamlit as st
3
  from openai import OpenAI
 
15
 
16
  client = OpenAI(api_key=OPENAI_API_KEY)
17
  vector_store_id = "vs_sHLBqA88ng66RDZYMgaBmDuX" # Vector Store ID to use
18
+ # vector_store_id = os.getenv("VECTOR_STORE_ID") # Vector Store ID to use
19
  # all_files = list(client.beta.vector_stores.files.list(vector_store_id))
20
  # for file in all_files:
21
  # # print(file)
 
24
 
25
  # Set the assistant ID
26
  assistant_id = "asst_UeIHpkpVejAgrIz3RnisxSLm" # Replace with your own assistant ID
27
+ # assistant_id = os.getenv("ASSISTANT_ID") # Replace with your own assistant ID
28
 
29
  def ensure_single_thread_id():
30
  if "thread_id" not in st.session_state:
 
93
  def list_all_files_in_vector_store():
94
  """List all files in the specified vector store."""
95
  try:
96
+ all_files = list(client.vector_stores.files.list(vector_store_id=vector_store_id))
97
  # st.write(all_files)
98
  for file in all_files:
99
  file_id = file.id
 
105
  def delete_file_from_vector_store(vector_store_id, file_id):
106
  """Delete a file from the specified vector store."""
107
  try:
108
+ client.vector_stores.files.delete(
109
  vector_store_id=vector_store_id,
110
  file_id=file_id
111
  )
 
168
 
169
  with st.chat_message("assistant"):
170
  response = st.write_stream(stream_generator(prompt, thread_id))
171
+ st.session_state.messages.append({"role": "assistant", "content": response})