Deepakkori45 commited on
Commit
c6f97d3
·
verified ·
1 Parent(s): 6aaef19

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +167 -0
app.py ADDED
@@ -0,0 +1,167 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import openai
2
+ import streamlit as st
3
+ from openai import OpenAI
4
+ import io
5
+ import time
6
+ import os
7
+ from dotenv import load_dotenv
8
+
9
+ # OpenAI keys
10
+ OPENAI_API_KEY = st.secrets["OPENAI_API_KEY"]
11
+ VECTOR_STORE_ID = os.getenv("VECTOR_STORE_ID")
12
+
13
+ client = OpenAI(api_key=OPENAI_API_KEY)
14
+ vector_store_id = VECTOR_STORE_ID # Vector Store ID to use
15
+
16
+ # all_files = list(client.beta.vector_stores.files.list(vector_store_id))
17
+ # for file in all_files:
18
+ # # print(file)
19
+ # file_id = file.id
20
+ # st.write(file_id)
21
+
22
+ # Set the assistant ID
23
+ assistant_id = "asst_UeIHpkpVejAgrIz3RnisxSLm" # Replace with your own assistant ID
24
+
25
+ def ensure_single_thread_id():
26
+ if "thread_id" not in st.session_state:
27
+ thread = client.beta.threads.create()
28
+ st.session_state.thread_id = thread.id
29
+ return st.session_state.thread_id
30
+
31
+ def safe_message_send(prompt, thread_id):
32
+ try:
33
+ message = client.beta.threads.messages.create(
34
+ thread_id=thread_id,
35
+ role="user",
36
+ content=prompt
37
+ )
38
+ return message
39
+ except Exception as e:
40
+ if "active" in str(e):
41
+ print("Waiting for the current run to finish...")
42
+ time.sleep(1) # wait a bit before retrying
43
+ return safe_message_send(prompt, thread_id) # retry sending the message
44
+ else:
45
+ raise e
46
+
47
+ def stream_generator(prompt, thread_id):
48
+ # print(f'First time thread in the function {thread_id}')
49
+ message = safe_message_send(prompt, thread_id) # use the new safe send function
50
+
51
+ with st.spinner("Wait... Generating response..."):
52
+ try:
53
+ stream = client.beta.threads.runs.create(
54
+ thread_id=thread_id,
55
+ assistant_id=assistant_id,
56
+ stream=True
57
+ )
58
+
59
+ for event in stream:
60
+ if event.data.object == "thread.message.delta":
61
+ for content in event.data.delta.content:
62
+ if content.type == 'text':
63
+ yield content.text.value
64
+ time.sleep(0.01)
65
+ elif event.data.object == "thread.run.stop":
66
+ break # Break if the run stops
67
+ except Exception as e:
68
+ print(f"Error during streaming: {str(e)}")
69
+
70
+ def upload_and_add_to_vector_store(uploaded_file):
71
+ """Upload a file to OpenAI and add it to the specified vector store."""
72
+ try:
73
+ # Convert the uploaded file to a BytesIO stream for uploading
74
+ file_stream = io.BytesIO(uploaded_file.getvalue())
75
+ file_stream.name = uploaded_file.name # Preserve the file name
76
+ # Upload the file to the vector store
77
+ file_batch = client.beta.vector_stores.file_batches.upload_and_poll(
78
+ vector_store_id=vector_store_id,
79
+ files=[file_stream]
80
+ )
81
+
82
+
83
+ st.success(f"File '{uploaded_file.name}' processed and added to vector store. Status: {file_batch.status}")
84
+ except Exception as e:
85
+ st.error(f"Failed to process file: {str(e)}")
86
+
87
+
88
+
89
+ def list_all_files_in_vector_store():
90
+ """List all files in the specified vector store."""
91
+ try:
92
+ all_files = list(client.beta.vector_stores.files.list(vector_store_id=vector_store_id))
93
+ # st.write(all_files)
94
+ for file in all_files:
95
+ file_id = file.id
96
+ st.write(file_id)
97
+ except Exception as e:
98
+ st.error(f"Failed to list files: {str(e)}")
99
+ return {}
100
+
101
+ def delete_file_from_vector_store(vector_store_id, file_id):
102
+ """Delete a file from the specified vector store."""
103
+ try:
104
+ client.beta.vector_stores.files.delete(
105
+ vector_store_id=vector_store_id,
106
+ file_id=file_id
107
+ )
108
+ st.success(f"File with ID '{file_id}' deleted from vector store '{vector_store_id}'.")
109
+ except Exception as e:
110
+ st.error(f"Failed to delete file. File id is not Found.")
111
+
112
+ # Interface to delete files from vector store
113
+ st.sidebar.subheader("Delete File from Vector Store")
114
+ file_id_to_delete = st.sidebar.text_input("Enter File ID to Delete", "")
115
+ if st.sidebar.button("Delete File"):
116
+ delete_file_from_vector_store(vector_store_id, file_id_to_delete)
117
+
118
+ # Streamlit interface setup
119
+ st.title("💬Chatbot")
120
+ st.caption("🚀 A Streamlit Custom Chatbot")
121
+
122
+
123
+ with st.sidebar:
124
+ st.write("Upload PDF File")
125
+ uploaded_file = st.file_uploader("Choose a file", type=['pdf', 'docx'], key='file_uploader')
126
+
127
+ if st.button('Upload File', key='process_file'):
128
+ if uploaded_file is not None:
129
+ upload_and_add_to_vector_store(uploaded_file)
130
+ st.success("File successfully uploaded and processed.")
131
+ else:
132
+ st.error("Please upload a file to process.")
133
+
134
+ # List all uploaded files
135
+ st.write("### Uploaded Files")
136
+ if 'uploaded_files' in st.session_state and st.session_state.uploaded_files:
137
+ for file_name, file_id in st.session_state.uploaded_files.items():
138
+ st.write(f"{file_name}: {file_id}")
139
+
140
+ # List all files in the vector store
141
+ st.write("## All Files in Vector Store")
142
+ all_files = list_all_files_in_vector_store()
143
+
144
+ # Initialize session state for chat
145
+ st.session_state.start_chat = True
146
+ if 'start_chat' not in st.session_state:
147
+ st.session_state.start_chat = False
148
+
149
+ # Main chat interface
150
+ if st.session_state.start_chat:
151
+ if "messages" not in st.session_state:
152
+ st.session_state.messages = []
153
+
154
+ for message in st.session_state.messages:
155
+ with st.chat_message(message["role"]):
156
+ st.markdown(message["content"])
157
+
158
+ prompt = st.chat_input("Enter your message")
159
+ if prompt:
160
+ thread_id = ensure_single_thread_id()
161
+ with st.chat_message("user"):
162
+ st.markdown(prompt)
163
+ st.session_state.messages.append({"role": "user", "content": prompt})
164
+
165
+ with st.chat_message("assistant"):
166
+ response = st.write_stream(stream_generator(prompt, thread_id))
167
+ st.session_state.messages.append({"role": "assistant", "content": response})