import openai import streamlit as st from openai import OpenAI import io import time import os from dotenv import load_dotenv # Initialize the OpenAI client with your API key # Load environment variables from the .env file load_dotenv() # Get the OpenAI API key OPENAI_API_KEY = os.getenv("OPENAI_API_KEY") client = OpenAI(api_key=OPENAI_API_KEY) vector_store_id = os.getenv("VECTOR_STORE_ID") # Vector Store ID to use # all_files = list(client.beta.vector_stores.files.list(vector_store_id)) # for file in all_files: # # print(file) # file_id = file.id # st.write(file_id) # Set the assistant ID assistant_id = os.getenv("ASSISTANT_ID") # Replace with your own assistant ID def ensure_single_thread_id(): if "thread_id" not in st.session_state: thread = client.beta.threads.create() st.session_state.thread_id = thread.id return st.session_state.thread_id def safe_message_send(prompt, thread_id): try: message = client.beta.threads.messages.create( thread_id=thread_id, role="user", content=prompt ) return message except Exception as e: if "active" in str(e): print("Waiting for the current run to finish...") time.sleep(1) # wait a bit before retrying return safe_message_send(prompt, thread_id) # retry sending the message else: raise e def stream_generator(prompt, thread_id): # print(f'First time thread in the function {thread_id}') message = safe_message_send(prompt, thread_id) # use the new safe send function with st.spinner("Wait... Generating response..."): try: stream = client.beta.threads.runs.create( thread_id=thread_id, assistant_id=assistant_id, stream=True ) for event in stream: if event.data.object == "thread.message.delta": for content in event.data.delta.content: if content.type == 'text': yield content.text.value time.sleep(0.01) elif event.data.object == "thread.run.stop": break # Break if the run stops except Exception as e: print(f"Error during streaming: {str(e)}") def upload_and_add_to_vector_store(uploaded_file): """Upload a file to OpenAI and add it to the specified vector store.""" try: # Convert the uploaded file to a BytesIO stream for uploading file_stream = io.BytesIO(uploaded_file.getvalue()) file_stream.name = uploaded_file.name # Preserve the file name # Upload the file to the vector store file_batch = client.beta.vector_stores.file_batches.upload_and_poll( vector_store_id=vector_store_id, files=[file_stream] ) st.success(f"File '{uploaded_file.name}' processed and added to vector store. Status: {file_batch.status}") except Exception as e: st.error(f"Failed to process file: {str(e)}") def list_all_files_in_vector_store(): """List all files in the specified vector store.""" try: all_files = list(client.vector_stores.files.list(vector_store_id=vector_store_id)) # st.write(all_files) for file in all_files: file_id = file.id st.write(file_id) except Exception as e: st.error(f"Failed to list files: {str(e)}") return {} def delete_file_from_vector_store(vector_store_id, file_id): """Delete a file from the specified vector store.""" try: client.vector_stores.files.delete( vector_store_id=vector_store_id, file_id=file_id ) st.success(f"File with ID '{file_id}' deleted from vector store '{vector_store_id}'.") except Exception as e: st.error(f"Failed to delete file. File id is not Found.") # Interface to delete files from vector store st.sidebar.subheader("Delete File from Vector Store") file_id_to_delete = st.sidebar.text_input("Enter File ID to Delete", "") if st.sidebar.button("Delete File"): delete_file_from_vector_store(vector_store_id, file_id_to_delete) # Streamlit interface setup st.title("💬Chatbot") st.caption("🚀 A Streamlit Custom Chatbot") with st.sidebar: st.write("Upload PDF File") uploaded_file = st.file_uploader("Choose a file", type=['pdf', 'docx'], key='file_uploader') if st.button('Upload File', key='process_file'): if uploaded_file is not None: upload_and_add_to_vector_store(uploaded_file) st.success("File successfully uploaded and processed.") else: st.error("Please upload a file to process.") # List all uploaded files st.write("### Uploaded Files") if 'uploaded_files' in st.session_state and st.session_state.uploaded_files: for file_name, file_id in st.session_state.uploaded_files.items(): st.write(f"{file_name}: {file_id}") # List all files in the vector store st.write("## All Files in Vector Store") all_files = list_all_files_in_vector_store() # Initialize session state for chat st.session_state.start_chat = True if 'start_chat' not in st.session_state: st.session_state.start_chat = False # Main chat interface if st.session_state.start_chat: if "messages" not in st.session_state: st.session_state.messages = [] for message in st.session_state.messages: with st.chat_message(message["role"]): st.markdown(message["content"]) prompt = st.chat_input("Enter your message") if prompt: thread_id = ensure_single_thread_id() with st.chat_message("user"): st.markdown(prompt) st.session_state.messages.append({"role": "user", "content": prompt}) with st.chat_message("assistant"): response = st.write_stream(stream_generator(prompt, thread_id)) st.session_state.messages.append({"role": "assistant", "content": response})