Spaces:
Sleeping
Sleeping
| import streamlit as st | |
| import os | |
| from typing_extensions import override | |
| from openai import AssistantEventHandler, OpenAI | |
| def list_files_in_directory(directory): | |
| file_paths = [] | |
| for root, dirs, files in os.walk(directory): | |
| for file in files: | |
| file_path = os.path.join(root, file) | |
| file_paths.append(file_path) # Use full path | |
| return file_paths | |
| class EventHandler(AssistantEventHandler): | |
| def on_text_created(self, text) -> None: | |
| print(f"\nassistant > ", end="", flush=True) | |
| def on_tool_call_created(self, tool_call): | |
| print(f"\nassistant > {tool_call.type}\n", flush=True) | |
| def on_message_done(self, message) -> None: | |
| # print a citation to the file searched | |
| message_content = message.content[0].text | |
| annotations = message_content.annotations | |
| citations = [] | |
| for index, annotation in enumerate(annotations): | |
| message_content.value = message_content.value.replace( | |
| annotation.text, f"[{index}]" | |
| ) | |
| if file_citation := getattr(annotation, "file_citation", None): | |
| cited_file = client.files.retrieve(file_citation.file_id) | |
| citations.append(f"[{index}] {cited_file.filename}") | |
| print(message_content.value) | |
| print("\n".join(citations)) | |
| st.title("AutoMethylML Chatbot") | |
| st.caption("Please ask questions regarding your Methylation Classification Report here.") | |
| # Input for the user to provide their API key | |
| api_key = st.text_input("Enter your OpenAI API key", type="password") | |
| # Initialize the OpenAI client with the provided API key | |
| if api_key: | |
| client = OpenAI(api_key=api_key) | |
| assistant = client.beta.assistants.create( | |
| name="Methylation Classification Assistant", | |
| instructions="You are a helpful molecular pathologist, expert in CNS tumor classification using methylation array data.", | |
| model="gpt-4o", | |
| tools=[{"type": "file_search"}], | |
| ) | |
| vector_store = client.beta.vector_stores.create(name="MAA_SOP") | |
| # Ready the files for upload to OpenAI | |
| file_paths = list_files_in_directory('SOP') | |
| file_streams = [open(path, "rb") for path in file_paths] | |
| # Use the upload and poll SDK helper to upload the files, add them to the vector store, | |
| # and poll the status of the file batch for completion. | |
| file_batch = client.beta.vector_stores.file_batches.upload_and_poll( | |
| vector_store_id=vector_store.id, files=file_streams | |
| ) | |
| assistant = client.beta.assistants.update( | |
| assistant_id=assistant.id, | |
| tool_resources={"file_search": {"vector_store_ids": [vector_store.id]}}, | |
| ) | |
| thread = client.beta.threads.create() | |
| prompt = st.text_area("Enter your message") | |
| if prompt: | |
| with client.beta.threads.runs.stream( | |
| thread_id=thread.id, | |
| assistant_id=assistant.id, | |
| instructions=f"Please answer the users question: {prompt}", | |
| event_handler=EventHandler(), | |
| ) as stream: | |
| with st.chat_message("assistant"): | |
| response = st.write_stream(stream.text_deltas) | |
| stream.until_done() | |