Spaces:
Sleeping
Sleeping
| from utlis.helper import * | |
| initialize_session_state() | |
| with st.sidebar: | |
| st.title("Smart Retrieval") | |
| # Get List of models | |
| llms = ['Cohere','Gemini-Pro','Mistral-7B-Instruct-v0.3','gemma-2b','Meta-Llama-3-8B-Instruct','Phi-3-mini-4k-instruct','zephyr-7b-beta'] | |
| st.session_state.llm = st.selectbox("Choose LLM",llms) | |
| genre = st.radio( | |
| "Choose option", | |
| ["Select document(s)", "Add document(s)","Delete service(s)", "Delete document(s)"]) | |
| if genre=="Add document(s)": | |
| st.title('Add Document(s)') | |
| # Check service status | |
| # Get all available services | |
| add_new_service = st.checkbox("Add new service") | |
| if add_new_service: | |
| new_service = st.text_input("Enter service name") | |
| # Get list of Embedding models | |
| res_request= requests.get(EMBEDDING_MODELS_API) | |
| embidding_models =json.loads(res_request.text) | |
| embdding_model = st.selectbox("Choose Embidding model",embidding_models["Model_Names_paid"]) | |
| if new_service and st.button('Add'): | |
| add_service(st.session_state.token,new_service, embdding_model) | |
| services = requests.get(SERVICES_API+st.session_state.token) | |
| services =json.loads(services.text) | |
| if len(services)>0: | |
| st.session_state.service = st.selectbox("Choose Service",services) | |
| # Get list of Indexing methods | |
| # indexing_method_list = ['FLAT','HSNW'] | |
| # st.session_state.indexing_method = st.selectbox("Choose Indexing method",indexing_method_list) | |
| # Send Document to API | |
| if st.session_state.service: | |
| st.session_state.uploaded_files = st.file_uploader("Upload PDF files", type=["pdf", "png", "jpg", "jpeg"], accept_multiple_files=True) | |
| if st.session_state.uploaded_files: | |
| st.session_state.process = st.button('Process') | |
| if st.session_state.process: | |
| add_document(st.session_state.token,st.session_state.service) | |
| elif genre=="Select document(s)": | |
| st.title('Chat with Document(s)') | |
| services = requests.get(SERVICES_API+st.session_state.token) | |
| services =json.loads(services.text) | |
| if len(services)>0: | |
| st.session_state.service_slected_to_chat = st.selectbox("Choose Service",services) | |
| st.session_state.top_k = st.number_input("Top k ", min_value=1, value=5) | |
| history_document = requests.get(DOCUMENT_API+f'/{st.session_state.token}/{st.session_state.service_slected_to_chat}') | |
| history_document =json.loads(history_document.text).get("documents",[]) | |
| if len(history_document)>=2: | |
| history_document.append("ALL") | |
| # Get list of documents from histrory | |
| if "ALL" in history_document: | |
| st.session_state.doument_slected_to_chat = st.multiselect( | |
| "",history_document ,default="ALL" | |
| ) | |
| elif len(history_document)==1: | |
| st.session_state.doument_slected_to_chat = st.multiselect( | |
| "",history_document,default=history_document[0] | |
| ) | |
| else: | |
| st.session_state.doument_slected_to_chat = st.multiselect( | |
| "",history_document | |
| ) | |
| if "ALL" in st.session_state.doument_slected_to_chat: | |
| st.session_state.doument_slected_to_chat = history_document | |
| st.session_state.doument_slected_to_chat.remove("ALL") | |
| st.write("You selected:", st.session_state.doument_slected_to_chat) | |
| elif genre == "Delete service(s)": | |
| st.title('Delete Service(s)') | |
| services = requests.get(SERVICES_API+st.session_state.token) | |
| services =json.loads(services.text) | |
| if len(services)>=2: | |
| services.append("ALL") | |
| # Get list of documents from histrory | |
| if "ALL" in services: | |
| service_slected = st.multiselect( | |
| "",services ,default="ALL" | |
| ) | |
| elif len(services)==1: | |
| service_slected = st.multiselect( | |
| "",services,default=services[0] | |
| ) | |
| else: | |
| service_slected = st.multiselect( | |
| "",services | |
| ) | |
| if "ALL" in service_slected: | |
| service_slected = services | |
| service_slected.remove("ALL") | |
| st.write("You selected:", service_slected) | |
| if len(service_slected) > 0: | |
| st.session_state.delete = st.button('Delete') | |
| if st.session_state.delete: | |
| delete_service(st.session_state.token ,service_slected) | |
| elif genre == "Delete document(s)": | |
| st.title('Delete Document(s)') | |
| services = requests.get(SERVICES_API+st.session_state.token) | |
| services =json.loads(services.text) | |
| if len(services)>0: | |
| service = st.selectbox("Choose Service",services) | |
| history_document = requests.get(DOCUMENT_API+f'/{st.session_state.token}/{service}') | |
| history_document =json.loads(history_document.text).get("documents",[]) | |
| if len(history_document)>=2: | |
| history_document.append("ALL") | |
| # Get list of documents from histrory | |
| if "ALL" in history_document: | |
| document_slected_to_delete = st.multiselect( | |
| "",history_document ,default="ALL" | |
| ) | |
| elif len(history_document)==1: | |
| document_slected_to_delete = st.multiselect( | |
| "",history_document,default=history_document[0] | |
| ) | |
| else: | |
| document_slected_to_delete = st.multiselect( | |
| "",history_document | |
| ) | |
| if "ALL" in document_slected_to_delete: | |
| document_slected_to_delete = history_document | |
| document_slected_to_delete.remove("ALL") | |
| st.write("You selected:", document_slected_to_delete) | |
| if len(document_slected_to_delete) > 0: | |
| st.session_state.delete = st.button('Delete') | |
| if st.session_state.delete: | |
| delete_document(st.session_state.token,st.session_state.service ,document_slected_to_delete) | |
| for msg in st.session_state.messages: | |
| if msg["role"] == "user": | |
| st.chat_message(msg["role"], avatar="π§βπ»").write(msg["content"]) | |
| else: | |
| st.chat_message(msg["role"], avatar="π€").write(msg["content"]) | |
| if prompt := st.chat_input(): | |
| st.session_state.messages.append({"role": "user", "content": prompt}) | |
| st.chat_message("user", avatar="π§βπ»").write(prompt) | |
| context = get_context(prompt,st.session_state.token,st.session_state.service_slected_to_chat,st.session_state.top_k) | |
| template = " " | |
| for i in range(0,len(context)): | |
| template += f"Chunk{i}: "+context[i] + "\n" | |
| print(template) | |
| response=generate_response(st.session_state.llm, prompt, context = template) | |
| #response = generate_response(st.session_state.llm,prompt, context) | |
| st.session_state.messages.append({"role": "assistant", "content": response}) | |
| st.chat_message("assistant", avatar="π€").write(response) | |