Spaces:
Sleeping
Sleeping
| import os | |
| import streamlit as st | |
| import time | |
| from huggingface_hub import Repository | |
| from huggingface_hub import login | |
| login(token = os.environ['HF_TOKEN']) | |
| repo = Repository( | |
| local_dir="agent_function", | |
| repo_type="dataset", | |
| clone_from=os.environ['DATASET'], | |
| token=True | |
| ) | |
| repo.git_pull() | |
| from agent_function.agent import Agent | |
| # Streamed response emulator | |
| def response_generator(query): | |
| ans = Agent().ask(query) | |
| for word in ans.split(' '): | |
| yield word + " " | |
| time.sleep(0.05) | |
| st.title("HR Chatbot") | |
| if 'conversation_id' not in st.session_state: | |
| st.session_state['conversation_id'] = '' | |
| # Initialize chat history | |
| if "messages" not in st.session_state: | |
| st.session_state.messages = [] | |
| # Display chat messages from history on app rerun | |
| for message in st.session_state.messages: | |
| with st.chat_message(message["role"]): | |
| st.markdown(message["content"]) | |
| # Accept user input | |
| if prompt := st.chat_input("What is up?"): | |
| # Add user message to chat history | |
| st.session_state.messages.append({"role": "user", "content": prompt}) | |
| # Display user message in chat message container | |
| with st.chat_message("user"): | |
| st.markdown(prompt) | |
| # Display assistant response in chat message container | |
| with st.chat_message("assistant"): | |
| response = st.write_stream(response_generator(prompt)) | |
| # Add assistant response to chat history | |
| st.session_state.messages.append({"role": "assistant", "content": response}) |