Spaces:
Sleeping
Sleeping
| import os | |
| import streamlit as st | |
| from PIL import Image | |
| from huggingface_hub import Repository | |
| from huggingface_hub import login | |
| login(token = os.environ['HF_TOKEN']) | |
| repo = Repository( | |
| local_dir="agent_function", | |
| repo_type="dataset", | |
| clone_from=os.environ['DATASET'], | |
| token=True | |
| ) | |
| repo.git_pull() | |
| from agent_function.function import generate_answer | |
| Image.MAX_IMAGE_PIXELS = None | |
| st.set_page_config(layout="wide") | |
| st.title("Mapbot") | |
| col1, col2 = st.columns([1 ,2]) | |
| if "messages" not in st.session_state: | |
| st.session_state.messages = [] | |
| if "overlay" not in st.session_state: | |
| st.session_state.overlay = Image.open(f'agent_function/base.png') | |
| with col1: | |
| messages_box = st.container(height=500) | |
| # Display chat messages from history on app rerun | |
| for message in st.session_state.messages: | |
| with messages_box.chat_message(message["role"]): | |
| st.markdown(message["content"]) | |
| # React to user input | |
| if prompt := st.chat_input("What is up?"): | |
| messages_box.chat_message("user").markdown(prompt) | |
| st.session_state.messages.append({"role": "user", "content": prompt}) | |
| response = generate_answer(prompt) | |
| with messages_box.chat_message("assistant"): | |
| st.markdown(response) | |
| # Add assistant response to chat history | |
| st.session_state.messages.append({"role": "assistant", "content": response}) | |
| with col2: | |
| with st.spinner(): | |
| st.image(st.session_state.overlay) |