| |
| |
|
|
| import streamlit as st |
| import hmac |
| import config |
| from openai import OpenAI |
|
|
| |
| |
|
|
| def check_password(): |
| """Returns `True` if the user had the correct password.""" |
|
|
| def password_entered(): |
| """Checks whether a password entered by the user is correct.""" |
| if hmac.compare_digest(st.session_state["password"], st.secrets["password"]): |
| st.session_state["password_correct"] = True |
| del st.session_state["password"] |
| else: |
| st.session_state["password_correct"] = False |
|
|
| |
| if st.session_state.get("password_correct", False): |
| return True |
|
|
| |
| st.text_input( |
| "Password", type="password", on_change=password_entered, key="password" |
| ) |
| if "password_correct" in st.session_state: |
| st.error("😕 Password incorrect") |
| return False |
|
|
| if not check_password(): |
| st.stop() |
|
|
| |
| |
|
|
| |
| st.set_page_config(layout="wide", |
| page_title="Modular Chatbot", |
| page_icon=":lightbulb:", |
| initial_sidebar_state="collapsed" |
| ) |
|
|
| |
| |
| |
| |
|
|
| |
|
|
|
|
| |
| initial_context = { |
| "role": "system", |
| "content": config.prompt |
| } |
|
|
| |
| client = OpenAI(api_key=st.secrets["OPENAI_API_KEY"]) |
|
|
| |
| if "openai_model" not in st.session_state: |
| st.session_state["openai_model"] = config.ai_model |
|
|
| |
| if "display_messages" not in st.session_state: |
| st.session_state["display_messages"] = [initial_context] |
|
|
| |
| prompt = st.chat_input("Type your message here...") |
|
|
| |
| if prompt: |
| |
| if not st.session_state["display_messages"]: |
| st.session_state["display_messages"] = [initial_context] |
| st.session_state["display_messages"].append({"role": "user", "content": prompt}) |
|
|
| |
| def reset_chat_history(): |
| st.session_state["display_messages"] = [initial_context] |
| st.rerun() |
|
|
| |
| with st.container(border=False): |
| |
| for message in st.session_state["display_messages"][1:]: |
| if message["role"] == "user": |
| with st.chat_message("user"): |
| st.markdown(message["content"]) |
| else: |
| with st.chat_message("assistant"): |
| st.markdown(message["content"]) |
|
|
| |
| if prompt: |
| with st.chat_message("assistant"): |
| try: |
| stream = client.chat.completions.create( |
| model=st.session_state["openai_model"], |
| messages=[ |
| {"role": m["role"], "content": m["content"]} |
| for m in st.session_state["display_messages"] |
| ], |
| stream=True, |
| temperature=config.temperature, |
| max_tokens=config.max_tokens, |
| frequency_penalty=config.frequency_penalty, |
| presence_penalty=config.presence_penalty, |
| ) |
| |
| |
| full_response = "" |
| message_placeholder = st.empty() |
| |
| |
| for chunk in stream: |
| if chunk.choices[0].delta.content is not None: |
| full_response += chunk.choices[0].delta.content |
| message_placeholder.markdown(full_response + "▌") |
| |
| |
| message_placeholder.markdown(full_response) |
| |
| |
| st.session_state["display_messages"].append( |
| {"role": "assistant", "content": full_response} |
| ) |
| except Exception as e: |
| st.error(f"An error occurred: {str(e)}") |
|
|
| |
| with st.sidebar: |
| st.markdown(config.warning_message, unsafe_allow_html=True) |
|
|
| |
| if st.button("Clear Chat History"): |
| reset_chat_history() |
| |
| |
| st.markdown("---") |
| st.markdown(""" |
| <small>Licensed under [GNU GPL v3.0](https://www.gnu.org/licenses/gpl-3.0.en.html)</small> |
| """, unsafe_allow_html=True) |