Spaces:
Sleeping
Sleeping
File size: 6,021 Bytes
ad6d2ba f9b1046 ad6d2ba f9b1046 ad6d2ba f9b1046 ad6d2ba |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 |
############################################################################################################
# Importing Libraries
import streamlit as st
import hmac
import config
from openai import OpenAI
############################################################################################################
# Password protection
def check_password():
"""Returns `True` if the user had the correct password."""
def password_entered():
"""Checks whether a password entered by the user is correct."""
if hmac.compare_digest(st.session_state["password"], st.secrets["password"]):
st.session_state["password_correct"] = True
del st.session_state["password"] # Don't store the password.
else:
st.session_state["password_correct"] = False
# Return True if the password is validated.
if st.session_state.get("password_correct", False):
return True
# Show input for password.
st.text_input(
"Password", type="password", on_change=password_entered, key="password"
)
if "password_correct" in st.session_state:
st.error("😕 Password incorrect")
return False
if not check_password():
st.stop() # Do not continue if check_password is not True.
############################################################################################################
# Streamlit app layout
# Set the page to wide or centered mode
st.set_page_config(layout="wide",
page_title="Modular Chatbot",
page_icon=":lightbulb:",
initial_sidebar_state="collapsed"
)
# Streamlit app layout
# st.title(config.app_title)
# with st.expander("INSTRUCTIONS FOR STUDENTS:"):
# st.markdown(config.instructions)
############################################################################################################
# Define a basic initial context at the beginning of your script
initial_context = {
"role": "system",
"content": config.prompt
}
# Initialize the OpenAI client
client = OpenAI(api_key=st.secrets["OPENAI_API_KEY"])
# Initialize the session state variables if they don't exist
if "openai_model" not in st.session_state:
st.session_state["openai_model"] = config.ai_model
# Corrected initialization of display_messages:
if "display_messages" not in st.session_state:
st.session_state["display_messages"] = [initial_context]
# Initialize chat with opening message if it doesn't exist
if "chat_initialized" not in st.session_state:
st.session_state["chat_initialized"] = True
# Add the opening message to the display
st.session_state["display_messages"].append({"role": "assistant", "content": config.opening_message})
# Get user input
prompt = st.chat_input("Type your message here...")
# Input for new messages
if prompt:
# Ensure initial context is in the session state and then append user messages
if not st.session_state["display_messages"]:
st.session_state["display_messages"] = [initial_context]
st.session_state["display_messages"].append({"role": "user", "content": prompt})
# Function to reset all chat history
def reset_chat_history():
st.session_state["display_messages"] = [initial_context]
# Re-add opening message when chat is reset
st.session_state["display_messages"].append({"role": "assistant", "content": config.opening_message})
st.rerun()
# Main chat container
with st.container(border=False):
# Display chat history in reverse order including new messages
for message in st.session_state["display_messages"][1:]:
if message["role"] == "user":
with st.chat_message("user"):
st.markdown(message["content"])
else:
with st.chat_message("assistant"):
st.markdown(message["content"])
# Generate assistant's response and add it to the messages
if prompt:
with st.chat_message("assistant"):
try:
stream = client.chat.completions.create(
model=st.session_state["openai_model"],
messages=[
{"role": m["role"], "content": m["content"]}
for m in st.session_state["display_messages"]
],
stream=True,
temperature=config.temperature,
max_tokens=config.max_tokens,
frequency_penalty=config.frequency_penalty,
presence_penalty=config.presence_penalty,
)
# Initialize an empty string to store the full response
full_response = ""
message_placeholder = st.empty()
# Iterate through the stream to get each chunk
for chunk in stream:
if chunk.choices[0].delta.content is not None:
full_response += chunk.choices[0].delta.content
message_placeholder.markdown(full_response + "▌")
# Replace the placeholder with the complete message
message_placeholder.markdown(full_response)
# Append the full response to the session state for display
st.session_state["display_messages"].append(
{"role": "assistant", "content": full_response}
)
except Exception as e:
st.error(f"An error occurred: {str(e)}")
# Create a sidebar
with st.sidebar:
st.markdown(config.warning_message, unsafe_allow_html=True)
# Add Clear Chat History button to sidebar
if st.button("Clear Chat History"):
reset_chat_history()
# Add license link with markdown
st.markdown("---") # Separator line
st.markdown("""
<small>Licensed under [GNU GPL v3.0](https://www.gnu.org/licenses/gpl-3.0.en.html)</small>
""", unsafe_allow_html=True) |