aaspbury's picture
Update app.py
f568a4d verified
############################################################################################################
# Importing Libraries
import streamlit as st
import hmac
import config
from openai import OpenAI
import matplotlib.pyplot as plt
import numpy as np
############################################################################################################
# Password protection
def check_password():
"""Returns `True` if the user had the correct password."""
def password_entered():
"""Checks whether a password entered by the user is correct."""
if hmac.compare_digest(st.session_state["password"], st.secrets["password"]):
st.session_state["password_correct"] = True
del st.session_state["password"] # Don't store the password.
else:
st.session_state["password_correct"] = False
if st.session_state.get("password_correct", False):
return True
st.text_input("Password", type="password", on_change=password_entered, key="password")
if "password_correct" in st.session_state:
st.error("๐Ÿ˜• Password incorrect")
return False
if not check_password():
st.stop()
############################################################################################################
# Streamlit app layout
st.set_page_config(
layout="wide",
page_title="Modular Chatbot + Growth Model",
page_icon=":chart_with_upwards_trend:",
initial_sidebar_state="expanded"
)
# Initial context for chatbot
initial_context = {
"role": "system",
"content": config.prompt
}
client = OpenAI(api_key=st.secrets["OPENAI_API_KEY"])
if "openai_model" not in st.session_state:
st.session_state["openai_model"] = config.ai_model
if "display_messages" not in st.session_state:
st.session_state["display_messages"] = [initial_context]
if "chat_initialized" not in st.session_state:
st.session_state["chat_initialized"] = True
st.session_state["display_messages"].append({"role": "assistant", "content": config.opening_message})
prompt = st.chat_input("Type your message here...")
if prompt:
if not st.session_state["display_messages"]:
st.session_state["display_messages"] = [initial_context]
st.session_state["display_messages"].append({"role": "user", "content": prompt})
def reset_chat_history():
st.session_state["display_messages"] = [initial_context]
st.session_state["display_messages"].append({"role": "assistant", "content": config.opening_message})
st.rerun()
# Main chat display
with st.container(border=False):
for message in st.session_state["display_messages"][1:]:
with st.chat_message(message["role"]):
st.markdown(message["content"])
if prompt:
with st.chat_message("assistant"):
try:
stream = client.chat.completions.create(
model=st.session_state["openai_model"],
messages=[{"role": m["role"], "content": m["content"]} for m in st.session_state["display_messages"]],
stream=True,
temperature=config.temperature,
max_tokens=config.max_tokens,
frequency_penalty=config.frequency_penalty,
presence_penalty=config.presence_penalty,
)
full_response = ""
message_placeholder = st.empty()
for chunk in stream:
if chunk.choices[0].delta.content is not None:
full_response += chunk.choices[0].delta.content
message_placeholder.markdown(full_response + "โ–Œ")
message_placeholder.markdown(full_response)
st.session_state["display_messages"].append({"role": "assistant", "content": full_response})
except Exception as e:
st.error(f"An error occurred: {str(e)}")
############################################################################################################
# Sidebar and Growth Model Controls
with st.sidebar:
st.markdown(config.warning_message, unsafe_allow_html=True)
if st.button("Clear Chat History"):
reset_chat_history()
st.markdown("---")
st.markdown("<small>Licensed under [GNU GPL v3.0](https://www.gnu.org/licenses/gpl-3.0.en.html)</small>", unsafe_allow_html=True)
st.markdown("## ๐Ÿ“ˆ Population Growth Model")
with st.form("growth_model_form"):
model_type = st.radio("Model type:", ["Exponential", "Logistic"])
N0 = st.number_input("Initial population size (Nโ‚€):", min_value=0.0, value=10.0)
r = st.number_input("Growth rate (r):", value=0.1)
K = None
if model_type == "Logistic":
K = st.number_input("Carrying capacity (K):", min_value=1.0, value=100.0)
t_max = st.number_input("Time span (t max):", min_value=1.0, value=50.0)
submit_model = st.form_submit_button("Generate Growth Plot")
############################################################################################################
# Growth Model Plot
if submit_model:
try:
t = np.linspace(0, t_max, 500)
if model_type == "Exponential":
N = N0 * np.exp(r * t)
elif model_type == "Logistic":
N = K / (1 + ((K - N0) / N0) * np.exp(-r * t))
else:
raise ValueError("Invalid model type selected.")
st.subheader("๐Ÿ“Š Population Growth Plot")
fig, ax = plt.subplots()
ax.plot(t, N, label=model_type, color="green" if model_type == "Logistic" else "blue")
ax.set_xlabel("Time")
ax.set_ylabel("Population Size")
ax.set_title(f"{model_type} Growth Model")
ax.grid(True)
ax.legend()
st.pyplot(fig)
except Exception as e:
st.error(f"Error generating plot: {e}")