Spaces:
Sleeping
Sleeping
File size: 5,875 Bytes
ad6d2ba f568a4d ad6d2ba f568a4d ad6d2ba f568a4d ad6d2ba f568a4d ad6d2ba f568a4d ad6d2ba f568a4d f9b1046 ad6d2ba f9b1046 ad6d2ba f568a4d ad6d2ba f568a4d ad6d2ba f568a4d ad6d2ba f568a4d ad6d2ba f568a4d ad6d2ba f568a4d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 |
############################################################################################################
# Importing Libraries
import streamlit as st
import hmac
import config
from openai import OpenAI
import matplotlib.pyplot as plt
import numpy as np
############################################################################################################
# Password protection
def check_password():
"""Returns `True` if the user had the correct password."""
def password_entered():
"""Checks whether a password entered by the user is correct."""
if hmac.compare_digest(st.session_state["password"], st.secrets["password"]):
st.session_state["password_correct"] = True
del st.session_state["password"] # Don't store the password.
else:
st.session_state["password_correct"] = False
if st.session_state.get("password_correct", False):
return True
st.text_input("Password", type="password", on_change=password_entered, key="password")
if "password_correct" in st.session_state:
st.error("😕 Password incorrect")
return False
if not check_password():
st.stop()
############################################################################################################
# Streamlit app layout
st.set_page_config(
layout="wide",
page_title="Modular Chatbot + Growth Model",
page_icon=":chart_with_upwards_trend:",
initial_sidebar_state="expanded"
)
# Initial context for chatbot
initial_context = {
"role": "system",
"content": config.prompt
}
client = OpenAI(api_key=st.secrets["OPENAI_API_KEY"])
if "openai_model" not in st.session_state:
st.session_state["openai_model"] = config.ai_model
if "display_messages" not in st.session_state:
st.session_state["display_messages"] = [initial_context]
if "chat_initialized" not in st.session_state:
st.session_state["chat_initialized"] = True
st.session_state["display_messages"].append({"role": "assistant", "content": config.opening_message})
prompt = st.chat_input("Type your message here...")
if prompt:
if not st.session_state["display_messages"]:
st.session_state["display_messages"] = [initial_context]
st.session_state["display_messages"].append({"role": "user", "content": prompt})
def reset_chat_history():
st.session_state["display_messages"] = [initial_context]
st.session_state["display_messages"].append({"role": "assistant", "content": config.opening_message})
st.rerun()
# Main chat display
with st.container(border=False):
for message in st.session_state["display_messages"][1:]:
with st.chat_message(message["role"]):
st.markdown(message["content"])
if prompt:
with st.chat_message("assistant"):
try:
stream = client.chat.completions.create(
model=st.session_state["openai_model"],
messages=[{"role": m["role"], "content": m["content"]} for m in st.session_state["display_messages"]],
stream=True,
temperature=config.temperature,
max_tokens=config.max_tokens,
frequency_penalty=config.frequency_penalty,
presence_penalty=config.presence_penalty,
)
full_response = ""
message_placeholder = st.empty()
for chunk in stream:
if chunk.choices[0].delta.content is not None:
full_response += chunk.choices[0].delta.content
message_placeholder.markdown(full_response + "▌")
message_placeholder.markdown(full_response)
st.session_state["display_messages"].append({"role": "assistant", "content": full_response})
except Exception as e:
st.error(f"An error occurred: {str(e)}")
############################################################################################################
# Sidebar and Growth Model Controls
with st.sidebar:
st.markdown(config.warning_message, unsafe_allow_html=True)
if st.button("Clear Chat History"):
reset_chat_history()
st.markdown("---")
st.markdown("<small>Licensed under [GNU GPL v3.0](https://www.gnu.org/licenses/gpl-3.0.en.html)</small>", unsafe_allow_html=True)
st.markdown("## 📈 Population Growth Model")
with st.form("growth_model_form"):
model_type = st.radio("Model type:", ["Exponential", "Logistic"])
N0 = st.number_input("Initial population size (N₀):", min_value=0.0, value=10.0)
r = st.number_input("Growth rate (r):", value=0.1)
K = None
if model_type == "Logistic":
K = st.number_input("Carrying capacity (K):", min_value=1.0, value=100.0)
t_max = st.number_input("Time span (t max):", min_value=1.0, value=50.0)
submit_model = st.form_submit_button("Generate Growth Plot")
############################################################################################################
# Growth Model Plot
if submit_model:
try:
t = np.linspace(0, t_max, 500)
if model_type == "Exponential":
N = N0 * np.exp(r * t)
elif model_type == "Logistic":
N = K / (1 + ((K - N0) / N0) * np.exp(-r * t))
else:
raise ValueError("Invalid model type selected.")
st.subheader("📊 Population Growth Plot")
fig, ax = plt.subplots()
ax.plot(t, N, label=model_type, color="green" if model_type == "Logistic" else "blue")
ax.set_xlabel("Time")
ax.set_ylabel("Population Size")
ax.set_title(f"{model_type} Growth Model")
ax.grid(True)
ax.legend()
st.pyplot(fig)
except Exception as e:
st.error(f"Error generating plot: {e}") |