aaspbury commited on
Commit
f568a4d
ยท
verified ยท
1 Parent(s): 720bd69

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +61 -65
app.py CHANGED
@@ -5,6 +5,8 @@ import streamlit as st
5
  import hmac
6
  import config
7
  from openai import OpenAI
 
 
8
 
9
  ############################################################################################################
10
  # Password protection
@@ -20,137 +22,131 @@ def check_password():
20
  else:
21
  st.session_state["password_correct"] = False
22
 
23
- # Return True if the password is validated.
24
  if st.session_state.get("password_correct", False):
25
  return True
26
 
27
- # Show input for password.
28
- st.text_input(
29
- "Password", type="password", on_change=password_entered, key="password"
30
- )
31
  if "password_correct" in st.session_state:
32
  st.error("๐Ÿ˜• Password incorrect")
33
  return False
34
 
35
  if not check_password():
36
- st.stop() # Do not continue if check_password is not True.
37
 
38
  ############################################################################################################
39
  # Streamlit app layout
40
 
41
- # Set the page to wide or centered mode
42
- st.set_page_config(layout="wide",
43
- page_title="Modular Chatbot",
44
- page_icon=":lightbulb:",
45
- initial_sidebar_state="collapsed"
46
- )
47
 
48
- # Streamlit app layout
49
- # st.title(config.app_title)
50
- # with st.expander("INSTRUCTIONS FOR STUDENTS:"):
51
- # st.markdown(config.instructions)
52
-
53
- ############################################################################################################
54
-
55
-
56
- # Define a basic initial context at the beginning of your script
57
  initial_context = {
58
  "role": "system",
59
  "content": config.prompt
60
  }
61
 
62
- # Initialize the OpenAI client
63
  client = OpenAI(api_key=st.secrets["OPENAI_API_KEY"])
64
 
65
- # Initialize the session state variables if they don't exist
66
  if "openai_model" not in st.session_state:
67
  st.session_state["openai_model"] = config.ai_model
68
 
69
- # Corrected initialization of display_messages:
70
  if "display_messages" not in st.session_state:
71
- st.session_state["display_messages"] = [initial_context]
72
-
73
- # Initialize chat with opening message if it doesn't exist
74
  if "chat_initialized" not in st.session_state:
75
  st.session_state["chat_initialized"] = True
76
- # Add the opening message to the display
77
  st.session_state["display_messages"].append({"role": "assistant", "content": config.opening_message})
78
 
79
- # Get user input
80
  prompt = st.chat_input("Type your message here...")
81
 
82
- # Input for new messages
83
  if prompt:
84
- # Ensure initial context is in the session state and then append user messages
85
  if not st.session_state["display_messages"]:
86
  st.session_state["display_messages"] = [initial_context]
87
  st.session_state["display_messages"].append({"role": "user", "content": prompt})
88
 
89
- # Function to reset all chat history
90
  def reset_chat_history():
91
  st.session_state["display_messages"] = [initial_context]
92
- # Re-add opening message when chat is reset
93
  st.session_state["display_messages"].append({"role": "assistant", "content": config.opening_message})
94
  st.rerun()
95
 
96
- # Main chat container
97
  with st.container(border=False):
98
- # Display chat history in reverse order including new messages
99
  for message in st.session_state["display_messages"][1:]:
100
- if message["role"] == "user":
101
- with st.chat_message("user"):
102
- st.markdown(message["content"])
103
- else:
104
- with st.chat_message("assistant"):
105
- st.markdown(message["content"])
106
 
107
- # Generate assistant's response and add it to the messages
108
  if prompt:
109
  with st.chat_message("assistant"):
110
  try:
111
  stream = client.chat.completions.create(
112
  model=st.session_state["openai_model"],
113
- messages=[
114
- {"role": m["role"], "content": m["content"]}
115
- for m in st.session_state["display_messages"]
116
- ],
117
  stream=True,
118
  temperature=config.temperature,
119
  max_tokens=config.max_tokens,
120
  frequency_penalty=config.frequency_penalty,
121
  presence_penalty=config.presence_penalty,
122
  )
123
-
124
- # Initialize an empty string to store the full response
125
  full_response = ""
126
  message_placeholder = st.empty()
127
-
128
- # Iterate through the stream to get each chunk
129
  for chunk in stream:
130
  if chunk.choices[0].delta.content is not None:
131
  full_response += chunk.choices[0].delta.content
132
  message_placeholder.markdown(full_response + "โ–Œ")
133
-
134
- # Replace the placeholder with the complete message
135
  message_placeholder.markdown(full_response)
136
-
137
- # Append the full response to the session state for display
138
- st.session_state["display_messages"].append(
139
- {"role": "assistant", "content": full_response}
140
- )
141
  except Exception as e:
142
  st.error(f"An error occurred: {str(e)}")
143
 
144
- # Create a sidebar
 
 
145
  with st.sidebar:
146
  st.markdown(config.warning_message, unsafe_allow_html=True)
147
 
148
- # Add Clear Chat History button to sidebar
149
  if st.button("Clear Chat History"):
150
  reset_chat_history()
151
-
152
- # Add license link with markdown
153
- st.markdown("---") # Separator line
154
- st.markdown("""
155
- <small>Licensed under [GNU GPL v3.0](https://www.gnu.org/licenses/gpl-3.0.en.html)</small>
156
- """, unsafe_allow_html=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  import hmac
6
  import config
7
  from openai import OpenAI
8
+ import matplotlib.pyplot as plt
9
+ import numpy as np
10
 
11
  ############################################################################################################
12
  # Password protection
 
22
  else:
23
  st.session_state["password_correct"] = False
24
 
 
25
  if st.session_state.get("password_correct", False):
26
  return True
27
 
28
+ st.text_input("Password", type="password", on_change=password_entered, key="password")
 
 
 
29
  if "password_correct" in st.session_state:
30
  st.error("๐Ÿ˜• Password incorrect")
31
  return False
32
 
33
  if not check_password():
34
+ st.stop()
35
 
36
  ############################################################################################################
37
  # Streamlit app layout
38
 
39
+ st.set_page_config(
40
+ layout="wide",
41
+ page_title="Modular Chatbot + Growth Model",
42
+ page_icon=":chart_with_upwards_trend:",
43
+ initial_sidebar_state="expanded"
44
+ )
45
 
46
+ # Initial context for chatbot
 
 
 
 
 
 
 
 
47
  initial_context = {
48
  "role": "system",
49
  "content": config.prompt
50
  }
51
 
 
52
  client = OpenAI(api_key=st.secrets["OPENAI_API_KEY"])
53
 
 
54
  if "openai_model" not in st.session_state:
55
  st.session_state["openai_model"] = config.ai_model
56
 
 
57
  if "display_messages" not in st.session_state:
58
+ st.session_state["display_messages"] = [initial_context]
59
+
 
60
  if "chat_initialized" not in st.session_state:
61
  st.session_state["chat_initialized"] = True
 
62
  st.session_state["display_messages"].append({"role": "assistant", "content": config.opening_message})
63
 
 
64
  prompt = st.chat_input("Type your message here...")
65
 
 
66
  if prompt:
 
67
  if not st.session_state["display_messages"]:
68
  st.session_state["display_messages"] = [initial_context]
69
  st.session_state["display_messages"].append({"role": "user", "content": prompt})
70
 
 
71
  def reset_chat_history():
72
  st.session_state["display_messages"] = [initial_context]
 
73
  st.session_state["display_messages"].append({"role": "assistant", "content": config.opening_message})
74
  st.rerun()
75
 
76
+ # Main chat display
77
  with st.container(border=False):
 
78
  for message in st.session_state["display_messages"][1:]:
79
+ with st.chat_message(message["role"]):
80
+ st.markdown(message["content"])
 
 
 
 
81
 
 
82
  if prompt:
83
  with st.chat_message("assistant"):
84
  try:
85
  stream = client.chat.completions.create(
86
  model=st.session_state["openai_model"],
87
+ messages=[{"role": m["role"], "content": m["content"]} for m in st.session_state["display_messages"]],
 
 
 
88
  stream=True,
89
  temperature=config.temperature,
90
  max_tokens=config.max_tokens,
91
  frequency_penalty=config.frequency_penalty,
92
  presence_penalty=config.presence_penalty,
93
  )
 
 
94
  full_response = ""
95
  message_placeholder = st.empty()
 
 
96
  for chunk in stream:
97
  if chunk.choices[0].delta.content is not None:
98
  full_response += chunk.choices[0].delta.content
99
  message_placeholder.markdown(full_response + "โ–Œ")
 
 
100
  message_placeholder.markdown(full_response)
101
+ st.session_state["display_messages"].append({"role": "assistant", "content": full_response})
 
 
 
 
102
  except Exception as e:
103
  st.error(f"An error occurred: {str(e)}")
104
 
105
+ ############################################################################################################
106
+ # Sidebar and Growth Model Controls
107
+
108
  with st.sidebar:
109
  st.markdown(config.warning_message, unsafe_allow_html=True)
110
 
 
111
  if st.button("Clear Chat History"):
112
  reset_chat_history()
113
+
114
+ st.markdown("---")
115
+ st.markdown("<small>Licensed under [GNU GPL v3.0](https://www.gnu.org/licenses/gpl-3.0.en.html)</small>", unsafe_allow_html=True)
116
+
117
+ st.markdown("## ๐Ÿ“ˆ Population Growth Model")
118
+ with st.form("growth_model_form"):
119
+ model_type = st.radio("Model type:", ["Exponential", "Logistic"])
120
+ N0 = st.number_input("Initial population size (Nโ‚€):", min_value=0.0, value=10.0)
121
+ r = st.number_input("Growth rate (r):", value=0.1)
122
+ K = None
123
+ if model_type == "Logistic":
124
+ K = st.number_input("Carrying capacity (K):", min_value=1.0, value=100.0)
125
+ t_max = st.number_input("Time span (t max):", min_value=1.0, value=50.0)
126
+ submit_model = st.form_submit_button("Generate Growth Plot")
127
+
128
+ ############################################################################################################
129
+ # Growth Model Plot
130
+
131
+ if submit_model:
132
+ try:
133
+ t = np.linspace(0, t_max, 500)
134
+ if model_type == "Exponential":
135
+ N = N0 * np.exp(r * t)
136
+ elif model_type == "Logistic":
137
+ N = K / (1 + ((K - N0) / N0) * np.exp(-r * t))
138
+ else:
139
+ raise ValueError("Invalid model type selected.")
140
+
141
+ st.subheader("๐Ÿ“Š Population Growth Plot")
142
+ fig, ax = plt.subplots()
143
+ ax.plot(t, N, label=model_type, color="green" if model_type == "Logistic" else "blue")
144
+ ax.set_xlabel("Time")
145
+ ax.set_ylabel("Population Size")
146
+ ax.set_title(f"{model_type} Growth Model")
147
+ ax.grid(True)
148
+ ax.legend()
149
+ st.pyplot(fig)
150
+
151
+ except Exception as e:
152
+ st.error(f"Error generating plot: {e}")