Adityaganesh commited on
Commit
2480bce
·
verified ·
1 Parent(s): 8c96f94

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +116 -118
app.py CHANGED
@@ -21,96 +21,26 @@ st.markdown("""
21
  font-size: 20px;
22
  color: #333333;
23
  }
24
- </style>
25
- """, unsafe_allow_html=True)
26
-
27
- # Sidebar Controls
28
- st.sidebar.title("🧠 Neural Network Settings")
29
-
30
- task_type = st.sidebar.selectbox("Task Type", ["Classification", "Regression"])
31
- dataset = st.sidebar.selectbox("Choose Dataset", ["Circles", "Exclusive OR", "Gaussian", "Spiral"])
32
- learning_rate = st.sidebar.slider("Learning Rate", 0.001, 1.0, 0.03, 0.001, format="%.3f")
33
- hidden_layers = st.sidebar.slider("Number of Hidden Layers", 1, 5, 3)
34
-
35
- neuron_counts = []
36
- for i in range(hidden_layers):
37
- neuron_counts.append(st.sidebar.slider(f"Neurons in Hidden Layer {i+1}", 1, 20, 5))
38
-
39
- activation = st.sidebar.selectbox("Activation Function", ["relu", "sigmoid", "tanh", "linear"])
40
- epochs = st.sidebar.slider("Epochs", 1, 200, 100)
41
-
42
- regularization = st.sidebar.selectbox("Regularization", ["None", "L1", "L2"])
43
- if regularization != "None":
44
- regularization_rate = st.sidebar.slider("Regularization Rate", 0.0, 0.1, 0.01, 0.001, format="%.3f")
45
- else:
46
- regularization_rate = 0.0
47
-
48
- st.sidebar.markdown("---")
49
- st.sidebar.markdown(f"🔁 **Epochs:** {epochs} &nbsp;&nbsp; 🚀 **Learning Rate:** {learning_rate:.3f}")
50
- if regularization != "None":
51
- st.sidebar.markdown(f"🧪 **Regularization:** {regularization} @ {regularization_rate}")
52
-
53
- # Title
54
- st.title("🎯 Neural Network Playground")
55
-
56
- # Dataset Generator
57
- def generate_dataset(dataset):
58
- if dataset == "Circles":
59
- X, y = make_circles(n_samples=1000, noise=0.1, factor=0.5, random_state=0)
60
- elif dataset == "Exclusive OR":
61
- X = np.random.randn(1000, 2) * 2
62
- y = np.logical_xor(X[:, 0] > 0, X[:, 1] > 0).astype(np.float32)
63
- elif dataset == "Gaussian":
64
- X, y = make_blobs(n_samples=1000, centers=2, n_features=2, random_state=0)
65
- y = y.astype(np.float32)
66
- elif dataset == "Spiral":
67
- n = 1000
68
- n_class = 2
69
- X = np.zeros((n * n_class, 2))
70
- y = np.zeros(n * n_class, dtype=np.float32)
71
- for j in range(n_class):
72
- ix = range(n * j, n * (j + 1))
73
- r = np.linspace(0.0, 1, n)
74
- t = np.linspace(j * 4, (j + 1) * 4, n) + np.random.randn(n) * 0.2
75
- X[ix] = np.c_[r * np.sin(t), r * np.cos(t)]
76
- y[ix] = j
77
- X, y = shuffle(X, y)
78
- return X, y
79
-
80
- # Data setup
81
- X, y = generate_dataset(dataset)
82
- scaler = StandardScaler()
83
- X = scaler.fit_transform(X)
84
- X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=42)
85
-
86
- # Build model
87
- model = keras.Sequential()
88
- for i, count in enumerate(neuron_counts):
89
- kwargs = {
90
- "units": count,
91
- "activation": activation,
92
- "input_shape": (2,) if i == 0 else None
93
  }
94
- if regularization == "L1":
95
- kwargs["kernel_regularizer"] = keras.regularizers.l1(regularization_rate)
96
- elif regularization == "L2":
97
- kwargs["kernel_regularizer"] = keras.regularizers.l2(regularization_rate)
98
- model.add(keras.layers.Dense(**{k: v for k, v in kwargs.items() if v is not None}))
99
-
100
- if task_type == "Classification":
101
- model.add(keras.layers.Dense(1, activation="sigmoid"))
102
- loss = "binary_crossentropy"
103
- metrics = ["accuracy"]
104
- else:
105
- model.add(keras.layers.Dense(1, activation="linear"))
106
- loss = "mse"
107
- metrics = ["mae"]
108
-
109
- # Compile and train
110
- model.compile(optimizer=keras.optimizers.Adam(learning_rate), loss=loss, metrics=metrics)
111
- history = model.fit(X_train, y_train, epochs=epochs, batch_size=32, verbose=0, validation_split=0.2)
112
 
113
- # Visualization Functions
114
  def draw_neural_network(model):
115
  G = nx.DiGraph()
116
  pos = {}
@@ -132,18 +62,18 @@ def draw_neural_network(model):
132
  G.add_node(output_node, layer=len(hidden_nodes) + 1)
133
  pos[output_node] = (len(hidden_nodes) + 1, -0.5)
134
 
 
 
 
135
  for inp in input_nodes:
136
  for hid in hidden_nodes[0]:
137
  G.add_edge(inp, hid)
138
- for i in range(len(hidden_nodes) - 1):
139
- for src in hidden_nodes[i]:
140
- for dst in hidden_nodes[i + 1]:
141
- G.add_edge(src, dst)
142
- for node in hidden_nodes[-1]:
143
- G.add_edge(node, output_node)
144
-
145
- all_nodes = input_nodes + sum(hidden_nodes, []) + [output_node]
146
- colors = ["lightblue"] * len(input_nodes) + ["lightcoral"] * sum(len(layer) for layer in hidden_nodes) + ["lightgreen"]
147
 
148
  fig, ax = plt.subplots(figsize=(10, 8))
149
  nx.draw(G, pos, with_labels=True, node_color=colors, edgecolors="black", node_size=1500, font_size=12, ax=ax, width=2, edge_color="gray", arrowsize=20)
@@ -152,18 +82,19 @@ def draw_neural_network(model):
152
  st.pyplot(fig)
153
 
154
  def plot_decision_boundary(X, y, model):
 
155
  x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
156
  y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
157
  xx, yy = np.meshgrid(np.linspace(x_min, x_max, 200), np.linspace(y_min, y_max, 200))
158
  Z = model.predict(np.c_[xx.ravel(), yy.ravel()])
159
  Z = (Z > 0.5).astype(int).reshape(xx.shape)
160
-
161
  plt.figure(figsize=(10, 8))
162
  plt.contourf(xx, yy, Z, alpha=0.8, cmap="coolwarm")
163
  plt.scatter(X[:, 0], X[:, 1], c=y, edgecolors="k", cmap="coolwarm", s=100)
164
- plt.xlabel("X1", fontsize=14)
165
- plt.ylabel("X2", fontsize=14)
166
- plt.title("Decision Boundary", fontsize=16)
 
167
  st.pyplot(plt)
168
 
169
  def plot_regression_surface(X, y, model):
@@ -171,34 +102,101 @@ def plot_regression_surface(X, y, model):
171
  y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
172
  xx, yy = np.meshgrid(np.linspace(x_min, x_max, 200), np.linspace(y_min, y_max, 200))
173
  Z = model.predict(np.c_[xx.ravel(), yy.ravel()]).reshape(xx.shape)
174
-
175
  fig = plt.figure(figsize=(10, 8))
176
  ax = fig.add_subplot(111, projection='3d')
177
  ax.plot_surface(xx, yy, Z, alpha=0.7, cmap='viridis')
178
  ax.scatter(X[:, 0], X[:, 1], y, c=y, cmap='viridis', s=20)
179
- ax.set_xlabel("X1", fontsize=14)
180
- ax.set_ylabel("X2", fontsize=14)
181
- ax.set_zlabel("Predicted", fontsize=14)
182
- ax.set_title("Regression Surface", fontsize=16)
183
  st.pyplot(fig)
184
 
185
  def plot_learning_curves(history):
186
  plt.figure(figsize=(10, 6))
187
  if task_type == "Classification":
188
- plt.plot(history.history['accuracy'], label='Training Accuracy')
189
- plt.plot(history.history['val_accuracy'], label='Validation Accuracy')
190
- plt.ylabel('Accuracy', fontsize=14)
191
  else:
192
- plt.plot(history.history['mae'], label='Training MAE')
193
- plt.plot(history.history['val_mae'], label='Validation MAE')
194
- plt.ylabel('Mean Absolute Error', fontsize=14)
195
- plt.title(f'Model {task_type} Over Epochs', fontsize=16)
196
- plt.xlabel('Epoch', fontsize=14)
197
- plt.legend(fontsize=12)
198
  st.pyplot(plt)
199
 
200
- # Visualizations
201
- st.subheader("Network Architecture")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
202
  draw_neural_network(model)
203
 
204
  if task_type == "Classification":
@@ -208,7 +206,7 @@ else:
208
  st.subheader("Regression Surface")
209
  plot_regression_surface(X, y, model)
210
 
211
- st.subheader(f"Learning Curves for {task_type}")
212
  plot_learning_curves(history)
213
 
214
  if st.checkbox("Show Model Summary"):
 
21
  font-size: 20px;
22
  color: #333333;
23
  }
24
+ .sidebar .css-19rxjzo {
25
+ background-color: #e9ecef;
26
+ border: 1px solid #ced4da;
27
+ border-radius: 4px;
28
+ color: #495057;
29
+ font-size: 14px;
30
+ padding: 10px;
31
+ margin-top: 10px;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
  }
33
+ .sidebar .css-14qlr5i {
34
+ background-color: #6c757d;
35
+ color: white;
36
+ border-radius: 20px;
37
+ padding: 5px 10px;
38
+ font-size: 16px;
39
+ }
40
+ </style>
41
+ """, unsafe_allow_html=True)
 
 
 
 
 
 
 
 
 
42
 
43
+ # Helper functions
44
  def draw_neural_network(model):
45
  G = nx.DiGraph()
46
  pos = {}
 
62
  G.add_node(output_node, layer=len(hidden_nodes) + 1)
63
  pos[output_node] = (len(hidden_nodes) + 1, -0.5)
64
 
65
+ all_nodes = input_nodes + sum(hidden_nodes, []) + [output_node]
66
+ colors = ["lightblue"] * len(input_nodes) + ["lightcoral"] * sum(len(layer) for layer in hidden_nodes) + ["lightgreen"]
67
+
68
  for inp in input_nodes:
69
  for hid in hidden_nodes[0]:
70
  G.add_edge(inp, hid)
71
+ for layer_idx in range(len(hidden_nodes) - 1):
72
+ for node1 in hidden_nodes[layer_idx]:
73
+ for node2 in hidden_nodes[layer_idx + 1]:
74
+ G.add_edge(node1, node2)
75
+ for hid in hidden_nodes[-1]:
76
+ G.add_edge(hid, output_node)
 
 
 
77
 
78
  fig, ax = plt.subplots(figsize=(10, 8))
79
  nx.draw(G, pos, with_labels=True, node_color=colors, edgecolors="black", node_size=1500, font_size=12, ax=ax, width=2, edge_color="gray", arrowsize=20)
 
82
  st.pyplot(fig)
83
 
84
  def plot_decision_boundary(X, y, model):
85
+ if task_type == "Regression": return
86
  x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
87
  y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
88
  xx, yy = np.meshgrid(np.linspace(x_min, x_max, 200), np.linspace(y_min, y_max, 200))
89
  Z = model.predict(np.c_[xx.ravel(), yy.ravel()])
90
  Z = (Z > 0.5).astype(int).reshape(xx.shape)
 
91
  plt.figure(figsize=(10, 8))
92
  plt.contourf(xx, yy, Z, alpha=0.8, cmap="coolwarm")
93
  plt.scatter(X[:, 0], X[:, 1], c=y, edgecolors="k", cmap="coolwarm", s=100)
94
+ plt.xlabel("X1")
95
+ plt.ylabel("X2")
96
+ plt.title("Decision Boundary")
97
+ plt.colorbar(label="Class")
98
  st.pyplot(plt)
99
 
100
  def plot_regression_surface(X, y, model):
 
102
  y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
103
  xx, yy = np.meshgrid(np.linspace(x_min, x_max, 200), np.linspace(y_min, y_max, 200))
104
  Z = model.predict(np.c_[xx.ravel(), yy.ravel()]).reshape(xx.shape)
 
105
  fig = plt.figure(figsize=(10, 8))
106
  ax = fig.add_subplot(111, projection='3d')
107
  ax.plot_surface(xx, yy, Z, alpha=0.7, cmap='viridis')
108
  ax.scatter(X[:, 0], X[:, 1], y, c=y, cmap='viridis', s=20)
109
+ ax.set_xlabel("X1")
110
+ ax.set_ylabel("X2")
111
+ ax.set_zlabel("Predicted Value")
112
+ ax.set_title("Regression Surface")
113
  st.pyplot(fig)
114
 
115
  def plot_learning_curves(history):
116
  plt.figure(figsize=(10, 6))
117
  if task_type == "Classification":
118
+ plt.plot(history.history['accuracy'], label='Train Acc')
119
+ plt.plot(history.history['val_accuracy'], label='Val Acc')
120
+ plt.ylabel('Accuracy')
121
  else:
122
+ plt.plot(history.history['mae'], label='Train MAE')
123
+ plt.plot(history.history['val_mae'], label='Val MAE')
124
+ plt.ylabel('MAE')
125
+ plt.title('Learning Curves')
126
+ plt.xlabel('Epoch')
127
+ plt.legend()
128
  st.pyplot(plt)
129
 
130
+ st.title("Neural Network Playground")
131
+
132
+ task_type = st.selectbox("Task Type", ["Classification", "Regression"])
133
+ dataset = st.selectbox("Choose Dataset", ["Circles", "Exclusive OR", "Gaussian", "Spiral"])
134
+ epochs = st.slider("Epochs", 1, 200, 50)
135
+
136
+ col1, col2, col3 = st.columns(3)
137
+ with col1:
138
+ learning_rate = st.slider("Learning Rate", 0.001, 1.0, 0.03, 0.001)
139
+ with col2:
140
+ hidden_layers = st.slider("Hidden Layers", 1, 5, 3)
141
+ neuron_counts = [st.slider(f"Neurons in Layer {i+1}", 1, 20, 5) for i in range(hidden_layers)]
142
+ with col3:
143
+ activation = st.selectbox("Activation", ["relu", "sigmoid", "tanh", "linear"])
144
+ regularization = st.selectbox("Regularization", ["None", "L1", "L2"])
145
+ reg_rate = st.slider("Reg. Rate", 0.0, 0.1, 0.01, 0.001) if regularization != "None" else 0.0
146
+
147
+ def generate_dataset(name):
148
+ if name == "Circles":
149
+ return make_circles(n_samples=1000, noise=0.1, factor=0.5)
150
+ elif name == "Exclusive OR":
151
+ X = np.random.randn(1000, 2) * 2
152
+ y = np.logical_xor(X[:, 0] > 0, X[:, 1] > 0).astype(np.float32)
153
+ return X, y
154
+ elif name == "Gaussian":
155
+ X, y = make_blobs(n_samples=1000, centers=2, n_features=2)
156
+ return X, y.astype(np.float32)
157
+ elif name == "Spiral":
158
+ n = 1000
159
+ X = np.zeros((n * 2, 2))
160
+ y = np.zeros(n * 2)
161
+ for j in range(2):
162
+ ix = range(n * j, n * (j + 1))
163
+ r = np.linspace(0, 1, n)
164
+ t = np.linspace(j * 4, (j + 1) * 4, n) + np.random.randn(n) * 0.2
165
+ X[ix] = np.c_[r * np.sin(t), r * np.cos(t)]
166
+ y[ix] = j
167
+ return shuffle(X, y)
168
+
169
+ X, y = generate_dataset(dataset)
170
+ scaler = StandardScaler()
171
+ X = scaler.fit_transform(X)
172
+ X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3)
173
+
174
+ model = keras.Sequential()
175
+ first = True
176
+ for count in neuron_counts:
177
+ layer_args = dict(units=count, activation=activation)
178
+ if first:
179
+ layer_args['input_shape'] = (2,)
180
+ if regularization == "L1":
181
+ layer_args['kernel_regularizer'] = keras.regularizers.l1(reg_rate)
182
+ elif regularization == "L2":
183
+ layer_args['kernel_regularizer'] = keras.regularizers.l2(reg_rate)
184
+ model.add(keras.layers.Dense(**layer_args))
185
+ first = False
186
+
187
+ if task_type == "Classification":
188
+ model.add(keras.layers.Dense(1, activation="sigmoid"))
189
+ loss = "binary_crossentropy"
190
+ metrics = ["accuracy"]
191
+ else:
192
+ model.add(keras.layers.Dense(1, activation="linear"))
193
+ loss = "mse"
194
+ metrics = ["mae"]
195
+
196
+ model.compile(optimizer=keras.optimizers.Adam(learning_rate), loss=loss, metrics=metrics)
197
+ history = model.fit(X_train, y_train, epochs=epochs, batch_size=32, verbose=0, validation_split=0.2)
198
+
199
+ st.subheader("Network Structure")
200
  draw_neural_network(model)
201
 
202
  if task_type == "Classification":
 
206
  st.subheader("Regression Surface")
207
  plot_regression_surface(X, y, model)
208
 
209
+ st.subheader("Learning Curves")
210
  plot_learning_curves(history)
211
 
212
  if st.checkbox("Show Model Summary"):