Adityaganesh commited on
Commit
8c96f94
·
verified ·
1 Parent(s): 44b19c8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +201 -95
app.py CHANGED
@@ -1,110 +1,216 @@
1
  import streamlit as st
2
  import numpy as np
3
- import matplotlib.pyplot as plt
4
  import tensorflow as tf
5
- from sklearn.datasets import make_moons, make_circles, make_classification
 
 
 
6
  from sklearn.model_selection import train_test_split
7
  from sklearn.preprocessing import StandardScaler
8
- from tensorflow import keras
9
- import plotly.graph_objects as go
10
- import plotly.figure_factory as ff
11
-
12
-
13
- # Sidebar: Neural Network Settings
14
- st.sidebar.header("Neural Network Settings")
15
- problem_type = st.sidebar.selectbox("Problem type", ["Classification", "Regression"])
16
- dataset_choice = st.sidebar.selectbox("Dataset", ["Moons", "Circles", "Linear"])
17
- train_ratio = st.sidebar.slider("Training Data Ratio", 0.1, 0.9, 0.5, 0.05)
18
- noise = st.sidebar.slider("Noise Level", 0.0, 0.5, 0.1, 0.01)
19
- batch_size = st.sidebar.slider("Batch Size", 5, 100, 10, 5)
20
- hidden_layers = st.sidebar.slider("Hidden Layers", 1, 5, 2)
21
- neurons_per_layer = st.sidebar.slider("Neurons per Hidden Layer", 2, 10, 4)
22
- activation_function = st.sidebar.selectbox("Activation Function", ["relu", "sigmoid", "tanh"])
23
- learning_rate = st.sidebar.slider("Learning Rate", 0.001, 0.1, 0.03, step=0.001)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
24
  regularization = st.sidebar.selectbox("Regularization", ["None", "L1", "L2"])
25
- reg_rate = st.sidebar.slider("Regularization Rate", 0.0, 0.1, 0.0, step=0.01)
26
- epochs = st.sidebar.slider("Epochs", 10, 500, 100)
27
-
28
- # Generate Dataset
29
- if dataset_choice == "Moons":
30
- X, y = make_moons(n_samples=1000, noise=noise, random_state=42)
31
- elif dataset_choice == "Circles":
32
- X, y = make_circles(n_samples=1000, noise=noise, factor=0.5, random_state=42)
33
  else:
34
- X, y = make_classification(n_samples=1000, n_features=2, n_classes=2, n_redundant=0, random_state=42)
 
 
 
 
 
 
 
 
35
 
36
- X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=1-train_ratio, random_state=42)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
37
  scaler = StandardScaler()
38
- X_train = scaler.fit_transform(X_train)
39
- X_test = scaler.transform(X_test)
40
-
41
- # Build Neural Network
42
- model = keras.Sequential([keras.layers.InputLayer(input_shape=(2,))])
43
- reg = None
44
- if regularization == "L1":
45
- reg = keras.regularizers.l1(reg_rate)
46
- elif regularization == "L2":
47
- reg = keras.regularizers.l2(reg_rate)
48
- for _ in range(hidden_layers):
49
- model.add(keras.layers.Dense(neurons_per_layer, activation=activation_function, kernel_regularizer=reg))
50
- model.add(keras.layers.Dense(1, activation="sigmoid"))
51
-
52
- optimizer = keras.optimizers.Adam(learning_rate=learning_rate)
53
- model.compile(optimizer=optimizer, loss="binary_crossentropy", metrics=["accuracy"])
54
-
55
- # Train the model
56
- history = model.fit(X_train, y_train, epochs=epochs, batch_size=batch_size, verbose=0, validation_data=(X_test, y_test))
57
-
58
- # Training Progress Visualization
59
- st.subheader("Training Progress")
60
- fig, ax = plt.subplots(1, 2, figsize=(12, 4))
61
- ax[0].plot(history.history['loss'], label="Train Loss")
62
- ax[0].plot(history.history['val_loss'], label="Validation Loss")
63
- ax[0].set_title("Loss Curve")
64
- ax[0].legend()
65
- ax[1].plot(history.history['accuracy'], label="Train Accuracy")
66
- ax[1].plot(history.history['val_accuracy'], label="Validation Accuracy")
67
- ax[1].set_title("Accuracy Curve")
68
- ax[1].legend()
69
- st.pyplot(fig)
70
-
71
- # Neural Network Visualization
72
- st.subheader("Neural Network Structure")
73
- fig = go.Figure()
74
- layer_x_positions = [0] + [i * 2 for i in range(1, hidden_layers + 1)] + [hidden_layers * 2 + 2]
75
- layer_names = ["Input Layer"] + [f"Hidden Layer {i+1}" for i in range(hidden_layers)] + ["Output Layer"]
76
- for i, x in enumerate(layer_x_positions):
77
- y_positions = np.linspace(-1, 1, neurons_per_layer if i != 0 and i != len(layer_x_positions) - 1 else 2)
78
- fig.add_trace(go.Scatter(
79
- x=[x] * len(y_positions), y=y_positions, mode='markers+text',
80
- marker=dict(size=20, color='blue'),
81
- text=[f"N{j+1}" for j in range(len(y_positions))], textposition="middle right", name=layer_names[i]
82
- ))
83
- for i in range(len(layer_x_positions) - 1):
84
- prev_y_positions = np.linspace(-1, 1, neurons_per_layer if i != 0 else 2)
85
- next_y_positions = np.linspace(-1, 1, neurons_per_layer if i + 1 != len(layer_x_positions) - 1 else 1)
86
- for y1 in prev_y_positions:
87
- for y2 in next_y_positions:
88
- fig.add_trace(go.Scatter(
89
- x=[layer_x_positions[i], layer_x_positions[i + 1]],
90
- y=[y1, y2], mode='lines', line=dict(color='gray', width=2), showlegend=False
91
- ))
92
- st.plotly_chart(fig, use_container_width=True)
93
-
94
- # Decision Boundary Plot
95
- def plot_decision_boundary(model, X, y):
96
- x_min, x_max = X[:, 0].min() - 0.5, X[:, 0].max() + 0.5
97
- y_min, y_max = X[:, 1].min() - 0.5, X[:, 1].max() + 0.5
98
- xx, yy = np.meshgrid(np.linspace(x_min, x_max, 100),
99
- np.linspace(y_min, y_max, 100))
100
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
101
  Z = model.predict(np.c_[xx.ravel(), yy.ravel()])
102
  Z = (Z > 0.5).astype(int).reshape(xx.shape)
103
 
104
- plt.contourf(xx, yy, Z, alpha=0.3)
105
- plt.scatter(X[:, 0], X[:, 1], c=y, edgecolors="k")
106
- plt.title("Decision Boundary")
 
 
 
107
  st.pyplot(plt)
108
 
109
- st.subheader("Decision Boundary")
110
- plot_decision_boundary(model, X_test, y_test)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import streamlit as st
2
  import numpy as np
 
3
  import tensorflow as tf
4
+ from tensorflow import keras
5
+ import matplotlib.pyplot as plt
6
+ import networkx as nx
7
+ from sklearn.datasets import make_circles, make_moons, make_blobs
8
  from sklearn.model_selection import train_test_split
9
  from sklearn.preprocessing import StandardScaler
10
+ from sklearn.utils import shuffle
11
+
12
+ # Custom CSS
13
+ st.markdown("""
14
+ <style>
15
+ .sidebar .css-12oz5g7 {
16
+ background-color: #f0f2f6;
17
+ padding: 20px;
18
+ border-radius: 10px;
19
+ }
20
+ .sidebar .css-1xarl3l {
21
+ font-size: 20px;
22
+ color: #333333;
23
+ }
24
+ </style>
25
+ """, unsafe_allow_html=True)
26
+
27
+ # Sidebar Controls
28
+ st.sidebar.title("🧠 Neural Network Settings")
29
+
30
+ task_type = st.sidebar.selectbox("Task Type", ["Classification", "Regression"])
31
+ dataset = st.sidebar.selectbox("Choose Dataset", ["Circles", "Exclusive OR", "Gaussian", "Spiral"])
32
+ learning_rate = st.sidebar.slider("Learning Rate", 0.001, 1.0, 0.03, 0.001, format="%.3f")
33
+ hidden_layers = st.sidebar.slider("Number of Hidden Layers", 1, 5, 3)
34
+
35
+ neuron_counts = []
36
+ for i in range(hidden_layers):
37
+ neuron_counts.append(st.sidebar.slider(f"Neurons in Hidden Layer {i+1}", 1, 20, 5))
38
+
39
+ activation = st.sidebar.selectbox("Activation Function", ["relu", "sigmoid", "tanh", "linear"])
40
+ epochs = st.sidebar.slider("Epochs", 1, 200, 100)
41
+
42
  regularization = st.sidebar.selectbox("Regularization", ["None", "L1", "L2"])
43
+ if regularization != "None":
44
+ regularization_rate = st.sidebar.slider("Regularization Rate", 0.0, 0.1, 0.01, 0.001, format="%.3f")
 
 
 
 
 
 
45
  else:
46
+ regularization_rate = 0.0
47
+
48
+ st.sidebar.markdown("---")
49
+ st.sidebar.markdown(f"🔁 **Epochs:** {epochs} &nbsp;&nbsp; 🚀 **Learning Rate:** {learning_rate:.3f}")
50
+ if regularization != "None":
51
+ st.sidebar.markdown(f"🧪 **Regularization:** {regularization} @ {regularization_rate}")
52
+
53
+ # Title
54
+ st.title("🎯 Neural Network Playground")
55
 
56
+ # Dataset Generator
57
+ def generate_dataset(dataset):
58
+ if dataset == "Circles":
59
+ X, y = make_circles(n_samples=1000, noise=0.1, factor=0.5, random_state=0)
60
+ elif dataset == "Exclusive OR":
61
+ X = np.random.randn(1000, 2) * 2
62
+ y = np.logical_xor(X[:, 0] > 0, X[:, 1] > 0).astype(np.float32)
63
+ elif dataset == "Gaussian":
64
+ X, y = make_blobs(n_samples=1000, centers=2, n_features=2, random_state=0)
65
+ y = y.astype(np.float32)
66
+ elif dataset == "Spiral":
67
+ n = 1000
68
+ n_class = 2
69
+ X = np.zeros((n * n_class, 2))
70
+ y = np.zeros(n * n_class, dtype=np.float32)
71
+ for j in range(n_class):
72
+ ix = range(n * j, n * (j + 1))
73
+ r = np.linspace(0.0, 1, n)
74
+ t = np.linspace(j * 4, (j + 1) * 4, n) + np.random.randn(n) * 0.2
75
+ X[ix] = np.c_[r * np.sin(t), r * np.cos(t)]
76
+ y[ix] = j
77
+ X, y = shuffle(X, y)
78
+ return X, y
79
+
80
+ # Data setup
81
+ X, y = generate_dataset(dataset)
82
  scaler = StandardScaler()
83
+ X = scaler.fit_transform(X)
84
+ X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=42)
85
+
86
+ # Build model
87
+ model = keras.Sequential()
88
+ for i, count in enumerate(neuron_counts):
89
+ kwargs = {
90
+ "units": count,
91
+ "activation": activation,
92
+ "input_shape": (2,) if i == 0 else None
93
+ }
94
+ if regularization == "L1":
95
+ kwargs["kernel_regularizer"] = keras.regularizers.l1(regularization_rate)
96
+ elif regularization == "L2":
97
+ kwargs["kernel_regularizer"] = keras.regularizers.l2(regularization_rate)
98
+ model.add(keras.layers.Dense(**{k: v for k, v in kwargs.items() if v is not None}))
99
+
100
+ if task_type == "Classification":
101
+ model.add(keras.layers.Dense(1, activation="sigmoid"))
102
+ loss = "binary_crossentropy"
103
+ metrics = ["accuracy"]
104
+ else:
105
+ model.add(keras.layers.Dense(1, activation="linear"))
106
+ loss = "mse"
107
+ metrics = ["mae"]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
108
 
109
+ # Compile and train
110
+ model.compile(optimizer=keras.optimizers.Adam(learning_rate), loss=loss, metrics=metrics)
111
+ history = model.fit(X_train, y_train, epochs=epochs, batch_size=32, verbose=0, validation_split=0.2)
112
+
113
+ # Visualization Functions
114
+ def draw_neural_network(model):
115
+ G = nx.DiGraph()
116
+ pos = {}
117
+ input_nodes = ["X1", "X2"]
118
+ for i, node in enumerate(input_nodes):
119
+ G.add_node(node, layer=0)
120
+ pos[node] = (0, -i)
121
+
122
+ hidden_nodes = []
123
+ for layer_idx, layer in enumerate(model.layers[:-1]):
124
+ if isinstance(layer, keras.layers.Dense):
125
+ layer_nodes = [f"H{layer_idx+1}_{i+1}" for i in range(layer.units)]
126
+ hidden_nodes.append(layer_nodes)
127
+ for i, node in enumerate(layer_nodes):
128
+ G.add_node(node, layer=layer_idx + 1)
129
+ pos[node] = (layer_idx + 1, -i)
130
+
131
+ output_node = "Y"
132
+ G.add_node(output_node, layer=len(hidden_nodes) + 1)
133
+ pos[output_node] = (len(hidden_nodes) + 1, -0.5)
134
+
135
+ for inp in input_nodes:
136
+ for hid in hidden_nodes[0]:
137
+ G.add_edge(inp, hid)
138
+ for i in range(len(hidden_nodes) - 1):
139
+ for src in hidden_nodes[i]:
140
+ for dst in hidden_nodes[i + 1]:
141
+ G.add_edge(src, dst)
142
+ for node in hidden_nodes[-1]:
143
+ G.add_edge(node, output_node)
144
+
145
+ all_nodes = input_nodes + sum(hidden_nodes, []) + [output_node]
146
+ colors = ["lightblue"] * len(input_nodes) + ["lightcoral"] * sum(len(layer) for layer in hidden_nodes) + ["lightgreen"]
147
+
148
+ fig, ax = plt.subplots(figsize=(10, 8))
149
+ nx.draw(G, pos, with_labels=True, node_color=colors, edgecolors="black", node_size=1500, font_size=12, ax=ax, width=2, edge_color="gray", arrowsize=20)
150
+ ax.axis("off")
151
+ ax.set_title("Neural Network Architecture", fontsize=16)
152
+ st.pyplot(fig)
153
+
154
+ def plot_decision_boundary(X, y, model):
155
+ x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
156
+ y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
157
+ xx, yy = np.meshgrid(np.linspace(x_min, x_max, 200), np.linspace(y_min, y_max, 200))
158
  Z = model.predict(np.c_[xx.ravel(), yy.ravel()])
159
  Z = (Z > 0.5).astype(int).reshape(xx.shape)
160
 
161
+ plt.figure(figsize=(10, 8))
162
+ plt.contourf(xx, yy, Z, alpha=0.8, cmap="coolwarm")
163
+ plt.scatter(X[:, 0], X[:, 1], c=y, edgecolors="k", cmap="coolwarm", s=100)
164
+ plt.xlabel("X1", fontsize=14)
165
+ plt.ylabel("X2", fontsize=14)
166
+ plt.title("Decision Boundary", fontsize=16)
167
  st.pyplot(plt)
168
 
169
+ def plot_regression_surface(X, y, model):
170
+ x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
171
+ y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
172
+ xx, yy = np.meshgrid(np.linspace(x_min, x_max, 200), np.linspace(y_min, y_max, 200))
173
+ Z = model.predict(np.c_[xx.ravel(), yy.ravel()]).reshape(xx.shape)
174
+
175
+ fig = plt.figure(figsize=(10, 8))
176
+ ax = fig.add_subplot(111, projection='3d')
177
+ ax.plot_surface(xx, yy, Z, alpha=0.7, cmap='viridis')
178
+ ax.scatter(X[:, 0], X[:, 1], y, c=y, cmap='viridis', s=20)
179
+ ax.set_xlabel("X1", fontsize=14)
180
+ ax.set_ylabel("X2", fontsize=14)
181
+ ax.set_zlabel("Predicted", fontsize=14)
182
+ ax.set_title("Regression Surface", fontsize=16)
183
+ st.pyplot(fig)
184
+
185
+ def plot_learning_curves(history):
186
+ plt.figure(figsize=(10, 6))
187
+ if task_type == "Classification":
188
+ plt.plot(history.history['accuracy'], label='Training Accuracy')
189
+ plt.plot(history.history['val_accuracy'], label='Validation Accuracy')
190
+ plt.ylabel('Accuracy', fontsize=14)
191
+ else:
192
+ plt.plot(history.history['mae'], label='Training MAE')
193
+ plt.plot(history.history['val_mae'], label='Validation MAE')
194
+ plt.ylabel('Mean Absolute Error', fontsize=14)
195
+ plt.title(f'Model {task_type} Over Epochs', fontsize=16)
196
+ plt.xlabel('Epoch', fontsize=14)
197
+ plt.legend(fontsize=12)
198
+ st.pyplot(plt)
199
+
200
+ # Visualizations
201
+ st.subheader("Network Architecture")
202
+ draw_neural_network(model)
203
+
204
+ if task_type == "Classification":
205
+ st.subheader("Decision Boundary")
206
+ plot_decision_boundary(X, y, model)
207
+ else:
208
+ st.subheader("Regression Surface")
209
+ plot_regression_surface(X, y, model)
210
+
211
+ st.subheader(f"Learning Curves for {task_type}")
212
+ plot_learning_curves(history)
213
+
214
+ if st.checkbox("Show Model Summary"):
215
+ st.subheader("Model Summary")
216
+ model.summary(print_fn=lambda x: st.text(x))