Gowthamvemula commited on
Commit
27a79fc
·
verified ·
1 Parent(s): ec904b0

Update Home.py

Browse files
Files changed (1) hide show
  1. Home.py +261 -186
Home.py CHANGED
@@ -1,192 +1,267 @@
1
- pip install scikit-learn
2
  import streamlit as st
3
  import numpy as np
4
- from sklearn.datasets import load_iris
5
- from sklearn.model_selection import train_test_split
6
- from sklearn.preprocessing import StandardScaler, OneHotEncoder
7
- from tensorflow import keras
8
- from tensorflow.keras import layers, regularizers
9
-
10
- def main():
11
- st.title("Neural Network Playground (TensorFlow + Streamlit)")
12
- st.write("""
13
- This demo trains a simple feed-forward neural network on the Iris dataset.
14
- Adjust the hyperparameters below and click *Train* to see how they affect performance.
15
- """)
16
-
17
- # -----------------------------
18
- # 1. SIDEBAR / HYPERPARAMETERS
19
- # -----------------------------
20
- st.sidebar.header("Hyperparameters")
21
-
22
- # Learning Rate
23
- learning_rate = st.sidebar.slider(
24
- "Learning Rate",
25
- min_value=1e-5,
26
- max_value=1.0,
27
- value=0.01,
28
- step=1e-5
29
- )
30
-
31
- # Regularization type
32
- reg_type = st.sidebar.selectbox(
33
- "Regularization Type",
34
- ["None", "L1", "L2", "L1L2"]
35
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
 
37
- # Regularization value
38
- reg_value = st.sidebar.number_input(
39
- "Regularization Value",
40
- value=0.01,
41
- step=0.01,
42
- min_value=0.0
43
- )
44
-
45
- # Activation function
46
- activation_fn = st.sidebar.selectbox(
47
- "Activation Function",
48
- ["sigmoid", "tanh", "relu"]
49
- )
50
-
51
- # Number of hidden layers
52
- num_hidden_layers = st.sidebar.slider(
53
- "Number of Hidden Layers",
54
- min_value=0,
55
- max_value=5,
56
- value=1
57
- )
58
-
59
- # Neurons per hidden layer
60
- neurons_per_layer = st.sidebar.slider(
61
- "Neurons per Hidden Layer",
62
- min_value=1,
63
- max_value=128,
64
- value=16
65
- )
66
-
67
- # Ratio of training to test
68
- test_size = st.sidebar.slider(
69
- "Test Set Ratio",
70
- min_value=0.05,
71
- max_value=0.95,
72
- value=0.2,
73
- step=0.05
74
- )
75
-
76
- # Batch size & Epochs
77
- batch_size = st.sidebar.selectbox(
78
- "Batch Size",
79
- [8, 16, 32, 64, 128]
80
- )
81
- epochs = st.sidebar.slider(
82
- "Epochs",
83
- min_value=1,
84
- max_value=200,
85
- value=50
86
- )
87
-
88
- # -----------------------------
89
- # 2. DATA PREPARATION
90
- # -----------------------------
91
- iris = load_iris()
92
- X = iris.data # shape (150, 4)
93
- y = iris.target.reshape(-1, 1) # shape (150, 1)
94
-
95
- # One-hot encode target
96
- encoder = OneHotEncoder(sparse_output=False)
97
- y_encoded = encoder.fit_transform(y) # shape (150, 3)
98
-
99
- # Split data
100
- X_train, X_test, y_train, y_test = train_test_split(
101
- X, y_encoded,
102
- test_size=test_size,
103
- random_state=42
104
- )
105
-
106
- # Scale features
107
- scaler = StandardScaler()
108
- X_train = scaler.fit_transform(X_train)
109
- X_test = scaler.transform(X_test)
110
-
111
- # -----------------------------
112
- # 3. BUILD MODEL FUNCTION
113
- # -----------------------------
114
- def build_model(lr, reg_t, reg_v, activation, n_hidden, n_neurons):
115
- # Choose the correct regularizer
116
- if reg_t == "None":
117
- reg = None
118
- elif reg_t == "L1":
119
- reg = regularizers.l1(reg_v)
120
- elif reg_t == "L2":
121
- reg = regularizers.l2(reg_v)
122
- else:
123
- reg = regularizers.l1_l2(reg_v, reg_v)
124
 
125
- model = keras.Sequential()
126
- # Input layer shape = 4 (for Iris)
127
- model.add(layers.Input(shape=(4,)))
128
-
129
- # Add hidden layers
130
- for _ in range(n_hidden):
131
- model.add(
132
- layers.Dense(
133
- n_neurons,
134
- activation=activation,
135
- kernel_regularizer=reg
136
- )
137
- )
138
 
139
- # Output layer (3 classes for Iris)
140
- model.add(layers.Dense(3, activation='softmax'))
141
-
142
- # Compile the model
143
- model.compile(
144
- optimizer=keras.optimizers.Adam(learning_rate=lr),
145
- loss='categorical_crossentropy',
146
- metrics=['accuracy']
147
- )
148
- return model
149
-
150
- # -----------------------------
151
- # 4. TRAINING
152
- # -----------------------------
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
153
  if st.button("Train"):
154
- st.write("### Training in progress...")
155
- model = build_model(
156
- learning_rate,
157
- reg_type,
158
- reg_value,
159
- activation_fn,
160
- num_hidden_layers,
161
- neurons_per_layer
162
- )
163
-
164
- history = model.fit(
165
- X_train,
166
- y_train,
167
- epochs=epochs,
168
- batch_size=batch_size,
169
- validation_split=0.2,
170
- verbose=0
171
- )
172
-
173
- # Plot training history
174
- st.write("#### Accuracy")
175
- st.line_chart({
176
- "Train": history.history['accuracy'],
177
- "Val": history.history['val_accuracy']
178
- })
179
-
180
- st.write("#### Loss")
181
- st.line_chart({
182
- "Train": history.history['loss'],
183
- "Val": history.history['val_loss']
184
- })
185
-
186
- # Evaluate on test set
187
- loss, acc = model.evaluate(X_test, y_test, verbose=0)
188
- st.write(f"#### Test Loss: {loss:.4f}")
189
- st.write(f"#### Test Accuracy: {acc:.4f}")
190
-
191
- if __name__ == "__main__":
192
- main()
 
 
 
1
  import streamlit as st
2
  import numpy as np
3
+ import matplotlib.pyplot as plt
4
+ import seaborn as sns
5
+ import graphviz
6
+ import time
7
+ from sklearn.datasets import make_moons, make_circles, make_classification
8
+ from sklearn.datasets import make_regression
9
+
10
+ # Set Streamlit page title
11
+ st.set_page_config(page_title="Neural Network Trainer", layout="wide")
12
+
13
+ # ================= Session State for Training Controls =================
14
+ if "epoch" not in st.session_state:
15
+ st.session_state.epoch = 0
16
+ if "running" not in st.session_state:
17
+ st.session_state.running = False
18
+
19
+ # ================= TRAINING CONTROL PANEL (Top) =================
20
+ st.markdown("### Training Controls")
21
+ col1, col2, col3, col4, col5, col6, col7, col8, col9 = st.columns(9)
22
+
23
+ with col1:
24
+ if st.button("↩️ Reset"):
25
+ st.session_state.epoch = 0
26
+ st.session_state.running = False
27
+ with col2:
28
+ if st.button("▶️ Train"):
29
+ st.session_state.running = True
30
+ with col3:
31
+ if st.button("⏸️ Pause"):
32
+ st.session_state.running = False
33
+ with col4:
34
+ activation = st.selectbox("Activation", ["ReLU", "Sigmoid", "Tanh", "LeakyReLU"])
35
+ with col5:
36
+ regularization = st.selectbox("Regularization", ["None", "L1", "L2"])
37
+ with col6:
38
+ reg_rate = st.selectbox("Regularization Rate", [0.0001, 0.001, 0.01, 0.1]) if regularization in ["L1", "L2"] else 0
39
+ with col7:
40
+ problem_type = st.selectbox("Problem Type", ["Classification", "Regression"])
41
+ with col8:
42
+ learning_rate = st.selectbox("Learning Rate", [0.0001, 0.001, 0.01, 0.03, 0.1])
43
+ with col9:
44
+ st.write(f"Epoch: {st.session_state.epoch}")
45
+
46
+ # 🚀 Fix: Run training loop without breaking Streamlit
47
+ if st.session_state.running:
48
+ time.sleep(1) # Simulating training
49
+ st.session_state.epoch += 1
50
+
51
+ # ================= MAIN LAYOUT =================
52
+ col_features, col_hidden, col_output = st.columns([2, 2, 2])
53
+
54
+ # ========== FEATURE SELECTION MOVED TO MIDDLE ==========
55
+ with col_features:
56
+ st.header("FEATURE SELECTION")
57
+ feature_dict = {
58
+ "X₁": st.checkbox("X₁", value=True),
59
+ "X₂": st.checkbox("X₂", value=True),
60
+ "X₁²": st.checkbox("X₁²"),
61
+ "X₂²": st.checkbox("X₂²"),
62
+ "X₁X₂": st.checkbox("X₁X₂"),
63
+ "sin(X₁)": st.checkbox("sin(X₁)"),
64
+ "sin(X₂)": st.checkbox("sin(X₂)"),
65
+ }
66
+ selected_features = [f for f, v in feature_dict.items() if v]
67
+
68
+ # ========== HIDDEN LAYERS PANEL (Middle) ========== #
69
+ with col_hidden:
70
+ st.header("HIDDEN LAYERS")
71
+ hidden_layers = st.slider("Number of Hidden Layers", 1, 7, 2)
72
+
73
+ neurons = []
74
+ for i in range(hidden_layers):
75
+ neurons.append(st.slider(f"Neurons in Layer {i+1}", 1, 20, 4))
76
+
77
+ # ========== OUTPUT PANEL (Right) ========== #
78
+ with col_output:
79
+ st.header("OUTPUT")
80
+ st.write("Test Loss: 0.501")
81
+ st.write("Training Loss: 0.507")
82
+
83
+ # Spiral Plot with Updated Color Palette
84
+ x = np.linspace(-6, 6, 300)
85
+ y = np.sin(x) + np.random.normal(0, 0.1, x.shape)
86
+
87
+ fig, ax = plt.subplots()
88
+ sns.scatterplot(x=x, y=y, hue=x, palette="plasma", ax=ax)
89
+ st.pyplot(fig)
90
+
91
+ show_test_data = st.checkbox("Show test data")
92
+ discretize_output = st.checkbox("Discretize output")
93
+
94
+
95
 
96
+ # Sidebar for dataset selection
97
+ st.sidebar.header("Dataset Selection")
98
+ data_type = st.sidebar.radio("Choose Data Type", ["Classification", "Regression"])
99
+
100
+ # Generate classification data
101
+ def generate_classification_data():
102
+ st.sidebar.subheader("Classification Settings")
103
+ dataset_type = st.sidebar.selectbox("Dataset Type", ["Moons", "Circles", "Classification"])
104
+ noise = st.sidebar.slider("Noise Level", 0.0, 1.0, 0.2, step=0.05)
105
+ samples = st.sidebar.slider("Number of Samples", 100, 1000, 500, step=50)
106
+
107
+ if dataset_type == "Moons":
108
+ X, y = make_moons(n_samples=samples, noise=noise)
109
+ elif dataset_type == "Circles":
110
+ X, y = make_circles(n_samples=samples, noise=noise, factor=0.5)
111
+ else:
112
+ X, y = make_classification(n_samples=samples, n_features=2, n_classes=2, n_clusters_per_class=1, flip_y=noise)
113
+
114
+ return X, y
115
+
116
+ # Generate regression data
117
+ def generate_regression_data():
118
+ st.sidebar.subheader("Regression Settings")
119
+ samples = st.sidebar.slider("Number of Samples", 100, 1000, 500, step=50)
120
+ noise = st.sidebar.slider("Noise Level", 0.0, 10.0, 2.0, step=0.5)
121
+
122
+ X, y = make_regression(n_samples=samples, n_features=1, noise=noise)
123
+ return X, y
124
+
125
+ # Select dataset type
126
+ if data_type == "Classification":
127
+ X, y = generate_classification_data()
128
+ cmap = "coolwarm"
129
+ title = "Classification Data"
130
+ is_classification = True
131
+ else:
132
+ X, y = generate_regression_data()
133
+ cmap = "plasma"
134
+ title = "Regression Data"
135
+ is_classification = False
136
+
137
+ # 🎯 Reduced Size of the Plot
138
+ fig, ax = plt.subplots(figsize=(4, 2)) # Smaller size (width=4, height=2)
139
+
140
+ if is_classification:
141
+ scatter = ax.scatter(X[:, 0], X[:, 1], c=y, cmap=cmap, edgecolors="white", alpha=0.8)
142
+ ax.set_xlabel("Feature 1", fontsize=8)
143
+ ax.set_ylabel("Feature 2", fontsize=8)
144
+ else:
145
+ scatter = ax.scatter(X[:, 0], y, c=y, cmap=cmap, edgecolors="white", alpha=0.8)
146
+ sns.kdeplot(x=X[:, 0], y=y, fill=True, cmap=cmap, alpha=0.3, ax=ax)
147
+ ax.set_xlabel("Feature 1", fontsize=8)
148
+ ax.set_ylabel("Target", fontsize=8)
149
+
150
+ ax.set_title(title, fontsize=10)
151
+ ax.tick_params(axis='both', labelsize=7)
152
+ ax.grid(True, linewidth=0.5)
153
+
154
+ # Display in Streamlit
155
+ st.pyplot(fig)
156
+
157
+
158
+ # ================= NEURAL NETWORK VISUALIZATION =================
159
+ def draw_neural_network():
160
+ graph = graphviz.Digraph(engine="dot")
161
+
162
+ # Input Layer (Features)
163
+ input_nodes = []
164
+ for feature in selected_features:
165
+ graph.node(feature, feature, shape="circle", style="filled", fillcolor="lightblue", width="0.6", height="0.6")
166
+ input_nodes.append(feature)
167
+
168
+ # Hidden Layers
169
+ prev_layer = input_nodes
170
+ hidden_layers_nodes = []
171
+
172
+ for i, num_neurons in enumerate(neurons):
173
+ layer_nodes = [f"H{i+1}_{j+1}" for j in range(num_neurons)]
174
+ hidden_layers_nodes.append(layer_nodes)
175
+
176
+ for node in layer_nodes:
177
+ graph.node(node, node, shape="circle", style="filled", fillcolor="lightyellow", width="0.6", height="0.6")
 
 
 
 
 
178
 
179
+ # Connect previous layer to this hidden layer
180
+ for prev in prev_layer:
181
+ for curr in layer_nodes:
182
+ graph.edge(prev, curr)
 
 
 
 
 
 
 
 
 
183
 
184
+ prev_layer = layer_nodes # Update previous layer for next iteration
185
+
186
+ # Output Layer
187
+ graph.node("Output", "Output", shape="circle", style="filled", fillcolor="lightgreen", width="0.6", height="0.6")
188
+
189
+ # Connect last hidden layer to output
190
+ for last_hidden in prev_layer:
191
+ graph.edge(last_hidden, "Output")
192
+
193
+ graph.attr(rankdir="LR") # Make it horizontal (Left to Right)
194
+
195
+ return graph
196
+
197
+ # =================== DISPLAY NEURAL NETWORK ===================
198
+ st.graphviz_chart(draw_neural_network())
199
+
200
+
201
+ # =================== DISPLAY DATA PLOT ===================
202
+ st.sidebar.subheader("Dataset Visualization")
203
+ fig, ax = plt.subplots()
204
+ ax.scatter(X[:, 0], X[:, 1], c=y, cmap="plasma", edgecolors="k")
205
+ st.sidebar.pyplot(fig)
206
+ import streamlit as st
207
+ import numpy as np
208
+ import matplotlib.pyplot as plt
209
+ import time
210
+
211
+ # Initialize session state
212
+ if "epoch" not in st.session_state:
213
+ st.session_state.epoch = 0
214
+ if "running" not in st.session_state:
215
+ st.session_state.running = False
216
+ if "loss_history" not in st.session_state:
217
+ st.session_state.loss_history = []
218
+
219
+ # Training controls
220
+ col1, col2, col3 = st.columns(3)
221
+ with col1:
222
+ if st.button("Reset"):
223
+ st.session_state.epoch = 0
224
+ st.session_state.running = False
225
+ st.session_state.loss_history = []
226
+ with col2:
227
  if st.button("Train"):
228
+ st.session_state.running = True
229
+ with col3:
230
+ if st.button("Pause"):
231
+ st.session_state.running = False
232
+
233
+ # Training loop simulation
234
+ if st.session_state.running:
235
+ for _ in range(10):
236
+ time.sleep(0.5)
237
+ st.session_state.epoch += 1
238
+ simulated_loss = np.exp(-0.1 * st.session_state.epoch) + np.random.normal(0, 0.02)
239
+ st.session_state.loss_history.append(simulated_loss)
240
+
241
+ # Epoch vs Training Loss Plot (Smaller Size)
242
+ st.header("Epoch vs Training Loss")
243
+ fig, ax = plt.subplots(figsize=(4, 2)) # Reduce plot size (width=4, height=2)
244
+ ax.plot(range(1, len(st.session_state.loss_history) + 1), st.session_state.loss_history, marker="o", linestyle="-", color="blue")
245
+ ax.set_xlabel("Epoch")
246
+ ax.set_ylabel("Training Loss")
247
+ ax.set_title("Training Loss Over Epochs", fontsize=10)
248
+ ax.tick_params(axis='both', labelsize=8)
249
+ ax.grid(True, linewidth=0.5)
250
+
251
+ st.pyplot(fig)
252
+
253
+ # Display current epoch and training loss below the plot
254
+ if st.session_state.loss_history:
255
+ st.write(f"Epoch: {st.session_state.epoch}")
256
+ st.write(f"Training Loss: {st.session_state.loss_history[-1]:.4f}")
257
+
258
+
259
+ # Display current epoch and training loss below the plot
260
+ if st.session_state.loss_history:
261
+ st.write(f"Epoch: {st.session_state.epoch}")
262
+ st.write(f"Training Loss: {st.session_state.loss_history[-1]:.4f}")
263
+ # =================== TRAINING STATUS ===================
264
+ if st.session_state.running:
265
+ st.write("🚀 Training started...")
266
+ elif not st.session_state.running and st.session_state.epoch > 0:
267
+ st.write("⏸️ Training paused.")