shubham680 commited on
Commit
f8b39fb
·
verified ·
1 Parent(s): 904ff98

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +344 -149
app.py CHANGED
@@ -171,201 +171,396 @@
171
 
172
 
173
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
174
  import streamlit as st
175
  import numpy as np
176
  import pandas as pd
177
  from sklearn.datasets import make_circles, make_moons, make_blobs
178
  from sklearn.preprocessing import StandardScaler
179
  from sklearn.model_selection import train_test_split
180
- from tensorflow.keras.models import Sequential
181
- from tensorflow.keras.layers import InputLayer, Dense
182
- from tensorflow.keras.optimizers import SGD
183
- from tensorflow.keras.regularizers import l1, l2, l1_l2
184
- from tensorflow.keras.callbacks import EarlyStopping
185
-
186
  import matplotlib.pyplot as plt
 
 
 
 
 
187
  from mlxtend.plotting import plot_decision_regions
188
- import graphviz
189
-
190
- # -----------------------------
191
- # Streamlit UI
192
- # -----------------------------
193
- st.title("TensorFlow Playground")
194
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
195
  with st.sidebar:
196
- st.header("Dataset Options")
197
- dataset_mode = st.radio("Choose Dataset Mode", ["Synthetic", "Upload CSV"])
198
 
199
- if dataset_mode == "Synthetic":
200
- dataset = st.selectbox("Select Dataset", ["Blobs", "Circles", "Moons"])
201
- noise = st.slider("Noise", 0.0, 1.0, 0.1)
202
- test_size = st.slider("Test Size", 0.1, 0.5, 0.2)
203
- else:
204
- uploaded_file = st.file_uploader("Upload your CSV", type=["csv"])
205
- if uploaded_file is not None:
206
- df = pd.read_csv(uploaded_file)
207
- if df.shape[1] < 3:
208
- st.error("Your dataset must have at least 3 columns (2 features + 1 target).")
209
- st.stop()
210
- feature_cols = st.multiselect("Select exactly 2 features", df.columns[:-1])
211
- target_col = st.selectbox("Select target column", df.columns)
212
- if len(feature_cols) != 2:
213
- st.error("Please select exactly 2 features.")
214
- st.stop()
215
- X = df[feature_cols].values
216
- y = df[target_col].values
217
- test_size = st.slider("Test Size", 0.1, 0.5, 0.2)
218
- else:
219
- st.warning("Upload a CSV file to continue.")
220
- st.stop()
221
 
222
  st.header("Model Hyperparameters")
223
  hl = st.number_input("Hidden Layers", 1, 10, step=1)
224
  numbers = st.text_input("Neurons for each hidden layer", placeholder="e.g. 8,16,32")
225
  input_func = lambda x: [int(i.strip()) for i in x.split(",") if i.strip() != ""]
226
  nn = input_func(numbers)
227
- n_epochs = st.number_input("Epochs", 1, 10000, step=1, value=50)
228
 
229
  col1, col2 = st.columns(2)
230
  with col1:
231
- af = st.selectbox("Activation Function", ["sigmoid", "tanh", "relu"], index=2)
232
  with col2:
233
- lr = st.selectbox("Learning Rate", [0.1, 0.01, 0.02, 0.2], index=1)
234
 
235
  reg = st.selectbox("Regularizer", ["None", "L1", "L2", "ElasticNet"])
236
  if reg != "None":
237
  reg_rate = st.slider("Regularization rate", 0.0, 0.1, 0.01)
238
 
239
- early_stop_option = st.selectbox("Early Stopping", ["No", "Yes"], index=0)
240
- if early_stop_option == "Yes":
241
  col3, col4 = st.columns(2)
242
  with col3:
243
- min_delta = st.number_input("Minimum Delta", 0.001, 0.9, step=0.01)
244
  with col4:
245
  patience = st.number_input("Patience", 3, 20, step=1)
246
 
247
- # -----------------------------
248
- # Train Button
249
- # -----------------------------
250
- if st.sidebar.button("Train"):
251
- # ---------------- Dataset ----------------
252
- if dataset_mode == "Synthetic":
253
- if dataset == "Circles":
254
- X, y = make_circles(n_samples=1000, noise=noise, random_state=42, factor=0.5)
255
- elif dataset == "Moons":
256
- X, y = make_moons(n_samples=1000, noise=noise, random_state=42)
257
- elif dataset == "Blobs":
258
- X, y = make_blobs(n_samples=1000, centers=2, cluster_std=noise+0.5, random_state=42)
259
-
260
- # Split dataset
261
- X_train, X_test, y_train, y_test = train_test_split(
262
- X, y, test_size=test_size, random_state=42, stratify=y
263
- )
264
-
265
- # Standardize
266
- std = StandardScaler()
267
- X_train = std.fit_transform(X_train)
268
- X_test = std.transform(X_test)
269
-
270
- # ---------------- Regularizer ----------------
271
- if reg == "L1":
272
- reg = l1(reg_rate)
273
- elif reg == "L2":
274
- reg = l2(reg_rate)
275
- elif reg == "ElasticNet":
276
- reg = l1_l2(l1=reg_rate, l2=reg_rate)
277
  else:
278
- reg = None
279
-
280
- # ---------------- Model ----------------
281
- model = Sequential()
282
- model.add(InputLayer(shape=(2,)))
283
- if hl == len(nn):
284
- for units in nn:
285
- model.add(Dense(units=units, activation=af, kernel_regularizer=reg))
286
-
287
- model.add(Dense(units=1, activation="sigmoid", kernel_regularizer=reg))
288
- sgd = SGD(learning_rate=lr)
289
- model.compile(loss="binary_crossentropy", optimizer=sgd, metrics=["accuracy"])
290
-
291
- # ---------------- Callbacks ----------------
292
- callbacks = []
293
- if early_stop_option == "Yes":
294
- es = EarlyStopping(
295
- monitor="val_loss",
296
- min_delta=min_delta,
297
- patience=patience,
298
- verbose=1,
299
- restore_best_weights=True,
300
- start_from_epoch=50,
301
- )
302
- callbacks.append(es)
303
-
304
- # ---------------- Training ----------------
305
- hist = model.fit(
306
- X_train,
307
- y_train,
308
- epochs=n_epochs,
309
- batch_size=len(X_train),
310
- validation_data=(X_test, y_test),
311
- verbose=0,
312
- callbacks=callbacks,
313
- )
314
-
315
- # ---------------- Decision Boundary ----------------
 
 
 
 
 
 
 
 
 
 
 
316
  st.subheader("Decision Boundary")
317
  fig, ax = plt.subplots()
318
- plot_decision_regions(X_test, y_test, clf=model, legend=2)
319
- plt.xlabel("Feature 1")
320
- plt.ylabel("Feature 2")
321
  st.pyplot(fig)
322
 
323
- # ---------------- Loss Curves ----------------
324
  st.subheader("Training vs Validation Loss")
325
  fig2, ax2 = plt.subplots()
326
  ax2.plot(hist.history["loss"], label="Train Loss")
327
  ax2.plot(hist.history["val_loss"], label="Validation Loss")
328
- ax2.set_xlabel("Epochs")
329
- ax2.set_ylabel("Loss")
330
  ax2.legend()
331
  st.pyplot(fig2)
332
 
333
- # ---------------- Accuracy Curves ----------------
334
- st.subheader("Training vs Validation Accuracy")
335
  fig3, ax3 = plt.subplots()
336
- ax3.plot(hist.history["accuracy"], label="Train Accuracy")
337
- ax3.plot(hist.history["val_accuracy"], label="Validation Accuracy")
338
- ax3.set_xlabel("Epochs")
339
- ax3.set_ylabel("Accuracy")
340
- ax3.legend()
 
 
 
 
341
  st.pyplot(fig3)
342
 
343
- # ---------------- Graphical NN Architecture ----------------
344
- st.subheader("Neural Network Architecture")
345
-
346
- def visualize_nn(layers):
347
- dot = graphviz.Digraph()
348
- dot.attr(rankdir="LR")
349
-
350
- dot.node("Input", "Input Layer\nfeatures=2", shape="box", style="filled", color="lightblue")
351
-
352
- for i, units in enumerate(layers):
353
- dot.node(f"H{i}", f"Hidden {i+1}\nunits={units}", shape="box", style="filled", color="lightgreen")
354
- if i == 0:
355
- dot.edge("Input", f"H{i}")
356
- else:
357
- dot.edge(f"H{i-1}", f"H{i}")
358
-
359
- dot.node("Output", "Output Layer\nunits=1", shape="box", style="filled", color="lightcoral")
360
- dot.edge(f"H{len(layers)-1}" if layers else "Input", "Output")
361
-
362
- return dot
363
-
364
- dot = visualize_nn(nn)
365
- st.graphviz_chart(dot)
366
-
367
-
368
-
369
 
370
 
371
 
 
171
 
172
 
173
 
174
+ # import streamlit as st
175
+ # import numpy as np
176
+ # import pandas as pd
177
+ # from sklearn.datasets import make_circles, make_moons, make_blobs
178
+ # from sklearn.preprocessing import StandardScaler
179
+ # from sklearn.model_selection import train_test_split
180
+ # from tensorflow.keras.models import Sequential
181
+ # from tensorflow.keras.layers import InputLayer, Dense
182
+ # from tensorflow.keras.optimizers import SGD
183
+ # from tensorflow.keras.regularizers import l1, l2, l1_l2
184
+ # from tensorflow.keras.callbacks import EarlyStopping
185
+
186
+ # import matplotlib.pyplot as plt
187
+ # from mlxtend.plotting import plot_decision_regions
188
+ # import graphviz
189
+
190
+ # # -----------------------------
191
+ # # Streamlit UI
192
+ # # -----------------------------
193
+ # st.title("TensorFlow Playground")
194
+
195
+ # with st.sidebar:
196
+ # st.header("Dataset Options")
197
+ # dataset_mode = st.radio("Choose Dataset Mode", ["Synthetic", "Upload CSV"])
198
+
199
+ # if dataset_mode == "Synthetic":
200
+ # dataset = st.selectbox("Select Dataset", ["Blobs", "Circles", "Moons"])
201
+ # noise = st.slider("Noise", 0.0, 1.0, 0.1)
202
+ # test_size = st.slider("Test Size", 0.1, 0.5, 0.2)
203
+ # else:
204
+ # uploaded_file = st.file_uploader("Upload your CSV", type=["csv"])
205
+ # if uploaded_file is not None:
206
+ # df = pd.read_csv(uploaded_file)
207
+ # if df.shape[1] < 3:
208
+ # st.error("Your dataset must have at least 3 columns (2 features + 1 target).")
209
+ # st.stop()
210
+ # feature_cols = st.multiselect("Select exactly 2 features", df.columns[:-1])
211
+ # target_col = st.selectbox("Select target column", df.columns)
212
+ # if len(feature_cols) != 2:
213
+ # st.error("Please select exactly 2 features.")
214
+ # st.stop()
215
+ # X = df[feature_cols].values
216
+ # y = df[target_col].values
217
+ # test_size = st.slider("Test Size", 0.1, 0.5, 0.2)
218
+ # else:
219
+ # st.warning("Upload a CSV file to continue.")
220
+ # st.stop()
221
+
222
+ # st.header("Model Hyperparameters")
223
+ # hl = st.number_input("Hidden Layers", 1, 10, step=1)
224
+ # numbers = st.text_input("Neurons for each hidden layer", placeholder="e.g. 8,16,32")
225
+ # input_func = lambda x: [int(i.strip()) for i in x.split(",") if i.strip() != ""]
226
+ # nn = input_func(numbers)
227
+ # n_epochs = st.number_input("Epochs", 1, 10000, step=1, value=50)
228
+
229
+ # col1, col2 = st.columns(2)
230
+ # with col1:
231
+ # af = st.selectbox("Activation Function", ["sigmoid", "tanh", "relu"], index=2)
232
+ # with col2:
233
+ # lr = st.selectbox("Learning Rate", [0.1, 0.01, 0.02, 0.2], index=1)
234
+
235
+ # reg = st.selectbox("Regularizer", ["None", "L1", "L2", "ElasticNet"])
236
+ # if reg != "None":
237
+ # reg_rate = st.slider("Regularization rate", 0.0, 0.1, 0.01)
238
+
239
+ # early_stop_option = st.selectbox("Early Stopping", ["No", "Yes"], index=0)
240
+ # if early_stop_option == "Yes":
241
+ # col3, col4 = st.columns(2)
242
+ # with col3:
243
+ # min_delta = st.number_input("Minimum Delta", 0.001, 0.9, step=0.01)
244
+ # with col4:
245
+ # patience = st.number_input("Patience", 3, 20, step=1)
246
+
247
+ # # -----------------------------
248
+ # # Train Button
249
+ # # -----------------------------
250
+ # if st.sidebar.button("Train"):
251
+ # # ---------------- Dataset ----------------
252
+ # if dataset_mode == "Synthetic":
253
+ # if dataset == "Circles":
254
+ # X, y = make_circles(n_samples=1000, noise=noise, random_state=42, factor=0.5)
255
+ # elif dataset == "Moons":
256
+ # X, y = make_moons(n_samples=1000, noise=noise, random_state=42)
257
+ # elif dataset == "Blobs":
258
+ # X, y = make_blobs(n_samples=1000, centers=2, cluster_std=noise+0.5, random_state=42)
259
+
260
+ # # Split dataset
261
+ # X_train, X_test, y_train, y_test = train_test_split(
262
+ # X, y, test_size=test_size, random_state=42, stratify=y
263
+ # )
264
+
265
+ # # Standardize
266
+ # std = StandardScaler()
267
+ # X_train = std.fit_transform(X_train)
268
+ # X_test = std.transform(X_test)
269
+
270
+ # # ---------------- Regularizer ----------------
271
+ # if reg == "L1":
272
+ # reg = l1(reg_rate)
273
+ # elif reg == "L2":
274
+ # reg = l2(reg_rate)
275
+ # elif reg == "ElasticNet":
276
+ # reg = l1_l2(l1=reg_rate, l2=reg_rate)
277
+ # else:
278
+ # reg = None
279
+
280
+ # # ---------------- Model ----------------
281
+ # model = Sequential()
282
+ # model.add(InputLayer(shape=(2,)))
283
+ # if hl == len(nn):
284
+ # for units in nn:
285
+ # model.add(Dense(units=units, activation=af, kernel_regularizer=reg))
286
+
287
+ # model.add(Dense(units=1, activation="sigmoid", kernel_regularizer=reg))
288
+ # sgd = SGD(learning_rate=lr)
289
+ # model.compile(loss="binary_crossentropy", optimizer=sgd, metrics=["accuracy"])
290
+
291
+ # # ---------------- Callbacks ----------------
292
+ # callbacks = []
293
+ # if early_stop_option == "Yes":
294
+ # es = EarlyStopping(
295
+ # monitor="val_loss",
296
+ # min_delta=min_delta,
297
+ # patience=patience,
298
+ # verbose=1,
299
+ # restore_best_weights=True,
300
+ # start_from_epoch=50,
301
+ # )
302
+ # callbacks.append(es)
303
+
304
+ # # ---------------- Training ----------------
305
+ # hist = model.fit(
306
+ # X_train,
307
+ # y_train,
308
+ # epochs=n_epochs,
309
+ # batch_size=len(X_train),
310
+ # validation_data=(X_test, y_test),
311
+ # verbose=0,
312
+ # callbacks=callbacks,
313
+ # )
314
+
315
+ # # ---------------- Decision Boundary ----------------
316
+ # st.subheader("Decision Boundary")
317
+ # fig, ax = plt.subplots()
318
+ # plot_decision_regions(X_test, y_test, clf=model, legend=2)
319
+ # plt.xlabel("Feature 1")
320
+ # plt.ylabel("Feature 2")
321
+ # st.pyplot(fig)
322
+
323
+ # # ---------------- Loss Curves ----------------
324
+ # st.subheader("Training vs Validation Loss")
325
+ # fig2, ax2 = plt.subplots()
326
+ # ax2.plot(hist.history["loss"], label="Train Loss")
327
+ # ax2.plot(hist.history["val_loss"], label="Validation Loss")
328
+ # ax2.set_xlabel("Epochs")
329
+ # ax2.set_ylabel("Loss")
330
+ # ax2.legend()
331
+ # st.pyplot(fig2)
332
+
333
+ # # ---------------- Accuracy Curves ----------------
334
+ # st.subheader("Training vs Validation Accuracy")
335
+ # fig3, ax3 = plt.subplots()
336
+ # ax3.plot(hist.history["accuracy"], label="Train Accuracy")
337
+ # ax3.plot(hist.history["val_accuracy"], label="Validation Accuracy")
338
+ # ax3.set_xlabel("Epochs")
339
+ # ax3.set_ylabel("Accuracy")
340
+ # ax3.legend()
341
+ # st.pyplot(fig3)
342
+
343
+ # # ---------------- Graphical NN Architecture ----------------
344
+ # st.subheader("Neural Network Architecture")
345
+
346
+ # def visualize_nn(layers):
347
+ # dot = graphviz.Digraph()
348
+ # dot.attr(rankdir="LR")
349
+
350
+ # dot.node("Input", "Input Layer\nfeatures=2", shape="box", style="filled", color="lightblue")
351
+
352
+ # for i, units in enumerate(layers):
353
+ # dot.node(f"H{i}", f"Hidden {i+1}\nunits={units}", shape="box", style="filled", color="lightgreen")
354
+ # if i == 0:
355
+ # dot.edge("Input", f"H{i}")
356
+ # else:
357
+ # dot.edge(f"H{i-1}", f"H{i}")
358
+
359
+ # dot.node("Output", "Output Layer\nunits=1", shape="box", style="filled", color="lightcoral")
360
+ # dot.edge(f"H{len(layers)-1}" if layers else "Input", "Output")
361
+
362
+ # return dot
363
+
364
+ # dot = visualize_nn(nn)
365
+ # st.graphviz_chart(dot)
366
+
367
+
368
  import streamlit as st
369
  import numpy as np
370
  import pandas as pd
371
  from sklearn.datasets import make_circles, make_moons, make_blobs
372
  from sklearn.preprocessing import StandardScaler
373
  from sklearn.model_selection import train_test_split
374
+ from tensorflow import keras
 
 
 
 
 
375
  import matplotlib.pyplot as plt
376
+ from keras.models import Sequential
377
+ from keras.layers import InputLayer, Dense, Dropout
378
+ from keras.optimizers import SGD
379
+ from keras.regularizers import l1, l2, l1_l2
380
+ from keras.callbacks import EarlyStopping
381
  from mlxtend.plotting import plot_decision_regions
 
 
 
 
 
 
382
 
383
+ # ========== Custom CSS ==========
384
+ st.markdown("""
385
+ <style>
386
+ /* Background */
387
+ .stApp {
388
+ background: linear-gradient(to right, #f8f9fa, #eef2f3);
389
+ font-family: 'Segoe UI', sans-serif;
390
+ }
391
+ /* Sidebar */
392
+ section[data-testid="stSidebar"] {
393
+ background-color: #2C3E50 !important;
394
+ }
395
+ section[data-testid="stSidebar"] h1,
396
+ section[data-testid="stSidebar"] h2,
397
+ section[data-testid="stSidebar"] h3,
398
+ section[data-testid="stSidebar"] label {
399
+ color: white !important;
400
+ }
401
+ /* Title */
402
+ h1 {
403
+ text-align: center;
404
+ color: #2C3E50;
405
+ font-size: 40px;
406
+ font-weight: bold;
407
+ margin-bottom: 20px;
408
+ }
409
+ /* Buttons */
410
+ div.stButton > button {
411
+ background: #3498DB;
412
+ color: white;
413
+ border-radius: 8px;
414
+ padding: 10px 24px;
415
+ border: none;
416
+ font-size: 16px;
417
+ transition: 0.3s;
418
+ }
419
+ div.stButton > button:hover {
420
+ background: #2980B9;
421
+ }
422
+ /* Select & Input */
423
+ .stSelectbox, .stNumberInput, .stSlider {
424
+ border-radius: 10px !important;
425
+ }
426
+ /* Charts */
427
+ .css-1v0mbdj, .css-1y0tads {
428
+ background-color: white;
429
+ padding: 20px;
430
+ border-radius: 12px;
431
+ box-shadow: 0px 4px 15px rgba(0,0,0,0.1);
432
+ }
433
+ </style>
434
+ """, unsafe_allow_html=True)
435
+
436
+ # ========== Title ==========
437
+ st.title("✨ TensorFlow Playground")
438
+
439
+ # Sidebar
440
  with st.sidebar:
441
+ st.header("Choose Dataset")
442
+ dataset = st.selectbox("Select Dataset", ["Blobs", "Circles", "Moons", "Upload CSV"])
443
 
444
+ noise = st.slider("Noise", 0.0, 1.0, 0.1)
445
+ test_size = st.slider("Test Size", 0.1, 0.5, 0.2)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
446
 
447
  st.header("Model Hyperparameters")
448
  hl = st.number_input("Hidden Layers", 1, 10, step=1)
449
  numbers = st.text_input("Neurons for each hidden layer", placeholder="e.g. 8,16,32")
450
  input_func = lambda x: [int(i.strip()) for i in x.split(",") if i.strip() != ""]
451
  nn = input_func(numbers)
452
+ epochs = st.number_input("Epochs", 1, 10000, step=1, value=10)
453
 
454
  col1, col2 = st.columns(2)
455
  with col1:
456
+ af = st.selectbox("Activation Function", ["Sigmoid", "Tanh", "Relu"], index=2)
457
  with col2:
458
+ lr = st.selectbox("Learning Rate", [0.1, 0.01, 0.02, 0.2])
459
 
460
  reg = st.selectbox("Regularizer", ["None", "L1", "L2", "ElasticNet"])
461
  if reg != "None":
462
  reg_rate = st.slider("Regularization rate", 0.0, 0.1, 0.01)
463
 
464
+ es = st.selectbox("Early Stopping", ["No", "Yes"], index=0)
465
+ if es == "Yes":
466
  col3, col4 = st.columns(2)
467
  with col3:
468
+ min_delta = st.number_input("Minimum Delta", 0.001, 0.9, step=0.1)
469
  with col4:
470
  patience = st.number_input("Patience", 3, 20, step=1)
471
 
472
+ # Dataset
473
+ if dataset == "Upload CSV":
474
+ uploaded_file = st.sidebar.file_uploader("Upload your CSV", type=["csv"])
475
+ if uploaded_file is not None:
476
+ df = pd.read_csv(uploaded_file)
477
+ if df.shape[1] < 3:
478
+ st.error("Your dataset must have at least 3 columns (2 features + 1 target).")
479
+ st.stop()
480
+ feature_cols = st.sidebar.multiselect("Select exactly 2 features", df.columns[:-1])
481
+ target_col = st.sidebar.selectbox("Select target column", df.columns)
482
+ if len(feature_cols) != 2:
483
+ st.error("Please select exactly 2 features.")
484
+ st.stop()
485
+ X = df[feature_cols].values
486
+ y = df[target_col].values
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
487
  else:
488
+ st.warning("Upload a CSV file to continue.")
489
+ st.stop()
490
+ else:
491
+ if dataset == "Circles":
492
+ X, y = make_circles(n_samples=1000, noise=noise, random_state=42, factor=0.5)
493
+ elif dataset == "Moons":
494
+ X, y = make_moons(n_samples=1000, noise=noise, random_state=42)
495
+ elif dataset == "Blobs":
496
+ X, y = make_blobs(n_samples=1000, centers=2, cluster_std=noise, random_state=42)
497
+
498
+ # Train-test split
499
+ x_train, x_test, y_train, y_test = train_test_split(X, y, test_size=test_size, random_state=42, stratify=y)
500
+ std = StandardScaler()
501
+ x_train = std.fit_transform(x_train)
502
+ x_test = std.transform(x_test)
503
+
504
+ # Regularizer setup
505
+ if reg == "L1":
506
+ reg = l1(reg_rate)
507
+ elif reg == "L2":
508
+ reg = l2(reg_rate)
509
+ elif reg == "ElasticNet":
510
+ reg = l1_l2(l1=reg_rate, l2=reg_rate)
511
+ else:
512
+ reg = None
513
+
514
+ # Build model
515
+ model = Sequential()
516
+ model.add(InputLayer(shape=(2,)))
517
+ if hl == len(nn):
518
+ for i in range(len(nn)):
519
+ model.add(Dense(units=nn[i], activation=af.lower(), kernel_regularizer=reg))
520
+ model.add(Dense(units=1, activation="sigmoid", kernel_regularizer=reg))
521
+
522
+ sgd = SGD(learning_rate=lr)
523
+ model.compile(loss="binary_crossentropy", optimizer=sgd, metrics=["accuracy"])
524
+
525
+ callbacks = []
526
+ if es == "Yes":
527
+ early_stopping = EarlyStopping(monitor="val_loss", min_delta=min_delta,
528
+ patience=patience, verbose=1,
529
+ restore_best_weights=True)
530
+ callbacks.append(early_stopping)
531
+
532
+ if st.sidebar.button("🚀 Train Model"):
533
+ hist = model.fit(x_train, y_train, epochs=epochs, validation_data=(x_test, y_test),
534
+ batch_size=32, verbose=0, callbacks=callbacks)
535
+
536
+ # Decision boundary
537
  st.subheader("Decision Boundary")
538
  fig, ax = plt.subplots()
539
+ plot_decision_regions(X, y, clf=model, legend=2)
 
 
540
  st.pyplot(fig)
541
 
542
+ # Loss plot
543
  st.subheader("Training vs Validation Loss")
544
  fig2, ax2 = plt.subplots()
545
  ax2.plot(hist.history["loss"], label="Train Loss")
546
  ax2.plot(hist.history["val_loss"], label="Validation Loss")
 
 
547
  ax2.legend()
548
  st.pyplot(fig2)
549
 
550
+ # Neural network diagram
551
+ st.subheader("Neural Network Architecture")
552
  fig3, ax3 = plt.subplots()
553
+ layer_sizes = [2] + nn + [1]
554
+ for i, size in enumerate(layer_sizes):
555
+ for j in range(size):
556
+ ax3.scatter(i, j, s=800, color="skyblue", edgecolors="black")
557
+ if i < len(layer_sizes) - 1:
558
+ for j in range(size):
559
+ for k in range(layer_sizes[i + 1]):
560
+ ax3.plot([i, i + 1], [j, k], color="gray", alpha=0.3)
561
+ ax3.axis("off")
562
  st.pyplot(fig3)
563
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
564
 
565
 
566