Surendradjh commited on
Commit
353106f
Β·
verified Β·
1 Parent(s): 185f23f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +85 -55
app.py CHANGED
@@ -3,15 +3,14 @@ import numpy as np
3
  import matplotlib.pyplot as plt
4
 
5
  from tensorflow.keras.models import Sequential
6
- from tensorflow.keras.layers import Dense, Input
7
  from tensorflow.keras.optimizers import Adam
 
8
 
9
  from sklearn.datasets import make_moons, make_circles, make_blobs
10
  from sklearn.model_selection import train_test_split
11
  from sklearn.preprocessing import StandardScaler
12
 
13
-
14
- # Sidebar - UI Controls
15
  st.sidebar.title("πŸ”§ Model Settings")
16
 
17
  with st.sidebar.expander("🧠 Dataset Settings"):
@@ -28,7 +27,11 @@ with st.sidebar.expander("βš™οΈ Model Hyperparameters"):
28
  num_neurons = st.slider("Neurons per Hidden Layer", 1, 100, 16)
29
  hidden_layers = st.slider("Number of Hidden Layers", 1, 5, 2)
30
 
31
- # Dataset Generator
 
 
 
 
32
  if dataset == "Moons":
33
  x, y = make_moons(n_samples=n_samples, noise=noise, random_state=42)
34
  elif dataset == "Circles":
@@ -41,60 +44,87 @@ x = scaler.fit_transform(x)
41
 
42
  x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=split, random_state=27)
43
 
44
- # Model Definition
45
- model = Sequential()
46
- model.add(Input(shape=(2,)))
47
- for _ in range(hidden_layers):
48
- model.add(Dense(units=num_neurons, activation=activation))
49
- model.add(Dense(1, activation="sigmoid"))
50
-
51
- model.compile(optimizer=Adam(learning_rate=lr), loss='binary_crossentropy', metrics=['accuracy'])
52
-
53
- # Training
54
- history = model.fit(x_train, y_train,
55
- validation_data=(x_test, y_test),
56
- batch_size=batch,
57
- epochs=epochs,
58
- verbose=0)
59
-
60
- test_loss, test_acc = model.evaluate(x_test, y_test, verbose=0)
61
-
62
- # UI Display
63
- st.title("πŸ§ͺ Neural Network Playground")
64
- st.markdown("Visualize how a simple feedforward neural network learns to classify synthetic datasets.")
65
-
66
- st.subheader("πŸ“ˆ Model Performance")
67
- st.success(f"**Test Accuracy:** {test_acc:.4f}")
68
- st.info(f"**Test Loss:** {test_loss:.4f}")
69
-
70
- # Decision Boundary Plot
71
- st.subheader("πŸ” Decision Boundary")
72
-
73
- x_min, x_max = x[:, 0].min() - 1, x[:, 0].max() + 1
74
- y_min, y_max = x[:, 1].min() - 1, x[:, 1].max() + 1
75
- xx, yy = np.meshgrid(np.linspace(x_min, x_max, 300),
76
- np.linspace(y_min, y_max, 300))
77
- grid = np.c_[xx.ravel(), yy.ravel()]
78
- preds = model.predict(grid, verbose=0).reshape(xx.shape)
79
-
80
- fig, ax = plt.subplots(figsize=(7, 6))
81
- ax.contourf(xx, yy, preds, cmap='RdBu', alpha=0.6)
82
- ax.scatter(x[:, 0], x[:, 1], c=y, cmap='RdBu', edgecolors='k', s=30)
83
- ax.set_title("Decision Boundary")
84
- ax.set_xlabel("Feature 1")
85
- ax.set_ylabel("Feature 2")
86
- st.pyplot(fig)
87
-
88
- # Training History Plot
89
- def plot_loss(history):
 
 
 
 
 
 
 
 
 
90
  fig, ax = plt.subplots()
91
  ax.plot(history.history['loss'], label='Train Loss')
92
- ax.plot(history.history['val_loss'], label='Validation Loss')
93
- ax.set_title("Training vs Validation Loss")
94
- ax.set_xlabel("Epoch")
95
  ax.set_ylabel("Loss")
96
  ax.legend()
97
  return fig
98
 
99
- st.subheader("πŸ“‰ Training Loss")
100
- st.pyplot(plot_loss(history))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
  import matplotlib.pyplot as plt
4
 
5
  from tensorflow.keras.models import Sequential
6
+ from tensorflow.keras.layers import Dense, Input, Dropout
7
  from tensorflow.keras.optimizers import Adam
8
+ from tensorflow.keras.callbacks import EarlyStopping
9
 
10
  from sklearn.datasets import make_moons, make_circles, make_blobs
11
  from sklearn.model_selection import train_test_split
12
  from sklearn.preprocessing import StandardScaler
13
 
 
 
14
  st.sidebar.title("πŸ”§ Model Settings")
15
 
16
  with st.sidebar.expander("🧠 Dataset Settings"):
 
27
  num_neurons = st.slider("Neurons per Hidden Layer", 1, 100, 16)
28
  hidden_layers = st.slider("Number of Hidden Layers", 1, 5, 2)
29
 
30
+ with st.sidebar.expander("πŸ›  Regularization"):
31
+ use_earlystop = st.checkbox("Use EarlyStopping", value=True)
32
+ use_dropout = st.checkbox("Use Dropout", value=True)
33
+ dropout_rate = st.slider("Dropout Rate", 0.0, 0.5, 0.3) if use_dropout else 0.0
34
+
35
  if dataset == "Moons":
36
  x, y = make_moons(n_samples=n_samples, noise=noise, random_state=42)
37
  elif dataset == "Circles":
 
44
 
45
  x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=split, random_state=27)
46
 
47
+ callbacks = [EarlyStopping(patience=10, restore_best_weights=True)] if use_earlystop else []
48
+
49
+ def build_model(with_dropout=False):
50
+ model = Sequential()
51
+ model.add(Input(shape=(2,)))
52
+ for _ in range(hidden_layers):
53
+ model.add(Dense(units=num_neurons, activation=activation))
54
+ if with_dropout:
55
+ model.add(Dropout(dropout_rate))
56
+ model.add(Dense(1, activation="sigmoid"))
57
+ model.compile(optimizer=Adam(learning_rate=lr), loss='binary_crossentropy', metrics=['accuracy'])
58
+ return model
59
+
60
+ base_model = build_model(False)
61
+ dropout_model = build_model(True)
62
+
63
+ base_hist = base_model.fit(x_train, y_train,
64
+ validation_data=(x_test, y_test),
65
+ batch_size=batch,
66
+ epochs=epochs,
67
+ callbacks=[],
68
+ verbose=0)
69
+
70
+ early_model = build_model(False)
71
+ early_hist = early_model.fit(x_train, y_train,
72
+ validation_data=(x_test, y_test),
73
+ batch_size=batch,
74
+ epochs=epochs,
75
+ callbacks=callbacks,
76
+ verbose=0)
77
+
78
+ dropout_hist = dropout_model.fit(x_train, y_train,
79
+ validation_data=(x_test, y_test),
80
+ batch_size=batch,
81
+ epochs=epochs,
82
+ callbacks=callbacks,
83
+ verbose=0)
84
+
85
+ def plot_decision_boundary(model, title):
86
+ x_min, x_max = x[:, 0].min() - 1, x[:, 0].max() + 1
87
+ y_min, y_max = x[:, 1].min() - 1, x[:, 1].max() + 1
88
+ xx, yy = np.meshgrid(np.linspace(x_min, x_max, 300),
89
+ np.linspace(y_min, y_max, 300))
90
+ grid = np.c_[xx.ravel(), yy.ravel()]
91
+ preds = model.predict(grid, verbose=0).reshape(xx.shape)
92
+
93
+ fig, ax = plt.subplots()
94
+ ax.contourf(xx, yy, preds, cmap='RdBu', alpha=0.6)
95
+ ax.scatter(x[:, 0], x[:, 1], c=y, cmap='RdBu', edgecolors='k', s=25)
96
+ ax.set_title(title)
97
+ ax.set_xlabel("Feature 1")
98
+ ax.set_ylabel("Feature 2")
99
+ return fig
100
+
101
+ def plot_loss(history, title):
102
  fig, ax = plt.subplots()
103
  ax.plot(history.history['loss'], label='Train Loss')
104
+ ax.plot(history.history['val_loss'], label='Val Loss')
105
+ ax.set_title(title)
106
+ ax.set_xlabel("Epochs")
107
  ax.set_ylabel("Loss")
108
  ax.legend()
109
  return fig
110
 
111
+ st.title("πŸ§ͺ Regularization Comparison")
112
+
113
+ st.markdown("Below are the decision boundaries and loss curves for three configurations:")
114
+
115
+ cols = st.columns(3)
116
+
117
+ with cols[0]:
118
+ st.markdown("### πŸ”Ή Base Model")
119
+ st.pyplot(plot_decision_boundary(base_model, "Base Model"))
120
+ st.pyplot(plot_loss(base_hist, "Base Model Loss"))
121
+
122
+ with cols[1]:
123
+ st.markdown("### 🟒 With EarlyStopping")
124
+ st.pyplot(plot_decision_boundary(early_model, "EarlyStopping"))
125
+ st.pyplot(plot_loss(early_hist, "EarlyStopping Loss"))
126
+
127
+ with cols[2]:
128
+ st.markdown("### πŸ”Έ With Dropout")
129
+ st.pyplot(plot_decision_boundary(dropout_model, "Dropout Model"))
130
+ st.pyplot(plot_loss(dropout_hist, "Dropout Loss"))