Spaces:
Sleeping
Sleeping
File size: 5,347 Bytes
cedc32c 185f23f b7f5768 185f23f 353106f cedc32c 353106f cedc32c ebaf7b8 cedc32c ebaf7b8 cedc32c 9ff4735 cedc32c ebaf7b8 353106f ebaf7b8 e8b5ae4 ebaf7b8 185f23f ebaf7b8 cedc32c ebaf7b8 353106f ebaf7b8 353106f ebaf7b8 353106f ebaf7b8 e8b5ae4 185f23f 353106f ebaf7b8 e8b5ae4 185f23f ebaf7b8 9ff4735 ebaf7b8 353106f ebaf7b8 353106f ebaf7b8 353106f ebaf7b8 e8b5ae4 ebaf7b8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 |
import streamlit as st
import numpy as np
import matplotlib.pyplot as plt
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Input, Dropout
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.callbacks import EarlyStopping
from sklearn.datasets import make_moons, make_circles, make_blobs
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
# Caching utility
@st.cache_resource
def train_models(params):
models_data = {}
def build_model(use_dropout=False):
model = Sequential()
model.add(Input(shape=(2,)))
for _ in range(params['hidden_layers']):
model.add(Dense(params['num_neurons'], activation=params['activation']))
if use_dropout:
model.add(Dropout(params['dropout_rate']))
model.add(Dense(1, activation="sigmoid"))
model.compile(optimizer=Adam(learning_rate=params['lr']),
loss='binary_crossentropy', metrics=['accuracy'])
return model
callbacks = [EarlyStopping(patience=2, restore_best_weights=True)]
for mode in ['Base Model', 'EarlyStopping', 'Dropout']:
use_dropout = (mode == 'Dropout')
use_callbacks = callbacks if mode == 'EarlyStopping' else []
model = build_model(use_dropout)
history = model.fit(params['x_train'], params['y_train'],
validation_data=(params['x_test'], params['y_test']),
batch_size=params['batch'],
epochs=params['epochs'],
callbacks=use_callbacks,
verbose=0)
test_loss, test_acc = model.evaluate(params['x_test'], params['y_test'], verbose=0)
models_data[mode] = {
'model': model,
'history': history,
'test_loss': test_loss,
'test_acc': test_acc,
'decision_fig': plot_decision_boundary(model, params['x'], params['y']),
'loss_fig': plot_loss_curve(history)
}
return models_data
# Plotting functions
def plot_decision_boundary(model, x, y):
x_min, x_max = x[:, 0].min() - 1, x[:, 0].max() + 1
y_min, y_max = x[:, 1].min() - 1, x[:, 1].max() + 1
xx, yy = np.meshgrid(np.linspace(x_min, x_max, 300),
np.linspace(y_min, y_max, 300))
grid = np.c_[xx.ravel(), yy.ravel()]
preds = model.predict(grid, verbose=0).reshape(xx.shape)
fig, ax = plt.subplots(figsize=(7, 5))
ax.contourf(xx, yy, preds, cmap='RdBu', alpha=0.6)
ax.scatter(x[:, 0], x[:, 1], c=y, cmap='RdBu', edgecolors='k', s=25)
ax.set_title("Decision Boundary")
ax.set_xlabel("Feature 1")
ax.set_ylabel("Feature 2")
return fig
def plot_loss_curve(history):
fig, ax = plt.subplots(figsize=(7, 4))
ax.plot(history.history['loss'], label='Train Loss')
ax.plot(history.history['val_loss'], label='Val Loss')
ax.set_title("Loss Curve")
ax.set_xlabel("Epoch")
ax.set_ylabel("Loss")
ax.legend()
return fig
# UI: Sidebar Parameters
st.sidebar.title("Model Controls")
dataset = st.sidebar.selectbox("Dataset", ["Moons", "Circles", "Blobs"])
noise = st.sidebar.slider("Noise Level", 0.0, 0.2, 0.1)
n_samples = st.sidebar.slider("Number of Samples", 100, 1000, 300, step=50)
activation = st.sidebar.selectbox("Activation", ['relu', 'sigmoid', 'tanh', 'elu'])
lr = st.sidebar.slider("Learning Rate", 0.001, 0.1, 0.01)
split = st.sidebar.slider("Train-Test Split", 0.1, 0.9, 0.2)
batch = st.sidebar.select_slider("Batch Size", list(range(8, 129, 8)), value=32)
epochs = st.sidebar.slider("Epochs", 10, 200, 50)
num_neurons = st.sidebar.slider("Neurons per Hidden Layer", 1, 100, 16)
hidden_layers = st.sidebar.slider("Hidden Layers", 1, 5, 2)
dropout_rate = st.sidebar.slider("Dropout Rate",0.0,1.0)
# Data Preparation
if dataset == "Moons":
x, y = make_moons(n_samples=n_samples, noise=noise, random_state=42)
elif dataset == "Circles":
x, y = make_circles(n_samples=n_samples, noise=noise, random_state=42)
else:
x, y = make_blobs(n_samples=n_samples, centers=2, cluster_std=1.5, random_state=42)
scaler = StandardScaler()
x = scaler.fit_transform(x)
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=split, random_state=27)
params = {
'x': x,
'y': y,
'x_train': x_train,
'x_test': x_test,
'y_train': y_train,
'y_test': y_test,
'activation': activation,
'lr': lr,
'batch': batch,
'epochs': epochs,
'num_neurons': num_neurons,
'hidden_layers': hidden_layers,
'dropout_rate': dropout_rate,
}
# Train all models ONCE
with st.spinner("Training models, please wait..."):
model_results = train_models(params)
# UI: Select which model and plot to show
st.title("⚡ Neural Net Regularization Visualizer")
model_choice = st.radio("Choose Model", ["Base Model", "EarlyStopping", "Dropout"])
plot_choice = st.selectbox("Select Plot", ["Decision Boundary", "Loss Curve"])
# Display results
selected = model_results[model_choice]
st.subheader(f"Test Accuracy: {selected['test_acc']:.4f}")
st.caption(f"Test Loss: {selected['test_loss']:.4f}")
if plot_choice == "Decision Boundary":
st.pyplot(selected['decision_fig'])
else:
st.pyplot(selected['loss_fig'])
|