Spaces:
Sleeping
Sleeping
| import streamlit as st | |
| import numpy as np | |
| import matplotlib.pyplot as plt | |
| from tensorflow.keras.models import Sequential | |
| from tensorflow.keras.layers import Dense, Input, Dropout | |
| from tensorflow.keras.optimizers import Adam | |
| from tensorflow.keras.callbacks import EarlyStopping | |
| from sklearn.datasets import make_moons, make_circles, make_blobs | |
| from sklearn.model_selection import train_test_split | |
| from sklearn.preprocessing import StandardScaler | |
| # Caching utility | |
| def train_models(params): | |
| models_data = {} | |
| def build_model(use_dropout=False): | |
| model = Sequential() | |
| model.add(Input(shape=(2,))) | |
| for _ in range(params['hidden_layers']): | |
| model.add(Dense(params['num_neurons'], activation=params['activation'])) | |
| if use_dropout: | |
| model.add(Dropout(params['dropout_rate'])) | |
| model.add(Dense(1, activation="sigmoid")) | |
| model.compile(optimizer=Adam(learning_rate=params['lr']), | |
| loss='binary_crossentropy', metrics=['accuracy']) | |
| return model | |
| callbacks = [EarlyStopping(patience=2, restore_best_weights=True)] | |
| for mode in ['Base Model', 'EarlyStopping', 'Dropout']: | |
| use_dropout = (mode == 'Dropout') | |
| use_callbacks = callbacks if mode == 'EarlyStopping' else [] | |
| model = build_model(use_dropout) | |
| history = model.fit(params['x_train'], params['y_train'], | |
| validation_data=(params['x_test'], params['y_test']), | |
| batch_size=params['batch'], | |
| epochs=params['epochs'], | |
| callbacks=use_callbacks, | |
| verbose=0) | |
| test_loss, test_acc = model.evaluate(params['x_test'], params['y_test'], verbose=0) | |
| models_data[mode] = { | |
| 'model': model, | |
| 'history': history, | |
| 'test_loss': test_loss, | |
| 'test_acc': test_acc, | |
| 'decision_fig': plot_decision_boundary(model, params['x'], params['y']), | |
| 'loss_fig': plot_loss_curve(history) | |
| } | |
| return models_data | |
| # Plotting functions | |
| def plot_decision_boundary(model, x, y): | |
| x_min, x_max = x[:, 0].min() - 1, x[:, 0].max() + 1 | |
| y_min, y_max = x[:, 1].min() - 1, x[:, 1].max() + 1 | |
| xx, yy = np.meshgrid(np.linspace(x_min, x_max, 300), | |
| np.linspace(y_min, y_max, 300)) | |
| grid = np.c_[xx.ravel(), yy.ravel()] | |
| preds = model.predict(grid, verbose=0).reshape(xx.shape) | |
| fig, ax = plt.subplots(figsize=(7, 5)) | |
| ax.contourf(xx, yy, preds, cmap='RdBu', alpha=0.6) | |
| ax.scatter(x[:, 0], x[:, 1], c=y, cmap='RdBu', edgecolors='k', s=25) | |
| ax.set_title("Decision Boundary") | |
| ax.set_xlabel("Feature 1") | |
| ax.set_ylabel("Feature 2") | |
| return fig | |
| def plot_loss_curve(history): | |
| fig, ax = plt.subplots(figsize=(7, 4)) | |
| ax.plot(history.history['loss'], label='Train Loss') | |
| ax.plot(history.history['val_loss'], label='Val Loss') | |
| ax.set_title("Loss Curve") | |
| ax.set_xlabel("Epoch") | |
| ax.set_ylabel("Loss") | |
| ax.legend() | |
| return fig | |
| # UI: Sidebar Parameters | |
| st.sidebar.title("Model Controls") | |
| dataset = st.sidebar.selectbox("Dataset", ["Moons", "Circles", "Blobs"]) | |
| noise = st.sidebar.slider("Noise Level", 0.0, 0.2, 0.1) | |
| n_samples = st.sidebar.slider("Number of Samples", 100, 1000, 300, step=50) | |
| activation = st.sidebar.selectbox("Activation", ['relu', 'sigmoid', 'tanh', 'elu']) | |
| lr = st.sidebar.slider("Learning Rate", 0.001, 0.1, 0.01) | |
| split = st.sidebar.slider("Train-Test Split", 0.1, 0.9, 0.2) | |
| batch = st.sidebar.select_slider("Batch Size", list(range(8, 129, 8)), value=32) | |
| epochs = st.sidebar.slider("Epochs", 10, 200, 50) | |
| num_neurons = st.sidebar.slider("Neurons per Hidden Layer", 1, 100, 16) | |
| hidden_layers = st.sidebar.slider("Hidden Layers", 1, 5, 2) | |
| dropout_rate = st.sidebar.slider("Dropout Rate",0.0,1.0) | |
| # Data Preparation | |
| if dataset == "Moons": | |
| x, y = make_moons(n_samples=n_samples, noise=noise, random_state=42) | |
| elif dataset == "Circles": | |
| x, y = make_circles(n_samples=n_samples, noise=noise, random_state=42) | |
| else: | |
| x, y = make_blobs(n_samples=n_samples, centers=2, cluster_std=1.5, random_state=42) | |
| scaler = StandardScaler() | |
| x = scaler.fit_transform(x) | |
| x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=split, random_state=27) | |
| params = { | |
| 'x': x, | |
| 'y': y, | |
| 'x_train': x_train, | |
| 'x_test': x_test, | |
| 'y_train': y_train, | |
| 'y_test': y_test, | |
| 'activation': activation, | |
| 'lr': lr, | |
| 'batch': batch, | |
| 'epochs': epochs, | |
| 'num_neurons': num_neurons, | |
| 'hidden_layers': hidden_layers, | |
| 'dropout_rate': dropout_rate, | |
| } | |
| # Train all models ONCE | |
| with st.spinner("Training models, please wait..."): | |
| model_results = train_models(params) | |
| # UI: Select which model and plot to show | |
| st.title("⚡ Neural Net Regularization Visualizer") | |
| model_choice = st.radio("Choose Model", ["Base Model", "EarlyStopping", "Dropout"]) | |
| plot_choice = st.selectbox("Select Plot", ["Decision Boundary", "Loss Curve"]) | |
| # Display results | |
| selected = model_results[model_choice] | |
| st.subheader(f"Test Accuracy: {selected['test_acc']:.4f}") | |
| st.caption(f"Test Loss: {selected['test_loss']:.4f}") | |
| if plot_choice == "Decision Boundary": | |
| st.pyplot(selected['decision_fig']) | |
| else: | |
| st.pyplot(selected['loss_fig']) | |