|
|
import streamlit as st |
|
|
import base64 |
|
|
import matplotlib.pyplot as plt |
|
|
import seaborn as sns |
|
|
from sklearn.datasets import make_circles, make_moons, make_classification |
|
|
from sklearn.model_selection import train_test_split |
|
|
from sklearn.preprocessing import StandardScaler |
|
|
from keras.models import Sequential |
|
|
from keras.layers import Dense |
|
|
from keras.optimizers import SGD |
|
|
from mlxtend.plotting import plot_decision_regions |
|
|
import numpy as np |
|
|
import tensorflow as tf |
|
|
|
|
|
|
|
|
st.set_page_config(page_title="π§ Neural Network Explorer", layout="wide") |
|
|
|
|
|
|
|
|
def set_blurred_background(image_path): |
|
|
with open(image_path, "rb") as img_file: |
|
|
img_base64 = base64.b64encode(img_file.read()).decode() |
|
|
st.markdown( |
|
|
f""" |
|
|
<style> |
|
|
.blur-background {{ |
|
|
position: fixed; |
|
|
top: 0; |
|
|
left: 0; |
|
|
width: 100vw; |
|
|
height: 100vh; |
|
|
z-index: -1; |
|
|
}} |
|
|
|
|
|
.blur-background::before {{ |
|
|
content: ""; |
|
|
background-image: url("data:image/png;base64,{img_base64}"); |
|
|
background-size: cover; |
|
|
background-position: center; |
|
|
background-attachment: fixed; |
|
|
filter: blur(8px); |
|
|
position: absolute; |
|
|
top: 0; |
|
|
left: 0; |
|
|
width: 100%; |
|
|
height: 100%; |
|
|
}} |
|
|
</style> |
|
|
<div class="blur-background"></div> |
|
|
""", |
|
|
unsafe_allow_html=True |
|
|
) |
|
|
|
|
|
|
|
|
set_blurred_background("ann.jpeg") |
|
|
|
|
|
|
|
|
st.markdown(""" |
|
|
<h1 style='text-align: center; color: #4B0082;'>β¨ Neural Network Explorer</h1> |
|
|
<h4 style='text-align: center; color: #2F4F4F;'>Visualize and train simple neural networks interactively</h4> |
|
|
""", unsafe_allow_html=True) |
|
|
|
|
|
|
|
|
st.sidebar.markdown(""" |
|
|
<style> |
|
|
section[data-testid="stSidebar"] > div:first-child { |
|
|
background-color: #F0F8FF; |
|
|
padding: 1rem; |
|
|
border-radius: 10px; |
|
|
} |
|
|
</style> |
|
|
""", unsafe_allow_html=True) |
|
|
|
|
|
st.sidebar.header("π§ Configure Model") |
|
|
num_points = st.sidebar.slider("Number of Samples", 100, 10000, 1000, step=100) |
|
|
noise = st.sidebar.slider("Dataset Noise", 0.01, 0.9, 0.1) |
|
|
batch_size = st.sidebar.slider("Batch Size", 1, 512, 32) |
|
|
epochs = st.sidebar.slider("Epochs", 1, 100, 20) |
|
|
learning_rate = st.sidebar.slider("Learning Rate", 0.0001, 1.0, 0.01, step=0.0001, format="%.4f") |
|
|
hidden_layers = st.sidebar.slider("Hidden Layers", 1, 5, 2) |
|
|
neurons_per_layer = st.sidebar.slider("Neurons per Layer", 1, 128, 16) |
|
|
activation_name = st.sidebar.selectbox("Activation", ["relu", "tanh", "sigmoid", "linear"]) |
|
|
|
|
|
|
|
|
st.markdown("## π§ͺ Dataset Selection") |
|
|
dataset_option = st.selectbox("Select a dataset", ("circle", "moons", "classification")) |
|
|
|
|
|
if dataset_option == "circle": |
|
|
x, y = make_circles(n_samples=num_points, noise=noise, factor=0.5, random_state=42) |
|
|
elif dataset_option == "moons": |
|
|
x, y = make_moons(n_samples=num_points, noise=noise, random_state=42) |
|
|
else: |
|
|
x, y = make_classification(n_samples=num_points, n_features=2, n_informative=2, n_redundant=0, |
|
|
n_clusters_per_class=1, random_state=42) |
|
|
|
|
|
|
|
|
if st.button("π Show Dataset"): |
|
|
st.subheader("π― Sample Distribution") |
|
|
fig, ax = plt.subplots() |
|
|
sns.scatterplot(x=x[:, 0], y=x[:, 1], hue=y, palette="coolwarm", ax=ax) |
|
|
st.pyplot(fig) |
|
|
|
|
|
|
|
|
if st.button("π Train Model"): |
|
|
st.subheader("βοΈ Training the Model...") |
|
|
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=42, stratify=y) |
|
|
scaler = StandardScaler() |
|
|
x_train = scaler.fit_transform(x_train) |
|
|
x_test = scaler.transform(x_test) |
|
|
|
|
|
model = Sequential() |
|
|
model.add(Dense(neurons_per_layer, input_shape=(2,), activation=activation_name)) |
|
|
for _ in range(hidden_layers - 1): |
|
|
model.add(Dense(neurons_per_layer, activation=activation_name)) |
|
|
model.add(Dense(1, activation="sigmoid")) |
|
|
|
|
|
optimizer = SGD(learning_rate=learning_rate) |
|
|
model.compile(optimizer=optimizer, loss="binary_crossentropy", metrics=["accuracy"]) |
|
|
history = model.fit(x_train, y_train, batch_size=batch_size, epochs=epochs, validation_split=0.2, verbose=0) |
|
|
|
|
|
st.success("β
Model trained successfully!") |
|
|
|
|
|
st.subheader("π Training Metrics") |
|
|
fig, ax = plt.subplots() |
|
|
ax.plot(history.history['loss'], label='Train Loss') |
|
|
ax.plot(history.history['val_loss'], label='Val Loss') |
|
|
ax.set_title("Loss Over Epochs") |
|
|
ax.legend() |
|
|
st.pyplot(fig) |
|
|
|
|
|
st.write(f"π’ Final Training Loss: **{history.history['loss'][-1]:.4f}**") |
|
|
st.write(f"π Final Validation Loss: **{history.history['val_loss'][-1]:.4f}**") |
|
|
|
|
|
class ModelWrapper: |
|
|
def __init__(self, model): |
|
|
self.model = model |
|
|
|
|
|
def predict(self, X): |
|
|
return (self.model.predict(X) > 0.5).astype("int32") |
|
|
|
|
|
st.subheader("π Decision Boundary") |
|
|
fig, ax = plt.subplots() |
|
|
plot_decision_regions(X=x_train, y=y_train, clf=ModelWrapper(model), ax=ax) |
|
|
st.pyplot(fig) |
|
|
|