NeuroCanvas / pages /1_User_Defined_DataLab.py
DOMMETI's picture
Update pages/1_User_Defined_DataLab.py
cf9e09d verified
import streamlit as st
from sklearn.datasets import make_classification
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, InputLayer
from tensorflow.keras.optimizers import SGD
from sklearn.model_selection import train_test_split
from mlxtend.plotting import plot_decision_regions
import numpy as np
st.set_page_config(layout="wide")
st.title("๐Ÿง  Neural Network Playground - Custom Dataset")
# Generate synthetic 2-feature dataset
from sklearn.datasets import make_classification, make_circles, make_moons
# Dataset selection
st.sidebar.title("๐Ÿ“ฆ Dataset Settings")
dataset_type = st.sidebar.selectbox("Choose Dataset Type", ["Linear (make_classification)", "Circles (make_circles)", "Moons (make_moons)"])
n_samples = st.sidebar.slider("Number of Samples", 100, 1000, 300)
noise = st.sidebar.slider("Noise Level", 0.0, 1.0, 0.2)
random_state = st.sidebar.number_input("Random State", value=42)
if dataset_type == "Linear (make_classification)":
n_clusters_per_class = st.sidebar.slider("Number of Clusters", 1, 4, 1)
X, y = make_classification(n_samples=n_samples, n_features=2, n_redundant=0,
n_informative=2, n_clusters_per_class=n_clusters_per_class,
flip_y=noise, random_state=random_state, class_sep=4)
elif dataset_type == "Circles (make_circles)":
X, y = make_circles(n_samples=n_samples, noise=noise, factor=0.5, random_state=random_state)
elif dataset_type == "Moons (make_moons)":
X, y = make_moons(n_samples=n_samples, noise=noise, random_state=random_state)
df = pd.DataFrame(X, columns=["X1", "X2"])
df["label"] = y
st.write("### ๐Ÿ“„ Preview of Generated Data")
st.dataframe(df.head())
# Scatter plot
st.write("### ๐ŸŽฏ Feature Scatter Plot by Class Label")
fig, ax = plt.subplots()
sns.scatterplot(data=df, x="X1", y="X2", hue="label", palette="deep", ax=ax)
st.pyplot(fig)
# Sidebar - Model Configuration
st.sidebar.title("๐Ÿ› ๏ธ Neural Network Configuration")
# Hidden layer selection
n_layers = st.sidebar.slider("Number of Hidden Layers", 0, 4, 2)
hidden_config = []
hidden_activation = "tanh"
if n_layers > 0:
hidden_activation = st.sidebar.selectbox("Activation for Hidden Layers", ["tanh", "sigmoid"])
for i in range(n_layers):
units = st.sidebar.slider(f"Neurons in Layer {i+1}", 1, 10, 4, key=f"layer_{i}")
hidden_config.append(units)
learning_rate = st.sidebar.slider("Learning Rate", 0.0001, 1.0, 0.1, step=0.0001, format="%.4f")
batch_size = st.sidebar.slider("Batch Size", 1, 512, 64)
epochs = st.sidebar.slider("Epochs", 100, 2000, 350, step=50)
# Train model
if st.button("๐Ÿš€ Train Model"):
with st.spinner("Training in progress... please wait โณ"):
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
# Build model
model = Sequential()
model.add(InputLayer(shape=(2,)))
if n_layers == 0:
st.info("๐Ÿงฎ Building Logistic Regression (no hidden layers)")
else:
for units in hidden_config:
model.add(Dense(units, activation=hidden_activation))
model.add(Dense(1, activation="sigmoid"))
model.compile(optimizer=SGD(learning_rate=learning_rate),
loss='binary_crossentropy',
metrics=['accuracy'])
history = model.fit(X_train, y_train, batch_size=batch_size,
epochs=epochs, validation_split=0.2, verbose=0)
# Wrap Keras model for mlxtend
class KerasClassifierWrapper:
def __init__(self, model):
self.model = model
def predict(self, X):
return (self.model.predict(X) > 0.5).astype(int).flatten()
wrapped_model = KerasClassifierWrapper(model)
# ๐ŸŽฏ Plot decision region
st.subheader("๐Ÿงญ Decision Region")
fig2, ax2 = plt.subplots()
plot_decision_regions(X=X, y=y.astype(int),
clf=wrapped_model, legend=2, ax=ax2)
st.pyplot(fig2)
# ๐Ÿ“‰ Plot training history
st.subheader("๐Ÿ“ˆ Training & Validation Loss")
fig3, ax3 = plt.subplots()
ax3.plot(history.history['loss'], label='Train Loss')
ax3.plot(history.history['val_loss'], label='Val Loss')
ax3.set_xlabel("Epochs")
ax3.set_ylabel("Loss")
ax3.legend()
st.pyplot(fig3)
st.success("โœ… Model training completed!")