Spaces:
Sleeping
Sleeping
File size: 4,503 Bytes
7fd0fbe 71e2d1d 7fd0fbe da3dfc7 7fd0fbe da3dfc7 cf9e09d 7fd0fbe 71e2d1d 7fd0fbe 71e2d1d da3dfc7 518e867 71e2d1d 518e867 71e2d1d da3dfc7 71e2d1d da3dfc7 518e867 da3dfc7 71e2d1d da3dfc7 71e2d1d 518e867 da3dfc7 71e2d1d da3dfc7 71e2d1d da3dfc7 71e2d1d da3dfc7 71e2d1d da3dfc7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 |
import streamlit as st
from sklearn.datasets import make_classification
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, InputLayer
from tensorflow.keras.optimizers import SGD
from sklearn.model_selection import train_test_split
from mlxtend.plotting import plot_decision_regions
import numpy as np
st.set_page_config(layout="wide")
st.title("๐ง Neural Network Playground - Custom Dataset")
# Generate synthetic 2-feature dataset
from sklearn.datasets import make_classification, make_circles, make_moons
# Dataset selection
st.sidebar.title("๐ฆ Dataset Settings")
dataset_type = st.sidebar.selectbox("Choose Dataset Type", ["Linear (make_classification)", "Circles (make_circles)", "Moons (make_moons)"])
n_samples = st.sidebar.slider("Number of Samples", 100, 1000, 300)
noise = st.sidebar.slider("Noise Level", 0.0, 1.0, 0.2)
random_state = st.sidebar.number_input("Random State", value=42)
if dataset_type == "Linear (make_classification)":
n_clusters_per_class = st.sidebar.slider("Number of Clusters", 1, 4, 1)
X, y = make_classification(n_samples=n_samples, n_features=2, n_redundant=0,
n_informative=2, n_clusters_per_class=n_clusters_per_class,
flip_y=noise, random_state=random_state, class_sep=4)
elif dataset_type == "Circles (make_circles)":
X, y = make_circles(n_samples=n_samples, noise=noise, factor=0.5, random_state=random_state)
elif dataset_type == "Moons (make_moons)":
X, y = make_moons(n_samples=n_samples, noise=noise, random_state=random_state)
df = pd.DataFrame(X, columns=["X1", "X2"])
df["label"] = y
st.write("### ๐ Preview of Generated Data")
st.dataframe(df.head())
# Scatter plot
st.write("### ๐ฏ Feature Scatter Plot by Class Label")
fig, ax = plt.subplots()
sns.scatterplot(data=df, x="X1", y="X2", hue="label", palette="deep", ax=ax)
st.pyplot(fig)
# Sidebar - Model Configuration
st.sidebar.title("๐ ๏ธ Neural Network Configuration")
# Hidden layer selection
n_layers = st.sidebar.slider("Number of Hidden Layers", 0, 4, 2)
hidden_config = []
hidden_activation = "tanh"
if n_layers > 0:
hidden_activation = st.sidebar.selectbox("Activation for Hidden Layers", ["tanh", "sigmoid"])
for i in range(n_layers):
units = st.sidebar.slider(f"Neurons in Layer {i+1}", 1, 10, 4, key=f"layer_{i}")
hidden_config.append(units)
learning_rate = st.sidebar.slider("Learning Rate", 0.0001, 1.0, 0.1, step=0.0001, format="%.4f")
batch_size = st.sidebar.slider("Batch Size", 1, 512, 64)
epochs = st.sidebar.slider("Epochs", 100, 2000, 350, step=50)
# Train model
if st.button("๐ Train Model"):
with st.spinner("Training in progress... please wait โณ"):
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
# Build model
model = Sequential()
model.add(InputLayer(shape=(2,)))
if n_layers == 0:
st.info("๐งฎ Building Logistic Regression (no hidden layers)")
else:
for units in hidden_config:
model.add(Dense(units, activation=hidden_activation))
model.add(Dense(1, activation="sigmoid"))
model.compile(optimizer=SGD(learning_rate=learning_rate),
loss='binary_crossentropy',
metrics=['accuracy'])
history = model.fit(X_train, y_train, batch_size=batch_size,
epochs=epochs, validation_split=0.2, verbose=0)
# Wrap Keras model for mlxtend
class KerasClassifierWrapper:
def __init__(self, model):
self.model = model
def predict(self, X):
return (self.model.predict(X) > 0.5).astype(int).flatten()
wrapped_model = KerasClassifierWrapper(model)
# ๐ฏ Plot decision region
st.subheader("๐งญ Decision Region")
fig2, ax2 = plt.subplots()
plot_decision_regions(X=X, y=y.astype(int),
clf=wrapped_model, legend=2, ax=ax2)
st.pyplot(fig2)
# ๐ Plot training history
st.subheader("๐ Training & Validation Loss")
fig3, ax3 = plt.subplots()
ax3.plot(history.history['loss'], label='Train Loss')
ax3.plot(history.history['val_loss'], label='Val Loss')
ax3.set_xlabel("Epochs")
ax3.set_ylabel("Loss")
ax3.legend()
st.pyplot(fig3)
st.success("โ
Model training completed!")
|