Spaces:
Sleeping
Sleeping
| import streamlit as st | |
| import pandas as pd | |
| import numpy as np | |
| import matplotlib.pyplot as plt | |
| import seaborn as sns | |
| from sklearn.datasets import make_classification, make_moons, make_circles | |
| from sklearn.model_selection import train_test_split | |
| from sklearn.preprocessing import StandardScaler | |
| from keras.models import Sequential | |
| from keras.layers import Dense | |
| from keras.optimizers import SGD | |
| from mlxtend.plotting import plot_decision_regions | |
| # --- Config --- | |
| st.set_page_config(page_title="Neural Net Lab", layout="wide") | |
| st.title("π¬ Interactive Neural Network Lab") | |
| # --- Session State Setup --- | |
| if 'X' not in st.session_state: st.session_state.X = None | |
| if 'y' not in st.session_state: st.session_state.y = None | |
| if 'model' not in st.session_state: st.session_state.model = None | |
| if 'history' not in st.session_state: st.session_state.history = None | |
| if 'X_train' not in st.session_state: st.session_state.X_train = None | |
| if 'y_train' not in st.session_state: st.session_state.y_train = None | |
| # --- Step 1: Dataset Generator --- | |
| with st.expander("π STEP 1: Generate Dataset", expanded=True): | |
| st.markdown("Start by creating a synthetic 2D classification dataset.") | |
| col1, col2, col3 = st.columns([2, 1, 1]) | |
| dataset_type = col1.selectbox("Choose a dataset type", ["make_classification", "make_moons", "make_circles"]) | |
| n_samples = col2.slider("Number of Samples", 100, 5000, 1000, step=100) | |
| noise_level = col3.slider("Noise", 0.0, 1.0, 0.2) | |
| circle_factor = st.slider("Factor (Circles only)", 0.1, 1.0, 0.5) | |
| if st.button("π Generate Dataset"): | |
| if dataset_type == "make_moons": | |
| X, y = make_moons(n_samples=n_samples, noise=noise_level, random_state=42) | |
| elif dataset_type == "make_circles": | |
| X, y = make_circles(n_samples=n_samples, noise=noise_level, factor=circle_factor, random_state=42) | |
| else: | |
| X, y = make_classification(n_samples=n_samples, n_features=2, n_informative=2, | |
| n_redundant=0, n_clusters_per_class=1, flip_y=noise_level, random_state=42) | |
| st.session_state.X, st.session_state.y = X, y | |
| st.success("Dataset generated successfully! π") | |
| if st.session_state.X is not None: | |
| df = pd.DataFrame(st.session_state.X, columns=["x1", "x2"]) | |
| df["label"] = st.session_state.y | |
| st.markdown("### π Dataset Preview") | |
| st.dataframe(df.head()) | |
| st.markdown("### π Visualize") | |
| fig, ax = plt.subplots() | |
| sns.scatterplot(data=df, x="x1", y="x2", hue="label", palette="coolwarm", ax=ax) | |
| st.pyplot(fig) | |
| # --- Step 2: Train Neural Network --- | |
| with st.expander("π€ STEP 2: Train Neural Network"): | |
| if st.session_state.X is None: | |
| st.warning("β οΈ Please generate the dataset in Step 1.") | |
| else: | |
| st.subheader("π οΈ Model Configuration") | |
| c1, c2, c3 = st.columns(3) | |
| test_size = c1.slider("Test Split %", 10, 90, 20) / 100 | |
| learning_rate = c2.selectbox("Learning Rate", [0.0001, 0.001, 0.01, 0.1]) | |
| batch_size = c3.slider("Batch Size", 1, 512, 64) | |
| epochs = st.slider("Epochs", 10, 500, 100) | |
| if st.button("π§ Train the Model"): | |
| st.info("π‘ Preprocessing and Training in progress...") | |
| X_train, _, y_train, _ = train_test_split(st.session_state.X, st.session_state.y, test_size=test_size, random_state=1) | |
| scaler = StandardScaler() | |
| X_scaled = scaler.fit_transform(X_train) | |
| model = Sequential([ | |
| Dense(8, activation='relu', input_shape=(2,)), | |
| Dense(4, activation='relu'), | |
| Dense(1, activation='sigmoid') | |
| ]) | |
| model.compile(optimizer=SGD(learning_rate=learning_rate), loss='binary_crossentropy', metrics=['accuracy']) | |
| history = model.fit(X_scaled, y_train, validation_split=0.2, | |
| batch_size=batch_size, epochs=epochs, verbose=0) | |
| st.session_state.model = model | |
| st.session_state.history = history | |
| st.session_state.X_train = X_scaled | |
| st.session_state.y_train = y_train | |
| st.success("β Model training complete!") | |
| st.metric("Final Accuracy", f"{history.history['val_loss'][-1]:.4f}") | |
| st.progress(100) | |
| # --- Step 3: Visualize Model Output --- | |
| with st.expander("π STEP 3: Visualize Model Output"): | |
| if st.session_state.model is None: | |
| st.warning("β οΈ Train the model first in Step 2.") | |
| else: | |
| col1, col2 = st.columns(2) | |
| with col1: | |
| st.subheader("π Decision Boundary") | |
| fig1, ax1 = plt.subplots() | |
| plot_decision_regions(st.session_state.X_train, st.session_state.y_train, | |
| clf=st.session_state.model, ax=ax1, legend=2) | |
| st.pyplot(fig1) | |
| with col2: | |
| st.subheader("π Training Loss Curve") | |
| fig2, ax2 = plt.subplots() | |
| ax2.plot(st.session_state.history.history['loss'], label='Train Loss') | |
| ax2.plot(st.session_state.history.history['val_loss'], label='Val Loss') | |
| ax2.set_title("Loss over Epochs") | |
| ax2.legend() | |
| st.pyplot(fig2) | |
| st.success("π§ͺ Visualization ready!") | |