File size: 5,310 Bytes
ff402b5
 
 
 
 
 
 
 
 
 
 
 
 
efd1d7c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ff402b5
efd1d7c
ff402b5
efd1d7c
 
 
 
 
ff402b5
 
efd1d7c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ff402b5
efd1d7c
ff402b5
efd1d7c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
import streamlit as st
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.datasets import make_classification, make_moons, make_circles
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from keras.models import Sequential
from keras.layers import Dense
from keras.optimizers import SGD
from mlxtend.plotting import plot_decision_regions

# --- Config ---
st.set_page_config(page_title="Neural Net Lab", layout="wide")
st.title("πŸ”¬ Interactive Neural Network Lab")

# --- Session State Setup ---
if 'X' not in st.session_state: st.session_state.X = None
if 'y' not in st.session_state: st.session_state.y = None
if 'model' not in st.session_state: st.session_state.model = None
if 'history' not in st.session_state: st.session_state.history = None
if 'X_train' not in st.session_state: st.session_state.X_train = None
if 'y_train' not in st.session_state: st.session_state.y_train = None

# --- Step 1: Dataset Generator ---
with st.expander("πŸ“Œ STEP 1: Generate Dataset", expanded=True):
    st.markdown("Start by creating a synthetic 2D classification dataset.")

    col1, col2, col3 = st.columns([2, 1, 1])
    dataset_type = col1.selectbox("Choose a dataset type", ["make_classification", "make_moons", "make_circles"])
    n_samples = col2.slider("Number of Samples", 100, 5000, 1000, step=100)
    noise_level = col3.slider("Noise", 0.0, 1.0, 0.2)
    circle_factor = st.slider("Factor (Circles only)", 0.1, 1.0, 0.5)

    if st.button("πŸš€ Generate Dataset"):
        if dataset_type == "make_moons":
            X, y = make_moons(n_samples=n_samples, noise=noise_level, random_state=42)
        elif dataset_type == "make_circles":
            X, y = make_circles(n_samples=n_samples, noise=noise_level, factor=circle_factor, random_state=42)
        else:
            X, y = make_classification(n_samples=n_samples, n_features=2, n_informative=2,
                                       n_redundant=0, n_clusters_per_class=1, flip_y=noise_level, random_state=42)
        st.session_state.X, st.session_state.y = X, y
        st.success("Dataset generated successfully! πŸŽ‰")

    if st.session_state.X is not None:
        df = pd.DataFrame(st.session_state.X, columns=["x1", "x2"])
        df["label"] = st.session_state.y
        st.markdown("### πŸ” Dataset Preview")
        st.dataframe(df.head())

        st.markdown("### πŸ“Š Visualize")
        fig, ax = plt.subplots()
        sns.scatterplot(data=df, x="x1", y="x2", hue="label", palette="coolwarm", ax=ax)
        st.pyplot(fig)

# --- Step 2: Train Neural Network ---
with st.expander("πŸ€– STEP 2: Train Neural Network"):
    if st.session_state.X is None:
        st.warning("⚠️ Please generate the dataset in Step 1.")
    else:
        st.subheader("πŸ› οΈ Model Configuration")
        c1, c2, c3 = st.columns(3)
        test_size = c1.slider("Test Split %", 10, 90, 20) / 100
        learning_rate = c2.selectbox("Learning Rate", [0.0001, 0.001, 0.01, 0.1])
        batch_size = c3.slider("Batch Size", 1, 512, 64)
        epochs = st.slider("Epochs", 10, 500, 100)

        if st.button("🧠 Train the Model"):
            st.info("πŸ“‘ Preprocessing and Training in progress...")
            X_train, _, y_train, _ = train_test_split(st.session_state.X, st.session_state.y, test_size=test_size, random_state=1)
            scaler = StandardScaler()
            X_scaled = scaler.fit_transform(X_train)

            model = Sequential([
                Dense(8, activation='relu', input_shape=(2,)),
                Dense(4, activation='relu'),
                Dense(1, activation='sigmoid')
            ])
            model.compile(optimizer=SGD(learning_rate=learning_rate), loss='binary_crossentropy', metrics=['accuracy'])

            history = model.fit(X_scaled, y_train, validation_split=0.2,
                                batch_size=batch_size, epochs=epochs, verbose=0)

            st.session_state.model = model
            st.session_state.history = history
            st.session_state.X_train = X_scaled
            st.session_state.y_train = y_train
            st.success("βœ… Model training complete!")

            st.metric("Final Accuracy", f"{history.history['val_loss'][-1]:.4f}")
            st.progress(100)

# --- Step 3: Visualize Model Output ---
with st.expander("πŸ“ˆ STEP 3: Visualize Model Output"):
    if st.session_state.model is None:
        st.warning("⚠️ Train the model first in Step 2.")
    else:
        col1, col2 = st.columns(2)

        with col1:
            st.subheader("🌐 Decision Boundary")
            fig1, ax1 = plt.subplots()
            plot_decision_regions(st.session_state.X_train, st.session_state.y_train,
                                  clf=st.session_state.model, ax=ax1, legend=2)
            st.pyplot(fig1)

        with col2:
            st.subheader("πŸ“‰ Training Loss Curve")
            fig2, ax2 = plt.subplots()
            ax2.plot(st.session_state.history.history['loss'], label='Train Loss')
            ax2.plot(st.session_state.history.history['val_loss'], label='Val Loss')
            ax2.set_title("Loss over Epochs")
            ax2.legend()
            st.pyplot(fig2)

        st.success("πŸ§ͺ Visualization ready!")