Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,91 +1,111 @@
|
|
| 1 |
import streamlit as st
|
| 2 |
import numpy as np
|
| 3 |
import matplotlib.pyplot as plt
|
| 4 |
-
import
|
| 5 |
-
from
|
| 6 |
-
from sklearn.datasets import make_circles
|
| 7 |
from sklearn.model_selection import train_test_split
|
| 8 |
from sklearn.preprocessing import StandardScaler
|
| 9 |
-
|
| 10 |
-
|
| 11 |
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.5, random_state=42)
|
| 15 |
-
scaler = StandardScaler()
|
| 16 |
-
X_train = scaler.fit_transform(X_train)
|
| 17 |
-
X_test = scaler.transform(X_test)
|
| 18 |
-
return X_train, X_test, y_train, y_test
|
| 19 |
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
model.add(keras.layers.InputLayer(input_shape=(2,)))
|
| 23 |
-
|
| 24 |
-
for units in layers:
|
| 25 |
-
model.add(keras.layers.Dense(units, activation=activation))
|
| 26 |
-
|
| 27 |
-
model.add(keras.layers.Dense(1, activation='sigmoid'))
|
| 28 |
-
|
| 29 |
-
optimizer = keras.optimizers.Adam(learning_rate=learning_rate)
|
| 30 |
-
model.compile(optimizer=optimizer, loss='binary_crossentropy', metrics=['accuracy'])
|
| 31 |
-
return model
|
| 32 |
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 39 |
|
| 40 |
-
|
| 41 |
-
|
| 42 |
-
|
| 43 |
-
|
| 44 |
-
|
| 45 |
-
|
| 46 |
-
|
| 47 |
|
| 48 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 49 |
G = nx.DiGraph()
|
| 50 |
-
layer_sizes = [2] +
|
| 51 |
-
|
| 52 |
pos = {}
|
| 53 |
-
|
| 54 |
for i, size in enumerate(layer_sizes):
|
| 55 |
for j in range(size):
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
for i in range(len(layer_sizes) - 1):
|
| 61 |
-
for j in range(layer_sizes[i]):
|
| 62 |
-
for k in range(layer_sizes[i+1]):
|
| 63 |
-
G.add_edge(node_idx + j, node_idx + layer_sizes[i] + k)
|
| 64 |
-
node_idx += layer_sizes[i]
|
| 65 |
|
| 66 |
-
|
| 67 |
-
|
|
|
|
| 68 |
st.pyplot(plt)
|
| 69 |
|
| 70 |
-
|
| 71 |
-
|
| 72 |
-
layers = st.sidebar.text_input("Network Shape (comma-separated)", "4,2")
|
| 73 |
-
layers = list(map(int, layers.split(',')))
|
| 74 |
-
activation = st.sidebar.selectbox("Activation Function", ['tanh', 'relu', 'sigmoid'])
|
| 75 |
-
learning_rate = st.sidebar.slider("Learning Rate", 0.001, 0.1, 0.03, step=0.001)
|
| 76 |
-
batch_size = st.sidebar.slider("Batch Size", 5, 50, 10, step=5)
|
| 77 |
-
epochs = st.sidebar.slider("Epochs", 10, 100, 50, step=10)
|
| 78 |
-
|
| 79 |
-
X_train, X_test, y_train, y_test = generate_data()
|
| 80 |
-
|
| 81 |
-
model = build_model(layers, activation, learning_rate)
|
| 82 |
-
model.fit(X_train, y_train, epochs=epochs, batch_size=batch_size, verbose=0)
|
| 83 |
|
| 84 |
-
|
| 85 |
-
|
| 86 |
-
|
| 87 |
-
|
| 88 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 89 |
|
| 90 |
-
|
| 91 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import streamlit as st
|
| 2 |
import numpy as np
|
| 3 |
import matplotlib.pyplot as plt
|
| 4 |
+
import networkx as nx
|
| 5 |
+
from sklearn.datasets import make_classification, make_moons, make_circles, make_regression
|
|
|
|
| 6 |
from sklearn.model_selection import train_test_split
|
| 7 |
from sklearn.preprocessing import StandardScaler
|
| 8 |
+
from tensorflow import keras
|
| 9 |
+
from tensorflow.keras import layers
|
| 10 |
|
| 11 |
+
# Title Bar
|
| 12 |
+
st.title("Neural Network Playground")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 13 |
|
| 14 |
+
# Navigation Bar
|
| 15 |
+
col1, col2, col3, col4, col5, col6 = st.columns(6)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 16 |
|
| 17 |
+
with col1:
|
| 18 |
+
epochs = st.selectbox("Epochs", [100, 200, 500, 800, 1000, 1500, 2000])
|
| 19 |
+
with col2:
|
| 20 |
+
learning_rate = st.selectbox("Learning Rate", [0.0001, 0.001, 0.01, 0.1, 0.3, 1, 3, 10])
|
| 21 |
+
with col3:
|
| 22 |
+
activation = st.selectbox("Activation Function", ["ReLU", "Tanh", "Sigmoid", "Linear"])
|
| 23 |
+
with col4:
|
| 24 |
+
reg_type = st.selectbox("Regularization Type", ["L1", "L2", "None"])
|
| 25 |
+
reg_rate = st.slider("Regularization Rate", 0.0, 0.1, 0.01, step=0.01)
|
| 26 |
+
with col5:
|
| 27 |
+
problem_type = st.selectbox("Problem Type", ["Classification", "Regression"])
|
| 28 |
+
with col6:
|
| 29 |
+
play = st.button("Train Model")
|
| 30 |
+
|
| 31 |
+
# Dataset Selection & Preprocessing
|
| 32 |
+
st.subheader("Dataset Selection & Preprocessing")
|
| 33 |
+
dataset_type = st.selectbox("Select Dataset", ["Binary Classification", "XOR", "Binary Spiral", "Binary Circles", "Regression 1", "Regression 2"])
|
| 34 |
+
test_ratio = st.slider("Train-Test Split Ratio", 0.1, 0.5, 0.2, step=0.05)
|
| 35 |
+
batch_size = st.slider("Batch Size", 4, 64, 16, step=2)
|
| 36 |
+
|
| 37 |
+
data = None
|
| 38 |
+
X_train, X_test, y_train, y_test = None, None, None, None
|
| 39 |
+
if st.button("Generate Dataset"):
|
| 40 |
+
if dataset_type == "Binary Classification":
|
| 41 |
+
data = make_classification(n_samples=1000, n_features=2, n_classes=2, random_state=42)
|
| 42 |
+
elif dataset_type == "XOR":
|
| 43 |
+
data = make_moons(n_samples=1000, noise=0.2, random_state=42)
|
| 44 |
+
elif dataset_type == "Binary Spiral":
|
| 45 |
+
data = make_circles(n_samples=1000, noise=0.2, factor=0.5, random_state=42)
|
| 46 |
+
elif dataset_type == "Regression 1":
|
| 47 |
+
data = make_regression(n_samples=1000, n_features=1, noise=5, random_state=42)
|
| 48 |
+
elif dataset_type == "Regression 2":
|
| 49 |
+
X = np.linspace(-1, 1, 1000).reshape(-1, 1)
|
| 50 |
+
y = X ** 3 + 0.1 * np.random.randn(1000, 1).flatten()
|
| 51 |
+
data = (X, y)
|
| 52 |
|
| 53 |
+
if data:
|
| 54 |
+
X, y = data
|
| 55 |
+
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=test_ratio, random_state=42)
|
| 56 |
+
scaler = StandardScaler()
|
| 57 |
+
X_train = scaler.fit_transform(X_train)
|
| 58 |
+
X_test = scaler.transform(X_test)
|
| 59 |
+
st.success("Dataset Generated Successfully")
|
| 60 |
|
| 61 |
+
# Neural Network Architecture
|
| 62 |
+
st.subheader("Neural Network Architecture")
|
| 63 |
+
num_layers = st.slider("Number of Hidden Layers", 1, 5, 3)
|
| 64 |
+
hidden_layers = []
|
| 65 |
+
for i in range(num_layers):
|
| 66 |
+
neurons = st.slider(f"Neurons in Layer {i+1}", 2, 20, 5)
|
| 67 |
+
hidden_layers.append(neurons)
|
| 68 |
+
|
| 69 |
+
def draw_nn(hidden_layers):
|
| 70 |
G = nx.DiGraph()
|
| 71 |
+
layer_sizes = [2] + hidden_layers + [1]
|
|
|
|
| 72 |
pos = {}
|
| 73 |
+
|
| 74 |
for i, size in enumerate(layer_sizes):
|
| 75 |
for j in range(size):
|
| 76 |
+
G.add_node(f"L{i}_N{j}", layer=i, pos=(i, -j))
|
| 77 |
+
if i > 0:
|
| 78 |
+
for k in range(layer_sizes[i-1]):
|
| 79 |
+
G.add_edge(f"L{i-1}_N{k}", f"L{i}_N{j}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 80 |
|
| 81 |
+
pos = nx.get_node_attributes(G, 'pos')
|
| 82 |
+
plt.figure(figsize=(8, 5))
|
| 83 |
+
nx.draw(G, pos, with_labels=False, node_size=600, node_color="lightblue", edge_color="gray")
|
| 84 |
st.pyplot(plt)
|
| 85 |
|
| 86 |
+
draw_nn(hidden_layers)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 87 |
|
| 88 |
+
# Model Training
|
| 89 |
+
if play:
|
| 90 |
+
model = keras.Sequential()
|
| 91 |
+
model.add(layers.InputLayer(input_shape=(2,)))
|
| 92 |
+
for neurons in hidden_layers:
|
| 93 |
+
model.add(layers.Dense(neurons, activation=activation.lower(), kernel_regularizer=keras.regularizers.l1(reg_rate) if reg_type == "L1" else (keras.regularizers.l2(reg_rate) if reg_type == "L2" else None)))
|
| 94 |
+
model.add(layers.Dense(1, activation="sigmoid" if problem_type == "Classification" else "linear"))
|
| 95 |
+
model.compile(optimizer=keras.optimizers.Adam(learning_rate=learning_rate), loss="binary_crossentropy" if problem_type == "Classification" else "mse")
|
| 96 |
+
history = model.fit(X_train, y_train, epochs=epochs, batch_size=batch_size, verbose=0)
|
| 97 |
+
st.success("Model Training Complete")
|
| 98 |
|
| 99 |
+
# Decision Region Visualization
|
| 100 |
+
if play:
|
| 101 |
+
st.subheader("Decision Boundary Visualization")
|
| 102 |
+
x_min, x_max = X_train[:, 0].min() - 1, X_train[:, 0].max() + 1
|
| 103 |
+
y_min, y_max = X_train[:, 1].min() - 1, X_train[:, 1].max() + 1
|
| 104 |
+
xx, yy = np.meshgrid(np.linspace(x_min, x_max, 100), np.linspace(y_min, y_max, 100))
|
| 105 |
+
grid = np.c_[xx.ravel(), yy.ravel()]
|
| 106 |
+
preds = model.predict(grid).reshape(xx.shape)
|
| 107 |
+
plt.figure(figsize=(8, 5))
|
| 108 |
+
plt.contourf(xx, yy, preds, cmap="coolwarm", alpha=0.6)
|
| 109 |
+
plt.scatter(X_train[:, 0], X_train[:, 1], c=y_train, cmap="coolwarm", edgecolors="k")
|
| 110 |
+
st.pyplot(plt)
|
| 111 |
+
st.success("Decision Region Updated")
|