trohith89 commited on
Commit
e99dc11
·
verified ·
1 Parent(s): 0192947

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +92 -72
app.py CHANGED
@@ -1,91 +1,111 @@
1
  import streamlit as st
2
  import numpy as np
3
  import matplotlib.pyplot as plt
4
- import tensorflow as tf
5
- from tensorflow import keras
6
- from sklearn.datasets import make_circles
7
  from sklearn.model_selection import train_test_split
8
  from sklearn.preprocessing import StandardScaler
9
- import seaborn as sns
10
- import networkx as nx
11
 
12
- def generate_data():
13
- X, y = make_circles(n_samples=500, factor=0.5, noise=0.05, random_state=42)
14
- X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.5, random_state=42)
15
- scaler = StandardScaler()
16
- X_train = scaler.fit_transform(X_train)
17
- X_test = scaler.transform(X_test)
18
- return X_train, X_test, y_train, y_test
19
 
20
- def build_model(layers=[4, 2], activation='tanh', learning_rate=0.03):
21
- model = keras.Sequential()
22
- model.add(keras.layers.InputLayer(input_shape=(2,)))
23
-
24
- for units in layers:
25
- model.add(keras.layers.Dense(units, activation=activation))
26
-
27
- model.add(keras.layers.Dense(1, activation='sigmoid'))
28
-
29
- optimizer = keras.optimizers.Adam(learning_rate=learning_rate)
30
- model.compile(optimizer=optimizer, loss='binary_crossentropy', metrics=['accuracy'])
31
- return model
32
 
33
- def plot_decision_boundary(model, X, y):
34
- x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
35
- y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
36
- xx, yy = np.meshgrid(np.linspace(x_min, x_max, 100), np.linspace(y_min, y_max, 100))
37
- grid = np.c_[xx.ravel(), yy.ravel()]
38
- preds = model.predict(grid).reshape(xx.shape)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
 
40
- plt.figure(figsize=(8, 6))
41
- plt.contourf(xx, yy, preds, alpha=0.3)
42
- plt.scatter(X[:, 0], X[:, 1], c=y, cmap='coolwarm', edgecolor='k')
43
- plt.xlabel('X1')
44
- plt.ylabel('X2')
45
- plt.title('Decision Boundary')
46
- st.pyplot(plt)
47
 
48
- def plot_network(layers):
 
 
 
 
 
 
 
 
49
  G = nx.DiGraph()
50
- layer_sizes = [2] + layers + [1]
51
-
52
  pos = {}
53
- node_idx = 0
54
  for i, size in enumerate(layer_sizes):
55
  for j in range(size):
56
- pos[node_idx] = (i, -j)
57
- node_idx += 1
58
-
59
- node_idx = 0
60
- for i in range(len(layer_sizes) - 1):
61
- for j in range(layer_sizes[i]):
62
- for k in range(layer_sizes[i+1]):
63
- G.add_edge(node_idx + j, node_idx + layer_sizes[i] + k)
64
- node_idx += layer_sizes[i]
65
 
66
- plt.figure(figsize=(8, 6))
67
- nx.draw(G, pos, with_labels=False, node_size=500, edge_color='gray')
 
68
  st.pyplot(plt)
69
 
70
- st.title("TensorFlow Playground Replica with Streamlit")
71
-
72
- layers = st.sidebar.text_input("Network Shape (comma-separated)", "4,2")
73
- layers = list(map(int, layers.split(',')))
74
- activation = st.sidebar.selectbox("Activation Function", ['tanh', 'relu', 'sigmoid'])
75
- learning_rate = st.sidebar.slider("Learning Rate", 0.001, 0.1, 0.03, step=0.001)
76
- batch_size = st.sidebar.slider("Batch Size", 5, 50, 10, step=5)
77
- epochs = st.sidebar.slider("Epochs", 10, 100, 50, step=10)
78
-
79
- X_train, X_test, y_train, y_test = generate_data()
80
-
81
- model = build_model(layers, activation, learning_rate)
82
- model.fit(X_train, y_train, epochs=epochs, batch_size=batch_size, verbose=0)
83
 
84
- plot_decision_boundary(model, X_test, y_test)
85
-
86
- st.write("## Model Performance")
87
- loss, accuracy = model.evaluate(X_test, y_test, verbose=0)
88
- st.write(f"Test Accuracy: {accuracy:.4f}")
 
 
 
 
 
89
 
90
- st.write("## Neural Network Structure")
91
- plot_network(layers)
 
 
 
 
 
 
 
 
 
 
 
 
1
  import streamlit as st
2
  import numpy as np
3
  import matplotlib.pyplot as plt
4
+ import networkx as nx
5
+ from sklearn.datasets import make_classification, make_moons, make_circles, make_regression
 
6
  from sklearn.model_selection import train_test_split
7
  from sklearn.preprocessing import StandardScaler
8
+ from tensorflow import keras
9
+ from tensorflow.keras import layers
10
 
11
+ # Title Bar
12
+ st.title("Neural Network Playground")
 
 
 
 
 
13
 
14
+ # Navigation Bar
15
+ col1, col2, col3, col4, col5, col6 = st.columns(6)
 
 
 
 
 
 
 
 
 
 
16
 
17
+ with col1:
18
+ epochs = st.selectbox("Epochs", [100, 200, 500, 800, 1000, 1500, 2000])
19
+ with col2:
20
+ learning_rate = st.selectbox("Learning Rate", [0.0001, 0.001, 0.01, 0.1, 0.3, 1, 3, 10])
21
+ with col3:
22
+ activation = st.selectbox("Activation Function", ["ReLU", "Tanh", "Sigmoid", "Linear"])
23
+ with col4:
24
+ reg_type = st.selectbox("Regularization Type", ["L1", "L2", "None"])
25
+ reg_rate = st.slider("Regularization Rate", 0.0, 0.1, 0.01, step=0.01)
26
+ with col5:
27
+ problem_type = st.selectbox("Problem Type", ["Classification", "Regression"])
28
+ with col6:
29
+ play = st.button("Train Model")
30
+
31
+ # Dataset Selection & Preprocessing
32
+ st.subheader("Dataset Selection & Preprocessing")
33
+ dataset_type = st.selectbox("Select Dataset", ["Binary Classification", "XOR", "Binary Spiral", "Binary Circles", "Regression 1", "Regression 2"])
34
+ test_ratio = st.slider("Train-Test Split Ratio", 0.1, 0.5, 0.2, step=0.05)
35
+ batch_size = st.slider("Batch Size", 4, 64, 16, step=2)
36
+
37
+ data = None
38
+ X_train, X_test, y_train, y_test = None, None, None, None
39
+ if st.button("Generate Dataset"):
40
+ if dataset_type == "Binary Classification":
41
+ data = make_classification(n_samples=1000, n_features=2, n_classes=2, random_state=42)
42
+ elif dataset_type == "XOR":
43
+ data = make_moons(n_samples=1000, noise=0.2, random_state=42)
44
+ elif dataset_type == "Binary Spiral":
45
+ data = make_circles(n_samples=1000, noise=0.2, factor=0.5, random_state=42)
46
+ elif dataset_type == "Regression 1":
47
+ data = make_regression(n_samples=1000, n_features=1, noise=5, random_state=42)
48
+ elif dataset_type == "Regression 2":
49
+ X = np.linspace(-1, 1, 1000).reshape(-1, 1)
50
+ y = X ** 3 + 0.1 * np.random.randn(1000, 1).flatten()
51
+ data = (X, y)
52
 
53
+ if data:
54
+ X, y = data
55
+ X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=test_ratio, random_state=42)
56
+ scaler = StandardScaler()
57
+ X_train = scaler.fit_transform(X_train)
58
+ X_test = scaler.transform(X_test)
59
+ st.success("Dataset Generated Successfully")
60
 
61
+ # Neural Network Architecture
62
+ st.subheader("Neural Network Architecture")
63
+ num_layers = st.slider("Number of Hidden Layers", 1, 5, 3)
64
+ hidden_layers = []
65
+ for i in range(num_layers):
66
+ neurons = st.slider(f"Neurons in Layer {i+1}", 2, 20, 5)
67
+ hidden_layers.append(neurons)
68
+
69
+ def draw_nn(hidden_layers):
70
  G = nx.DiGraph()
71
+ layer_sizes = [2] + hidden_layers + [1]
 
72
  pos = {}
73
+
74
  for i, size in enumerate(layer_sizes):
75
  for j in range(size):
76
+ G.add_node(f"L{i}_N{j}", layer=i, pos=(i, -j))
77
+ if i > 0:
78
+ for k in range(layer_sizes[i-1]):
79
+ G.add_edge(f"L{i-1}_N{k}", f"L{i}_N{j}")
 
 
 
 
 
80
 
81
+ pos = nx.get_node_attributes(G, 'pos')
82
+ plt.figure(figsize=(8, 5))
83
+ nx.draw(G, pos, with_labels=False, node_size=600, node_color="lightblue", edge_color="gray")
84
  st.pyplot(plt)
85
 
86
+ draw_nn(hidden_layers)
 
 
 
 
 
 
 
 
 
 
 
 
87
 
88
+ # Model Training
89
+ if play:
90
+ model = keras.Sequential()
91
+ model.add(layers.InputLayer(input_shape=(2,)))
92
+ for neurons in hidden_layers:
93
+ model.add(layers.Dense(neurons, activation=activation.lower(), kernel_regularizer=keras.regularizers.l1(reg_rate) if reg_type == "L1" else (keras.regularizers.l2(reg_rate) if reg_type == "L2" else None)))
94
+ model.add(layers.Dense(1, activation="sigmoid" if problem_type == "Classification" else "linear"))
95
+ model.compile(optimizer=keras.optimizers.Adam(learning_rate=learning_rate), loss="binary_crossentropy" if problem_type == "Classification" else "mse")
96
+ history = model.fit(X_train, y_train, epochs=epochs, batch_size=batch_size, verbose=0)
97
+ st.success("Model Training Complete")
98
 
99
+ # Decision Region Visualization
100
+ if play:
101
+ st.subheader("Decision Boundary Visualization")
102
+ x_min, x_max = X_train[:, 0].min() - 1, X_train[:, 0].max() + 1
103
+ y_min, y_max = X_train[:, 1].min() - 1, X_train[:, 1].max() + 1
104
+ xx, yy = np.meshgrid(np.linspace(x_min, x_max, 100), np.linspace(y_min, y_max, 100))
105
+ grid = np.c_[xx.ravel(), yy.ravel()]
106
+ preds = model.predict(grid).reshape(xx.shape)
107
+ plt.figure(figsize=(8, 5))
108
+ plt.contourf(xx, yy, preds, cmap="coolwarm", alpha=0.6)
109
+ plt.scatter(X_train[:, 0], X_train[:, 1], c=y_train, cmap="coolwarm", edgecolors="k")
110
+ st.pyplot(plt)
111
+ st.success("Decision Region Updated")