shubham680 commited on
Commit
29dfdc3
·
verified ·
1 Parent(s): 55e35ff

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +77 -4
app.py CHANGED
@@ -27,7 +27,7 @@ with st.sidebar:
27
  noise = st.slider("Noise",0.0,1.0,0.1)
28
  test_size = st.slider("Test Size",0.1,0.5,0.05)
29
 
30
- st.header("ANN Hyperparameters")
31
  hl = st.number_input("Hidden Layers",1,10,step=1)
32
  numbers = st.text_input("Neurons for each hidden layer" ,placeholder="e.g. 8,16,32")
33
  input_func = lambda x: [int(i.strip()) for i in x.split(",") if i.strip() != ""]
@@ -39,7 +39,7 @@ with st.sidebar:
39
  col1, col2 = st.columns(2)
40
 
41
  with col1:
42
- af = st.selectbox("Activation Function",["Sigmoid","Tanh","Relu"])
43
  with col2:
44
  lr = st.selectbox("Learning Rate",[0.1,0.01,0.02,0.2])
45
 
@@ -59,11 +59,84 @@ with st.sidebar:
59
  with col4:
60
  patience = st.number_input("Patience",3,20,step=1)
61
 
62
- # if st.sidebar.button("start trainning"):
 
 
 
 
 
 
 
63
 
64
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
65
 
 
 
 
 
 
 
 
 
 
 
 
 
66
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
67
 
68
 
69
 
 
27
  noise = st.slider("Noise",0.0,1.0,0.1)
28
  test_size = st.slider("Test Size",0.1,0.5,0.05)
29
 
30
+ st.header("Model Hyperparameters")
31
  hl = st.number_input("Hidden Layers",1,10,step=1)
32
  numbers = st.text_input("Neurons for each hidden layer" ,placeholder="e.g. 8,16,32")
33
  input_func = lambda x: [int(i.strip()) for i in x.split(",") if i.strip() != ""]
 
39
  col1, col2 = st.columns(2)
40
 
41
  with col1:
42
+ af = st.selectbox("Activation Function",["sigmoid","tanh","relu"],index=2)
43
  with col2:
44
  lr = st.selectbox("Learning Rate",[0.1,0.01,0.02,0.2])
45
 
 
59
  with col4:
60
  patience = st.number_input("Patience",3,20,step=1)
61
 
62
+ if st.sidebar.button("Train"):
63
+ if dataset:
64
+ if dataset=="Circles":
65
+ x,y=make_circles(n_samples=1000,noise=noise,random_state=42,factor=0.1)
66
+ elif dataset=="moons":
67
+ x,y=make_moons(n_samples=1000,noise=noise,random_state=42)
68
+ elif dataset == "Blobs":
69
+ x,y=make_blobs(n_samples=1000, centers=2, cluster_std=noise, random_state=42)
70
 
71
+ x_train,x_test,y_train,y_test=train_test_split(x,y,test_size=test_size,random_state=42,stratify=y)
72
+
73
+ std = StandardScaler()
74
+ x_train = std.fit_transform(x_train)
75
+ x_test = std.transform(x_test)
76
+
77
+
78
+ if reg == "L1":
79
+ reg = l1(reg_rate)
80
+ elif reg == "L2":
81
+ reg = l2(reg_rate)
82
+ elif reg == "ElasticNet":
83
+ reg = l1_l2(l1=reg_rate, l2=reg_rate)
84
+ else:
85
+ reg=None
86
+
87
+
88
+
89
+ model = Sequential()
90
+ model.add(InputLayer(shape=(2,)))
91
+ if hl == len(nn):
92
+ for i in range(0,len(nn)):
93
+ model.add(Dense(units=nn[i],activation=af,kernel_regularizer=reg))
94
+
95
+ model.add(Dense(units=1,activation="sigmoid",kernel_regularizer=reg))
96
+ sgd=SGD(learning_rate=lr)
97
+ model.compile(loss="binary_crossentropy",optimizer=sgd,metrics=["accuracy"])
98
+ train_size=round(x_train.shape[0]-x_train.shape[0]*0.2)
99
 
100
+ callbacks = []
101
+ if early_stop_option == "Yes":
102
+ es = EarlyStopping(
103
+ monitor="val_loss",
104
+ #min_delta= min_delta if min_delta else 0.001,
105
+ min_delta = min_delta
106
+ patience=patience,
107
+ verbose=1,
108
+ restore_best_weights=True,
109
+ start_from_epoch=50
110
+ )
111
+ callbacks.append(es)
112
 
113
+ hist=model.fit(x_train,y_train,epochs=n_epochs,batch_size=train_size,validation_data=(x_test, y_test),verbose=False)
114
+
115
+ # --- Neural Network Diagram ---
116
+ st.subheader("Neural Network Architecture")
117
+ stringlist = []
118
+ model.summary(print_fn=lambda x: stringlist.append(x))
119
+ summary_str = "\n".join(stringlist)
120
+ st.text(summary_str)
121
+
122
+
123
+ # --- Plotting Decision region ---
124
+ st.subheader("Decision Region")
125
+ fig1, ax1 = plt.subplots(figsize=(6, 5))
126
+ plot_decision_regions(X=x_test, y=y_test.astype(np.int_), clf=model, ax=ax1)
127
+ ax1.set_title("Decision Regions", fontsize=12, weight="bold")
128
+ st.pyplot(fig1)
129
+
130
+ # --- Plot 2: Training vs Validation Loss ---
131
+ st.subheader("Training vs Validation Loss")
132
+ fig2, ax2 = plt.subplots(figsize=(6, 5))
133
+ ax2.plot(hist.history["loss"], label="Training Loss", linewidth=2)
134
+ ax2.plot(hist.history["val_loss"], label="Validation Loss", linewidth=2, linestyle="--")
135
+ ax2.set_xlabel("Epochs")
136
+ ax2.set_ylabel("Loss")
137
+ ax2.legend()
138
+ ax2.grid(alpha=0.3)
139
+ st.pyplot(fig2)
140
 
141
 
142