jiehou commited on
Commit
36ff860
·
1 Parent(s): d0ad25e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +30 -8
app.py CHANGED
@@ -1,6 +1,9 @@
1
  ### CSCI 4750/5750: regression models and gradient descent
2
 
3
 
 
 
 
4
  import gradio as gr
5
  import matplotlib
6
  import matplotlib.pyplot as plt
@@ -14,7 +17,7 @@ def cal_mse(X,y,b,w):
14
  mse = np.mean((y_predict-y)**2)
15
  return mse
16
 
17
- def gradient_descent(intercept=4, slope=3, intercept_random=4, slope_random=3, gradient_descent=False, learning_rate= 0.01, iteration=1000):
18
  ### (1) generate simulated data points
19
  X = 2 * np.random.rand(100, 1)
20
  y = intercept + slope * X + np.random.randn(100, 1)
@@ -28,9 +31,9 @@ def gradient_descent(intercept=4, slope=3, intercept_random=4, slope_random=3, g
28
  y_predict
29
 
30
  ### (4) Draw baseline linear Line
31
- fig = plt.figure(figsize=(12,12))
32
 
33
- plt.subplot(2,1,1)
34
  plt.plot(X, y_predict, "r-", linewidth=2, label = "Line of best fit")
35
  plt.plot(X, y, "b.")
36
 
@@ -42,26 +45,27 @@ def gradient_descent(intercept=4, slope=3, intercept_random=4, slope_random=3, g
42
  y_predict = X_new_b.dot(np.array([intercept_random, slope_random]))
43
  plt.plot(X_new, y_predict, "g-", linewidth=2, label = "Random line")
44
 
45
-
46
  ### (4.3) Apply gradient desc
47
  if gradient_descent:
48
  b = intercept_random
49
  w = slope_random
50
 
51
  lr = learning_rate # learning rate
52
- iteration = 1000
53
 
54
  # Store initial values for plotting.
55
  b_history = [b]
56
  w_history = [w]
57
 
 
58
  # Iterations
59
  for i in range(iteration):
60
  b_grad = 0.0
61
  w_grad = 0.0
62
  for n in range(len(X)):
63
- b_grad = b_grad - (y[n] - b - w*X[n])*1.0
64
- w_grad = w_grad - (y[n] - b - w*X[n])*X[n]
65
  b_grad /= len(X)
66
  w_grad /= len(X)
67
 
@@ -72,6 +76,8 @@ def gradient_descent(intercept=4, slope=3, intercept_random=4, slope_random=3, g
72
  # Store parameters for plotting
73
  b_history.append(b)
74
  w_history.append(w)
 
 
75
 
76
  plt.xlabel("$x_1$", fontsize=22)
77
  plt.ylabel("$y$", rotation=0, fontsize=22)
@@ -86,7 +92,7 @@ def gradient_descent(intercept=4, slope=3, intercept_random=4, slope_random=3, g
86
 
87
 
88
  ### (5) Visualize loss function
89
- plt.subplot(2,1,2)
90
 
91
  ### (5.1) generate grid of parameters
92
  b = np.arange(-10,10,0.1) #bias
@@ -132,6 +138,19 @@ def gradient_descent(intercept=4, slope=3, intercept_random=4, slope_random=3, g
132
  plt.yticks(fontsize=18)
133
  plt.xlim(-10,10)
134
  plt.ylim(-10,10)
 
 
 
 
 
 
 
 
 
 
 
 
 
135
  #plt.show()
136
  fig.tight_layout()
137
  plt.savefig('plot_line.png', dpi=300)
@@ -139,6 +158,9 @@ def gradient_descent(intercept=4, slope=3, intercept_random=4, slope_random=3, g
139
 
140
 
141
 
 
 
 
142
  #### Define input component
143
  input_intercept = gr.inputs.Slider(1, 8, step=0.5, default=4, label='(Baseline) Intercept')
144
  input_slope = gr.inputs.Slider(-8, 8, step=0.5, default=2.8, label='(Baseline) Slope')
 
1
  ### CSCI 4750/5750: regression models and gradient descent
2
 
3
 
4
+ ### CSCI 4750/5750: regression models
5
+
6
+
7
  import gradio as gr
8
  import matplotlib
9
  import matplotlib.pyplot as plt
 
17
  mse = np.mean((y_predict-y)**2)
18
  return mse
19
 
20
+ def gradient_descent(intercept=4, slope=3, intercept_random=4, slope_random=3, gradient_descent=False, learning_rate= 0.01, iteration=100):
21
  ### (1) generate simulated data points
22
  X = 2 * np.random.rand(100, 1)
23
  y = intercept + slope * X + np.random.randn(100, 1)
 
31
  y_predict
32
 
33
  ### (4) Draw baseline linear Line
34
+ fig = plt.figure(figsize=(12,18))
35
 
36
+ plt.subplot(3,1,1)
37
  plt.plot(X, y_predict, "r-", linewidth=2, label = "Line of best fit")
38
  plt.plot(X, y, "b.")
39
 
 
45
  y_predict = X_new_b.dot(np.array([intercept_random, slope_random]))
46
  plt.plot(X_new, y_predict, "g-", linewidth=2, label = "Random line")
47
 
48
+
49
  ### (4.3) Apply gradient desc
50
  if gradient_descent:
51
  b = intercept_random
52
  w = slope_random
53
 
54
  lr = learning_rate # learning rate
55
+ iteration = iteration
56
 
57
  # Store initial values for plotting.
58
  b_history = [b]
59
  w_history = [w]
60
 
61
+ train_mse = []
62
  # Iterations
63
  for i in range(iteration):
64
  b_grad = 0.0
65
  w_grad = 0.0
66
  for n in range(len(X)):
67
+ b_grad = b_grad - (y[n,0] - b - w*X[n,0])*1.0
68
+ w_grad = w_grad - (y[n,0] - b - w*X[n,0])*X[n,0]
69
  b_grad /= len(X)
70
  w_grad /= len(X)
71
 
 
76
  # Store parameters for plotting
77
  b_history.append(b)
78
  w_history.append(w)
79
+
80
+ train_mse.append(cal_mse(X,y,b,w))
81
 
82
  plt.xlabel("$x_1$", fontsize=22)
83
  plt.ylabel("$y$", rotation=0, fontsize=22)
 
92
 
93
 
94
  ### (5) Visualize loss function
95
+ plt.subplot(3,1,2)
96
 
97
  ### (5.1) generate grid of parameters
98
  b = np.arange(-10,10,0.1) #bias
 
138
  plt.yticks(fontsize=18)
139
  plt.xlim(-10,10)
140
  plt.ylim(-10,10)
141
+
142
+
143
+ ### 6. Visualize the learning curves
144
+ if gradient_descent:
145
+ plt.subplot(3,1,3)
146
+ plt.plot(train_mse,label="train_loss (lr="+str(learning_rate)+")")
147
+ plt.xlabel('Iteration',fontweight="bold",fontsize = 22)
148
+ plt.ylabel('Loss',fontweight="bold",fontsize = 22)
149
+ plt.title("Learning curve: Loss VS Epochs",fontweight="bold",fontsize = 22)
150
+ plt.legend(fontsize=18)
151
+ plt.xticks(fontsize=18)
152
+ plt.yticks(fontsize=18)
153
+
154
  #plt.show()
155
  fig.tight_layout()
156
  plt.savefig('plot_line.png', dpi=300)
 
158
 
159
 
160
 
161
+
162
+
163
+
164
  #### Define input component
165
  input_intercept = gr.inputs.Slider(1, 8, step=0.5, default=4, label='(Baseline) Intercept')
166
  input_slope = gr.inputs.Slider(-8, 8, step=0.5, default=2.8, label='(Baseline) Slope')