jiehou commited on
Commit
8081430
·
1 Parent(s): fff18ee

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +171 -0
app.py ADDED
@@ -0,0 +1,171 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ### CSCI 4750/5750: regression models
2
+
3
+
4
+ import gradio as gr
5
+ import matplotlib
6
+ import matplotlib.pyplot as plt
7
+ import numpy as np
8
+ from sklearn.linear_model import LinearRegression
9
+
10
+ def cal_mse(X,y,b,w):
11
+ thetas = np.array([[b], [w]])
12
+ X_b = np.c_[np.ones((len(X), 1)), X] # add x0 = 1 to each instance
13
+ y_predict = X_b.dot(thetas)
14
+ mse = np.mean((y_predict-y)**2)
15
+ return mse
16
+
17
+ def gradient_descent(intercept=4, slope=3, intercept_random=4, slope_random=3, gradient_descent=False, learning_rate= 0.01, iteration=1000):
18
+ ### (1) generate simulated data points
19
+ X = 2 * np.random.rand(100, 1)
20
+ y = intercept + slope * X + np.random.randn(100, 1)
21
+
22
+ ### (2) fit regression model
23
+ lin_reg = LinearRegression()
24
+ lin_reg.fit(X, y)
25
+
26
+ ### (3) make a prediction on training data
27
+ y_predict = lin_reg.predict(X)
28
+ y_predict
29
+
30
+ ### (4) Draw baseline linear Line
31
+ fig = plt.figure(figsize=(12,12))
32
+
33
+ plt.subplot(2,1,1)
34
+ plt.plot(X, y_predict, "r-", linewidth=2, label = "Line of best fit")
35
+ plt.plot(X, y, "b.")
36
+
37
+
38
+ ### (4.2) Draw random line
39
+ if intercept_random != intercept or slope_random != slope: #avoid overlap
40
+ X_new = np.array([[0], [2]])
41
+ X_new_b = np.c_[np.ones((2, 1)), X_new] # add x0 = 1 to each instance
42
+ y_predict = X_new_b.dot(np.array([intercept_random, slope_random]))
43
+ plt.plot(X_new, y_predict, "g-", linewidth=2, label = "Random line")
44
+
45
+
46
+ ### (4.3) Apply gradient desc
47
+ if gradient_descent:
48
+ b = intercept_random
49
+ w = slope_random
50
+
51
+ lr = learning_rate # learning rate
52
+ iteration = 1000
53
+
54
+ # Store initial values for plotting.
55
+ b_history = [b]
56
+ w_history = [w]
57
+
58
+ # Iterations
59
+ for i in range(iteration):
60
+ b_grad = 0.0
61
+ w_grad = 0.0
62
+ for n in range(len(X)):
63
+ b_grad = b_grad - (y[n] - b - w*X[n])*1.0
64
+ w_grad = w_grad - (y[n] - b - w*X[n])*X[n]
65
+ b_grad /= len(X)
66
+ w_grad /= len(X)
67
+
68
+ # Update parameters.
69
+ b = b - lr * b_grad
70
+ w = w - lr * w_grad
71
+
72
+ # Store parameters for plotting
73
+ b_history.append(b)
74
+ w_history.append(w)
75
+
76
+ plt.xlabel("$x_1$", fontsize=22)
77
+ plt.ylabel("$y$", rotation=0, fontsize=22)
78
+ plt.xticks(fontsize=18)
79
+ plt.yticks(fontsize=18)
80
+ plt.axis([np.min(X)*0.1, np.max(X)*1.1, np.min(y)*0.1, np.max(y)*1.1])
81
+ plt.title("Linear Regression model predictions", fontsize=22)
82
+ plt.legend(fontsize=18)
83
+
84
+
85
+
86
+
87
+
88
+ ### (5) Visualize loss function
89
+ plt.subplot(2,1,2)
90
+
91
+ ### (5.1) generate grid of parameters
92
+ b = np.arange(-10,10,0.1) #bias
93
+ w = np.arange(-10,10,0.1) #weight
94
+
95
+ ### (5.2) Calculate MSE over parameters
96
+ Z = np.zeros((len(w), len(b)))
97
+
98
+ for i in range(len(w)):
99
+ for j in range(len(b)):
100
+ w0 = w[i]
101
+ b0 = b[j]
102
+ Z[i][j] = cal_mse(X, y, b0, w0)
103
+
104
+
105
+ ### (5.3) Get optimal parameters
106
+ theta0_best = lin_reg.intercept_[0]
107
+ theta1_best = lin_reg.coef_[0][0]
108
+
109
+
110
+ ### (5.4) Draw the contour graph
111
+ plt.contourf(b,w,Z, 50, alpha=0.5, cmap=plt.get_cmap('jet'))
112
+
113
+ ### (5.5) Add optimal loss
114
+ plt.plot(theta0_best, theta1_best, 'x', ms=12, markeredgewidth=3, color='orange')
115
+ plt.text(theta0_best, theta1_best,'MSE:'+str(np.round(cal_mse(X,y,theta0_best, theta1_best),2)), color='red', fontsize=22)
116
+
117
+ ### (5.6) Add loss of random lines
118
+ if intercept_random != intercept or slope_random != slope: #avoid overlap
119
+ plt.plot(intercept_random, slope_random, 'o', ms=5, markeredgewidth=3, color='orange')
120
+ plt.text(intercept_random, slope_random,'MSE:'+str(np.round(cal_mse(X,y,intercept_random, slope_random),2)), fontsize=22)
121
+
122
+ ### (5.7) draw gradient updates
123
+ if gradient_descent:
124
+ plt.plot(b_history, w_history, 'o-', ms=3, lw=1.5, color='black')
125
+
126
+
127
+
128
+ plt.title("Visualization of Gradient Descent Process", fontsize=22)
129
+ plt.xlabel("$Intercept$", fontsize=22)
130
+ plt.ylabel("$Slope$", rotation=0, fontsize=22)
131
+ plt.xticks(fontsize=18)
132
+ plt.yticks(fontsize=18)
133
+ plt.xlim(-10,10)
134
+ plt.ylim(-10,10)
135
+ #plt.show()
136
+ fig.tight_layout()
137
+ plt.savefig('plot_line.png', dpi=300)
138
+ return 'plot_line.png'
139
+
140
+
141
+
142
+ #### Define input component
143
+ input_intercept = gr.inputs.Slider(1, 8, step=0.5, default=4, label='(Baseline) Intercept')
144
+ input_slope = gr.inputs.Slider(-8, 8, step=0.5, default=2.8, label='(Baseline) Slope')
145
+
146
+ input_intercept_random = gr.inputs.Slider(-8, 8, step=0.5, default=-7.5, label='(Random) Intercept')
147
+ input_slope_random = gr.inputs.Slider(-8, 8, step=0.5, default=-4.5, label='(Random) Slope')
148
+
149
+ input_gradients = gr.inputs.Checkbox(label="Apply Gradient Descent")
150
+
151
+ input_learningrate = gr.inputs.Slider(0,2, step=0.0001, default=0.001, label='Learning Rate')
152
+ input_interation = gr.inputs.Slider(1, 1000, step=2, default=100, label='Iteration')
153
+
154
+
155
+ #### Define output component
156
+ output_plot1 = gr.outputs.Image(label="Regression plot")
157
+
158
+
159
+ ### configure gradio, detailed can be found at https://www.gradio.app/docs/#i_slider
160
+ interface = gr.Interface(fn=gradient_descent,
161
+ inputs=[input_intercept, input_slope, input_intercept_random, input_slope_random, input_gradients, input_learningrate, input_interation],
162
+ outputs=[output_plot1],
163
+ examples_per_page = 2,
164
+ examples = [[4, 3, -7, -5, True, 0.0001, 100], [1, 2, -7, -8, False, 0.0001, 100]],
165
+ title="CSCI4750/5750: Regression models (Gradient Descent)",
166
+ description= "Click examples to generate random dataset and select gradient descent parameters",
167
+ theme = 'huggingface',
168
+ layout = 'vertical'
169
+ )
170
+
171
+ interface.launch(debug=True)