jiehou commited on
Commit
a38e034
·
1 Parent(s): b5e0e26

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +132 -0
app.py CHANGED
@@ -0,0 +1,132 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ def homework04_solution(theta0, theta1, theta2, learning_rate):
2
+ import numpy as np
3
+ import pandas as pd
4
+
5
+ def linear_predict(b0, b1, b2, x1, x2):
6
+ y_hat = b0 + b1*x1 + b2*x2
7
+ return y_hat
8
+
9
+ def get_linear_results(data, theta0, theta1, theta2):
10
+ ## (2) make linear prediction
11
+ y_hat_list = []
12
+
13
+ theta0_grad = 0
14
+ theta1_grad = 0
15
+ theta2_grad = 0
16
+ for i in range(len(data)):
17
+ x1 = data.iloc[i,0]
18
+ x2 = data.iloc[i,1]
19
+ y = data.iloc[i,2]
20
+ y_hat = linear_predict(theta0, theta1, theta2, x1, x2)
21
+ y_hat_list.append(y_hat)
22
+
23
+ ## (3) calculate gradients
24
+ theta0_grad = theta0_grad - 2/len(data)*(y - theta0 - theta1*x1 - theta2*x2)*1.0
25
+
26
+ theta1_grad = theta1_grad - 2/len(data)*(y - theta0 - theta1*x1 - theta2*x2)*x1
27
+
28
+ theta2_grad = theta2_grad - 2/len(data)*(y - theta0 - theta1*x1 - theta2*x2)*x2
29
+
30
+ data['y_hat'] = y_hat_list
31
+
32
+ data['y-y_hat'] = data['y'] - data['y_hat']
33
+ data['(y-y_hat)^2'] = data['y-y_hat']*data['y-y_hat']
34
+ return data, theta0_grad, theta1_grad, theta2_grad
35
+
36
+ ## (1) load data
37
+ X = np.array([[15,20], [30,16], [12,6.5], [13,20], [18,18]])
38
+ y = [4.9, 5.8,6.5,7.3,7.2]
39
+ data = pd.DataFrame(X, columns=['X1','X2'])
40
+ data['y'] = y
41
+
42
+ ## (2) get regression table, gradients
43
+ data, theta0_grad, theta1_grad, theta2_grad = get_linear_results(data, theta0, theta1, theta2)
44
+
45
+
46
+ ### (3) summarize gradient results for question 3a
47
+
48
+ data_t = data.T
49
+
50
+ data_t = data_t.round(2)
51
+
52
+ data_t.insert(loc=0, column='Name', value=['X1', 'X2', 'y', 'y_hat', 'y-y_hat', '(y-y_hat)^2'])
53
+
54
+
55
+ ### (4) summarize gradient results for question 3b
56
+
57
+ MSE = data['(y-y_hat)^2'].mean()
58
+
59
+ q3_mse = MSE
60
+
61
+ ### summarize gradient results for question 4 (2)
62
+
63
+ ### update parameter using gradient descent 4 (3)
64
+
65
+ theta0_new = theta0 - learning_rate*theta0_grad
66
+ theta1_new = theta1 - learning_rate*theta1_grad
67
+ theta2_new = theta2 - learning_rate*theta2_grad
68
+
69
+
70
+ ### (5) recalculate linear regression table using new gradients
71
+ data4,_,_,_ = get_linear_results(data, theta0_new, theta1_new, theta2_new)
72
+
73
+
74
+ ### (6) summarize gradient results for question 4 (4)
75
+
76
+ MSE = data4['(y-y_hat)^2'].mean()
77
+
78
+ q4_mse = MSE
79
+
80
+ ### (7) return all results for Gradio visualization
81
+ return data_t, q3_mse, theta0_grad, theta1_grad , theta2_grad, theta0_new, theta1_new, theta2_new, q4_mse
82
+
83
+ import numpy as np
84
+
85
+ import gradio as gr
86
+
87
+
88
+ ### configure inputs
89
+ set_theta0 = gr.inputs.Number()
90
+ set_theta1 = gr.inputs.Number()
91
+ set_theta2 = gr.inputs.Number()
92
+ set_ita = gr.inputs.Number()
93
+
94
+
95
+ ### configure outputs
96
+ set_output_q3a = gr.outputs.Dataframe(type='pandas', label ='Question 3a')
97
+ set_output_q3b = gr.outputs.Textbox(label ='Question: What\'s Initial MSE loss')
98
+ set_output_q4a0 = gr.outputs.Textbox(label ='Question: What\'s theta0_grad')
99
+ set_output_q4a1 = gr.outputs.Textbox(label ='Question: What\'s theta1_grad')
100
+ set_output_q4a2 = gr.outputs.Textbox(label ='Question: What\'s theta2_grad')
101
+
102
+ set_output_q4b0 = gr.outputs.Textbox(label ='Question: What\'s theta0_new: updated by gradient descent')
103
+ set_output_q4b1 = gr.outputs.Textbox(label ='Question: What\'s theta1_new: updated by gradient descent')
104
+ set_output_q4b2 = gr.outputs.Textbox(label ='Question: What\'s theta2_new: updated by gradient descent')
105
+
106
+ set_output_q4b4 = gr.outputs.Textbox(label ='Question: What\'s New MSE after update the parameters using gradient descent')
107
+
108
+ ### configure Gradio
109
+ interface = gr.Interface(fn=homework04_solution,
110
+ inputs=[set_theta0, set_theta1, set_theta2, set_ita],
111
+ outputs=[set_output_q3a, set_output_q3b,
112
+ set_output_q4a0, set_output_q4a1, set_output_q4a2,
113
+ set_output_q4b0, set_output_q4b1, set_output_q4b2,
114
+ set_output_q4b4],
115
+ examples_per_page = 2,
116
+ examples=[
117
+ np.round(np.random.uniform(0, 1, (3,)),2).tolist()+[0.001],
118
+ np.round(np.random.uniform(0, 1, (3,)),2).tolist()+[0.001],
119
+ np.round(np.random.uniform(0, 1, (3,)),2).tolist()+[0.001],
120
+ np.round(np.random.uniform(0, 1, (3,)),2).tolist()+[0.001],
121
+ np.round(np.random.uniform(0, 1, (3,)),2).tolist()+[0.001],
122
+ np.round(np.random.uniform(0, 1, (3,)),2).tolist()+[0.001],
123
+ ],
124
+ title="CSCI4750/5750(gradient descent): Linear Regression/Optimization",
125
+ description= "Click examples below for a quick demo",
126
+ theme = 'huggingface',
127
+ layout = 'horizontal',
128
+ live=True
129
+ )
130
+
131
+
132
+ interface.launch()