jiehou's picture
Update app.py
ee01add verified
def homework04_solution(theta0, theta1, theta2, learning_rate):
import numpy as np
import pandas as pd
print(theta0, theta1, theta2, learning_rate)
def linear_predict(b0, b1, b2, x1, x2):
y_hat = b0 + b1*x1 + b2*x2
return y_hat
def get_linear_results(data, theta0, theta1, theta2):
## (2) make linear prediction
y_hat_list = []
theta0_grad = 0
theta1_grad = 0
theta2_grad = 0
for i in range(len(data)):
x1 = data.iloc[i,0]
x2 = data.iloc[i,1]
y = data.iloc[i,2]
y_hat = linear_predict(theta0, theta1, theta2, x1, x2)
y_hat_list.append(y_hat)
## (3) calculate gradients
theta0_grad = theta0_grad - 2/len(data)*(y - theta0 - theta1*x1 - theta2*x2)*1.0
theta1_grad = theta1_grad - 2/len(data)*(y - theta0 - theta1*x1 - theta2*x2)*x1
theta2_grad = theta2_grad - 2/len(data)*(y - theta0 - theta1*x1 - theta2*x2)*x2
data['y_hat'] = y_hat_list
data['y-y_hat'] = data['y'] - data['y_hat']
data['(y-y_hat)^2'] = data['y-y_hat']*data['y-y_hat']
return data, theta0_grad, theta1_grad, theta2_grad
## (1) load data
X = np.array([[15,20], [30,16], [12,6.5], [13,20], [18,18]])
y = [4.9, 5.8,6.5,7.3,7.2]
data = pd.DataFrame(X, columns=['X1','X2'])
data['y'] = y
## (2) get regression table, gradients
data, theta0_grad, theta1_grad, theta2_grad = get_linear_results(data, theta0, theta1, theta2)
### (3) summarize gradient results for question 3a
data_t = data.T
data_t = data_t.round(2)
data_t.insert(loc=0, column='Name', value=['X1', 'X2', 'y', 'y_hat', 'y-y_hat', '(y-y_hat)^2'])
data_t.columns = ['Name', '0', '1', '2', '3', '4']
### (4) summarize gradient results for question 3b
MSE = data['(y-y_hat)^2'].mean()
q3_mse = MSE
### summarize gradient results for question 4 (2)
### update parameter using gradient descent 4 (3)
theta0_new = theta0 - learning_rate*theta0_grad
theta1_new = theta1 - learning_rate*theta1_grad
theta2_new = theta2 - learning_rate*theta2_grad
### (5) recalculate linear regression table using new gradients
data4,_,_,_ = get_linear_results(data, theta0_new, theta1_new, theta2_new)
### (6) summarize gradient results for question 4 (4)
MSE = data4['(y-y_hat)^2'].mean()
q4_mse = MSE
### (7) return all results for Gradio visualization
return data_t, q3_mse, theta0_grad, theta1_grad , theta2_grad, theta0_new, theta1_new, theta2_new, q4_mse
import numpy as np
import gradio as gr
### configure inputs
set_theta0 = gr.Number(value=0.1)
set_theta1 = gr.Number(value=0.1)
set_theta2 = gr.Number(value=0.1)
set_ita = gr.Number(value=0.001)
#print("check8")
#set_theta0 = gr.Slider(minimum = -10, maximum=10, step=0.01, value=0.1)
#set_theta1 = gr.Slider(minimum = -10, maximum=10, step=0.01, value=0.1)
#set_theta2 = gr.Slider(minimum = -10, maximum=10, step=0.01, value=0.1)
#set_ita = gr.Slider(minimum = 0, maximum=10, step=0.01, value=0.01)
### configure outputs
set_output_q3a = gr.Dataframe(type='pandas', label ='Question 3a')
set_output_q3b = gr.Textbox(label ='Question: What\'s Initial MSE loss')
set_output_q4a0 = gr.Textbox(label ='Question: What\'s theta0_grad')
set_output_q4a1 = gr.Textbox(label ='Question: What\'s theta1_grad')
set_output_q4a2 = gr.Textbox(label ='Question: What\'s theta2_grad')
set_output_q4b0 = gr.Textbox(label ='Question: What\'s theta0_new: updated by gradient descent')
set_output_q4b1 = gr.Textbox(label ='Question: What\'s theta1_new: updated by gradient descent')
set_output_q4b2 = gr.Textbox(label ='Question: What\'s theta2_new: updated by gradient descent')
set_output_q4b4 = gr.Textbox(label ='Question: What\'s New MSE after update the parameters using gradient descent')
### configure Gradio
interface = gr.Interface(fn=homework04_solution,
inputs=[set_theta0, set_theta1, set_theta2, set_ita],
outputs=[set_output_q3a, set_output_q3b,
set_output_q4a0, set_output_q4a1, set_output_q4a2,
set_output_q4b0, set_output_q4b1, set_output_q4b2,
set_output_q4b4],
title="CSCI4750/5750(gradient descent): Linear Regression/Optimization",
description= "Click examples below for a quick demo",
theme = 'huggingface'
)
interface.launch(debug=True)