| |
|
|
|
|
| |
|
|
|
|
| import gradio as gr |
| import matplotlib |
| import matplotlib.pyplot as plt |
| import numpy as np |
| from sklearn.linear_model import LinearRegression |
|
|
| def cal_mse(X,y,b,w): |
| thetas = np.array([[b], [w]]) |
| X_b = np.c_[np.ones((len(X), 1)), X] |
| y_predict = X_b.dot(thetas) |
| mse = np.mean((y_predict-y)**2) |
| return mse |
|
|
| def gradient_descent(intercept=4, slope=3, intercept_random=4, slope_random=3, gradient_descent=False, learning_rate= 0.01, iteration=100): |
| |
| X = 2 * np.random.rand(100, 1) |
| y = intercept + slope * X + np.random.randn(100, 1) |
|
|
| |
| lin_reg = LinearRegression() |
| lin_reg.fit(X, y) |
|
|
| |
| y_predict = lin_reg.predict(X) |
| y_predict |
|
|
| |
| fig = plt.figure(figsize=(12,20)) |
|
|
| plt.subplot(3,1,1) |
| plt.plot(X, y_predict, "r-", linewidth=2, label = "Line of best fit") |
| plt.plot(X, y, "b.") |
|
|
|
|
| |
| if intercept_random != intercept or slope_random != slope: |
| X_new = np.array([[0], [2]]) |
| X_new_b = np.c_[np.ones((2, 1)), X_new] |
| y_predict = X_new_b.dot(np.array([intercept_random, slope_random])) |
| plt.plot(X_new, y_predict, "g-", linewidth=2, label = "Random line") |
| |
| |
| |
| if gradient_descent: |
| b = intercept_random |
| w = slope_random |
|
|
| lr = learning_rate |
| iteration = iteration |
|
|
| |
| b_history = [b] |
| w_history = [w] |
|
|
| train_mse = [] |
| |
| for i in range(iteration): |
| b_grad = 0.0 |
| w_grad = 0.0 |
| for n in range(len(X)): |
| b_grad = b_grad - (y[n,0] - b - w*X[n,0])*1.0 |
| w_grad = w_grad - (y[n,0] - b - w*X[n,0])*X[n,0] |
| b_grad /= len(X) |
| w_grad /= len(X) |
| |
| |
| b = b - lr * b_grad |
| w = w - lr * w_grad |
| |
| |
| b_history.append(b) |
| w_history.append(w) |
| |
| train_mse.append(cal_mse(X,y,b,w)) |
|
|
| if i == int(iteration/4): |
| X_tmp = np.array([[0], [2]]) |
| X_tmp_b = np.c_[np.ones((2, 1)), X_tmp] |
| y_predict_tmp = X_tmp_b.dot(np.array([b, w])) |
| plt.plot(X_tmp, y_predict_tmp, "brown", linewidth=2, label = "Fitted line in iteration "+str(i)) |
| |
| if i == int(iteration/3): |
| X_tmp = np.array([[0], [2]]) |
| X_tmp_b = np.c_[np.ones((2, 1)), X_tmp] |
| y_predict_tmp = X_tmp_b.dot(np.array([b, w])) |
| plt.plot(X_tmp, y_predict_tmp, "blue", linewidth=2, label = "Fitted line in iteration "+str(i)) |
| |
| if i == int(iteration/2): |
| X_tmp = np.array([[0], [2]]) |
| X_tmp_b = np.c_[np.ones((2, 1)), X_tmp] |
| y_predict_tmp = X_tmp_b.dot(np.array([b, w])) |
| plt.plot(X_tmp, y_predict_tmp, "gray", linewidth=2, label = "Fitted line in iteration "+str(i)) |
| |
| if i == int(iteration-1): |
| X_tmp = np.array([[0], [2]]) |
| X_tmp_b = np.c_[np.ones((2, 1)), X_tmp] |
| y_predict_tmp = X_tmp_b.dot(np.array([b, w])) |
| plt.plot(X_tmp, y_predict_tmp, "black", linewidth=2, label = "Fitted line in iteration "+str(i)) |
| |
| plt.xlabel("$x_1$", fontsize=22) |
| plt.ylabel("$y$", rotation=0, fontsize=22) |
| plt.xticks(fontsize=18) |
| plt.yticks(fontsize=18) |
| plt.axis([np.min(X)*0.1, np.max(X)*1.1, np.min(y)*0.1, np.max(y)*1.1]) |
| plt.title("Linear Regression model predictions", fontsize=22) |
| plt.legend(fontsize=18) |
|
|
|
|
|
|
|
|
|
|
| |
| plt.subplot(3,1,2) |
|
|
| |
| b = np.arange(-10,10,0.1) |
| w = np.arange(-10,10,0.1) |
|
|
| |
| Z = np.zeros((len(w), len(b))) |
|
|
| for i in range(len(w)): |
| for j in range(len(b)): |
| w0 = w[i] |
| b0 = b[j] |
| Z[i][j] = cal_mse(X, y, b0, w0) |
|
|
|
|
| |
| theta0_best = lin_reg.intercept_[0] |
| theta1_best = lin_reg.coef_[0][0] |
|
|
|
|
| |
| plt.contourf(b,w,Z, 50, alpha=0.5, cmap=plt.get_cmap('jet')) |
|
|
| |
| plt.plot(theta0_best, theta1_best, 'x', ms=12, markeredgewidth=3, color='orange') |
| plt.text(theta0_best, theta1_best,'MSE:'+str(np.round(cal_mse(X,y,theta0_best, theta1_best),2)), color='red', fontsize=22) |
| |
| |
| if intercept_random != intercept or slope_random != slope: |
| plt.plot(intercept_random, slope_random, 'o', ms=5, markeredgewidth=3, color='orange') |
| plt.text(intercept_random, slope_random,'MSE:'+str(np.round(cal_mse(X,y,intercept_random, slope_random),2)), fontsize=22) |
| |
| |
| if gradient_descent: |
| plt.plot(b_history, w_history, 'o-', ms=3, lw=1.5, color='black') |
|
|
|
|
|
|
| plt.title("Visualization of Gradient Descent Process", fontsize=22) |
| plt.xlabel("$Intercept$", fontsize=22) |
| plt.ylabel("$Slope$", rotation=0, fontsize=22) |
| plt.xticks(fontsize=18) |
| plt.yticks(fontsize=18) |
| plt.xlim(-10,10) |
| plt.ylim(-10,10) |
|
|
|
|
| |
| if gradient_descent: |
| plt.subplot(3,1,3) |
| plt.plot(train_mse,label="train_loss (lr="+str(learning_rate)+")") |
| plt.xlabel('Iteration',fontweight="bold",fontsize = 22) |
| plt.ylabel('Loss',fontweight="bold",fontsize = 22) |
| plt.title("Learning curve: Loss VS Epochs",fontweight="bold",fontsize = 22) |
| plt.legend(fontsize=18) |
| plt.xticks(fontsize=18) |
| plt.yticks(fontsize=18) |
|
|
| |
| fig.tight_layout() |
| plt.savefig('plot_line.png', dpi=300) |
| return 'plot_line.png' |
|
|
|
|
|
|
|
|
|
|
|
|
| |
| input_intercept = gr.inputs.Slider(1, 8, step=0.5, default=4, label='(Baseline) Intercept') |
| input_slope = gr.inputs.Slider(-8, 8, step=0.5, default=2.8, label='(Baseline) Slope') |
|
|
| input_intercept_random = gr.inputs.Slider(-8, 8, step=0.5, default=4, label='(Random) Intercept') |
| input_slope_random = gr.inputs.Slider(-8, 8, step=0.5, default=-4.5, label='(Random) Slope') |
|
|
| input_gradients = gr.inputs.Checkbox(label="Apply Gradient Descent") |
|
|
| input_learningrate = gr.inputs.Slider(0,1, step=0.001, default=0.005, label='Learning Rate') |
| input_interation = gr.inputs.Slider(1, 1000, step=2, default=150, label='Iteration') |
|
|
|
|
| |
| output_plot1 = gr.outputs.Image(label="Regression plot") |
|
|
|
|
| |
| interface = gr.Interface(fn=gradient_descent, |
| inputs=[input_intercept, input_slope, input_intercept_random, input_slope_random, input_gradients, input_learningrate, input_interation], |
| outputs=[output_plot1], |
| examples_per_page = 2, |
| examples = [[4, 3, -7, -5, True, 0.0001, 100], [1, 2, -7, -8, False, 0.0001, 100]], |
| title="CSCI4750/5750: Regression models \n (Function approximation by Gradient Descent)", |
| description= "Click examples to generate random dataset and select gradient descent parameters", |
| theme = 'huggingface', |
| layout = 'vertical' |
| ) |
| |
| interface.launch(debug=True) |
|
|