File size: 4,115 Bytes
320f1a7
 
 
 
 
 
 
 
 
15d10ec
320f1a7
1fbc5fc
320f1a7
 
 
 
 
 
 
 
 
686c2a3
320f1a7
686c2a3
06dcf0c
320f1a7
 
06dcf0c
31883e6
320f1a7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1fbc5fc
 
 
895da39
8298e15
895da39
1fbc5fc
 
 
 
 
8298e15
1fbc5fc
 
 
cf5597a
1fbc5fc
 
320f1a7
 
 
 
 
746064b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15d10ec
746064b
 
 
 
 
320f1a7
 
 
 
 
 
 
 
 
 
45d47b4
320f1a7
 
45d47b4
5ec571d
45d47b4
320f1a7
 
 
 
1fbc5fc
320f1a7
 
 
3f4bbb2
320f1a7
 
45cc446
 
 
320f1a7
 
 
 
edd1cb5
5ad6952
1fbc5fc
b396772
 
320f1a7
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139

### (1) Generate simulated data

import numpy.random as rnd

import gradio as gr
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from sklearn.linear_model import LinearRegression
import sympy as sp


np.random.seed(42)

m = 100
X1 = 6 * np.random.rand(m,1) - 3

X2 = X1 ** 2

y = X1 +  0.5 * X2 + 4 + np.random.randn(m,1)

m = 50

X1_test = 6 * np.random.rand(m,1) - 3
X2_test = X1_test ** 2

y_test = X1_test + 0.5 * X2_test + 4 + np.random.randn(m,1)


import pandas as pd

data = pd.DataFrame({'y': y.flatten(), 'X1': X1.flatten(), 'X2': X2.flatten()}, columns=['y', 'X1', 'X2'],)
data




def draw_polynomial(degree=2):

    from sklearn.preprocessing import PolynomialFeatures
    poly_features = PolynomialFeatures(degree=degree, include_bias=False)
    X_poly = poly_features.fit_transform(X1)
    
    from sklearn.linear_model import LinearRegression
    lin_reg = LinearRegression()
    lin_reg.fit(X_poly, y)
    lin_reg.intercept_, lin_reg.coef_

    
    # Sample polynomial coefficients
    coefficients = list(lin_reg.intercept_)+list(lin_reg.coef_[0])
    coefficients = [np.round(val,3) for val in coefficients]
    print("coefficients: ",coefficients)
    
    # Create a symbolic variable for x
    x = sp.symbols('x')
    
    # Create the symbolic polynomial expression
    polynomial_expr = sum(coeff * x**power for power, coeff in enumerate(coefficients))
    
    # Convert the symbolic expression to a LaTeX string
    latex_expression = sp.latex(polynomial_expr, mode='inline')
    latex_expression = 'y = '+latex_expression.replace('$','')


    
    ## Visualized fitted quadratic line on training/testing data
    X_new=np.linspace(-3, 3, 100).reshape(100, 1)
    X_new_poly = poly_features.transform(X_new)
    y_new = lin_reg.predict(X_new_poly)
    
    
    ### get training error using polynomial model
    X_poly = poly_features.transform(X1)
    y_predict = lin_reg.predict(X_poly)
    y_predict
    train_error = np.mean((y_predict - y)**2)
    print("train_error: ",train_error)
    
    
    ### get testing error using polynomial model
    X_test_poly = poly_features.transform(X1_test)
    y_test_predict = lin_reg.predict(X_test_poly)
    y_test_predict
    test_error = np.mean((y_test_predict - y_test)**2)
    print("test_error: ",test_error)

    X = np.array([[train_error,test_error]])
    results = pd.DataFrame(X, columns=['Training Error','Test Error'])


    fig = plt.figure(figsize=(12,10))
    plt.subplot(2,1,1)

    plt.plot(X1, y, "b.")
    plt.plot(X1_test, y_test, "g.", markersize=12)
    plt.plot(X_new, y_new, "r-", linewidth=2, label="Predictions")
    plt.xlabel("$x_1$", fontsize=18)
    plt.ylabel("$y$", rotation=0, fontsize=18)
    plt.legend(loc="upper left", fontsize=14)
    plt.axis([-3, 3, 0, 10])

    plt.subplot(2,1,2)
    
    plt.plot([i+1 for i in range(0, len(lin_reg.coef_[0]))], lin_reg.coef_[0], 'r-')
    plt.xlabel("Parameters", fontsize=14)
    plt.ylabel("Values", fontsize=14)
    plt.xticks([i+1 for i in range(0, len(lin_reg.coef_[0]))], [i+1 for i in range(0, len(lin_reg.coef_[0]))])
    plt.xlim(0,len(lin_reg.coef_[0])+1)
    

    #plt.show()
    fig.tight_layout()
    plt.savefig('plot_line.png', dpi=300)
    return latex_expression, results, 'plot_line.png'


#### Define input component
input_degree = gr.Slider(1, 64, step=1, value=2, label='Degree of Polynomial Regression')

#### Define output component
set_formula = gr.Textbox(label ='Polynomial Model')
set_output = gr.Dataframe(type='pandas', label ='Evaluation Results')
output_plot1 = gr.Image(label="Regression plot", type='pil')



### configure gradio, detailed can be found at https://www.gradio.app/docs/#i_slider
interface = gr.Interface(fn=draw_polynomial, 
                         inputs=[input_degree], 
                         outputs=[set_formula, set_output, output_plot1],
                         title="ML Demo: Polynomial Regression models \n (Model Complexity)", 
                         theme = 'huggingface'
                         )
                         
interface.launch(debug=True)