jiehou commited on
Commit
a0fa3b9
·
1 Parent(s): a34c4cc

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +237 -0
app.py CHANGED
@@ -0,0 +1,237 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import numpy as np
3
+ import pandas as pd
4
+ from matplotlib import pyplot as plt
5
+ import matplotlib.colors as colors
6
+ import itertools
7
+ from scipy.stats import norm
8
+ from scipy import stats
9
+ from sklearn.naive_bayes import GaussianNB
10
+
11
+
12
+
13
+ def gaussian(x, n, u, s):
14
+ #u = x.mean()
15
+ #s = x.std()
16
+
17
+ # divide [x.min(), x.max()] by n
18
+ x = np.linspace(x.min(), x.max(), n)
19
+
20
+ a = ((x - u) ** 2) / (2 * (s ** 2))
21
+ y = 1 / (s * np.sqrt(2 * np.pi)) * np.exp(-a)
22
+
23
+ return x, y, u, s
24
+
25
+ import gradio as gr
26
+ # 1. define mean and standard deviation for class 1
27
+
28
+ set_fea1_mean_class1 = gr.inputs.Slider(0, 20, step=0.5, default=1, label = 'Feature_1 Mean (Class 1)')
29
+ set_fea1_var_class1 = gr.inputs.Slider(0, 10, step=0.5, default=1.5, label = 'Feature_1 Variance (Class 1)')
30
+
31
+ set_fea2_mean_class1 = gr.inputs.Slider(0, 20, step=0.5, default=2, label = 'Feature_2 Mean (Class 1)')
32
+ set_fea2_var_class1 = gr.inputs.Slider(0, 10, step=0.5, default=1.5, label = 'Feature_2 Variance (Class 1)')
33
+
34
+ set_fea_covariance_class1 = gr.inputs.Slider(0, 10, step=0.5, default=1.5, label = 'Feature_1_2 Co-Variance (Class 1)')
35
+
36
+ # 2. define mean and standard deviation for class 2
37
+
38
+ set_fea1_mean_class2 = gr.inputs.Slider(0, 20, step=0.5, default=5, label = 'Feature_1 Mean (Class 2)')
39
+ set_fea1_var_class2 = gr.inputs.Slider(0, 10, step=0.5, default=1.5, label = 'Feature_1 Variance (Class 2)')
40
+
41
+ set_fea2_mean_class2 = gr.inputs.Slider(0, 20, step=0.5, default=6, label = 'Feature_2 Mean (Class 2)')
42
+ set_fea2_var_class2 = gr.inputs.Slider(0, 10, step=0.5, default=1.5, label = 'Feature_2 Variance (Class 2)')
43
+
44
+ set_fea_covariance_class2 = gr.inputs.Slider(0, 10, step=0.5, default=1.5, label = 'Feature_1_2 Co-Variance (Class 2)')
45
+
46
+ # 3. Define the number of data points
47
+ set_number_points = gr.inputs.Slider(10, 100, step=5, default=20, label = 'Number of samples in each class')
48
+
49
+ # 4. show distribution or not
50
+ set_show_dist = gr.inputs.Checkbox(label="Show data distribution")
51
+
52
+ # 5. set classifier type
53
+ set_classifier = gr.inputs.Dropdown(["None", "LDA", "QDA", "NaiveBayes"])
54
+
55
+ # 6. define output imagem model
56
+ set_out_plot_images = gr.outputs.Image(label="Data visualization")
57
+
58
+ set_out_plot_table = gr.outputs.Dataframe(type='pandas', label ='Simulated Dataset')
59
+
60
+
61
+
62
+ def plot_figure_twofeature(N, fea1_u1, fea1_var1, fea2_u1, fea2_var1, covariance1, fea1_u2, fea1_var2, fea2_u2, fea2_var2, covariance2, show_dist, classifier=None):
63
+
64
+
65
+ #N = 100
66
+ import numpy as np
67
+ import matplotlib.pyplot as pp
68
+ pp.style.use('default')
69
+ val = 0. # this is the value where you want the data to appear on the y-axis.
70
+
71
+ np.random.seed(seed = 3)
72
+
73
+ mu1 = [fea1_u1, fea2_u1]
74
+ sigma1 = [[np.sqrt(fea1_var1), np.sqrt(covariance1)], [np.sqrt(covariance1), np.sqrt(fea2_var1)]]
75
+ points_class1_fea1, points_class1_fea2 = np.random.multivariate_normal(mu1, sigma1, N).T
76
+
77
+ mu2 = [fea1_u2, fea2_u2]
78
+ sigma2 = [[np.sqrt(fea1_var2), np.sqrt(covariance2)], [np.sqrt(covariance2), np.sqrt(fea2_var2)]]
79
+ points_class2_fea1, points_class2_fea2 = np.random.multivariate_normal(mu2, sigma2, N).T
80
+
81
+ mu_list = [mu1,mu2]
82
+ sigma_list = [sigma1,sigma2]
83
+ color_list = ['darkblue','darkgreen']
84
+
85
+
86
+ pd_class1 = pd.DataFrame({'Feature 1 (X)': points_class1_fea1,'Feature 2 (X)': points_class1_fea2, 'Label (Y)': np.repeat(0,len(points_class1_fea1))})
87
+ pd_class2 = pd.DataFrame({'Feature 1 (X)': points_class2_fea1,'Feature 2 (X)': points_class2_fea2, 'Label (Y)': np.repeat(1,len(points_class2_fea1))})
88
+
89
+
90
+ pd_all = pd.concat([pd_class1, pd_class2]).reset_index(drop=True)
91
+
92
+ import numpy as np
93
+ #X_data= pd_all['Feature 1 (X)','Feature 2 (X)'].to_numpy().reshape((len(pd_all),2))
94
+ #y_labels= pd_all['Label (Y)']
95
+
96
+
97
+ #Setup X and y data
98
+ X_data = np.asarray(np.vstack((np.hstack((points_class1_fea1, points_class1_fea2)),np.hstack((points_class2_fea1, points_class2_fea2)))).T)
99
+ y_labels = np.hstack((np.zeros(N),np.ones(N)))
100
+
101
+ print("X_data: ",X_data.shape)
102
+
103
+ fig = pp.figure(figsize=(8, 6)) # figure size in inches
104
+ fig.subplots_adjust(left=0, right=1, bottom=0, top=1, hspace=0.3, wspace=0.05)
105
+
106
+
107
+ #pp.tick_params(left = False, right = False , labelleft = True ,
108
+ # labelbottom = True, bottom = False)
109
+
110
+ #reference = [stats.uniform.rvs(loc=1, scale = 1) for x in range(N)]
111
+ pp.plot(points_class1_fea1, points_class1_fea2 + val, 'x', label = 'Class 1', markersize = 10)
112
+ pp.plot(points_class2_fea1, points_class2_fea2 + val, 'o', label = 'Class 2', markersize = 10)
113
+
114
+
115
+
116
+
117
+ # define x, y limits
118
+ #x_min, x_max = X_data[:, 0].min() - 1, X_data[:, 0].max() + 1
119
+ #y_min, y_max = X_data[:, 1].min() - 1, X_data[:, 1].max() + 1
120
+
121
+ x_min, x_max = -5, 15
122
+ y_min, y_max = -5, 15
123
+ X_grid, Y_grid = np.meshgrid(np.linspace(x_min, x_max, 100),
124
+ np.linspace(y_min, y_max, 100))
125
+
126
+
127
+ if show_dist:
128
+
129
+ #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%#
130
+
131
+ #Plotting the contours of the Gaussians on left hand side plot
132
+ for i in range(2):
133
+ zz = np.array( [stats.multivariate_normal.pdf(np.array([xx,yy]), mean=mu_list[i], cov=sigma_list[i])
134
+ for xx, yy in zip(np.ravel(X_grid), np.ravel(Y_grid)) ] )
135
+
136
+ #Reshaping the predicted class into the meshgrid shape
137
+ Z = zz.reshape(X_grid.shape[0])
138
+
139
+ #Plot the contours
140
+ pp.contour( X_grid, Y_grid, Z, 5, alpha = .3, colors = color_list[i])
141
+
142
+
143
+ ### draw decision boundary on knn
144
+
145
+ import numpy as np
146
+ from matplotlib import pyplot as plt
147
+ from sklearn import neighbors, datasets
148
+ from matplotlib.colors import ListedColormap
149
+
150
+ # Create color maps for 3-class classification problem, as with iris
151
+ cmap_light = ListedColormap(['#FFAAAA', '#AAFFAA'])
152
+ cmap_bold = ListedColormap(['#FF0000', '#00FF00'])
153
+
154
+ # plt.scatter(x1, y1, marker='x', alpha = .7, label = 'Class 1', s = 50)
155
+ # plt.scatter(x2, y2, marker='o', alpha = .7, label = 'Class 2', s = 50)
156
+ # plt.xlabel("Feature 1 (X1)", size=20)
157
+ # plt.xticks(fontsize=20)
158
+ # plt.ylabel("Feature 2 (X2)", size=20)
159
+ # plt.yticks(fontsize=20)
160
+
161
+ if classifier == 'LDA':
162
+ from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
163
+ model_sk = LinearDiscriminantAnalysis()
164
+ model_sk.fit(X_data,y_labels)
165
+
166
+ X_grid, Y_grid = np.meshgrid(np.linspace(x_min, x_max, N),
167
+ np.linspace(y_min, y_max, N))
168
+ #Predictions for each point on meshgrid
169
+ zz = np.array( [model_sk.predict( [[xx,yy]])[0] for xx, yy in zip(np.ravel(X_grid), np.ravel(Y_grid)) ] )
170
+
171
+ Z = zz.reshape(X_grid.shape)
172
+
173
+ pp.pcolormesh(X_grid, Y_grid, Z, cmap=cmap_light, alpha=0.2)
174
+ #pp.contour( X_grid, Y_grid, Z, 1, alpha = .3, colors = ('red'))
175
+
176
+ elif classifier == 'QDA':
177
+ from sklearn.discriminant_analysis import QuadraticDiscriminantAnalysis
178
+ model_sk = QuadraticDiscriminantAnalysis()
179
+ model_sk.fit(X_data,y_labels)
180
+
181
+ X_grid, Y_grid = np.meshgrid(np.linspace(x_min, x_max, N),
182
+ np.linspace(y_min, y_max, N))
183
+ #Predictions for each point on meshgrid
184
+ zz = np.array( [model_sk.predict( [[xx,yy]])[0] for xx, yy in zip(np.ravel(X_grid), np.ravel(Y_grid)) ] )
185
+
186
+ Z = zz.reshape(X_grid.shape)
187
+
188
+ pp.pcolormesh(X_grid, Y_grid, Z, cmap=cmap_light, alpha=0.2)
189
+
190
+ elif classifier == 'NaiveBayes':
191
+ from sklearn.naive_bayes import GaussianNB
192
+ model_sk = GaussianNB(priors = None)
193
+ model_sk.fit(X_data,y_labels)
194
+
195
+ X_grid, Y_grid = np.meshgrid(np.linspace(x_min, x_max, N),
196
+ np.linspace(y_min, y_max, N))
197
+ #Predictions for each point on meshgrid
198
+ zz = np.array( [model_sk.predict( [[xx,yy]])[0] for xx, yy in zip(np.ravel(X_grid), np.ravel(Y_grid)) ] )
199
+
200
+ Z = zz.reshape(X_grid.shape)
201
+
202
+ pp.pcolormesh(X_grid, Y_grid, Z, cmap=cmap_light, alpha=0.2)
203
+
204
+
205
+ #Reshaping the predicted class into the meshgrid shape
206
+
207
+ #Plot the contours
208
+
209
+
210
+
211
+ print(x_min, x_max)
212
+ print(y_min, y_max)
213
+ #pp.xlim([x_min-0.8*x_max, x_max+0.8*x_max])
214
+ #pp.ylim([y_min-0.8*y_max, y_max+0.8*y_max])
215
+ pp.xlim([x_min, x_max])
216
+ pp.ylim([y_min, y_max])
217
+ pp.xlabel("Feature 1 (X)", size=20)
218
+ pp.xticks(fontsize=20)
219
+ pp.yticks(fontsize=20)
220
+ pp.ylabel("Feature 2 (X)", size=20)
221
+ pp.legend(loc='upper right', borderpad=0, handletextpad=0, fontsize = 20)
222
+ pp.savefig('plot.png')
223
+
224
+ return 'plot.png', pd_all
225
+
226
+
227
+ ### configure gradio, detailed can be found at https://www.gradio.app/docs/#i_slider
228
+ interface = gr.Interface(fn=plot_figure_twofeature, inputs=[set_number_points,set_fea1_mean_class1,set_fea1_var_class1,set_fea2_mean_class1,set_fea2_var_class1,set_fea_covariance_class1,set_fea1_mean_class2,set_fea1_var_class2,set_fea2_mean_class2,set_fea2_var_class2,set_fea_covariance_class2, set_show_dist, set_classifier],
229
+ outputs=[set_out_plot_images,set_out_plot_table],
230
+ examples_per_page = 2,
231
+ #examples = get_sample_data(10),
232
+ title="CSCI4750/5750 Demo: Web Application for Probabilistic Classifier (Two feature)",
233
+ description= "Click examples below for a quick demo",
234
+ theme = 'huggingface',
235
+ layout = 'vertical', live=True
236
+ )
237
+ interface.launch(debug=True)