joel-woodfield commited on
Commit
1dcf98e
·
1 Parent(s): 1f2626d

Use ElasticNet's regularization path generator when using l2 loss

Browse files
Files changed (1) hide show
  1. regularization.py +31 -14
regularization.py CHANGED
@@ -17,8 +17,7 @@ from sklearn.svm import LinearSVC
17
  from sklearn.datasets import load_iris
18
  from sklearn.metrics import classification_report, mean_squared_error, mean_absolute_error
19
  from sklearn.datasets import make_regression
20
- from sklearn.linear_model import Ridge
21
- from sklearn.linear_model import Lasso
22
 
23
  import traceback
24
  import yaml
@@ -65,6 +64,20 @@ def l2_loss(W, y, X):
65
  return np.mean((y - preds) ** 2, axis=1).reshape(num_dots, num_dots)
66
 
67
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
68
  class Regularization:
69
  def __init__(self, width, height):
70
  # initialized in draw_plot
@@ -211,17 +224,21 @@ class Regularization:
211
 
212
  # plot path
213
  if self.plot_regularization_path:
214
- min_loss_reg = regs.ravel()[np.argmin(losses)]
215
- path_reg_levels = np.linspace(0, min_loss_reg, 20)
216
- path_w = []
217
- for reg_level in path_reg_levels:
218
- mask = regs <= reg_level
219
- if np.sum(mask) == 0:
220
- continue
221
- idx = np.argmin(losses[mask])
222
- path_w.append(stacked[mask][idx])
223
-
224
- path_w = np.array(path_w)
 
 
 
 
225
  ax.plot(path_w[:, 0], path_w[:, 1], "r-")
226
 
227
  # custom legend
@@ -333,7 +350,7 @@ class Regularization:
333
 
334
  with gr.Row():
335
  # regularizer type
336
- regularizer_type = gr.Dropdown(choices=['l1', 'l2', 'elastic-net'],
337
  label='Regularizer type',
338
  value='l2',
339
  visible=True)
 
17
  from sklearn.datasets import load_iris
18
  from sklearn.metrics import classification_report, mean_squared_error, mean_absolute_error
19
  from sklearn.datasets import make_regression
20
+ from sklearn.linear_model import ElasticNet
 
21
 
22
  import traceback
23
  import yaml
 
64
  return np.mean((y - preds) ** 2, axis=1).reshape(num_dots, num_dots)
65
 
66
 
67
+ def l2_loss_regularization_path(y, X, regularization_type):
68
+ if regularization_type == "l2":
69
+ l1_ratio = 0
70
+ alphas = np.arange(0, 100, 0.02)
71
+ elif regularization_type == "l1":
72
+ l1_ratio = 1
73
+ alphas = None
74
+ else:
75
+ raise ValueError("regularization_type must be 'l1' or 'l2'")
76
+
77
+ _, coefs, *_ = ElasticNet.path(X, y, l1_ratio=l1_ratio, alphas=alphas)
78
+ return coefs.T
79
+
80
+
81
  class Regularization:
82
  def __init__(self, width, height):
83
  # initialized in draw_plot
 
224
 
225
  # plot path
226
  if self.plot_regularization_path:
227
+ if self.loss_type == "l2":
228
+ path_w = l2_loss_regularization_path(y, X, regularization_type=self.reg_type)
229
+ else:
230
+ min_loss_reg = regs.ravel()[np.argmin(losses)]
231
+ path_reg_levels = np.linspace(0, min_loss_reg, 20)
232
+ path_w = []
233
+ for reg_level in path_reg_levels:
234
+ mask = regs <= reg_level
235
+ if np.sum(mask) == 0:
236
+ continue
237
+ idx = np.argmin(losses[mask])
238
+ path_w.append(stacked[mask][idx])
239
+
240
+ path_w = np.array(path_w)
241
+
242
  ax.plot(path_w[:, 0], path_w[:, 1], "r-")
243
 
244
  # custom legend
 
350
 
351
  with gr.Row():
352
  # regularizer type
353
+ regularizer_type = gr.Dropdown(choices=['l1', 'l2'],
354
  label='Regularizer type',
355
  value='l2',
356
  visible=True)