-
Notifications
You must be signed in to change notification settings - Fork 25
/
Copy pathobjective.py
58 lines (48 loc) · 1.83 KB
/
objective.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
import numpy as np
from numpy.linalg import norm
from benchopt import BaseObjective
class Objective(BaseObjective):
"""L1 regularized linear regression.
"""
name = "Lasso Regression"
min_benchopt_version = "1.5"
parameters = {
'fit_intercept': [True, False],
'reg': [.5, .1, .05],
}
def __init__(self, reg=.1, fit_intercept=False):
self.reg = reg
self.fit_intercept = fit_intercept
def set_data(self, X, y):
self.X, self.y = X, y
self.lmbd = self.reg * self._get_lambda_max()
self.n_features = self.X.shape[1]
def get_one_result(self):
n_features = self.n_features
if self.fit_intercept:
n_features += 1
return dict(beta=np.zeros(n_features))
def evaluate_result(self, beta):
beta = beta.astype(np.float64) # avoid float32 numerical errors
# compute residuals
if self.fit_intercept:
beta, intercept = beta[:self.n_features], beta[self.n_features:]
diff = self.y - self.X @ beta
if self.fit_intercept:
diff -= intercept
# compute primal objective and duality gap
p_obj = .5 * diff.dot(diff) + self.lmbd * abs(beta).sum()
scaling = max(1, norm(self.X.T @ diff, ord=np.inf) / self.lmbd)
d_obj = (norm(self.y) ** 2 / 2.
- norm(self.y - diff / scaling) ** 2 / 2)
return dict(value=p_obj,
support_size=(beta != 0).sum(),
duality_gap=p_obj - d_obj,)
def _get_lambda_max(self):
if self.fit_intercept:
return abs(self.X.T @ (self.y - self.y.mean())).max()
else:
return abs(self.X.T.dot(self.y)).max()
def get_objective(self):
return dict(X=self.X, y=self.y, lmbd=self.lmbd,
fit_intercept=self.fit_intercept)