This repository has been archived by the owner on May 15, 2021. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathLogisticRegression.py
59 lines (52 loc) · 1.61 KB
/
LogisticRegression.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
import numpy as np
from scipy.optimize import minimize
from scipy.special import expit
class LogisticRegression:
""" Binary Logistic regression based on gradient descent (l-bfgs-b) without penalty
Dependency
----------
scipy :
- scipy.optimize.minimize
- scipy.special.expit
numpy:
- numpy.dot
- numpy.unique
- numpy.zeros
Parameters
----------
None
Methods
--------
fit(self, X, y) : Fit the model using X as training data and y as target values
predict(self, X) : Predict the class labels for the provided data
"""
@staticmethod
def cost_gradient(w, X, y):
yz = y * (np.dot(X, w[:-1]) + w[-1])
cost = -np.sum(
np.vectorize(
lambda x: -np.log(1 + np.exp(-x))
if x > 0
else x - np.log(1 + np.exp(x))
)(yz)
) + 0.5 * np.dot(w[:-1], w[:-1])
grad = np.zeros(len(w))
t = (expit(yz) - 1) * y
grad[:-1] = np.dot(X.T, t) + w[:-1]
grad[-1] = np.sum(t)
return cost, grad
def fit(self, X, y):
self._class, y = np.unique(y, return_inverse=True)
y = y * 2 - 1 # map to(-1,1)
res = minimize(
fun=LogisticRegression.cost_gradient,
jac=True,
x0=np.zeros(X.shape[1] + 1),
args=(X, y),
method="L-BFGS-B",
)
self.coef_, self.intercept_ = res.x[:-1], res.x[-1]
return self
def predict(self, X):
y_pred = ((np.dot(X, self.coef_) + self.intercept_) > 0).astype(int)
return self._class[y_pred]