-
Notifications
You must be signed in to change notification settings - Fork 21
/
Copy pathdiscrete_adaboost.py
53 lines (43 loc) · 1.5 KB
/
discrete_adaboost.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
import numpy as np
import decision_stump
class DiscreteAdaboost:
def fit(self, X, y, n_estimators):
'''
Parameters
----------
X : shape (n_samples, n_features)
Training data
y : shape (n_samples,)
Target values, 1 or -1
n_estimators : The number of estimators at which boosting is terminated
'''
n_samples = X.shape[0]
self.__alpha = np.zeros(n_estimators)
self.__estimators = []
w = np.full(n_samples, 1 / n_samples)
for i in range(n_estimators):
model = decision_stump.DecisionStump()
model.fit(X, y, w)
h = model.predict(X)
eta = np.sum(w[np.flatnonzero(h != y)]) / np.sum(w)
beta = np.sqrt((1 - eta) / (eta + 1e-8))
w *= beta ** (-y * h)
self.__alpha[i] = np.log(beta)
self.__estimators.append(model)
def predict(self, X):
'''
Parameters
----------
X : shape (n_samples, n_features)
Predicting data
Returns
-------
y : shape (n_samples,)
Predicted class label per sample, 1 or -1
'''
h = self.score(X)
y_pred = np.ones_like(h)
y_pred[np.flatnonzero(h < 0)] = -1
return y_pred
def score(self, X):
return sum([alpha * classifier.predict(X) for alpha, classifier in zip(self.__alpha, self.__estimators)])