Skip to content

Commit 66b691b

Browse files
author
Xavier Tannier
committed
Fixed deprecation warnings for use of n_iter in SGD and PassiveAggressive classifiers.
- Removed n_iter parameter - Added space search for parameters max_iter and tol.
1 parent e457746 commit 66b691b

File tree

1 file changed

+15
-12
lines changed

1 file changed

+15
-12
lines changed

hpsklearn/components.py

Lines changed: 15 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1025,7 +1025,8 @@ def sgd(name,
10251025
alpha=None, # default - 0.0001
10261026
l1_ratio=None, # default - 0.15, must be within [0, 1]
10271027
fit_intercept=True, # default - True
1028-
n_iter=5, # default - 5
1028+
max_iter=None,
1029+
tol=None,
10291030
shuffle=True, # default - True
10301031
random_state=None, # default - None
10311032
epsilon=None,
@@ -1057,7 +1058,9 @@ def _name(msg):
10571058
l1_ratio=(_sgd_l1_ratio(_name('l1ratio'))
10581059
if l1_ratio is None else l1_ratio),
10591060
fit_intercept=fit_intercept,
1060-
n_iter=n_iter,
1061+
tol=_svm_tol(_name('tol')) if tol is None else tol,
1062+
max_iter=(_svm_max_iter(_name('maxiter'))
1063+
if max_iter is None else max_iter),
10611064
learning_rate=(_sgdc_learning_rate(_name('learning_rate'))
10621065
if learning_rate is None else learning_rate),
10631066
eta0=_sgd_eta0(_name('eta0')) if eta0 is None else eta0,
@@ -1077,7 +1080,8 @@ def sgd_regression(name,
10771080
alpha=None, # default - 0.0001
10781081
l1_ratio=None, # default - 0.15, must be within [0, 1]
10791082
fit_intercept=True, # default - True
1080-
n_iter=5, # default - 5
1083+
tol=None,
1084+
max_iter=None,
10811085
shuffle=None, # default - False
10821086
random_state=None, # default - None
10831087
epsilon=None, # default - 0.1
@@ -1102,7 +1106,9 @@ def _name(msg):
11021106
l1_ratio=(_sgd_l1_ratio(_name('l1ratio'))
11031107
if l1_ratio is None else l1_ratio),
11041108
fit_intercept=fit_intercept,
1105-
n_iter=n_iter,
1109+
tol=_svm_tol(name_func('tol')) if tol is None else tol,
1110+
max_iter=(_svm_max_iter(name_func('maxiter'))
1111+
if max_iter is None else max_iter),
11061112
# For regression, use the SVM epsilon instead of the SGD one.
11071113
epsilon=_svm_epsilon(_name('epsilon')) if epsilon is None else epsilon,
11081114
learning_rate=(_sgdr_learning_rate(_name('learning_rate'))
@@ -1305,7 +1311,8 @@ def passive_aggressive(name,
13051311
loss=None,
13061312
C=None,
13071313
fit_intercept=False,
1308-
n_iter=None,
1314+
tol=None,
1315+
max_iter=None,
13091316
n_jobs=1,
13101317
shuffle=True,
13111318
random_state=None,
@@ -1324,13 +1331,9 @@ def _name(msg):
13241331
np.log(10),
13251332
) if C is None else C,
13261333
fit_intercept=fit_intercept,
1327-
n_iter=scope.int(
1328-
hp.qloguniform(
1329-
_name('n_iter'),
1330-
np.log(1),
1331-
np.log(1000),
1332-
q=1,
1333-
)) if n_iter is None else n_iter,
1334+
tol=_svm_tol(_name('tol')) if tol is None else tol,
1335+
max_iter=(_svm_max_iter(_name('maxiter'))
1336+
if max_iter is None else max_iter),
13341337
n_jobs=n_jobs,
13351338
random_state=_random_state(_name('rstate'), random_state),
13361339
verbose=verbose

0 commit comments

Comments
 (0)