Skip to content

Commit 4bc2864

Browse files
authored
Merge pull request #201 from mandjevant/master
Sklearn 1.3 update
2 parents d050420 + 5168d04 commit 4bc2864

26 files changed

+84
-80
lines changed

.github/workflows/tests.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ jobs:
1010
strategy:
1111
matrix:
1212
os: [ubuntu-latest, windows-latest]
13-
python-version: ['3.7', '3.8', '3.9']
13+
python-version: ['3.9', '3.10', '3.11']
1414

1515
steps:
1616
- uses: actions/checkout@v2

README.md

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,6 @@
55
[scikit-learn](http://scikit-learn.org/).
66

77
See how to use hyperopt-sklearn through [examples](http://hyperopt.github.io/hyperopt-sklearn/#documentation)
8-
or older
9-
[notebooks](http://nbviewer.ipython.org/github/hyperopt/hyperopt-sklearn/tree/master/notebooks)
10-
118
More examples can be found in the Example Usage section of the SciPy paper
129

1310
Komer B., Bergstra J., and Eliasmith C. "Hyperopt-Sklearn: automatic hyperparameter configuration for Scikit-learn" Proc. SciPy 2014. http://conference.scipy.org/proceedings/scipy2014/pdfs/komer.pdf

hpsklearn/components/__init__.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -189,7 +189,7 @@ def any_classifier(name):
189189
random_forest_classifier(name + ".random_forest"),
190190
extra_tree_classifier(name + ".extra_trees"),
191191
ada_boost_classifier(name + ".ada_boost"),
192-
gradient_boosting_classifier(name + ".grad_boosting", loss="deviance"),
192+
gradient_boosting_classifier(name + ".grad_boosting"),
193193
sgd_classifier(name + ".sgd")
194194
]
195195

@@ -207,7 +207,7 @@ def any_sparse_classifier(name):
207207
sparse_classifiers = [
208208
linear_svc(name + ".linear_svc"),
209209
sgd_classifier(name + ".sgd"),
210-
k_neighbors_classifier(name + ".knn", metric="euclidean", p=2),
210+
k_neighbors_classifier(name + ".knn", p=2),
211211
multinomial_nb(name + ".multinomial_nb")
212212
]
213213

@@ -242,7 +242,7 @@ def any_sparse_regressor(name):
242242
"""
243243
sparse_regressors = [
244244
sgd_regressor(name + ".sgd"),
245-
k_neighbors_regressor(name + ".knn", metric="euclidean", p=2)
245+
k_neighbors_regressor(name + ".knn", p=2)
246246
]
247247

248248
return hp.choice(name, sparse_regressors)

hpsklearn/components/cluster/_kmeans.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -61,8 +61,8 @@ def _kmeans_hp_space(
6161

6262

6363
@validate(params=["algorithm"],
64-
validation_test=lambda param: not isinstance(param, str) or param in ["auto", "full", "elkan"],
65-
msg="Invalid parameter '%s' with value '%s'. Value must be 'auto', 'full' or 'elkan'")
64+
validation_test=lambda param: not isinstance(param, str) or param in ["lloyd", "elkan"],
65+
msg="Invalid parameter '%s' with value '%s'. Value must be 'lloyd' or 'elkan'")
6666
def k_means(name: str,
6767
n_init: typing.Union[int, Apply] = None,
6868
max_iter: typing.Union[int, Apply] = None,
@@ -94,7 +94,7 @@ def _name(msg):
9494
hp_space["max_iter"] = scope.int(hp.uniform(_name("max_iter"), 100, 500)) if max_iter is None else max_iter
9595
hp_space["tol"] = hp.uniform(_name("tol"), 1e-5, 1e-3) if tol is None else tol
9696
hp_space["copy_x"] = copy_x
97-
hp_space["algorithm"] = hp.choice(_name("algorithm"), ["auto", "full", "elkan"]) if algorithm is None else algorithm
97+
hp_space["algorithm"] = hp.choice(_name("algorithm"), ["lloyd", "elkan"]) if algorithm is None else algorithm
9898

9999
return scope.sklearn_KMeans(**hp_space)
100100

hpsklearn/components/ensemble/_bagging.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ def _bagging_random_state(name: str):
8282
msg="Invalid parameter '%s' with value '%s'. Parameter value must exceed 1.")
8383
def _bagging_hp_space(
8484
name_func,
85-
base_estimator=None,
85+
estimator=None,
8686
n_estimators: typing.Union[int, Apply] = None,
8787
max_samples: typing.Union[float, Apply] = None,
8888
max_features: typing.Union[float, Apply] = None,
@@ -100,7 +100,7 @@ def _bagging_hp_space(
100100
bagging regressor
101101
"""
102102
hp_space = dict(
103-
base_estimator=base_estimator,
103+
estimator=estimator,
104104
n_estimators=_bagging_n_estimators(name_func("n_estimators")) if n_estimators is None else n_estimators,
105105
max_samples=_bagging_max_samples(name_func("max_samples")) if max_samples is None else max_samples,
106106
max_features=_bagging_max_features(name_func("max_features")) if max_features is None else max_features,

hpsklearn/components/ensemble/_gb.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ def _gb_clf_loss(name: str):
2323
"""
2424
Declaration search space 'loss' parameter for _gb classifier
2525
"""
26-
return hp.choice(name, ["deviance", "exponential"])
26+
return hp.choice(name, ["log_loss", "exponential"])
2727

2828

2929
def _gb_reg_loss(name: str):
@@ -211,16 +211,16 @@ def _gb_hp_space(
211211

212212

213213
@validate(params=["loss"],
214-
validation_test=lambda param: not isinstance(param, str) or param in ("deviance", "exponential"),
215-
msg="Invalid parameter '%s' with value '%s'. Choose 'deviance' or 'exponential'.")
214+
validation_test=lambda param: not isinstance(param, str) or param in ("log_loss", "exponential"),
215+
msg="Invalid parameter '%s' with value '%s'. Choose 'log_loss' or 'exponential'.")
216216
def gradient_boosting_classifier(name: str, loss: typing.Union[str, Apply] = None, **kwargs):
217217
"""
218218
Return a pyll graph with hyperparameters that will construct
219219
a sklearn.ensemble.GradientBoostingClassifier model.
220220
221221
Args:
222222
name: name | str
223-
loss: choose 'deviance' or 'exponential' | str
223+
loss: choose 'log_loss' or 'exponential' | str
224224
225225
See help(hpsklearn.components._gb._gb_hp_space) for info on
226226
additional available GradientBoosting arguments.

hpsklearn/components/ensemble/_hist_gradient_boosting.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,10 @@ def _hist_gradient_boosting_reg_loss(name: str):
2626
hist gradient boosting regressor
2727
2828
Parameter 'poisson' is also available. Not implemented since
29-
'poisson' is only available for non-negative y data
29+
'poisson' is only available for non-zero, non-negative y data
30+
31+
Parameter 'gamma' is also available. Not implemented since
32+
'gamma' is only available for non-negative y data
3033
"""
3134
return hp.choice(name, ["squared_error", "absolute_error"])
3235

@@ -141,14 +144,14 @@ def _hist_gradient_boosting_hp_space(
141144
"categorical_crossentropy"),
142145
msg="Invalid parameter '%s' with value '%s'. "
143146
"Choose 'auto', 'binary_crossentropy', 'categorical_crossentropy'")
144-
def hist_gradient_boosting_classifier(name: str, loss: typing.Union[str, Apply] = "auto", **kwargs):
147+
def hist_gradient_boosting_classifier(name: str, loss: typing.Union[str, Apply] = "log_loss", **kwargs):
145148
"""
146149
Return a pyll graph with hyperparameters that will construct
147150
a sklearn.ensemble.HistGradientBoostingClassifier model.
148151
149152
Args:
150153
name: name | str
151-
loss: choose 'auto', 'binary_crossentropy' or 'categorical_crossentropy' | str
154+
loss: 'log_loss' | str
152155
153156
See help(hpsklearn.components._hist_gradient_boosting._hist_gradient_boosting_regressor) for info on
154157
additional available HistGradientBoosting arguments.
@@ -165,7 +168,7 @@ def _name(msg):
165168

166169
@validate(params=["loss"],
167170
validation_test=lambda param: not isinstance(param, str) or param in ("squared_error", "absolute_error",
168-
"poisson"),
171+
"poisson", "quantile", "gamma"),
169172
msg="Invalid parameter '%s' with value '%s'. "
170173
"Choose 'squared_error', 'absolute_error', 'poisson'")
171174
def hist_gradient_boosting_regressor(name: str, loss: typing.Union[str, Apply] = None, **kwargs):

hpsklearn/components/ensemble/_weight_boosting.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ def _weight_boosting_random_state(name: str):
5858
msg="Invalid parameter '%s' with value '%s'. Parameter value must be non-negative and greater than 0.")
5959
def _weight_boosting_hp_space(
6060
name_func,
61-
base_estimator=None,
61+
estimator=None,
6262
n_estimators: typing.Union[int, Apply] = None,
6363
learning_rate: typing.Union[float, Apply] = None,
6464
random_state=None
@@ -69,7 +69,7 @@ def _weight_boosting_hp_space(
6969
AdaBoost regressor
7070
"""
7171
hp_space = dict(
72-
base_estimator=base_estimator,
72+
estimator=estimator,
7373
n_estimators=_weight_boosting_n_estimators(name_func("n_estimators")) if n_estimators is None else n_estimators,
7474
learning_rate=_weight_boosting_learning_rate(name_func("learning_rate"))
7575
if learning_rate is None else learning_rate,

hpsklearn/components/lightgbm.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -141,12 +141,14 @@ def _lightgbm_hp_space(
141141
lightgbm regressor
142142
"""
143143
hp_space = dict(
144-
max_depth=_lightgbm_max_depth(name_func("max_depth")) if max_depth is None else max_depth,
144+
# max_depth=_lightgbm_max_depth(name_func("max_depth")) if max_depth is None else max_depth,
145+
max_depth=-1,
145146
num_leaves=_lightgbm_num_leaves(name_func("num_leaves")) if num_leaves is None else num_leaves,
146147
learning_rate=_lightgbm_learning_rate(name_func("learning_rate")) if learning_rate is None else learning_rate,
147148
n_estimators=_lightgbm_n_estimators(name_func("n_estimators")) if n_estimators is None else n_estimators,
148149
min_child_weight=_lightgbm_min_child_weight(name_func("min_child_weight"))
149150
if min_child_weight is None else min_child_weight,
151+
# min_child_samples=5,
150152
max_delta_step=max_delta_step,
151153
subsample=_lightgbm_subsample(name_func("subsample")) if subsample is None else subsample,
152154
colsample_bytree=_lightgbm_colsample_bytree(name_func("colsample_bytree"))

hpsklearn/components/linear_model/_bayes.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,9 @@ def sklearn_ARDRegression(*args, **kwargs):
1818
return linear_model.ARDRegression(*args, **kwargs)
1919

2020

21-
def _bayes_n_iter(name: str):
21+
def _bayes_max_iter(name: str):
2222
"""
23-
Declaration search space 'n_iter' parameter
23+
Declaration search space 'max_iter' parameter
2424
"""
2525
return scope.int(hp.qloguniform(name, low=np.log(150), high=np.log(450), q=1.0))
2626

@@ -40,15 +40,15 @@ def _bayes_alpha_lambda(name: str):
4040
return hp.lognormal(name, mu=np.log(1e-6), sigma=np.log(10))
4141

4242

43-
@validate(params=["n_iter"],
43+
@validate(params=["max_iter"],
4444
validation_test=lambda param: not isinstance(param, int) or param > 1,
4545
msg="Invalid parameter '%s' with value '%s'. Parameter value must exceed 1.")
4646
@validate(params=["alpha_1", "alpha_2", "lambda_1", "lambda_2"],
4747
validation_test=lambda param: not isinstance(param, float) or param >= 0,
4848
msg="Invalid parameter '%s' with value '%s'. Parameter value must be equal to or exceed 0.")
4949
def _bayes_hp_space(
5050
name_func,
51-
n_iter: typing.Union[int, Apply] = None,
51+
max_iter: typing.Union[int, Apply] = None,
5252
tol: typing.Union[float, Apply] = None,
5353
alpha_1: typing.Union[float, Apply] = None,
5454
alpha_2: typing.Union[float, Apply] = None,
@@ -65,7 +65,7 @@ def _bayes_hp_space(
6565
ard regression
6666
"""
6767
hp_space = dict(
68-
n_iter=_bayes_n_iter(name_func("n_iter")) if n_iter is None else n_iter,
68+
max_iter=_bayes_max_iter(name_func("max_iter")) if max_iter is None else max_iter,
6969
tol=_bayes_tol(name_func("tol")) if tol is None else tol,
7070
alpha_1=_bayes_alpha_lambda(name_func("alpha_1")) if alpha_1 is None else alpha_1,
7171
alpha_2=_bayes_alpha_lambda(name_func("alpha_2")) if alpha_2 is None else alpha_2,

0 commit comments

Comments
 (0)