Skip to content

Commit 4cb6ab9

Browse files
authored
Merge pull request #2792 from mlr-org/cran-2.18.0
CRAN release v2.18.0
2 parents 9548443 + 62446e0 commit 4cb6ab9

12 files changed

+513
-428
lines changed

Diff for: .Rbuildignore

+1
Original file line numberDiff line numberDiff line change
@@ -44,3 +44,4 @@ revdep/*
4444
^\.github$
4545
^clang-.*
4646
^gfortran.*
47+
^cran-comments\.md$

Diff for: NEWS.md

+2-27
Original file line numberDiff line numberDiff line change
@@ -5,41 +5,16 @@
55

66
# mlr 2.18.0
77

8-
- Internal changes only.
9-
10-
11-
# mlr 2.17.1.9006
12-
138
- Many praznik filters are now also able to deal with regression tasks (#2790, @bommert)
149
- `praznik_MRMR`: Remove handling of survival tasks (#2790, @bommert)
15-
16-
17-
# mlr 2.17.1.9005
18-
1910
- xgboost: update `objective` default from `reg:linear` (deprecated) to `reg:squarederror`
2011
- issue a warning if `blocking` was set in the Task but `blocking.cv` was not set within `makeResampleDesc() (#2788)
21-
22-
# mlr 2.17.1.9003
23-
2412
- Fix order of learners in `generateLearningCurveData()` (#2768)
25-
26-
27-
# mlr 2.17.1.9002
28-
2913
- `getFeatureImportance()`: Account for feature importance weight of linear xgboost models
30-
31-
32-
# mlr 2.17.1.9001
33-
3414
- Fix learner note for learner glmnet (the default of param `s` did not match the learner note) (#2747)
35-
- Remove dep {hrbrthemes} used in `createSpatialResamplingPlots()`. The package caused issues on R-devel. In addition users should set custom themes by themselves.
15+
- Remove dependency {hrbrthemes} used in `createSpatialResamplingPlots()`. The package caused issues on R-devel. In addition users should set custom themes by themselves.
3616
- Explicitly return value in `getNestedTuneResultsOptPathDf()` (#2754)
3717

38-
39-
# mlr 2.17.1.9000
40-
41-
- Internal changes only
42-
4318
# mlr 2.17.1
4419

4520
## Learners - bugfixes
@@ -235,7 +210,7 @@ PR: #2638 (@pfistl)
235210
## filters - new
236211

237212
- Ensemble features are now supported. These filters combine multiple single filters to create a final ranking based on certain statistical operations. All new filters are listed in a dedicated section "ensemble filters" in the [tutorial](https://mlr.mlr-org.com/articles/tutorial/filter_methods.html).
238-
Tuning of simple features is not supported yet because of a [missing feature](https://github.com/berndbischl/ParamHelpers/pull/206) in _ParamHelpers_. (@pat-s, #2456)
213+
Tuning of simple features is not supported yet because of a [missing feature](https://github.com/mlr-org/ParamHelpers/pull/206) in _ParamHelpers_. (@pat-s, #2456)
239214

240215
# mlr 2.14.0
241216

Diff for: R/CostSensWeightedPairsWrapper.R

+6-3
Original file line numberDiff line numberDiff line change
@@ -23,21 +23,23 @@
2323
#' One-versus-one Binary Classification.
2424
#' In: Proceedings of the Sixth Asian Conference on Machine Learning.
2525
#' JMLR Workshop and Conference Proceedings, vol 39, pp. 371-386. JMLR W&CP (2014).
26-
#' <http://www.jmlr.org/proceedings/papers/v39/lin14.pdf>
26+
#' <https://www.jmlr.org/proceedings/papers/v39/lin14.pdf>
2727
#' @family costsens
2828
#' @aliases CostSensWeightedPairsWrapper CostSensWeightedPairsModel
2929
makeCostSensWeightedPairsWrapper = function(learner) {
3030
learner = checkLearner(learner, "classif", props = "weights")
3131
learner = setPredictType(learner, "response")
3232
id = stri_paste("costsens", learner$id, sep = ".")
33-
makeHomogeneousEnsemble(id, "costsens", learner, package = learner$package,
33+
makeHomogeneousEnsemble(id, "costsens", learner,
34+
package = learner$package,
3435
learner.subclass = "CostSensWeightedPairsWrapper", model.subclass = "CostSensWeightedPairsModel")
3536
}
3637

3738
#' @export
3839
trainLearner.CostSensWeightedPairsWrapper = function(.learner, .task, .subset = NULL, ...) {
3940

4041
# note that no hyperpars can be in ..., they would refer to the wrapper
42+
4143
.task = subsetTask(.task, subset = .subset)
4244
costs = getTaskCosts(.task)
4345
td = getTaskDesc(.task)
@@ -57,7 +59,8 @@ trainLearner.CostSensWeightedPairsWrapper = function(.learner, .task, .subset =
5759
models[[counter]] = y[1]
5860
} else {
5961
feats$..y.. = y
60-
task = makeClassifTask(data = feats, target = "..y..",
62+
task = makeClassifTask(
63+
data = feats, target = "..y..",
6164
check.data = FALSE, fixup.data = "quiet")
6265
w = abs(costs[, a1] - costs[, a2])
6366
models[[counter]] = train(.learner$next.learner, task, weights = w)

Diff for: R/MulticlassWrapper.R

+5-3
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
#' are generated is defined by an error-correcting-output-code (ECOC) code book.
1212
#' This also allows the simple and well-known one-vs-one and one-vs-rest
1313
#' approaches. Decoding is currently done via Hamming decoding, see
14-
#' e.g. here <http://jmlr.org/papers/volume11/escalera10a/escalera10a.pdf>.
14+
#' e.g. here <https://jmlr.org/papers/volume11/escalera10a/escalera10a.pdf>.
1515
#'
1616
#' Currently, the approach always operates on the discrete predicted labels
1717
#' of the binary base models (instead of their probabilities) and the created
@@ -41,7 +41,8 @@ makeMulticlassWrapper = function(learner, mcw.method = "onevsrest") {
4141
pv = list(mcw.method = mcw.method)
4242
id = stri_paste(learner$id, "multiclass", sep = ".")
4343

44-
x = makeHomogeneousEnsemble(id = id, type = "classif", next.learner = learner,
44+
x = makeHomogeneousEnsemble(
45+
id = id, type = "classif", next.learner = learner,
4546
package = learner$package, par.set = ps, par.vals = pv,
4647
learner.subclass = "MulticlassWrapper", model.subclass = "MulticlassModel")
4748
x = setPredictType(x, predict.type = "response")
@@ -58,7 +59,8 @@ trainLearner.MulticlassWrapper = function(.learner, .task, .subset = NULL, .weig
5859
args = list(x = x, learner = .learner, task = .task, weights = .weights)
5960
parallelLibrary("mlr", master = FALSE, level = "mlr.ensemble", show.info = FALSE)
6061
exportMlrOptions(level = "mlr.ensemble")
61-
models = parallelMap(i = seq_along(x$row.inds), doMulticlassTrainIteration,
62+
models = parallelMap(
63+
i = seq_along(x$row.inds), doMulticlassTrainIteration,
6264
more.args = args, level = "mlr.ensemble")
6365
m = makeHomChainModel(.learner, models)
6466
m$cm = cm

0 commit comments

Comments
 (0)