Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: internal tuning and parameter transformations #457

Merged
merged 5 commits into from
Oct 21, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion NEWS.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
# mlr3tuning (development version)

fix: The `as_data_table()` functions do not unnest the `x_domain` colum anymore by default.
* fix: The `as_data_table()` functions do not unnest the `x_domain` colum anymore by default.
* fix: `to_tune(internal = TRUE)` now also works if non-internal tuning parameters require have
an `.extra_trafo`
* feat: It is now possible to pass an `internal_search_space` manually.
This allows to use parameter transformations on the primary search space in combination with
internal hyperparameter tuning.

# mlr3tuning 1.0.2

Expand Down
3 changes: 3 additions & 0 deletions R/AutoTuner.R
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
#' @template param_measure
#' @template param_terminator
#' @template param_search_space
#' @template param_internal_search_space
#' @template param_store_tuning_instance
#' @template param_store_benchmark_result
#' @template param_store_models
Expand Down Expand Up @@ -137,6 +138,7 @@ AutoTuner = R6Class("AutoTuner",
measure = NULL,
terminator,
search_space = NULL,
internal_search_space = NULL,
store_tuning_instance = TRUE,
store_benchmark_result = TRUE,
store_models = FALSE,
Expand All @@ -156,6 +158,7 @@ AutoTuner = R6Class("AutoTuner",
ia$resampling = assert_resampling(resampling)$clone()
if (!is.null(measure)) ia$measure = assert_measure(as_measure(measure), learner = learner)
if (!is.null(search_space)) ia$search_space = assert_param_set(as_search_space(search_space))$clone()
if (!is.null(internal_search_space)) ia$search_space = assert_param_set(as_search_space(internal_search_space))$clone()
ia$terminator = assert_terminator(terminator)$clone()

ia$store_models = assert_flag(store_models)
Expand Down
50 changes: 40 additions & 10 deletions R/TuningInstanceAsyncMulticrit.R
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
#' @template param_measures
#' @template param_terminator
#' @template param_search_space
#' @template param_internal_search_space
#' @template param_store_benchmark_result
#' @template param_store_models
#' @template param_check_values
Expand Down Expand Up @@ -44,6 +45,7 @@ TuningInstanceAsyncMultiCrit = R6Class("TuningInstanceAsyncMultiCrit",
measures,
terminator,
search_space = NULL,
internal_search_space = NULL,
store_benchmark_result = TRUE,
store_models = FALSE,
check_values = FALSE,
Expand All @@ -57,24 +59,52 @@ TuningInstanceAsyncMultiCrit = R6Class("TuningInstanceAsyncMultiCrit",
if (!is.null(search_space) && length(learner$param_set$get_values(type = "only_token"))) {
stop("If the values of the ParamSet of the Learner contain TuneTokens you cannot supply a search_space.")
}
if (is.null(search_space)) {
search_space = as_search_space(learner)
learner$param_set$values = learner$param_set$get_values(type = "without_token")

search_space_from_tokens = is.null(search_space)

# convert tune token to search space
search_space = if (is.null(search_space)) {
learner$param_set$search_space()
} else {
search_space = as_search_space(search_space)
as_search_space(search_space)
}

# internal search space
internal_tune_ids = keep(names(search_space$tags), map_lgl(search_space$tags, function(tag) "internal_tuning" %in% tag))
# get ids of primary and internal hyperparameters
sids = search_space$ids()
internal_tune_ids = search_space$ids(any_tags = "internal_tuning")

# subset search space to primary hyperparameters
if (length(internal_tune_ids)) {
self$internal_search_space = search_space$subset(internal_tune_ids)
search_space = search_space$subset(setdiff(sids, internal_tune_ids))
}

if (self$internal_search_space$has_trafo) {
stopf("Inner tuning and parameter transformations are currently not supported.")
# get internal search space
self$internal_search_space = if (is.null(internal_search_space)) {
# We DO NOT subset the search space because there we might keep an extra_trafo which is not allowed
# for the internal tuning search space
if (length(internal_tune_ids)) {
learner$param_set$subset(internal_tune_ids)$search_space()
}
} else {
if (length(internal_tune_ids)) {
stopf("Either tag parameters in the `search_space` with 'internal_tuning' OR provide an `internal_search_space`.")
}
as_search_space(internal_search_space)
}

# set learner parameter values
if (search_space_from_tokens) {
learner$param_set$values = learner$param_set$get_values(type = "without_token")
}

search_space = search_space$subset(setdiff(search_space$ids(), internal_tune_ids))
if (!is.null(self$internal_search_space) && self$internal_search_space$has_trafo) {
stopf("Internal tuning and parameter transformations are currently not supported.
If you manually provided a search space that has a trafo and parameters tagged with 'internal_tuning',
please pass the latter separately via the argument `internal_search_space`.")
}

# set internal search space
if (!is.null(self$internal_search_space)) {
# the learner dictates how to interpret the to_tune(..., inner)
learner$param_set$set_values(.values = learner$param_set$convert_internal_search_space(self$internal_search_space))
}
Expand Down
50 changes: 40 additions & 10 deletions R/TuningInstanceAsyncSingleCrit.R
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
#' @template param_measure
#' @template param_terminator
#' @template param_search_space
#' @template param_internal_search_space
#' @template param_store_benchmark_result
#' @template param_store_models
#' @template param_check_values
Expand Down Expand Up @@ -54,6 +55,7 @@ TuningInstanceAsyncSingleCrit = R6Class("TuningInstanceAsyncSingleCrit",
measure = NULL,
terminator,
search_space = NULL,
internal_search_space = NULL,
store_benchmark_result = TRUE,
store_models = FALSE,
check_values = FALSE,
Expand All @@ -67,24 +69,52 @@ TuningInstanceAsyncSingleCrit = R6Class("TuningInstanceAsyncSingleCrit",
if (!is.null(search_space) && length(learner$param_set$get_values(type = "only_token"))) {
stop("If the values of the ParamSet of the Learner contain TuneTokens you cannot supply a search_space.")
}
if (is.null(search_space)) {
search_space = as_search_space(learner)
learner$param_set$values = learner$param_set$get_values(type = "without_token")

search_space_from_tokens = is.null(search_space)

# convert tune token to search space
search_space = if (is.null(search_space)) {
learner$param_set$search_space()
} else {
search_space = as_search_space(search_space)
as_search_space(search_space)
}

# internal search space
internal_tune_ids = keep(names(search_space$tags), map_lgl(search_space$tags, function(tag) "internal_tuning" %in% tag))
# get ids of primary and internal hyperparameters
sids = search_space$ids()
internal_tune_ids = search_space$ids(any_tags = "internal_tuning")

# subset search space to primary hyperparameters
if (length(internal_tune_ids)) {
self$internal_search_space = search_space$subset(internal_tune_ids)
search_space = search_space$subset(setdiff(sids, internal_tune_ids))
}

if (self$internal_search_space$has_trafo) {
stopf("Inner tuning and parameter transformations are currently not supported.")
# get internal search space
self$internal_search_space = if (is.null(internal_search_space)) {
# We DO NOT subset the search space because there we might keep an extra_trafo which is not allowed
# for the internal tuning search space
if (length(internal_tune_ids)) {
learner$param_set$subset(internal_tune_ids)$search_space()
}
} else {
if (length(internal_tune_ids)) {
stopf("Either tag parameters in the `search_space` with 'internal_tuning' OR provide an `internal_search_space`.")
}
as_search_space(internal_search_space)
}

# set learner parameter values
if (search_space_from_tokens) {
learner$param_set$values = learner$param_set$get_values(type = "without_token")
}

search_space = search_space$subset(setdiff(search_space$ids(), internal_tune_ids))
if (!is.null(self$internal_search_space) && self$internal_search_space$has_trafo) {
stopf("Internal tuning and parameter transformations are currently not supported.
If you manually provided a search space that has a trafo and parameters tagged with 'internal_tuning',
please pass the latter separately via the argument `internal_search_space`.")
}

# set internal search space
if (!is.null(self$internal_search_space)) {
# the learner dictates how to interpret the to_tune(..., inner)
learner$param_set$set_values(.values = learner$param_set$convert_internal_search_space(self$internal_search_space))
}
Expand Down
50 changes: 40 additions & 10 deletions R/TuningInstanceBatchMulticrit.R
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
#' @template param_measures
#' @template param_terminator
#' @template param_search_space
#' @template param_internal_search_space
#' @template param_store_benchmark_result
#' @template param_store_models
#' @template param_check_values
Expand Down Expand Up @@ -81,6 +82,7 @@ TuningInstanceBatchMultiCrit = R6Class("TuningInstanceBatchMultiCrit",
measures,
terminator,
search_space = NULL,
internal_search_space = NULL,
store_benchmark_result = TRUE,
store_models = FALSE,
check_values = FALSE,
Expand All @@ -92,24 +94,52 @@ TuningInstanceBatchMultiCrit = R6Class("TuningInstanceBatchMultiCrit",
if (!is.null(search_space) && length(learner$param_set$get_values(type = "only_token"))) {
stop("If the values of the ParamSet of the Learner contain TuneTokens you cannot supply a search_space.")
}
if (is.null(search_space)) {
search_space = as_search_space(learner)
learner$param_set$values = learner$param_set$get_values(type = "without_token")

search_space_from_tokens = is.null(search_space)

# convert tune token to search space
search_space = if (is.null(search_space)) {
learner$param_set$search_space()
} else {
search_space = as_search_space(search_space)
as_search_space(search_space)
}

# internal search space
internal_tune_ids = keep(names(search_space$tags), map_lgl(search_space$tags, function(tag) "internal_tuning" %in% tag))
# get ids of primary and internal hyperparameters
sids = search_space$ids()
internal_tune_ids = search_space$ids(any_tags = "internal_tuning")

# subset search space to primary hyperparameters
if (length(internal_tune_ids)) {
self$internal_search_space = search_space$subset(internal_tune_ids)
search_space = search_space$subset(setdiff(sids, internal_tune_ids))
}

if (self$internal_search_space$has_trafo) {
stopf("Inner tuning and parameter transformations are currently not supported.")
# get internal search space
self$internal_search_space = if (is.null(internal_search_space)) {
# We DO NOT subset the search space because there we might keep an extra_trafo which is not allowed
# for the internal tuning search space
if (length(internal_tune_ids)) {
learner$param_set$subset(internal_tune_ids)$search_space()
}
} else {
if (length(internal_tune_ids)) {
stopf("Either tag parameters in the `search_space` with 'internal_tuning' OR provide an `internal_search_space`.")
}
as_search_space(internal_search_space)
}

# set learner parameter values
if (search_space_from_tokens) {
learner$param_set$values = learner$param_set$get_values(type = "without_token")
}

search_space = search_space$subset(setdiff(search_space$ids(), internal_tune_ids))
if (!is.null(self$internal_search_space) && self$internal_search_space$has_trafo) {
stopf("Internal tuning and parameter transformations are currently not supported.
If you manually provided a search space that has a trafo and parameters tagged with 'internal_tuning',
please pass the latter separately via the argument `internal_search_space`.")
}

# set internal search space
if (!is.null(self$internal_search_space)) {
# the learner dictates how to interpret the to_tune(..., inner)
learner$param_set$set_values(.values = learner$param_set$convert_internal_search_space(self$internal_search_space))
}
Expand Down
51 changes: 40 additions & 11 deletions R/TuningInstanceBatchSingleCrit.R
Original file line number Diff line number Diff line change
Expand Up @@ -59,11 +59,11 @@
#' @template param_measure
#' @template param_terminator
#' @template param_search_space
#' @template param_internal_search_space
#' @template param_store_benchmark_result
#' @template param_store_models
#' @template param_check_values
#' @template param_callbacks
#' @template param_internal_search_space
#'
#' @template param_xdt
#' @template param_learner_param_vals
Expand Down Expand Up @@ -120,6 +120,7 @@ TuningInstanceBatchSingleCrit = R6Class("TuningInstanceBatchSingleCrit",
measure = NULL,
terminator,
search_space = NULL,
internal_search_space = NULL,
store_benchmark_result = TRUE,
store_models = FALSE,
check_values = FALSE,
Expand All @@ -131,24 +132,52 @@ TuningInstanceBatchSingleCrit = R6Class("TuningInstanceBatchSingleCrit",
if (!is.null(search_space) && length(learner$param_set$get_values(type = "only_token"))) {
stop("If the values of the ParamSet of the Learner contain TuneTokens you cannot supply a search_space.")
}
if (is.null(search_space)) {
search_space = as_search_space(learner)
learner$param_set$values = learner$param_set$get_values(type = "without_token")

search_space_from_tokens = is.null(search_space)

# convert tune token to search space
search_space = if (is.null(search_space)) {
learner$param_set$search_space()
} else {
search_space = as_search_space(search_space)
as_search_space(search_space)
}

# internal search space
internal_tune_ids = keep(names(search_space$tags), map_lgl(search_space$tags, function(tag) "internal_tuning" %in% tag))
# get ids of primary and internal hyperparameters
sids = search_space$ids()
internal_tune_ids = search_space$ids(any_tags = "internal_tuning")

# subset search space to primary hyperparameters
if (length(internal_tune_ids)) {
self$internal_search_space = search_space$subset(internal_tune_ids)
search_space = search_space$subset(setdiff(sids, internal_tune_ids))
}

if (self$internal_search_space$has_trafo) {
stopf("Inner tuning and parameter transformations are currently not supported.")
# get internal search space
self$internal_search_space = if (is.null(internal_search_space)) {
# We DO NOT subset the search space because there we might keep an extra_trafo which is not allowed
# for the internal tuning search space
if (length(internal_tune_ids)) {
learner$param_set$subset(internal_tune_ids)$search_space()
}
} else {
if (length(internal_tune_ids)) {
stopf("Either tag parameters in the `search_space` with 'internal_tuning' OR provide an `internal_search_space`.")
}
as_search_space(internal_search_space)
}

search_space = search_space$subset(setdiff(search_space$ids(), internal_tune_ids))
# set learner parameter values
if (search_space_from_tokens) {
learner$param_set$values = learner$param_set$get_values(type = "without_token")
}

if (!is.null(self$internal_search_space) && self$internal_search_space$has_trafo) {
stopf("Internal tuning and parameter transformations are currently not supported.
If you manually provided a search space that has a trafo and parameters tagged with 'internal_tuning',
please pass the latter separately via the argument `internal_search_space`.")
}

# set internal search space
if (!is.null(self$internal_search_space)) {
# the learner dictates how to interpret the to_tune(..., inner)
learner$param_set$set_values(.values = learner$param_set$convert_internal_search_space(self$internal_search_space))
}
Expand Down
Loading
Loading