diff --git a/NEWS.md b/NEWS.md index 2f11ce35..2489bb09 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,6 +1,11 @@ # mlr3tuning (development version) -fix: The `as_data_table()` functions do not unnest the `x_domain` colum anymore by default. +* fix: The `as_data_table()` functions do not unnest the `x_domain` colum anymore by default. +* fix: `to_tune(internal = TRUE)` now also works if non-internal tuning parameters require have + an `.extra_trafo` +* feat: It is now possible to pass an `internal_search_space` manually. + This allows to use parameter transformations on the primary search space in combination with + internal hyperparameter tuning. # mlr3tuning 1.0.2 diff --git a/R/AutoTuner.R b/R/AutoTuner.R index 8956c255..5f90f705 100644 --- a/R/AutoTuner.R +++ b/R/AutoTuner.R @@ -46,6 +46,7 @@ #' @template param_measure #' @template param_terminator #' @template param_search_space +#' @template param_internal_search_space #' @template param_store_tuning_instance #' @template param_store_benchmark_result #' @template param_store_models @@ -137,6 +138,7 @@ AutoTuner = R6Class("AutoTuner", measure = NULL, terminator, search_space = NULL, + internal_search_space = NULL, store_tuning_instance = TRUE, store_benchmark_result = TRUE, store_models = FALSE, @@ -156,6 +158,7 @@ AutoTuner = R6Class("AutoTuner", ia$resampling = assert_resampling(resampling)$clone() if (!is.null(measure)) ia$measure = assert_measure(as_measure(measure), learner = learner) if (!is.null(search_space)) ia$search_space = assert_param_set(as_search_space(search_space))$clone() + if (!is.null(internal_search_space)) ia$search_space = assert_param_set(as_search_space(internal_search_space))$clone() ia$terminator = assert_terminator(terminator)$clone() ia$store_models = assert_flag(store_models) diff --git a/R/TuningInstanceAsyncMulticrit.R b/R/TuningInstanceAsyncMulticrit.R index 450b2971..1663b06d 100644 --- a/R/TuningInstanceAsyncMulticrit.R +++ b/R/TuningInstanceAsyncMulticrit.R @@ -16,6 +16,7 @@ #' @template param_measures #' @template param_terminator #' @template param_search_space +#' @template param_internal_search_space #' @template param_store_benchmark_result #' @template param_store_models #' @template param_check_values @@ -44,6 +45,7 @@ TuningInstanceAsyncMultiCrit = R6Class("TuningInstanceAsyncMultiCrit", measures, terminator, search_space = NULL, + internal_search_space = NULL, store_benchmark_result = TRUE, store_models = FALSE, check_values = FALSE, @@ -57,24 +59,52 @@ TuningInstanceAsyncMultiCrit = R6Class("TuningInstanceAsyncMultiCrit", if (!is.null(search_space) && length(learner$param_set$get_values(type = "only_token"))) { stop("If the values of the ParamSet of the Learner contain TuneTokens you cannot supply a search_space.") } - if (is.null(search_space)) { - search_space = as_search_space(learner) - learner$param_set$values = learner$param_set$get_values(type = "without_token") + + search_space_from_tokens = is.null(search_space) + + # convert tune token to search space + search_space = if (is.null(search_space)) { + learner$param_set$search_space() } else { - search_space = as_search_space(search_space) + as_search_space(search_space) } - # internal search space - internal_tune_ids = keep(names(search_space$tags), map_lgl(search_space$tags, function(tag) "internal_tuning" %in% tag)) + # get ids of primary and internal hyperparameters + sids = search_space$ids() + internal_tune_ids = search_space$ids(any_tags = "internal_tuning") + + # subset search space to primary hyperparameters if (length(internal_tune_ids)) { - self$internal_search_space = search_space$subset(internal_tune_ids) + search_space = search_space$subset(setdiff(sids, internal_tune_ids)) + } - if (self$internal_search_space$has_trafo) { - stopf("Inner tuning and parameter transformations are currently not supported.") + # get internal search space + self$internal_search_space = if (is.null(internal_search_space)) { + # We DO NOT subset the search space because there we might keep an extra_trafo which is not allowed + # for the internal tuning search space + if (length(internal_tune_ids)) { + learner$param_set$subset(internal_tune_ids)$search_space() + } + } else { + if (length(internal_tune_ids)) { + stopf("Either tag parameters in the `search_space` with 'internal_tuning' OR provide an `internal_search_space`.") } + as_search_space(internal_search_space) + } + + # set learner parameter values + if (search_space_from_tokens) { + learner$param_set$values = learner$param_set$get_values(type = "without_token") + } - search_space = search_space$subset(setdiff(search_space$ids(), internal_tune_ids)) + if (!is.null(self$internal_search_space) && self$internal_search_space$has_trafo) { + stopf("Internal tuning and parameter transformations are currently not supported. + If you manually provided a search space that has a trafo and parameters tagged with 'internal_tuning', + please pass the latter separately via the argument `internal_search_space`.") + } + # set internal search space + if (!is.null(self$internal_search_space)) { # the learner dictates how to interpret the to_tune(..., inner) learner$param_set$set_values(.values = learner$param_set$convert_internal_search_space(self$internal_search_space)) } diff --git a/R/TuningInstanceAsyncSingleCrit.R b/R/TuningInstanceAsyncSingleCrit.R index 351667e1..da173d2d 100644 --- a/R/TuningInstanceAsyncSingleCrit.R +++ b/R/TuningInstanceAsyncSingleCrit.R @@ -25,6 +25,7 @@ #' @template param_measure #' @template param_terminator #' @template param_search_space +#' @template param_internal_search_space #' @template param_store_benchmark_result #' @template param_store_models #' @template param_check_values @@ -54,6 +55,7 @@ TuningInstanceAsyncSingleCrit = R6Class("TuningInstanceAsyncSingleCrit", measure = NULL, terminator, search_space = NULL, + internal_search_space = NULL, store_benchmark_result = TRUE, store_models = FALSE, check_values = FALSE, @@ -67,24 +69,52 @@ TuningInstanceAsyncSingleCrit = R6Class("TuningInstanceAsyncSingleCrit", if (!is.null(search_space) && length(learner$param_set$get_values(type = "only_token"))) { stop("If the values of the ParamSet of the Learner contain TuneTokens you cannot supply a search_space.") } - if (is.null(search_space)) { - search_space = as_search_space(learner) - learner$param_set$values = learner$param_set$get_values(type = "without_token") + + search_space_from_tokens = is.null(search_space) + + # convert tune token to search space + search_space = if (is.null(search_space)) { + learner$param_set$search_space() } else { - search_space = as_search_space(search_space) + as_search_space(search_space) } - # internal search space - internal_tune_ids = keep(names(search_space$tags), map_lgl(search_space$tags, function(tag) "internal_tuning" %in% tag)) + # get ids of primary and internal hyperparameters + sids = search_space$ids() + internal_tune_ids = search_space$ids(any_tags = "internal_tuning") + + # subset search space to primary hyperparameters if (length(internal_tune_ids)) { - self$internal_search_space = search_space$subset(internal_tune_ids) + search_space = search_space$subset(setdiff(sids, internal_tune_ids)) + } - if (self$internal_search_space$has_trafo) { - stopf("Inner tuning and parameter transformations are currently not supported.") + # get internal search space + self$internal_search_space = if (is.null(internal_search_space)) { + # We DO NOT subset the search space because there we might keep an extra_trafo which is not allowed + # for the internal tuning search space + if (length(internal_tune_ids)) { + learner$param_set$subset(internal_tune_ids)$search_space() } + } else { + if (length(internal_tune_ids)) { + stopf("Either tag parameters in the `search_space` with 'internal_tuning' OR provide an `internal_search_space`.") + } + as_search_space(internal_search_space) + } + + # set learner parameter values + if (search_space_from_tokens) { + learner$param_set$values = learner$param_set$get_values(type = "without_token") + } - search_space = search_space$subset(setdiff(search_space$ids(), internal_tune_ids)) + if (!is.null(self$internal_search_space) && self$internal_search_space$has_trafo) { + stopf("Internal tuning and parameter transformations are currently not supported. + If you manually provided a search space that has a trafo and parameters tagged with 'internal_tuning', + please pass the latter separately via the argument `internal_search_space`.") + } + # set internal search space + if (!is.null(self$internal_search_space)) { # the learner dictates how to interpret the to_tune(..., inner) learner$param_set$set_values(.values = learner$param_set$convert_internal_search_space(self$internal_search_space)) } diff --git a/R/TuningInstanceBatchMulticrit.R b/R/TuningInstanceBatchMulticrit.R index b6079dc8..540a98b9 100644 --- a/R/TuningInstanceBatchMulticrit.R +++ b/R/TuningInstanceBatchMulticrit.R @@ -23,6 +23,7 @@ #' @template param_measures #' @template param_terminator #' @template param_search_space +#' @template param_internal_search_space #' @template param_store_benchmark_result #' @template param_store_models #' @template param_check_values @@ -81,6 +82,7 @@ TuningInstanceBatchMultiCrit = R6Class("TuningInstanceBatchMultiCrit", measures, terminator, search_space = NULL, + internal_search_space = NULL, store_benchmark_result = TRUE, store_models = FALSE, check_values = FALSE, @@ -92,24 +94,52 @@ TuningInstanceBatchMultiCrit = R6Class("TuningInstanceBatchMultiCrit", if (!is.null(search_space) && length(learner$param_set$get_values(type = "only_token"))) { stop("If the values of the ParamSet of the Learner contain TuneTokens you cannot supply a search_space.") } - if (is.null(search_space)) { - search_space = as_search_space(learner) - learner$param_set$values = learner$param_set$get_values(type = "without_token") + + search_space_from_tokens = is.null(search_space) + + # convert tune token to search space + search_space = if (is.null(search_space)) { + learner$param_set$search_space() } else { - search_space = as_search_space(search_space) + as_search_space(search_space) } - # internal search space - internal_tune_ids = keep(names(search_space$tags), map_lgl(search_space$tags, function(tag) "internal_tuning" %in% tag)) + # get ids of primary and internal hyperparameters + sids = search_space$ids() + internal_tune_ids = search_space$ids(any_tags = "internal_tuning") + + # subset search space to primary hyperparameters if (length(internal_tune_ids)) { - self$internal_search_space = search_space$subset(internal_tune_ids) + search_space = search_space$subset(setdiff(sids, internal_tune_ids)) + } - if (self$internal_search_space$has_trafo) { - stopf("Inner tuning and parameter transformations are currently not supported.") + # get internal search space + self$internal_search_space = if (is.null(internal_search_space)) { + # We DO NOT subset the search space because there we might keep an extra_trafo which is not allowed + # for the internal tuning search space + if (length(internal_tune_ids)) { + learner$param_set$subset(internal_tune_ids)$search_space() } + } else { + if (length(internal_tune_ids)) { + stopf("Either tag parameters in the `search_space` with 'internal_tuning' OR provide an `internal_search_space`.") + } + as_search_space(internal_search_space) + } + + # set learner parameter values + if (search_space_from_tokens) { + learner$param_set$values = learner$param_set$get_values(type = "without_token") + } - search_space = search_space$subset(setdiff(search_space$ids(), internal_tune_ids)) + if (!is.null(self$internal_search_space) && self$internal_search_space$has_trafo) { + stopf("Internal tuning and parameter transformations are currently not supported. + If you manually provided a search space that has a trafo and parameters tagged with 'internal_tuning', + please pass the latter separately via the argument `internal_search_space`.") + } + # set internal search space + if (!is.null(self$internal_search_space)) { # the learner dictates how to interpret the to_tune(..., inner) learner$param_set$set_values(.values = learner$param_set$convert_internal_search_space(self$internal_search_space)) } diff --git a/R/TuningInstanceBatchSingleCrit.R b/R/TuningInstanceBatchSingleCrit.R index 6343986e..91d3216c 100644 --- a/R/TuningInstanceBatchSingleCrit.R +++ b/R/TuningInstanceBatchSingleCrit.R @@ -59,11 +59,11 @@ #' @template param_measure #' @template param_terminator #' @template param_search_space +#' @template param_internal_search_space #' @template param_store_benchmark_result #' @template param_store_models #' @template param_check_values #' @template param_callbacks -#' @template param_internal_search_space #' #' @template param_xdt #' @template param_learner_param_vals @@ -120,6 +120,7 @@ TuningInstanceBatchSingleCrit = R6Class("TuningInstanceBatchSingleCrit", measure = NULL, terminator, search_space = NULL, + internal_search_space = NULL, store_benchmark_result = TRUE, store_models = FALSE, check_values = FALSE, @@ -131,24 +132,52 @@ TuningInstanceBatchSingleCrit = R6Class("TuningInstanceBatchSingleCrit", if (!is.null(search_space) && length(learner$param_set$get_values(type = "only_token"))) { stop("If the values of the ParamSet of the Learner contain TuneTokens you cannot supply a search_space.") } - if (is.null(search_space)) { - search_space = as_search_space(learner) - learner$param_set$values = learner$param_set$get_values(type = "without_token") + + search_space_from_tokens = is.null(search_space) + + # convert tune token to search space + search_space = if (is.null(search_space)) { + learner$param_set$search_space() } else { - search_space = as_search_space(search_space) + as_search_space(search_space) } - # internal search space - internal_tune_ids = keep(names(search_space$tags), map_lgl(search_space$tags, function(tag) "internal_tuning" %in% tag)) + # get ids of primary and internal hyperparameters + sids = search_space$ids() + internal_tune_ids = search_space$ids(any_tags = "internal_tuning") + + # subset search space to primary hyperparameters if (length(internal_tune_ids)) { - self$internal_search_space = search_space$subset(internal_tune_ids) + search_space = search_space$subset(setdiff(sids, internal_tune_ids)) + } - if (self$internal_search_space$has_trafo) { - stopf("Inner tuning and parameter transformations are currently not supported.") + # get internal search space + self$internal_search_space = if (is.null(internal_search_space)) { + # We DO NOT subset the search space because there we might keep an extra_trafo which is not allowed + # for the internal tuning search space + if (length(internal_tune_ids)) { + learner$param_set$subset(internal_tune_ids)$search_space() } + } else { + if (length(internal_tune_ids)) { + stopf("Either tag parameters in the `search_space` with 'internal_tuning' OR provide an `internal_search_space`.") + } + as_search_space(internal_search_space) + } - search_space = search_space$subset(setdiff(search_space$ids(), internal_tune_ids)) + # set learner parameter values + if (search_space_from_tokens) { + learner$param_set$values = learner$param_set$get_values(type = "without_token") + } + + if (!is.null(self$internal_search_space) && self$internal_search_space$has_trafo) { + stopf("Internal tuning and parameter transformations are currently not supported. + If you manually provided a search space that has a trafo and parameters tagged with 'internal_tuning', + please pass the latter separately via the argument `internal_search_space`.") + } + # set internal search space + if (!is.null(self$internal_search_space)) { # the learner dictates how to interpret the to_tune(..., inner) learner$param_set$set_values(.values = learner$param_set$convert_internal_search_space(self$internal_search_space)) } diff --git a/R/helper.R b/R/helper.R index 646d901b..dde0d77f 100644 --- a/R/helper.R +++ b/R/helper.R @@ -24,42 +24,3 @@ extract_inner_tuned_values = function(resample_result, internal_search_space) { internal_tuned_values = transpose_list(map(get_private(resample_result)$.data$learner_states(get_private(resample_result)$.view), "internal_tuned_values")) internal_search_space$aggr_internal_tuned_values(internal_tuned_values) } - -# init_internal_search_space = function(self, private, super, search_space, store_benchmark_result, learner, callbacks, batch) { -# assert_flag(store_benchmark_result) -# internal_search_space = NULL -# internal_tune_ids = keep(names(search_space$tags), map_lgl(search_space$tags, function(t) "internal_tuning" %in% t)) - -# if (length(internal_tune_ids)) { -# internal_search_space = search_space$subset(internal_tune_ids) -# if (internal_search_space$has_trafo) { -# stopf("Inner Tuning and Parameter Transformations are currently not supported.") -# } -# search_space = search_space$subset(setdiff(search_space$ids(), internal_tune_ids)) - -# # the learner dictates how to interprete the to_tune(..., inner) - -# learner$param_set$set_values( -# .values = learner$param_set$convert_internal_search_space(internal_search_space) -# ) - -# # we need to use a callback to change how the Optimizer writes the result to the ArchiveTuning -# # This is because overwriting the Tuner's .assign_result method has no effect, as it is not called.helper -# callbacks = c(load_callback_internal_tuning(batch), callbacks) -# } - -# list( -# search_space = search_space, -# callbacks = callbacks, -# internal_search_space = internal_search_space %??% ps() -# ) -# } - -# init_internal_search_space_archive = function(self, private, super, search_space, internal_search_space) { -# if (!is.null(internal_search_space)) { -# private$.internal_search_space = as_search_space(internal_search_space) -# assert_disjunct(search_space$ids(), internal_search_space$ids()) -# } else { -# private$.internal_search_space = ps() -# } -# } diff --git a/R/sugar.R b/R/sugar.R index 8b517b0f..e09eb259 100644 --- a/R/sugar.R +++ b/R/sugar.R @@ -44,6 +44,7 @@ tnrs = function(.keys, ...) { #' @template param_resampling #' @template param_terminator #' @template param_search_space +#' @template param_internal_search_space #' @template param_store_benchmark_result #' @template param_store_models #' @template param_check_values @@ -61,6 +62,7 @@ ti = function( measures = NULL, terminator, search_space = NULL, + internal_search_space = NULL, store_benchmark_result = TRUE, store_models = FALSE, check_values = FALSE, @@ -75,6 +77,7 @@ ti = function( measures, terminator = terminator, search_space = search_space, + internal_search_space = internal_search_space, store_benchmark_result = store_benchmark_result, store_models = store_models, check_values = check_values, @@ -95,6 +98,7 @@ ti = function( #' @template param_resampling #' @template param_terminator #' @template param_search_space +#' @template param_internal_search_space #' @template param_store_benchmark_result #' @template param_store_models #' @template param_check_values @@ -113,6 +117,7 @@ ti_async = function( measures = NULL, terminator, search_space = NULL, + internal_search_space = NULL, store_benchmark_result = TRUE, store_models = FALSE, check_values = FALSE, @@ -128,6 +133,7 @@ ti_async = function( measures, terminator = terminator, search_space = search_space, + internal_search_space = internal_search_space, store_benchmark_result = store_benchmark_result, store_models = store_models, check_values = check_values, diff --git a/R/tune.R b/R/tune.R index 3a14b478..4516ae17 100644 --- a/R/tune.R +++ b/R/tune.R @@ -54,6 +54,7 @@ #' @template param_term_evals #' @template param_term_time #' @template param_search_space +#' @template param_internal_search_space #' @template param_store_benchmark_result #' @template param_store_models #' @template param_check_values @@ -99,6 +100,7 @@ tune = function( terminator = NULL, search_space = NULL, store_benchmark_result = TRUE, + internal_search_space = NULL, store_models = FALSE, check_values = FALSE, callbacks = NULL, @@ -116,11 +118,13 @@ tune = function( measures, terminator = terminator, search_space = search_space, + internal_search_space = internal_search_space, store_benchmark_result = store_benchmark_result, store_models = store_models, check_values = check_values, callbacks = callbacks, - rush = rush) + rush = rush + ) } else { TuningInstance = if (is.null(measures) || inherits(measures, "Measure")) TuningInstanceBatchSingleCrit else TuningInstanceBatchMultiCrit TuningInstance$new( @@ -130,6 +134,7 @@ tune = function( measures, terminator = terminator, search_space = search_space, + internal_search_space = internal_search_space, store_benchmark_result = store_benchmark_result, store_models = store_models, check_values = check_values, diff --git a/man-roxygen/param_internal_search_space.R b/man-roxygen/param_internal_search_space.R index ef541af9..b7af7b49 100644 --- a/man-roxygen/param_internal_search_space.R +++ b/man-roxygen/param_internal_search_space.R @@ -1,3 +1,2 @@ #' @param internal_search_space ([paradox::ParamSet] or `NULL`)\cr -#' The internal search space of the tuner. This includes parameters that the learner can optimize internally -#' durign `$train()`, such as the number of epochs via early stopping. +#' The internal search space. diff --git a/man/ArchiveAsyncTuning.Rd b/man/ArchiveAsyncTuning.Rd index 00d40afc..4f9e4da2 100644 --- a/man/ArchiveAsyncTuning.Rd +++ b/man/ArchiveAsyncTuning.Rd @@ -128,8 +128,7 @@ Internally created from provided \link[mlr3:Measure]{mlr3::Measure}s.} If a rush instance is supplied, the tuning runs without batches.} \item{\code{internal_search_space}}{(\link[paradox:ParamSet]{paradox::ParamSet} or \code{NULL})\cr -The internal search space of the tuner. This includes parameters that the learner can optimize internally -durign \verb{$train()}, such as the number of epochs via early stopping.} +The internal search space.} \item{\code{check_values}}{(\code{logical(1)})\cr If \code{TRUE} (default), hyperparameter configurations are check for validity.} diff --git a/man/ArchiveBatchTuning.Rd b/man/ArchiveBatchTuning.Rd index efda1e0c..b5e4d924 100644 --- a/man/ArchiveBatchTuning.Rd +++ b/man/ArchiveBatchTuning.Rd @@ -145,8 +145,7 @@ Internally created from provided \link[mlr3:Measure]{mlr3::Measure}s.} If \code{TRUE} (default), hyperparameter configurations are check for validity.} \item{\code{internal_search_space}}{(\link[paradox:ParamSet]{paradox::ParamSet} or \code{NULL})\cr -The internal search space of the tuner. This includes parameters that the learner can optimize internally -durign \verb{$train()}, such as the number of epochs via early stopping.} +The internal search space.} } \if{html}{\out{}} } diff --git a/man/AutoTuner.Rd b/man/AutoTuner.Rd index 0798116e..47f8387e 100644 --- a/man/AutoTuner.Rd +++ b/man/AutoTuner.Rd @@ -209,6 +209,7 @@ Creates a new instance of this \link[R6:R6Class]{R6} class. measure = NULL, terminator, search_space = NULL, + internal_search_space = NULL, store_tuning_instance = TRUE, store_benchmark_result = TRUE, store_models = FALSE, @@ -245,6 +246,9 @@ Hyperparameter search space. If \code{NULL} (default), the search space is constructed from the \link[paradox:to_tune]{paradox::TuneToken} of the learner's parameter set (learner$param_set).} +\item{\code{internal_search_space}}{(\link[paradox:ParamSet]{paradox::ParamSet} or \code{NULL})\cr +The internal search space.} + \item{\code{store_tuning_instance}}{(\code{logical(1)})\cr If \code{TRUE} (default), stores the internally created \link{TuningInstanceBatchSingleCrit} with all intermediate results in slot \verb{$tuning_instance}.} diff --git a/man/ObjectiveTuning.Rd b/man/ObjectiveTuning.Rd index 05f7d87a..6c16ce34 100644 --- a/man/ObjectiveTuning.Rd +++ b/man/ObjectiveTuning.Rd @@ -110,8 +110,7 @@ computational overhead is reduced.} List of callbacks.} \item{\code{internal_search_space}}{(\link[paradox:ParamSet]{paradox::ParamSet} or \code{NULL})\cr -The internal search space of the tuner. This includes parameters that the learner can optimize internally -durign \verb{$train()}, such as the number of epochs via early stopping.} +The internal search space.} } \if{html}{\out{}} } diff --git a/man/ObjectiveTuningBatch.Rd b/man/ObjectiveTuningBatch.Rd index 9b87fc65..240fe8f7 100644 --- a/man/ObjectiveTuningBatch.Rd +++ b/man/ObjectiveTuningBatch.Rd @@ -98,8 +98,7 @@ If \code{NULL} (default), benchmark result and models cannot be stored.} List of callbacks.} \item{\code{internal_search_space}}{(\link[paradox:ParamSet]{paradox::ParamSet} or \code{NULL})\cr -The internal search space of the tuner. This includes parameters that the learner can optimize internally -durign \verb{$train()}, such as the number of epochs via early stopping.} +The internal search space.} } \if{html}{\out{}} } diff --git a/man/TuningInstanceAsyncMultiCrit.Rd b/man/TuningInstanceAsyncMultiCrit.Rd index b4ebc0f8..dc2997d2 100644 --- a/man/TuningInstanceAsyncMultiCrit.Rd +++ b/man/TuningInstanceAsyncMultiCrit.Rd @@ -82,6 +82,7 @@ Creates a new instance of this \link[R6:R6Class]{R6} class. measures, terminator, search_space = NULL, + internal_search_space = NULL, store_benchmark_result = TRUE, store_models = FALSE, check_values = FALSE, @@ -117,6 +118,9 @@ Hyperparameter search space. If \code{NULL} (default), the search space is constructed from the \link[paradox:to_tune]{paradox::TuneToken} of the learner's parameter set (learner$param_set).} +\item{\code{internal_search_space}}{(\link[paradox:ParamSet]{paradox::ParamSet} or \code{NULL})\cr +The internal search space.} + \item{\code{store_benchmark_result}}{(\code{logical(1)})\cr If \code{TRUE} (default), store resample result of evaluated hyperparameter configurations in archive as \link[mlr3:BenchmarkResult]{mlr3::BenchmarkResult}.} diff --git a/man/TuningInstanceAsyncSingleCrit.Rd b/man/TuningInstanceAsyncSingleCrit.Rd index 40808b71..7d0a7932 100644 --- a/man/TuningInstanceAsyncSingleCrit.Rd +++ b/man/TuningInstanceAsyncSingleCrit.Rd @@ -117,6 +117,7 @@ Creates a new instance of this \link[R6:R6Class]{R6} class. measure = NULL, terminator, search_space = NULL, + internal_search_space = NULL, store_benchmark_result = TRUE, store_models = FALSE, check_values = FALSE, @@ -152,6 +153,12 @@ Hyperparameter search space. If \code{NULL} (default), the search space is constructed from the \link[paradox:to_tune]{paradox::TuneToken} of the learner's parameter set (learner$param_set).} +\item{\code{internal_search_space}}{(\link[paradox:ParamSet]{paradox::ParamSet} or \code{NULL})\cr +The internal search space.} + +\item{\code{internal_search_space}}{(\link[paradox:ParamSet]{paradox::ParamSet} or \code{NULL})\cr +The internal search space.} + \item{\code{store_benchmark_result}}{(\code{logical(1)})\cr If \code{TRUE} (default), store resample result of evaluated hyperparameter configurations in archive as \link[mlr3:BenchmarkResult]{mlr3::BenchmarkResult}.} diff --git a/man/TuningInstanceBatchMultiCrit.Rd b/man/TuningInstanceBatchMultiCrit.Rd index b38e7a14..ecf38ac6 100644 --- a/man/TuningInstanceBatchMultiCrit.Rd +++ b/man/TuningInstanceBatchMultiCrit.Rd @@ -123,6 +123,7 @@ Creates a new instance of this \link[R6:R6Class]{R6} class. measures, terminator, search_space = NULL, + internal_search_space = NULL, store_benchmark_result = TRUE, store_models = FALSE, check_values = FALSE, @@ -157,6 +158,12 @@ Hyperparameter search space. If \code{NULL} (default), the search space is constructed from the \link[paradox:to_tune]{paradox::TuneToken} of the learner's parameter set (learner$param_set).} +\item{\code{internal_search_space}}{(\link[paradox:ParamSet]{paradox::ParamSet} or \code{NULL})\cr +The internal search space.} + +\item{\code{internal_search_space}}{(\link[paradox:ParamSet]{paradox::ParamSet} or \code{NULL})\cr +The internal search space.} + \item{\code{store_benchmark_result}}{(\code{logical(1)})\cr If \code{TRUE} (default), store resample result of evaluated hyperparameter configurations in archive as \link[mlr3:BenchmarkResult]{mlr3::BenchmarkResult}.} diff --git a/man/TuningInstanceBatchSingleCrit.Rd b/man/TuningInstanceBatchSingleCrit.Rd index 09511144..c3644516 100644 --- a/man/TuningInstanceBatchSingleCrit.Rd +++ b/man/TuningInstanceBatchSingleCrit.Rd @@ -164,7 +164,8 @@ Creates a new instance of this \link[R6:R6Class]{R6} class. store_benchmark_result = TRUE, store_models = FALSE, check_values = FALSE, - callbacks = NULL + callbacks = NULL, + internal_search_space = NULL )}\if{html}{\out{}} } @@ -212,6 +213,9 @@ computational overhead is reduced.} \item{\code{callbacks}}{(list of \link[mlr3misc:Callback]{mlr3misc::Callback})\cr List of callbacks.} + +\item{\code{internal_search_space}}{(\link[paradox:ParamSet]{paradox::ParamSet} or \code{NULL})\cr +The internal search space.} } \if{html}{\out{}} } diff --git a/man/mlr_tuners_cmaes.Rd b/man/mlr_tuners_cmaes.Rd index 79a69f52..5df8f71c 100644 --- a/man/mlr_tuners_cmaes.Rd +++ b/man/mlr_tuners_cmaes.Rd @@ -11,7 +11,7 @@ Hansen N (2016). } \description{ Subclass for Covariance Matrix Adaptation Evolution Strategy (CMA-ES). -Calls \code{\link[adagio:cmaes]{adagio::pureCMAES()}} from package \CRANpkg{adagio}. +Calls \code{\link[adagio:pureCMAES]{adagio::pureCMAES()}} from package \CRANpkg{adagio}. } \section{Dictionary}{ @@ -29,7 +29,7 @@ Create \code{random} start values or based on \code{center} of search space? In the latter case, it is the center of the parameters before a trafo is applied.} } -For the meaning of the control parameters, see \code{\link[adagio:cmaes]{adagio::pureCMAES()}}. +For the meaning of the control parameters, see \code{\link[adagio:pureCMAES]{adagio::pureCMAES()}}. Note that we have removed all control parameters which refer to the termination of the algorithm and where our terminators allow to obtain the same behavior. } diff --git a/man/ti.Rd b/man/ti.Rd index e939a1b1..45827d70 100644 --- a/man/ti.Rd +++ b/man/ti.Rd @@ -11,6 +11,7 @@ ti( measures = NULL, terminator, search_space = NULL, + internal_search_space = NULL, store_benchmark_result = TRUE, store_models = FALSE, check_values = FALSE, @@ -43,6 +44,9 @@ Hyperparameter search space. If \code{NULL} (default), the search space is constructed from the \link[paradox:to_tune]{paradox::TuneToken} of the learner's parameter set (learner$param_set).} +\item{internal_search_space}{(\link[paradox:ParamSet]{paradox::ParamSet} or \code{NULL})\cr +The internal search space.} + \item{store_benchmark_result}{(\code{logical(1)})\cr If \code{TRUE} (default), store resample result of evaluated hyperparameter configurations in archive as \link[mlr3:BenchmarkResult]{mlr3::BenchmarkResult}.} diff --git a/man/ti_async.Rd b/man/ti_async.Rd index 560d2e16..7986e7c6 100644 --- a/man/ti_async.Rd +++ b/man/ti_async.Rd @@ -11,6 +11,7 @@ ti_async( measures = NULL, terminator, search_space = NULL, + internal_search_space = NULL, store_benchmark_result = TRUE, store_models = FALSE, check_values = FALSE, @@ -44,6 +45,9 @@ Hyperparameter search space. If \code{NULL} (default), the search space is constructed from the \link[paradox:to_tune]{paradox::TuneToken} of the learner's parameter set (learner$param_set).} +\item{internal_search_space}{(\link[paradox:ParamSet]{paradox::ParamSet} or \code{NULL})\cr +The internal search space.} + \item{store_benchmark_result}{(\code{logical(1)})\cr If \code{TRUE} (default), store resample result of evaluated hyperparameter configurations in archive as \link[mlr3:BenchmarkResult]{mlr3::BenchmarkResult}.} diff --git a/man/tune.Rd b/man/tune.Rd index f6f9507f..75cc2395 100644 --- a/man/tune.Rd +++ b/man/tune.Rd @@ -15,6 +15,7 @@ tune( terminator = NULL, search_space = NULL, store_benchmark_result = TRUE, + internal_search_space = NULL, store_models = FALSE, check_values = FALSE, callbacks = NULL, @@ -62,6 +63,9 @@ constructed from the \link[paradox:to_tune]{paradox::TuneToken} of the learner's If \code{TRUE} (default), store resample result of evaluated hyperparameter configurations in archive as \link[mlr3:BenchmarkResult]{mlr3::BenchmarkResult}.} +\item{internal_search_space}{(\link[paradox:ParamSet]{paradox::ParamSet} or \code{NULL})\cr +The internal search space.} + \item{store_models}{(\code{logical(1)})\cr If \code{TRUE}, fitted models are stored in the benchmark result (\code{archive$benchmark_result}). If \code{store_benchmark_result = FALSE}, models diff --git a/tests/testthat/test_Tuner.R b/tests/testthat/test_Tuner.R index c08ef00d..7098a407 100644 --- a/tests/testthat/test_Tuner.R +++ b/tests/testthat/test_Tuner.R @@ -269,10 +269,9 @@ test_that("proper error when primary search space is empty", { test_that("internal tuning: branching", { skip_if_not_installed("mlr3pipelines") skip_if(packageVersion("mlr3pipelines") < "0.5.3") - requireNamespace("mlr3pipelines") # this case is special, because not all internally tuned parameters are present in every iteration, only those that # are in the active branch are - glrn = ppl("branch", graphs = list( + glrn = mlr3pipelines::ppl("branch", graphs = list( lrn("classif.debug", id = "lrn1", iter = to_tune(upper = 500, internal = TRUE, aggr = function(x) 1L), early_stopping = TRUE), lrn("classif.debug", id = "lrn2", iter = to_tune(upper = 1000, internal = TRUE, aggr = function(x) 2L), early_stopping = TRUE) )) @@ -324,3 +323,70 @@ test_that("internal tuning: error message when primary search space is empty", { ), "tnr('internal')", fixed = TRUE) }) +test_that("parameter transformations can be used with internal tuning", { + ti = tune( + tuner = tnr("random_search"), + learner = lrn("classif.debug", + iter = to_tune(upper = 1000, internal = TRUE), + x = to_tune(ps(a = p_dbl(0, 0.5), b = p_dbl(0, 0.5), .extra_trafo = function(x, param_set) { + list(x = x$a + x$b) + })), + early_stopping = TRUE, validate = 0.2), + task = tsk("iris"), + resampling = rsmp("holdout"), + term_evals = 2 + ) + expect_set_equal( + names(ti$result_learner_param_vals), + c("x", "iter", "early_stopping") + ) +}) + +test_that("either provide internal_search_space OR tag params with 'internal_tuning'", { + expect_error( + tune( + tuner = tnr("random_search"), + learner = lrn("classif.debug", + iter = to_tune(upper = 1000, internal = TRUE), + x = to_tune(), + early_stopping = TRUE, validate = 0.2), + task = tsk("iris"), + internal_search_space = ps(iter = p_int(upper = 100, aggr = function(x) round(mean(unlist(x))))), + resampling = rsmp("holdout"), + term_evals = 2 + ), + "Either tag parameters in the `search_space`" + ) +}) + +test_that("Can pass internal_search_space separately", { + # 1. primary search space is passed manually + ti = tune( + tuner = tnr("random_search"), + learner = lrn("classif.debug", + x = to_tune(), + early_stopping = TRUE, validate = 0.2), + task = tsk("iris"), + internal_search_space = ps(iter = p_int(upper = 100, aggr = function(x) as.integer(mean(unlist(x))))), + resampling = rsmp("holdout"), + term_evals = 2 + ) + expect_true("iter" %in% ti$internal_search_space$ids()) + expect_true(is.integer(ti$result$internal_tuned_values[[1]]$iter)) + expect_double(ti$result$x) + + # 2. primary search space is passed via to_tune + ti = tune( + tuner = tnr("random_search"), + learner = lrn("classif.debug", + early_stopping = TRUE, validate = 0.2), + task = tsk("iris"), + search_space = ps(x = p_dbl(0, 1)), + internal_search_space = ps(iter = p_int(upper = 100, aggr = function(x) as.integer(mean(unlist(x))))), + resampling = rsmp("holdout"), + term_evals = 2 + ) + expect_true("iter" %in% ti$internal_search_space$ids()) + expect_true(is.integer(ti$result$internal_tuned_values[[1]]$iter)) + expect_double(ti$result$x) +}) diff --git a/tests/testthat/test_TunerBatch.R b/tests/testthat/test_TunerBatch.R deleted file mode 100644 index e69de29b..00000000