From b4d2504ea67e621da9299d49e7198b102bb12105 Mon Sep 17 00:00:00 2001 From: Sebastian Fischer Date: Wed, 22 Jan 2025 15:28:23 +0100 Subject: [PATCH] BREAKING CHANGE: remove internal search space argument (#488) --- R/AutoTuner.R | 6 +- R/TuningInstanceAsyncMulticrit.R | 39 +-------- R/TuningInstanceAsyncSingleCrit.R | 30 +------ R/TuningInstanceBatchMulticrit.R | 36 +-------- R/TuningInstanceBatchSingleCrit.R | 38 +-------- R/auto_tuner.R | 3 - R/helper.R | 11 +++ R/sugar.R | 6 -- R/tune.R | 4 - man/AutoTuner.Rd | 4 - man/TuningInstanceAsyncMultiCrit.Rd | 4 - man/TuningInstanceAsyncSingleCrit.Rd | 7 -- man/TuningInstanceBatchMultiCrit.Rd | 7 -- man/TuningInstanceBatchSingleCrit.Rd | 4 - man/auto_tuner.Rd | 4 - man/mlr_tuners_cmaes.Rd | 4 +- man/ti.Rd | 4 - man/ti_async.Rd | 4 - man/tune.Rd | 4 - tests/testthat/test_ArchiveBatchTuning.R | 2 +- tests/testthat/test_AutoTuner.R | 18 ----- tests/testthat/test_Tuner.R | 79 +------------------ .../test_TuningInstanceBatchSingleCrit.R | 19 +---- 23 files changed, 30 insertions(+), 307 deletions(-) diff --git a/R/AutoTuner.R b/R/AutoTuner.R index 0981abe9..50060ad5 100644 --- a/R/AutoTuner.R +++ b/R/AutoTuner.R @@ -39,7 +39,6 @@ #' @template param_measure #' @template param_terminator #' @template param_search_space -#' @template param_internal_search_space #' @template param_store_tuning_instance #' @template param_store_benchmark_result #' @template param_store_models @@ -132,7 +131,6 @@ AutoTuner = R6Class("AutoTuner", measure = NULL, terminator, search_space = NULL, - internal_search_space = NULL, store_tuning_instance = TRUE, store_benchmark_result = TRUE, store_models = FALSE, @@ -147,13 +145,13 @@ AutoTuner = R6Class("AutoTuner", stop("If the values of the ParamSet of the Learner contain TuneTokens you cannot supply a search_space.") } + ia = list() self$tuner = assert_tuner(tuner) ia$learner = learner ia$resampling = assert_resampling(resampling)$clone() if (!is.null(measure)) ia$measure = assert_measure(as_measure(measure), learner = learner) - if (!is.null(search_space)) ia$search_space = assert_param_set(as_search_space(search_space))$clone() - if (!is.null(internal_search_space)) ia$internal_search_space = assert_param_set(as_search_space(internal_search_space))$clone() + if (!is.null(search_space)) ia$search_space = search_space ia$terminator = assert_terminator(terminator)$clone() ia$store_models = assert_flag(store_models) diff --git a/R/TuningInstanceAsyncMulticrit.R b/R/TuningInstanceAsyncMulticrit.R index 15385879..cbcd9c1e 100644 --- a/R/TuningInstanceAsyncMulticrit.R +++ b/R/TuningInstanceAsyncMulticrit.R @@ -16,7 +16,6 @@ #' @template param_measures #' @template param_terminator #' @template param_search_space -#' @template param_internal_search_space #' @template param_store_benchmark_result #' @template param_store_models #' @template param_check_values @@ -34,7 +33,6 @@ TuningInstanceAsyncMultiCrit = R6Class("TuningInstanceAsyncMultiCrit", inherit = OptimInstanceAsyncMultiCrit, public = list( - internal_search_space = NULL, #' @description @@ -46,7 +44,6 @@ TuningInstanceAsyncMultiCrit = R6Class("TuningInstanceAsyncMultiCrit", measures, terminator, search_space = NULL, - internal_search_space = NULL, store_benchmark_result = TRUE, store_models = FALSE, check_values = FALSE, @@ -71,39 +68,9 @@ TuningInstanceAsyncMultiCrit = R6Class("TuningInstanceAsyncMultiCrit", as_search_space(search_space) } - # get ids of primary and internal hyperparameters - sids = search_space$ids() - internal_tune_ids = search_space$ids(any_tags = "internal_tuning") - - # get internal search space - self$internal_search_space = if (is.null(internal_search_space)) { - # We DO NOT subset the search space because there we might keep an extra_trafo which is not allowed - # for the internal tuning search space - if (length(internal_tune_ids)) { - if (search_space_from_tokens) { - learner$param_set$subset(internal_tune_ids)$search_space() - } else { - search_space$subset(internal_tune_ids) - } - } - } else { - if (length(internal_tune_ids)) { - stopf("Either tag parameters in the `search_space` with 'internal_tuning' OR provide an `internal_search_space`.") - } - as_search_space(internal_search_space) - } - - # subset search space to primary hyperparameters - if (length(internal_tune_ids)) { - search_space = search_space$subset(setdiff(sids, internal_tune_ids)) - } - - - if (!is.null(self$internal_search_space) && self$internal_search_space$has_trafo) { - stopf("Internal tuning and parameter transformations are currently not supported. - If you manually provided a search space that has a trafo and parameters tagged with 'internal_tuning', - please pass the latter separately via the argument `internal_search_space`.") - } + tmp = split_internal_search_space(search_space) + search_space = tmp$search_space + self$internal_search_space = tmp$internal_search_space # set internal search space if (!is.null(self$internal_search_space)) { diff --git a/R/TuningInstanceAsyncSingleCrit.R b/R/TuningInstanceAsyncSingleCrit.R index 40435e40..52327973 100644 --- a/R/TuningInstanceAsyncSingleCrit.R +++ b/R/TuningInstanceAsyncSingleCrit.R @@ -25,7 +25,6 @@ #' @template param_measure #' @template param_terminator #' @template param_search_space -#' @template param_internal_search_space #' @template param_store_benchmark_result #' @template param_store_models #' @template param_check_values @@ -56,7 +55,6 @@ TuningInstanceAsyncSingleCrit = R6Class("TuningInstanceAsyncSingleCrit", measure = NULL, terminator, search_space = NULL, - internal_search_space = NULL, store_benchmark_result = TRUE, store_models = FALSE, check_values = FALSE, @@ -82,31 +80,9 @@ TuningInstanceAsyncSingleCrit = R6Class("TuningInstanceAsyncSingleCrit", } # get ids of primary and internal hyperparameters - sids = search_space$ids() - internal_tune_ids = search_space$ids(any_tags = "internal_tuning") - - # get internal search space - self$internal_search_space = if (is.null(internal_search_space)) { - # We DO NOT subset the search space because there we might keep an extra_trafo which is not allowed - # for the internal tuning search space - if (length(internal_tune_ids)) { - if (search_space_from_tokens) { - learner$param_set$subset(internal_tune_ids)$search_space() - } else { - search_space$subset(internal_tune_ids) - } - } - } else { - if (length(internal_tune_ids)) { - stopf("Either tag parameters in the `search_space` with 'internal_tuning' OR provide an `internal_search_space`.") - } - as_search_space(internal_search_space) - } - - # subset search space to primary hyperparameters - if (length(internal_tune_ids)) { - search_space = search_space$subset(setdiff(sids, internal_tune_ids)) - } + tmp = split_internal_search_space(search_space) + search_space = tmp$search_space + self$internal_search_space = tmp$internal_search_space if (!is.null(self$internal_search_space) && self$internal_search_space$has_trafo) { stopf("Internal tuning and parameter transformations are currently not supported. diff --git a/R/TuningInstanceBatchMulticrit.R b/R/TuningInstanceBatchMulticrit.R index f25b18cb..a6f715a4 100644 --- a/R/TuningInstanceBatchMulticrit.R +++ b/R/TuningInstanceBatchMulticrit.R @@ -17,7 +17,6 @@ #' @template param_measures #' @template param_terminator #' @template param_search_space -#' @template param_internal_search_space #' @template param_store_benchmark_result #' @template param_store_models #' @template param_check_values @@ -77,7 +76,6 @@ TuningInstanceBatchMultiCrit = R6Class("TuningInstanceBatchMultiCrit", measures, terminator, search_space = NULL, - internal_search_space = NULL, store_benchmark_result = TRUE, store_models = FALSE, check_values = FALSE, @@ -101,37 +99,9 @@ TuningInstanceBatchMultiCrit = R6Class("TuningInstanceBatchMultiCrit", } # get ids of primary and internal hyperparameters - sids = search_space$ids() - internal_tune_ids = search_space$ids(any_tags = "internal_tuning") - - # get internal search space - self$internal_search_space = if (is.null(internal_search_space)) { - # We DO NOT subset the search space because there we might keep an extra_trafo which is not allowed - # for the internal tuning search space - if (length(internal_tune_ids)) { - if (search_space_from_tokens) { - learner$param_set$subset(internal_tune_ids)$search_space() - } else { - search_space$subset(internal_tune_ids) - } - } - } else { - if (length(internal_tune_ids)) { - stopf("Either tag parameters in the `search_space` with 'internal_tuning' OR provide an `internal_search_space`.") - } - as_search_space(internal_search_space) - } - - # subset search space to primary hyperparameters - if (length(internal_tune_ids)) { - search_space = search_space$subset(setdiff(sids, internal_tune_ids)) - } - - if (!is.null(self$internal_search_space) && self$internal_search_space$has_trafo) { - stopf("Internal tuning and parameter transformations are currently not supported. - If you manually provided a search space that has a trafo and parameters tagged with 'internal_tuning', - please pass the latter separately via the argument `internal_search_space`.") - } + tmp = split_internal_search_space(search_space) + search_space = tmp$search_space + self$internal_search_space = tmp$internal_search_space # set internal search space if (!is.null(self$internal_search_space)) { diff --git a/R/TuningInstanceBatchSingleCrit.R b/R/TuningInstanceBatchSingleCrit.R index 28507fe6..7ca75975 100644 --- a/R/TuningInstanceBatchSingleCrit.R +++ b/R/TuningInstanceBatchSingleCrit.R @@ -65,7 +65,6 @@ #' @template param_measure #' @template param_terminator #' @template param_search_space -#' @template param_internal_search_space #' @template param_store_benchmark_result #' @template param_store_models #' @template param_check_values @@ -127,7 +126,6 @@ TuningInstanceBatchSingleCrit = R6Class("TuningInstanceBatchSingleCrit", measure = NULL, terminator, search_space = NULL, - internal_search_space = NULL, store_benchmark_result = TRUE, store_models = FALSE, check_values = FALSE, @@ -150,40 +148,10 @@ TuningInstanceBatchSingleCrit = R6Class("TuningInstanceBatchSingleCrit", as_search_space(search_space) } - # get ids of primary and internal hyperparameters - sids = search_space$ids() - internal_tune_ids = search_space$ids(any_tags = "internal_tuning") + tmp = split_internal_search_space(search_space) + search_space = tmp$search_space + self$internal_search_space = tmp$internal_search_space - # get internal search space - self$internal_search_space = if (is.null(internal_search_space)) { - # We DO NOT subset the search space because there we might keep an extra_trafo which is not allowed - # for the internal tuning search space - if (length(internal_tune_ids)) { - if (search_space_from_tokens) { - learner$param_set$subset(internal_tune_ids)$search_space() - } else { - search_space$subset(internal_tune_ids) - } - } - } else { - if (length(internal_tune_ids)) { - stopf("Either tag parameters in the `search_space` with 'internal_tuning' OR provide an `internal_search_space`.") - } - as_search_space(internal_search_space) - } - - # subset search space to primary hyperparameters - if (length(internal_tune_ids)) { - search_space = search_space$subset(setdiff(sids, internal_tune_ids)) - } - - if (!is.null(self$internal_search_space) && self$internal_search_space$has_trafo) { - stopf("Internal tuning and parameter transformations are currently not supported. - If you manually provided a search space that has a trafo and parameters tagged with 'internal_tuning', - please pass the latter separately via the argument `internal_search_space`.") - } - - # set internal search space if (!is.null(self$internal_search_space)) { # the learner dictates how to interpret the to_tune(..., inner) learner$param_set$set_values(.values = learner$param_set$convert_internal_search_space(self$internal_search_space)) diff --git a/R/auto_tuner.R b/R/auto_tuner.R index b08f696f..fc2e1ac6 100644 --- a/R/auto_tuner.R +++ b/R/auto_tuner.R @@ -16,7 +16,6 @@ #' @template param_term_time #' @template param_terminator #' @template param_search_space -#' @template param_internal_search_space #' @template param_store_tuning_instance #' @template param_store_benchmark_result #' @template param_store_models @@ -44,7 +43,6 @@ auto_tuner = function( term_time = NULL, terminator = NULL, search_space = NULL, - internal_search_space = NULL, store_tuning_instance = TRUE, store_benchmark_result = TRUE, store_models = FALSE, @@ -62,7 +60,6 @@ auto_tuner = function( measure = measure, terminator = terminator, search_space = search_space, - internal_search_space = internal_search_space, store_tuning_instance = store_tuning_instance, store_benchmark_result = store_benchmark_result, store_models = store_models, diff --git a/R/helper.R b/R/helper.R index c2e45241..bdb78669 100644 --- a/R/helper.R +++ b/R/helper.R @@ -27,3 +27,14 @@ extract_inner_tuned_values = function(resample_result, internal_search_space) { internal_tuned_values = transpose_list(map(get_private(resample_result)$.data$learner_states(get_private(resample_result)$.view), "internal_tuned_values")) internal_search_space$aggr_internal_tuned_values(internal_tuned_values) } + + +split_internal_search_space = function(search_space) { + internal_tune_ids = search_space$ids(any_tags = "internal_tuning") + if (length(internal_tune_ids)) { + internal_search_space = search_space$subset(internal_tune_ids) + search_space = search_space$subset(setdiff(search_space$ids(), internal_tune_ids)) + return(list(search_space = search_space, internal_search_space = internal_search_space)) + } + list(search_space = search_space, internal_search_space = NULL) +} diff --git a/R/sugar.R b/R/sugar.R index e09eb259..8b517b0f 100644 --- a/R/sugar.R +++ b/R/sugar.R @@ -44,7 +44,6 @@ tnrs = function(.keys, ...) { #' @template param_resampling #' @template param_terminator #' @template param_search_space -#' @template param_internal_search_space #' @template param_store_benchmark_result #' @template param_store_models #' @template param_check_values @@ -62,7 +61,6 @@ ti = function( measures = NULL, terminator, search_space = NULL, - internal_search_space = NULL, store_benchmark_result = TRUE, store_models = FALSE, check_values = FALSE, @@ -77,7 +75,6 @@ ti = function( measures, terminator = terminator, search_space = search_space, - internal_search_space = internal_search_space, store_benchmark_result = store_benchmark_result, store_models = store_models, check_values = check_values, @@ -98,7 +95,6 @@ ti = function( #' @template param_resampling #' @template param_terminator #' @template param_search_space -#' @template param_internal_search_space #' @template param_store_benchmark_result #' @template param_store_models #' @template param_check_values @@ -117,7 +113,6 @@ ti_async = function( measures = NULL, terminator, search_space = NULL, - internal_search_space = NULL, store_benchmark_result = TRUE, store_models = FALSE, check_values = FALSE, @@ -133,7 +128,6 @@ ti_async = function( measures, terminator = terminator, search_space = search_space, - internal_search_space = internal_search_space, store_benchmark_result = store_benchmark_result, store_models = store_models, check_values = check_values, diff --git a/R/tune.R b/R/tune.R index 12edf465..4de882d0 100644 --- a/R/tune.R +++ b/R/tune.R @@ -42,7 +42,6 @@ #' @template param_term_evals #' @template param_term_time #' @template param_search_space -#' @template param_internal_search_space #' @template param_store_benchmark_result #' @template param_store_models #' @template param_check_values @@ -88,7 +87,6 @@ tune = function( terminator = NULL, search_space = NULL, store_benchmark_result = TRUE, - internal_search_space = NULL, store_models = FALSE, check_values = FALSE, callbacks = NULL, @@ -106,7 +104,6 @@ tune = function( measures, terminator = terminator, search_space = search_space, - internal_search_space = internal_search_space, store_benchmark_result = store_benchmark_result, store_models = store_models, check_values = check_values, @@ -122,7 +119,6 @@ tune = function( measures, terminator = terminator, search_space = search_space, - internal_search_space = internal_search_space, store_benchmark_result = store_benchmark_result, store_models = store_models, check_values = check_values, diff --git a/man/AutoTuner.Rd b/man/AutoTuner.Rd index 6e40de4f..53fa236c 100644 --- a/man/AutoTuner.Rd +++ b/man/AutoTuner.Rd @@ -223,7 +223,6 @@ Creates a new instance of this \link[R6:R6Class]{R6} class. measure = NULL, terminator, search_space = NULL, - internal_search_space = NULL, store_tuning_instance = TRUE, store_benchmark_result = TRUE, store_models = FALSE, @@ -261,9 +260,6 @@ Hyperparameter search space. If \code{NULL} (default), the search space is constructed from the \link[paradox:to_tune]{paradox::TuneToken} of the learner's parameter set (learner$param_set).} -\item{\code{internal_search_space}}{(\link[paradox:ParamSet]{paradox::ParamSet} or \code{NULL})\cr -The internal search space.} - \item{\code{store_tuning_instance}}{(\code{logical(1)})\cr If \code{TRUE} (default), stores the internally created \link{TuningInstanceBatchSingleCrit} with all intermediate results in slot \verb{$tuning_instance}.} diff --git a/man/TuningInstanceAsyncMultiCrit.Rd b/man/TuningInstanceAsyncMultiCrit.Rd index 2caf366b..090ea277 100644 --- a/man/TuningInstanceAsyncMultiCrit.Rd +++ b/man/TuningInstanceAsyncMultiCrit.Rd @@ -98,7 +98,6 @@ Creates a new instance of this \link[R6:R6Class]{R6} class. measures, terminator, search_space = NULL, - internal_search_space = NULL, store_benchmark_result = TRUE, store_models = FALSE, check_values = FALSE, @@ -134,9 +133,6 @@ Hyperparameter search space. If \code{NULL} (default), the search space is constructed from the \link[paradox:to_tune]{paradox::TuneToken} of the learner's parameter set (learner$param_set).} -\item{\code{internal_search_space}}{(\link[paradox:ParamSet]{paradox::ParamSet} or \code{NULL})\cr -The internal search space.} - \item{\code{store_benchmark_result}}{(\code{logical(1)})\cr If \code{TRUE} (default), store resample result of evaluated hyperparameter configurations in archive as \link[mlr3:BenchmarkResult]{mlr3::BenchmarkResult}.} diff --git a/man/TuningInstanceAsyncSingleCrit.Rd b/man/TuningInstanceAsyncSingleCrit.Rd index d248786a..4eebc688 100644 --- a/man/TuningInstanceAsyncSingleCrit.Rd +++ b/man/TuningInstanceAsyncSingleCrit.Rd @@ -124,7 +124,6 @@ Creates a new instance of this \link[R6:R6Class]{R6} class. measure = NULL, terminator, search_space = NULL, - internal_search_space = NULL, store_benchmark_result = TRUE, store_models = FALSE, check_values = FALSE, @@ -160,12 +159,6 @@ Hyperparameter search space. If \code{NULL} (default), the search space is constructed from the \link[paradox:to_tune]{paradox::TuneToken} of the learner's parameter set (learner$param_set).} -\item{\code{internal_search_space}}{(\link[paradox:ParamSet]{paradox::ParamSet} or \code{NULL})\cr -The internal search space.} - -\item{\code{internal_search_space}}{(\link[paradox:ParamSet]{paradox::ParamSet} or \code{NULL})\cr -The internal search space.} - \item{\code{store_benchmark_result}}{(\code{logical(1)})\cr If \code{TRUE} (default), store resample result of evaluated hyperparameter configurations in archive as \link[mlr3:BenchmarkResult]{mlr3::BenchmarkResult}.} diff --git a/man/TuningInstanceBatchMultiCrit.Rd b/man/TuningInstanceBatchMultiCrit.Rd index 4571fe10..e9be3820 100644 --- a/man/TuningInstanceBatchMultiCrit.Rd +++ b/man/TuningInstanceBatchMultiCrit.Rd @@ -138,7 +138,6 @@ Creates a new instance of this \link[R6:R6Class]{R6} class. measures, terminator, search_space = NULL, - internal_search_space = NULL, store_benchmark_result = TRUE, store_models = FALSE, check_values = FALSE, @@ -173,12 +172,6 @@ Hyperparameter search space. If \code{NULL} (default), the search space is constructed from the \link[paradox:to_tune]{paradox::TuneToken} of the learner's parameter set (learner$param_set).} -\item{\code{internal_search_space}}{(\link[paradox:ParamSet]{paradox::ParamSet} or \code{NULL})\cr -The internal search space.} - -\item{\code{internal_search_space}}{(\link[paradox:ParamSet]{paradox::ParamSet} or \code{NULL})\cr -The internal search space.} - \item{\code{store_benchmark_result}}{(\code{logical(1)})\cr If \code{TRUE} (default), store resample result of evaluated hyperparameter configurations in archive as \link[mlr3:BenchmarkResult]{mlr3::BenchmarkResult}.} diff --git a/man/TuningInstanceBatchSingleCrit.Rd b/man/TuningInstanceBatchSingleCrit.Rd index f3cad63c..80afad73 100644 --- a/man/TuningInstanceBatchSingleCrit.Rd +++ b/man/TuningInstanceBatchSingleCrit.Rd @@ -167,7 +167,6 @@ Creates a new instance of this \link[R6:R6Class]{R6} class. measure = NULL, terminator, search_space = NULL, - internal_search_space = NULL, store_benchmark_result = TRUE, store_models = FALSE, check_values = FALSE, @@ -202,9 +201,6 @@ Hyperparameter search space. If \code{NULL} (default), the search space is constructed from the \link[paradox:to_tune]{paradox::TuneToken} of the learner's parameter set (learner$param_set).} -\item{\code{internal_search_space}}{(\link[paradox:ParamSet]{paradox::ParamSet} or \code{NULL})\cr -The internal search space.} - \item{\code{store_benchmark_result}}{(\code{logical(1)})\cr If \code{TRUE} (default), store resample result of evaluated hyperparameter configurations in archive as \link[mlr3:BenchmarkResult]{mlr3::BenchmarkResult}.} diff --git a/man/auto_tuner.Rd b/man/auto_tuner.Rd index 6425789e..ace941d4 100644 --- a/man/auto_tuner.Rd +++ b/man/auto_tuner.Rd @@ -13,7 +13,6 @@ auto_tuner( term_time = NULL, terminator = NULL, search_space = NULL, - internal_search_space = NULL, store_tuning_instance = TRUE, store_benchmark_result = TRUE, store_models = FALSE, @@ -56,9 +55,6 @@ Hyperparameter search space. If \code{NULL} (default), the search space is constructed from the \link[paradox:to_tune]{paradox::TuneToken} of the learner's parameter set (learner$param_set).} -\item{internal_search_space}{(\link[paradox:ParamSet]{paradox::ParamSet} or \code{NULL})\cr -The internal search space.} - \item{store_tuning_instance}{(\code{logical(1)})\cr If \code{TRUE} (default), stores the internally created \link{TuningInstanceBatchSingleCrit} with all intermediate results in slot \verb{$tuning_instance}.} diff --git a/man/mlr_tuners_cmaes.Rd b/man/mlr_tuners_cmaes.Rd index b4b8393c..1e50c815 100644 --- a/man/mlr_tuners_cmaes.Rd +++ b/man/mlr_tuners_cmaes.Rd @@ -11,7 +11,7 @@ Hansen N (2016). } \description{ Subclass for Covariance Matrix Adaptation Evolution Strategy (CMA-ES). -Calls \code{\link[adagio:pureCMAES]{adagio::pureCMAES()}} from package \CRANpkg{adagio}. +Calls \code{\link[adagio:cmaes]{adagio::pureCMAES()}} from package \CRANpkg{adagio}. } \section{Dictionary}{ @@ -29,7 +29,7 @@ Create \code{random} start values or based on \code{center} of search space? In the latter case, it is the center of the parameters before a trafo is applied.} } -For the meaning of the control parameters, see \code{\link[adagio:pureCMAES]{adagio::pureCMAES()}}. +For the meaning of the control parameters, see \code{\link[adagio:cmaes]{adagio::pureCMAES()}}. Note that we have removed all control parameters which refer to the termination of the algorithm and where our terminators allow to obtain the same behavior. } diff --git a/man/ti.Rd b/man/ti.Rd index c8b7b694..315717e7 100644 --- a/man/ti.Rd +++ b/man/ti.Rd @@ -11,7 +11,6 @@ ti( measures = NULL, terminator, search_space = NULL, - internal_search_space = NULL, store_benchmark_result = TRUE, store_models = FALSE, check_values = FALSE, @@ -44,9 +43,6 @@ Hyperparameter search space. If \code{NULL} (default), the search space is constructed from the \link[paradox:to_tune]{paradox::TuneToken} of the learner's parameter set (learner$param_set).} -\item{internal_search_space}{(\link[paradox:ParamSet]{paradox::ParamSet} or \code{NULL})\cr -The internal search space.} - \item{store_benchmark_result}{(\code{logical(1)})\cr If \code{TRUE} (default), store resample result of evaluated hyperparameter configurations in archive as \link[mlr3:BenchmarkResult]{mlr3::BenchmarkResult}.} diff --git a/man/ti_async.Rd b/man/ti_async.Rd index f0b44924..aedc0e3f 100644 --- a/man/ti_async.Rd +++ b/man/ti_async.Rd @@ -11,7 +11,6 @@ ti_async( measures = NULL, terminator, search_space = NULL, - internal_search_space = NULL, store_benchmark_result = TRUE, store_models = FALSE, check_values = FALSE, @@ -45,9 +44,6 @@ Hyperparameter search space. If \code{NULL} (default), the search space is constructed from the \link[paradox:to_tune]{paradox::TuneToken} of the learner's parameter set (learner$param_set).} -\item{internal_search_space}{(\link[paradox:ParamSet]{paradox::ParamSet} or \code{NULL})\cr -The internal search space.} - \item{store_benchmark_result}{(\code{logical(1)})\cr If \code{TRUE} (default), store resample result of evaluated hyperparameter configurations in archive as \link[mlr3:BenchmarkResult]{mlr3::BenchmarkResult}.} diff --git a/man/tune.Rd b/man/tune.Rd index db311ffd..12156461 100644 --- a/man/tune.Rd +++ b/man/tune.Rd @@ -15,7 +15,6 @@ tune( terminator = NULL, search_space = NULL, store_benchmark_result = TRUE, - internal_search_space = NULL, store_models = FALSE, check_values = FALSE, callbacks = NULL, @@ -63,9 +62,6 @@ constructed from the \link[paradox:to_tune]{paradox::TuneToken} of the learner's If \code{TRUE} (default), store resample result of evaluated hyperparameter configurations in archive as \link[mlr3:BenchmarkResult]{mlr3::BenchmarkResult}.} -\item{internal_search_space}{(\link[paradox:ParamSet]{paradox::ParamSet} or \code{NULL})\cr -The internal search space.} - \item{store_models}{(\code{logical(1)})\cr If \code{TRUE}, fitted models are stored in the benchmark result (\code{archive$benchmark_result}). If \code{store_benchmark_result = FALSE}, models diff --git a/tests/testthat/test_ArchiveBatchTuning.R b/tests/testthat/test_ArchiveBatchTuning.R index 351115d9..fa961e85 100644 --- a/tests/testthat/test_ArchiveBatchTuning.R +++ b/tests/testthat/test_ArchiveBatchTuning.R @@ -263,7 +263,7 @@ test_that("ArchiveTuning as.data.table function works", { # Internal Tuning -------------------------------------------------------------- -test_that("ArchiveBatchTuning as.data.table function works internally tuned values", { +test_that("ArchiveBatchTuning as.data.table function works for internally tuned values", { instance = ti( task = tsk("pima"), learner = lrn("classif.debug", validate = 0.2, early_stopping = TRUE, iter = to_tune(upper = 1000, internal = TRUE, aggr = function(x) 99), diff --git a/tests/testthat/test_AutoTuner.R b/tests/testthat/test_AutoTuner.R index 66c85987..98a56ad0 100644 --- a/tests/testthat/test_AutoTuner.R +++ b/tests/testthat/test_AutoTuner.R @@ -679,24 +679,6 @@ test_that("AutoTuner works with internal tuning and validation", { expect_true(is.null(at$model$learner$state$internal_valid_task_ids)) }) -test_that("AutoTuner works when internal_search_space is passed separately", { - task = tsk("iris") - search_space = ps(x = p_dbl(0.2, 0.3)) - internal_search_space = ps(iter = p_int(upper = 1000L, aggr = function(x) length(x))) - at = auto_tuner( - tuner = tnr("random_search", batch_size = 2), - learner = lrn("classif.debug", early_stopping = TRUE, validate = "test"), - resampling = rsmp("cv", folds = 3), - search_space = search_space, - internal_search_space = internal_search_space, - measure = msr("classif.ce"), - term_evals = 4 - ) - at$train(task) - expect_equal(at$model$learner$param_set$values$iter, 3) - expect_false(at$model$learner$param_set$values$early_stopping) -}) - test_that("AutoTuner works when internal_search_space is part of primary search space", { task = tsk("iris") search_space = ps( diff --git a/tests/testthat/test_Tuner.R b/tests/testthat/test_Tuner.R index 33ad30f1..77010748 100644 --- a/tests/testthat/test_Tuner.R +++ b/tests/testthat/test_Tuner.R @@ -340,55 +340,6 @@ test_that("parameter transformations can be used with internal tuning", { ) }) -test_that("either provide internal_search_space OR tag params with 'internal_tuning'", { - expect_error( - tune( - tuner = tnr("random_search"), - learner = lrn("classif.debug", - iter = to_tune(upper = 1000, internal = TRUE), - x = to_tune(), - early_stopping = TRUE, validate = 0.2), - task = tsk("iris"), - internal_search_space = ps(iter = p_int(upper = 100, aggr = function(x) round(mean(unlist(x))))), - resampling = rsmp("holdout"), - term_evals = 2 - ), - "Either tag parameters in the `search_space`" - ) -}) - -test_that("Can pass internal_search_space separately", { - # 1. primary search space is passed manually - ti = tune( - tuner = tnr("random_search"), - learner = lrn("classif.debug", - x = to_tune(), - early_stopping = TRUE, validate = 0.2), - task = tsk("iris"), - internal_search_space = ps(iter = p_int(upper = 100, aggr = function(x) as.integer(mean(unlist(x))))), - resampling = rsmp("holdout"), - term_evals = 2 - ) - expect_true("iter" %in% ti$internal_search_space$ids()) - expect_true(is.integer(ti$result$internal_tuned_values[[1]]$iter)) - expect_double(ti$result$x) - - # 2. primary search space is passed via to_tune - ti = tune( - tuner = tnr("random_search"), - learner = lrn("classif.debug", - early_stopping = TRUE, validate = 0.2), - task = tsk("iris"), - search_space = ps(x = p_dbl(0, 1)), - internal_search_space = ps(iter = p_int(upper = 100, aggr = function(x) as.integer(mean(unlist(x))))), - resampling = rsmp("holdout"), - term_evals = 2 - ) - expect_true("iter" %in% ti$internal_search_space$ids()) - expect_true(is.integer(ti$result$internal_tuned_values[[1]]$iter)) - expect_double(ti$result$x) -}) - test_that("tag internal tune token manually in primary search space", { search_space = ps( x = p_dbl(0, 1), @@ -407,34 +358,6 @@ test_that("tag internal tune token manually in primary search space", { expect_double(ti$result$x) }) -test_that("Can only pass internal tune tokens one way", { - skip_if_not_installed("mlr3pipelines") - - l1 = lrn("classif.debug", early_stopping = TRUE) - l1$id = "l1" - l2 = l1$clone() - l2$id = "l2" - - l1$param_set$set_values( - iter = to_tune(upper = 100, internal = TRUE) - ) - l = mlr3pipelines::ppl("branch", list(l1 = l1, l2 = l2)) - l = as_learner(l) - set_validate(l, 0.2, ids = c("l1", "l2")) - - internal_search_space = ps( - l2.iter = p_int(upper = 100, aggr = function(x) as.integer(mean(unlist(x))), tags = "internal_tuning") - ) - expect_error(tune( - tuner = tnr("random_search"), - learner = l, - internal_search_space = internal_search_space, - task = tsk("iris"), - resampling = rsmp("holdout"), - term_evals = 2), - "Either tag parameters") -}) - test_that("Correct error when minimize is NA", { m = msr("classif.acc") m$minimize = NA @@ -443,6 +366,6 @@ test_that("Correct error when minimize is NA", { task = tsk("iris"), learner = lrn("classif.debug", x = to_tune()), resampling = rsmp("holdout"), - measure = m + measures = m ), "`minimize`") }) diff --git a/tests/testthat/test_TuningInstanceBatchSingleCrit.R b/tests/testthat/test_TuningInstanceBatchSingleCrit.R index 6e4b6979..65f8a86a 100644 --- a/tests/testthat/test_TuningInstanceBatchSingleCrit.R +++ b/tests/testthat/test_TuningInstanceBatchSingleCrit.R @@ -471,27 +471,10 @@ test_that("required parameter can be tuned internally without having a value set learner$param_set$set_values( early_stopping = TRUE, - iter = NULL + iter = to_tune(upper = 1000, internal = TRUE) ) learner$validate = "test" - internal_search_space = ps( - iter = p_int(upper = 1000, aggr = function(x) as.integer(mean(unlist(x)))) - ) - - - expect_error(tune( - task = tsk("iris"), - tuner = tnr("internal"), - learner = learner, - internal_search_space = internal_search_space, - resampling = rsmp("holdout"), - store_benchmark_result = TRUE - ), regexp = NA) - - learner$param_set$set_values( - iter = to_tune(upper = 1000, internal = TRUE) - ) expect_error(tune( task = tsk("iris"), tuner = tnr("internal"),