From 0d5e8f597d29ff20d87202bdcf89b1ad9ca3228f Mon Sep 17 00:00:00 2001 From: Marc Becker <33069354+be-marc@users.noreply.github.com> Date: Sun, 27 Oct 2024 08:00:49 +0100 Subject: [PATCH] refactor: pass extra information of the result in the extra parameter (#458) * refactor: pass extra information of the result in the extra parameter * ... * ... --- DESCRIPTION | 2 +- NEWS.md | 7 +++---- R/TuningInstanceAsyncMulticrit.R | 16 +++++++++++----- R/TuningInstanceAsyncSingleCrit.R | 14 ++++++++++---- R/TuningInstanceBatchMulticrit.R | 16 +++++++++++----- R/TuningInstanceBatchSingleCrit.R | 15 ++++++++++----- man-roxygen/param_extra.R | 2 ++ man/TuningInstanceAsyncMultiCrit.Rd | 10 +++++++++- man/TuningInstanceAsyncSingleCrit.Rd | 12 ++++++++++-- man/TuningInstanceBatchMultiCrit.Rd | 12 ++++++++++-- man/TuningInstanceBatchSingleCrit.Rd | 22 +++++++++++++++------- man/mlr_tuners_cmaes.Rd | 4 ++-- 12 files changed, 94 insertions(+), 38 deletions(-) create mode 100644 man-roxygen/param_extra.R diff --git a/DESCRIPTION b/DESCRIPTION index 9e97d88b..69fe7823 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -29,7 +29,7 @@ Depends: paradox (>= 1.0.1), R (>= 3.1.0) Imports: - bbotk (>= 1.1.1), + bbotk (>= 1.2.0), checkmate (>= 2.0.0), data.table, lgr, diff --git a/NEWS.md b/NEWS.md index 2489bb09..74094768 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,11 +1,10 @@ # mlr3tuning (development version) * fix: The `as_data_table()` functions do not unnest the `x_domain` colum anymore by default. -* fix: `to_tune(internal = TRUE)` now also works if non-internal tuning parameters require have - an `.extra_trafo` +* fix: `to_tune(internal = TRUE)` now also works if non-internal tuning parameters require have an `.extra_trafo`. * feat: It is now possible to pass an `internal_search_space` manually. - This allows to use parameter transformations on the primary search space in combination with - internal hyperparameter tuning. + This allows to use parameter transformations on the primary search space in combination with internal hyperparameter tuning. +* refactor: The `Tuner` pass extra information of the result in the `extra` parameter now. # mlr3tuning 1.0.2 diff --git a/R/TuningInstanceAsyncMulticrit.R b/R/TuningInstanceAsyncMulticrit.R index 1663b06d..1b769e5c 100644 --- a/R/TuningInstanceAsyncMulticrit.R +++ b/R/TuningInstanceAsyncMulticrit.R @@ -26,6 +26,7 @@ #' @template param_xdt #' @template param_learner_param_vals #' @template param_internal_tuned_values +#' @template param_extra #' #' @template field_internal_search_space #' @@ -147,13 +148,18 @@ TuningInstanceAsyncMultiCrit = R6Class("TuningInstanceAsyncMultiCrit", #' For internal use. #' #' @param ydt (`numeric(1)`)\cr - #' Optimal outcomes, e.g. the Pareto front. + #' Optimal outcomes, e.g. the Pareto front. #' @param xydt (`data.table::data.table()`)\cr - #' Point, outcome, and additional information. - assign_result = function(xdt, ydt, learner_param_vals = NULL, xydt = NULL) { + #' Point, outcome, and additional information. + #' @param ... (`any`)\cr + #' ignored. + assign_result = function(xdt, ydt, learner_param_vals = NULL, extra = NULL, xydt = NULL, ...) { + # workaround + extra = extra %??% xydt + # extract internal tuned values - if ("internal_tuned_values" %in% names(xydt)) { - set(xdt, j = "internal_tuned_values", value = list(xydt[["internal_tuned_values"]])) + if ("internal_tuned_values" %in% names(extra)) { + set(xdt, j = "internal_tuned_values", value = list(extra[["internal_tuned_values"]])) } # set the column with the learner param_vals that were not optimized over but set implicitly diff --git a/R/TuningInstanceAsyncSingleCrit.R b/R/TuningInstanceAsyncSingleCrit.R index da173d2d..823f2839 100644 --- a/R/TuningInstanceAsyncSingleCrit.R +++ b/R/TuningInstanceAsyncSingleCrit.R @@ -36,6 +36,7 @@ #' @template param_xdt #' @template param_learner_param_vals #' @template param_internal_tuned_values +#' @template param_extra #' #' @template field_internal_search_space #' @@ -159,14 +160,19 @@ TuningInstanceAsyncSingleCrit = R6Class("TuningInstanceAsyncSingleCrit", #' @param y (`numeric(1)`)\cr #' Optimal outcome. #' @param xydt (`data.table::data.table()`)\cr - #' Point, outcome, and additional information. - assign_result = function(xdt, y, learner_param_vals = NULL, xydt = NULL) { + #' Point, outcome, and additional information (Deprecated). + #' @param ... (`any`)\cr + #' ignored. + assign_result = function(xdt, y, learner_param_vals = NULL, extra = NULL, xydt = NULL, ...) { + # workaround + extra = extra %??% xydt + # set the column with the learner param_vals that were not optimized over but set implicitly assert_list(learner_param_vals, null.ok = TRUE, names = "named") # extract internal tuned values - if ("internal_tuned_values" %in% names(xydt)) { - set(xdt, j = "internal_tuned_values", value = list(xydt[["internal_tuned_values"]])) + if ("internal_tuned_values" %in% names(extra)) { + set(xdt, j = "internal_tuned_values", value = list(extra[["internal_tuned_values"]])) } if (is.null(learner_param_vals)) { diff --git a/R/TuningInstanceBatchMulticrit.R b/R/TuningInstanceBatchMulticrit.R index 540a98b9..163a16da 100644 --- a/R/TuningInstanceBatchMulticrit.R +++ b/R/TuningInstanceBatchMulticrit.R @@ -33,6 +33,7 @@ #' @template param_xdt #' @template param_learner_param_vals #' @template param_internal_tuned_values +#' @template param_extra #' #' @template field_internal_search_space #' @@ -181,13 +182,18 @@ TuningInstanceBatchMultiCrit = R6Class("TuningInstanceBatchMultiCrit", #' For internal use. #' #' @param ydt (`data.table::data.table()`)\cr - #' Optimal outcomes, e.g. the Pareto front. + #' Optimal outcomes, e.g. the Pareto front. #' @param xydt (`data.table::data.table()`)\cr - #' Point, outcome, and additional information. - assign_result = function(xdt, ydt, learner_param_vals = NULL, xydt = NULL) { + #' Point, outcome, and additional information (Deprecated). + #' @param ... (`any`)\cr + #' ignored. + assign_result = function(xdt, ydt, learner_param_vals = NULL, extra = NULL, xydt = NULL, ...) { + # workaround + extra = extra %??% xydt + # extract internal tuned values - if ("internal_tuned_values" %in% names(xydt)) { - set(xdt, j = "internal_tuned_values", value = list(xydt[["internal_tuned_values"]])) + if ("internal_tuned_values" %in% names(extra)) { + set(xdt, j = "internal_tuned_values", value = list(extra[["internal_tuned_values"]])) } # set the column with the learner param_vals that were not optimized over but set implicitly diff --git a/R/TuningInstanceBatchSingleCrit.R b/R/TuningInstanceBatchSingleCrit.R index 91d3216c..2ccc004a 100644 --- a/R/TuningInstanceBatchSingleCrit.R +++ b/R/TuningInstanceBatchSingleCrit.R @@ -68,6 +68,7 @@ #' @template param_xdt #' @template param_learner_param_vals #' @template param_internal_tuned_values +#' @template param_extra #' #' @template field_internal_search_space #' @@ -219,17 +220,21 @@ TuningInstanceBatchSingleCrit = R6Class("TuningInstanceBatchSingleCrit", #' For internal use. #' #' @param y (`numeric(1)`)\cr - #' Optimal outcome. + #' Optimal outcome. #' @param xydt (`data.table::data.table()`)\cr - #' Point, outcome, and additional information. - assign_result = function(xdt, y, learner_param_vals = NULL, xydt = NULL) { + #' Point, outcome, and additional information (Deprecated). + #' @param ... (`any`)\cr + #' ignored. + assign_result = function(xdt, y, learner_param_vals = NULL, extra = NULL, xydt = NULL, ...) { + # workaround + extra = extra %??% xydt # set the column with the learner param_vals that were not optimized over but set implicitly assert_list(learner_param_vals, null.ok = TRUE, names = "named") # extract internal tuned values - if ("internal_tuned_values" %in% names(xydt)) { - set(xdt, j = "internal_tuned_values", value = list(xydt[["internal_tuned_values"]])) + if ("internal_tuned_values" %in% names(extra)) { + set(xdt, j = "internal_tuned_values", value = list(extra[["internal_tuned_values"]])) } # learner param values diff --git a/man-roxygen/param_extra.R b/man-roxygen/param_extra.R new file mode 100644 index 00000000..5283a67b --- /dev/null +++ b/man-roxygen/param_extra.R @@ -0,0 +1,2 @@ +#' @param extra (`data.table::data.table()`)\cr +#' Additional information. diff --git a/man/TuningInstanceAsyncMultiCrit.Rd b/man/TuningInstanceAsyncMultiCrit.Rd index dc2997d2..622e008c 100644 --- a/man/TuningInstanceAsyncMultiCrit.Rd +++ b/man/TuningInstanceAsyncMultiCrit.Rd @@ -156,7 +156,9 @@ For internal use. xdt, ydt, learner_param_vals = NULL, - xydt = NULL + extra = NULL, + xydt = NULL, + ... )}\if{html}{\out{}} } @@ -174,8 +176,14 @@ Optimal outcomes, e.g. the Pareto front.} \item{\code{learner_param_vals}}{(List of named \verb{list()s})\cr Fixed parameter values of the learner that are neither part of the} +\item{\code{extra}}{(\code{data.table::data.table()})\cr +Additional information.} + \item{\code{xydt}}{(\code{data.table::data.table()})\cr Point, outcome, and additional information.} + +\item{\code{...}}{(\code{any})\cr +ignored.} } \if{html}{\out{}} } diff --git a/man/TuningInstanceAsyncSingleCrit.Rd b/man/TuningInstanceAsyncSingleCrit.Rd index 7d0a7932..139b67cc 100644 --- a/man/TuningInstanceAsyncSingleCrit.Rd +++ b/man/TuningInstanceAsyncSingleCrit.Rd @@ -194,7 +194,9 @@ For internal use. xdt, y, learner_param_vals = NULL, - xydt = NULL + extra = NULL, + xydt = NULL, + ... )}\if{html}{\out{}} } @@ -212,8 +214,14 @@ Optimal outcome.} \item{\code{learner_param_vals}}{(List of named \verb{list()s})\cr Fixed parameter values of the learner that are neither part of the} +\item{\code{extra}}{(\code{data.table::data.table()})\cr +Additional information.} + \item{\code{xydt}}{(\code{data.table::data.table()})\cr -Point, outcome, and additional information.} +Point, outcome, and additional information (Deprecated).} + +\item{\code{...}}{(\code{any})\cr +ignored.} } \if{html}{\out{}} } diff --git a/man/TuningInstanceBatchMultiCrit.Rd b/man/TuningInstanceBatchMultiCrit.Rd index ecf38ac6..790696ea 100644 --- a/man/TuningInstanceBatchMultiCrit.Rd +++ b/man/TuningInstanceBatchMultiCrit.Rd @@ -196,7 +196,9 @@ For internal use. xdt, ydt, learner_param_vals = NULL, - xydt = NULL + extra = NULL, + xydt = NULL, + ... )}\if{html}{\out{}} } @@ -214,8 +216,14 @@ Optimal outcomes, e.g. the Pareto front.} \item{\code{learner_param_vals}}{(List of named \verb{list()s})\cr Fixed parameter values of the learner that are neither part of the} +\item{\code{extra}}{(\code{data.table::data.table()})\cr +Additional information.} + \item{\code{xydt}}{(\code{data.table::data.table()})\cr -Point, outcome, and additional information.} +Point, outcome, and additional information (Deprecated).} + +\item{\code{...}}{(\code{any})\cr +ignored.} } \if{html}{\out{}} } diff --git a/man/TuningInstanceBatchSingleCrit.Rd b/man/TuningInstanceBatchSingleCrit.Rd index c3644516..b1fb08c2 100644 --- a/man/TuningInstanceBatchSingleCrit.Rd +++ b/man/TuningInstanceBatchSingleCrit.Rd @@ -161,11 +161,11 @@ Creates a new instance of this \link[R6:R6Class]{R6} class. measure = NULL, terminator, search_space = NULL, + internal_search_space = NULL, store_benchmark_result = TRUE, store_models = FALSE, check_values = FALSE, - callbacks = NULL, - internal_search_space = NULL + callbacks = NULL )}\if{html}{\out{}} } @@ -196,6 +196,9 @@ Hyperparameter search space. If \code{NULL} (default), the search space is constructed from the \link[paradox:to_tune]{paradox::TuneToken} of the learner's parameter set (learner$param_set).} +\item{\code{internal_search_space}}{(\link[paradox:ParamSet]{paradox::ParamSet} or \code{NULL})\cr +The internal search space.} + \item{\code{store_benchmark_result}}{(\code{logical(1)})\cr If \code{TRUE} (default), store resample result of evaluated hyperparameter configurations in archive as \link[mlr3:BenchmarkResult]{mlr3::BenchmarkResult}.} @@ -213,9 +216,6 @@ computational overhead is reduced.} \item{\code{callbacks}}{(list of \link[mlr3misc:Callback]{mlr3misc::Callback})\cr List of callbacks.} - -\item{\code{internal_search_space}}{(\link[paradox:ParamSet]{paradox::ParamSet} or \code{NULL})\cr -The internal search space.} } \if{html}{\out{}} } @@ -231,7 +231,9 @@ For internal use. xdt, y, learner_param_vals = NULL, - xydt = NULL + extra = NULL, + xydt = NULL, + ... )}\if{html}{\out{}} } @@ -249,8 +251,14 @@ Optimal outcome.} \item{\code{learner_param_vals}}{(List of named \verb{list()s})\cr Fixed parameter values of the learner that are neither part of the} +\item{\code{extra}}{(\code{data.table::data.table()})\cr +Additional information.} + \item{\code{xydt}}{(\code{data.table::data.table()})\cr -Point, outcome, and additional information.} +Point, outcome, and additional information (Deprecated).} + +\item{\code{...}}{(\code{any})\cr +ignored.} } \if{html}{\out{}} } diff --git a/man/mlr_tuners_cmaes.Rd b/man/mlr_tuners_cmaes.Rd index 5df8f71c..79a69f52 100644 --- a/man/mlr_tuners_cmaes.Rd +++ b/man/mlr_tuners_cmaes.Rd @@ -11,7 +11,7 @@ Hansen N (2016). } \description{ Subclass for Covariance Matrix Adaptation Evolution Strategy (CMA-ES). -Calls \code{\link[adagio:pureCMAES]{adagio::pureCMAES()}} from package \CRANpkg{adagio}. +Calls \code{\link[adagio:cmaes]{adagio::pureCMAES()}} from package \CRANpkg{adagio}. } \section{Dictionary}{ @@ -29,7 +29,7 @@ Create \code{random} start values or based on \code{center} of search space? In the latter case, it is the center of the parameters before a trafo is applied.} } -For the meaning of the control parameters, see \code{\link[adagio:pureCMAES]{adagio::pureCMAES()}}. +For the meaning of the control parameters, see \code{\link[adagio:cmaes]{adagio::pureCMAES()}}. Note that we have removed all control parameters which refer to the termination of the algorithm and where our terminators allow to obtain the same behavior. }