Skip to content

Commit b1f932c

Browse files
committed
chore: sms_ego --> smsego; update DESCRIPTION, add references, add cphs, minimal doc updates
1 parent 014ef9e commit b1f932c

36 files changed

+126
-81
lines changed

DESCRIPTION

Lines changed: 19 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
Type: Package
22
Package: mlr3mbo
3-
Title: Flexible Bayesian Optimization in R
3+
Title: Flexible Bayesian Optimization
44
Version: 0.1.1
55
Authors@R: c(
66
person("Lennart", "Schneider", , "[email protected]", role = c("cre", "aut"),
@@ -17,15 +17,24 @@ Authors@R: c(
1717
comment = c(ORCID = "0000-0001-8867-762X")),
1818
person("Martin", "Binder", , "[email protected]", role = "aut"),
1919
person("Sebastian", "Fischer", , "[email protected]", role = "aut",
20-
comment = c(ORCID = "0000-0002-9609-3197")))
21-
Description: A modern and flexible approach to Bayesian Optimization /
22-
Model Based Optimization in R building on the 'bbotk' package. 'mlr3mbo' is
23-
a toolbox providing both ready-to-use optimization algorithms as well
24-
as their fundamental building blocks. Single- and multi-objective
25-
optimization is supported as well as mixed continuous, categorical and
26-
conditional search spaces. Moreover, using 'mlr3mbo' for hyperparameter
27-
optimization of machine learning models within the 'mlr3' ecosystem is
28-
straightforward via 'mlr3tuning'.
20+
comment = c(ORCID = "0000-0002-9609-3197")),
21+
person("Michael H.", "Buselli", role = "cph"),
22+
person("Wessel", "Dankers", role = "cph"),
23+
person("Carlos", "Fonseca", role = "cph"),
24+
person("Manuel", "Lopez-Ibanez", role = "cph"),
25+
person("Luis", "Paquete", role = "cph"))
26+
Description: A modern and flexible approach to Bayesian Optimization / Model
27+
Based Optimization building on the 'bbotk' package. 'mlr3mbo' is a toolbox
28+
providing both ready-to-use optimization algorithms as well as their fundamental
29+
building blocks allowing for straightforward implementation of custom
30+
algorithms. Single- and multi-objective optimization is supported as well as
31+
mixed continuous, categorical and conditional search spaces. Moreover, using
32+
'mlr3mbo' for hyperparameter optimization of machine learning models within the
33+
'mlr3' ecosystem is straightforward via 'mlr3tuning'. Examples of ready-to-use
34+
optimization algorithms include Efficient Global Optimization by Jones et al.
35+
(1998) <doi:10.1023/A:1008306431147>, ParEGO by Knowles (2006)
36+
<doi:10.1109/TEVC.2005.851274> and SMS-EGO by Ponweiser et al. (2008)
37+
<doi:10.1007/978-3-540-87700-4_78>.
2938
License: LGPL-3
3039
URL: https://mlr3mbo.mlr-org.com, https://github.com/mlr-org/mlr3mbo
3140
BugReports: https://github.com/mlr-org/mlr3mbo/issues

R/AcqFunctionSmsEgo.R

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
#' @title Acquisition Function SMS-EGO
22
#'
33
#' @include AcqFunction.R
4-
#' @name mlr_acqfunctions_sms_ego
4+
#' @name mlr_acqfunctions_smsego
55
#'
66
#' @description
77
#' S-Metric Selection Evolutionary Multi-Objective Optimization Algorithm Acquisition Function.
@@ -54,7 +54,7 @@
5454
#'
5555
#' surrogate = srlrnc(list(learner, learner$clone(deep = TRUE)), archive = instance$archive)
5656
#'
57-
#' acq_function = acqf("sms_ego", surrogate = surrogate)
57+
#' acq_function = acqf("smsego", surrogate = surrogate)
5858
#'
5959
#' acq_function$surrogate$update()
6060
#' acq_function$progress = 5 - 4 # n_evals = 5 and 4 points already evaluated
@@ -101,7 +101,7 @@ AcqFunctionSmsEgo = R6Class("AcqFunctionSmsEgo",
101101
constants$values$lambda = lambda
102102
constants$values$epsilon = epsilon
103103

104-
super$initialize("acq_sms_ego", constants = constants, surrogate = surrogate, direction = "minimize", label = "SMS-EGO", man = "mlr3mbo::mlr_acqfunctions_sms_ego") # indeed, we minimize, see comments below about C code
104+
super$initialize("acq_smsego", constants = constants, surrogate = surrogate, direction = "minimize", label = "SMS-EGO", man = "mlr3mbo::mlr_acqfunctions_smsego") # indeed, we minimize, see comments below about C code
105105
},
106106

107107
#' @description
@@ -155,10 +155,10 @@ AcqFunctionSmsEgo = R6Class("AcqFunctionSmsEgo",
155155
# allocate memory for adding points to front for HV calculation in C
156156
front2 = t(rbind(self$ys_front, 0))
157157
sms = .Call("c_sms_indicator", PACKAGE = "mlr3mbo", cbs, self$ys_front, front2, self$epsilon, self$ref_point) # note that the negative indicator is returned from C
158-
data.table(acq_sms_ego = sms)
158+
data.table(acq_smsego = sms)
159159
}
160160
)
161161
)
162162

163-
mlr_acqfunctions$add("sms_ego", AcqFunctionSmsEgo)
163+
mlr_acqfunctions$add("smsego", AcqFunctionSmsEgo)
164164

R/OptimizerMbo.R

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
#' @description
66
#' `OptimizerMbo` class that implements Model Based Optimization (MBO).
77
#' The implementation follows a modular layout relying on a [loop_function] determining the MBO flavor to be used, e.g.,
8-
#' [bayesopt_ego] for sequential single-objective Bayesian Optimization, a [Surrogate], an [AcqFunction], e.g., [AcqFunctionEI] for
8+
#' [bayesopt_ego] for sequential single-objective Bayesian Optimization, a [Surrogate], an [AcqFunction], e.g., [mlr_acqfunctions_ei] for
99
#' Expected Improvement and an [AcqOptimizer].
1010
#'
1111
#' MBO algorithms are iterative optimization algorithms that make use of a continuously updated surrogate model built for the objective function.
@@ -23,7 +23,7 @@
2323
#' Whether this point was already evaluated. Depends on the `skip_already_evaluated` parameter of the [AcqOptimizer].
2424
#' @export
2525
#' @examples
26-
#' \dontrun{
26+
#' \donttest{
2727
#' if (requireNamespace("mlr3learners") &
2828
#' requireNamespace("DiceKriging") &
2929
#' requireNamespace("rgenoud")) {

R/TunerMbo.R

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
#'
1010
#' @export
1111
#' @examples
12-
#' \dontrun{
12+
#' \donttest{
1313
#' if (requireNamespace("mlr3learners") &
1414
#' requireNamespace("DiceKriging") &
1515
#' requireNamespace("rgenoud")) {

R/bayesopt_ego.R

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
#' @name mlr_loop_functions_ego
55
#'
66
#' @description
7-
#' MBO loop function for sequential single-objective Bayesian Optimization.
7+
#' Loop function for sequential single-objective Bayesian Optimization.
88
#' Normally used inside an [OptimizerMbo].
99
#'
1010
#' In each iteration after the initial design, the surrogate and acquisition function are updated and the next candidate
@@ -46,7 +46,7 @@
4646
#' @family Loop Function
4747
#' @export
4848
#' @examples
49-
#' \dontrun{
49+
#' \donttest{
5050
#' if (requireNamespace("mlr3learners") &
5151
#' requireNamespace("DiceKriging") &
5252
#' requireNamespace("rgenoud")) {

R/bayesopt_mpcl.R

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
#' @name mlr_loop_functions_mpcl
55
#'
66
#' @description
7-
#' MBO loop function for single-objective Bayesian Optimization via multipoint constant liar.
7+
#' Loop function for single-objective Bayesian Optimization via multipoint constant liar.
88
#' Normally used inside an [OptimizerMbo].
99
#'
1010
#' In each iteration after the initial design, the surrogate and acquisition function are updated.
@@ -56,7 +56,7 @@
5656
#' @family Loop Function
5757
#' @export
5858
#' @examples
59-
#' \dontrun{
59+
#' \donttest{
6060
#' if (requireNamespace("mlr3learners") &
6161
#' requireNamespace("DiceKriging") &
6262
#' requireNamespace("rgenoud")) {

R/bayesopt_parego.R

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
#' @name mlr_loop_functions_parego
55
#'
66
#' @description
7-
#' MBO loop function for multi-objective Bayesian Optimization via ParEGO.
7+
#' Loop function for multi-objective Bayesian Optimization via ParEGO.
88
#' Normally used inside an [OptimizerMbo].
99
#'
1010
#' In each iteration after the initial design, the observed objective function values are normalized and `q` candidates are
@@ -59,7 +59,7 @@
5959
#' @family Loop Function
6060
#' @export
6161
#' @examples
62-
#' \dontrun{
62+
#' \donttest{
6363
#' if (requireNamespace("mlr3learners") &
6464
#' requireNamespace("DiceKriging") &
6565
#' requireNamespace("rgenoud")) {

R/bayesopt_smsego.R

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -4,11 +4,11 @@
44
#' @name mlr_loop_functions_smsego
55
#'
66
#' @description
7-
#' MBO loop function for sequential multi-objective Bayesian Optimization via SmsEGO.
7+
#' Loop function for sequential multi-objective Bayesian Optimization via SMS-EGO.
88
#' Normally used inside an [OptimizerMbo].
99
#'
10-
#' In each iteration after the initial design, the surrogate and acquisition function ([AcqFunctionSmsEgo]) are updated and the next candidate
11-
#' is chosen based on optimizing the acquisition function.
10+
#' In each iteration after the initial design, the surrogate and acquisition function ([mlr_acqfunctions_smsego]) are
11+
#' updated and the next candidate is chosen based on optimizing the acquisition function.
1212
#'
1313
#' @param instance ([bbotk::OptimInstanceMultiCrit])\cr
1414
#' The [bbotk::OptimInstanceMultiCrit] to be optimized.
@@ -19,8 +19,8 @@
1919
#' Points are drawn uniformly at random.
2020
#' @param surrogate ([SurrogateLearnerCollection])\cr
2121
#' [SurrogateLearnerCollection] to be used as a surrogate.
22-
#' @param acq_function ([AcqFunctionSmsEgo])\cr
23-
#' [AcqFunctionSmsEgo] to be used as acquisition function.
22+
#' @param acq_function ([mlr_acqfunctions_smsego])\cr
23+
#' [mlr_acqfunctions_smsego] to be used as acquisition function.
2424
#' @param acq_optimizer ([AcqOptimizer])\cr
2525
#' [AcqOptimizer] to be used as acquisition function optimizer.
2626
#' @param random_interleave_iter (`integer(1)`)\cr
@@ -34,7 +34,7 @@
3434
#' * The `acq_function$surrogate`, even if already populated, will always be overwritten by the `surrogate`.
3535
#' * The `acq_optimizer$acq_function`, even if already populated, will always be overwritten by `acq_function`.
3636
#' * The `surrogate$archive`, even if already populated, will always be overwritten by the [bbotk::Archive] of the [bbotk::OptimInstanceMultiCrit].
37-
#' * Due to the iterative computation of the epsilon within the [AcqFunctionSmsEgo], requires the [bbotk::Terminator] of
37+
#' * Due to the iterative computation of the epsilon within the [mlr_acqfunctions_smsego], requires the [bbotk::Terminator] of
3838
#' the [bbotk::OptimInstanceMultiCrit] to be a [bbotk::TerminatorEvals].
3939
#'
4040
#' @return invisible(instance)\cr
@@ -47,7 +47,7 @@
4747
#' @family Loop Function
4848
#' @export
4949
#' @examples
50-
#' \dontrun{
50+
#' \donttest{
5151
#' if (requireNamespace("mlr3learners") &
5252
#' requireNamespace("DiceKriging") &
5353
#' requireNamespace("rgenoud")) {
@@ -69,7 +69,7 @@
6969
#'
7070
#' surrogate = default_surrogate(instance)
7171
#'
72-
#' acq_function = acqf("sms_ego")
72+
#' acq_function = acqf("smsego")
7373
#'
7474
#' acq_optimizer = acqo(
7575
#' optimizer = opt("random_search"),
@@ -146,7 +146,7 @@ bayesopt_smsego = function(
146146

147147
class(bayesopt_smsego) = "loop_function"
148148
attr(bayesopt_smsego, "id") = "bayesopt_smsego"
149-
attr(bayesopt_smsego, "label") = "SmsEGO"
149+
attr(bayesopt_smsego, "label") = "SMS-EGO"
150150
attr(bayesopt_smsego, "instance") = "multi-crit"
151151
attr(bayesopt_smsego, "man") = "mlr3mbo::mlr_loop_functions_smsego"
152152

R/loop_function.R

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
#' @name loop_function
55
#'
66
#' @description
7-
#' Loop functions determine the behavior of the BO algorithm on a global level.
7+
#' Loop functions determine the behavior of the Bayesian Optimization algorithm on a global level.
88
#' For an overview of readily available loop functions, see `as.data.table(mlr_loop_functions)`.
99
#'
1010
#' In general, a loop function is simply a decorated member of the S3 class `loop_function`.

R/mbo_defaults.R

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ NULL
1616
#' @title Default Loop Function
1717
#'
1818
#' @description
19-
#' Chooses a default [loop_function], i.e. the MBO flavor to be used for optimization.
19+
#' Chooses a default [loop_function], i.e. the Bayesian Optimization flavor to be used for optimization.
2020
#' For single-objective optimization, defaults to [bayesopt_ego].
2121
#' For multi-objective optimization, defaults to [bayesopt_smsego].
2222
#'
@@ -155,6 +155,9 @@ default_surrogate = function(instance, learner = NULL, n_learner = NULL) {
155155
#'
156156
#' @description
157157
#' Chooses a default acquisition function, i.e. the criterion used to propose future points.
158+
#' For single-objective optimization, defaults to [mlr_acqfunctions_ei].
159+
#' For multi-objective optimization, defaults to [mlr_acqfunctions_smsego].
160+
#'
158161
#' @param instance ([bbotk::OptimInstance]).
159162
#' @return [AcqFunction]
160163
#' @family mbo_defaults
@@ -172,14 +175,14 @@ default_acqfun = function(instance) {
172175
#'
173176
#' @description
174177
#' Chooses a default acquisition function optimizer.
175-
#' Defaults to wrapping [bbotk::OptimizerRandomSearch] allowing 10000 function evaluations.
178+
#' Defaults to wrapping [bbotk::OptimizerRandomSearch] allowing 10000 function evaluations (with a batch size of 1000) via a [bbotk::TerminatorEvals].
176179
#'
177180
#' @param acq_function ([AcqFunction]).
178181
#' @return [AcqOptimizer]
179182
#' @family mbo_defaults
180183
#' @export
181184
default_acqopt = function(acq_function) {
182185
assert_r6(acq_function, classes = "AcqFunction")
183-
AcqOptimizer$new(optimizer = opt("random_search", batch_size = 10000L), terminator = trm("evals", n_evals = 10000L)) # FIXME: what do we use
186+
AcqOptimizer$new(optimizer = opt("random_search", batch_size = 1000L), terminator = trm("evals", n_evals = 10000L)) # FIXME: what do we use
184187
}
185188

0 commit comments

Comments
 (0)