Skip to content

Commit ba8297f

Browse files
authored
docs: remove vignette, point to book, update README (#158)
* docs: remove vignette, point to book, update README * fix: drop timestamp and batch_nr from candidate points
1 parent 1b2f6fd commit ba8297f

File tree

7 files changed

+114
-989
lines changed

7 files changed

+114
-989
lines changed

DESCRIPTION

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -55,14 +55,12 @@ Suggests:
5555
DiceKriging,
5656
emoa,
5757
fastGHQuad,
58-
knitr,
5958
lhs,
6059
mlr3learners (>= 0.5.4),
6160
mlr3pipelines (>= 0.4.2),
6261
nloptr,
6362
ranger,
6463
rgenoud,
65-
rmarkdown,
6664
rpart,
6765
stringi,
6866
testthat (>= 3.0.0)
@@ -111,4 +109,3 @@ Collate:
111109
'mbo_defaults.R'
112110
'sugar.R'
113111
'zzz.R'
114-
VignetteBuilder: knitr

R/AcqOptimizer.R

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -214,7 +214,7 @@ AcqOptimizer = R6Class("AcqOptimizer",
214214
# }
215215
# setcolorder(xdt, c(instance$archive$cols_x, "x_domain", instance$objective$id))
216216
#}
217-
xdt
217+
xdt[, -c("timestamp", "batch_nr")] # drop timestamp and batch_nr information from the candidates
218218
}
219219
),
220220

README.Rmd

Lines changed: 48 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ A new R6 and much more modular implementation for single- and multi-objective Ba
2222

2323
## Get Started
2424

25-
An overview and gentle introduction is given in [this vignette](https://mlr3mbo.mlr-org.com/dev/articles/mlr3mbo.html).
25+
The best entry point to get familiar with `mlr3mbo` is provided via the [Bayesian Optimization](https://mlr3book.mlr-org.com/chapters/chapter5/advanced_tuning_methods_and_black_box_optimization.html#sec-bayesian-optimization) chapter in the `mlr3book`.
2626

2727
## Design
2828

@@ -32,7 +32,7 @@ An overview and gentle introduction is given in [this vignette](https://mlr3mbo.
3232
* `AcqFunction`: Acquisition Function
3333
* `AcqOptimizer`: Acquisition Function Optimizer
3434

35-
Based on these, Bayesian Optimization loops can be written, see, e.g., `bayesopt_ego` for sequential single-objective BO.
35+
Based on these, Bayesian Optimization (BO) loops can be written, see, e.g., `bayesopt_ego` for sequential single-objective BO.
3636

3737
`mlr3mbo` also provides an `OptimizerMbo` class behaving like any other `Optimizer` from the [bbotk](https://cran.r-project.org/package=bbotk) package as well as
3838
a `TunerMbo` class behaving like any other `Tuner` from the [mlr3tuning](https://cran.r-project.org/package=mlr3tuning) package.
@@ -42,66 +42,75 @@ See `?mbo_defaults` for more details.
4242

4343
## Simple Optimization Example
4444

45-
Minimize `f(x) = x^2` via sequential single-objective BO using a GP as surrogate and EI optimized via random search as acquisition function:
45+
Minimize the two-dimensional Branin function via sequential BO using a GP as surrogate and EI as acquisition function optimized via a local serch:
4646

4747
```{r, message = FALSE}
4848
library(bbotk)
4949
library(mlr3mbo)
5050
library(mlr3learners)
5151
set.seed(1)
5252
53-
obfun = ObjectiveRFun$new(
54-
fun = function(xs) list(y1 = xs$x ^ 2),
55-
domain = ps(x = p_dbl(lower = -10, upper = 10)),
56-
codomain = ps(y1 = p_dbl(tags = "minimize")))
53+
fun = function(xdt) {
54+
y = branin(xdt[["x1"]], xdt[["x2"]])
55+
data.table(y = y)
56+
}
57+
58+
domain = ps(
59+
x1 = p_dbl(-5, 10),
60+
x2 = p_dbl(0, 15)
61+
)
62+
63+
codomain = ps(
64+
y = p_dbl(tags = "minimize")
65+
)
66+
67+
objective = ObjectiveRFunDt$new(
68+
fun = fun,
69+
domain = domain,
70+
codomain = codomain
71+
)
5772
5873
instance = oi(
59-
objective = obfun,
60-
terminator = trm("evals", n_evals = 10))
74+
objective = objective,
75+
terminator = trm("evals", n_evals = 25)
76+
)
6177
6278
surrogate = srlrn(lrn("regr.km", control = list(trace = FALSE)))
63-
acqfun = acqf("ei")
64-
acqopt = acqo(opt("random_search", batch_size = 100),
65-
terminator = trm("evals", n_evals = 100))
79+
80+
acq_function = acqf("ei")
81+
82+
acq_optimizer = acqo(
83+
opt("local_search", n_initial_points = 10, initial_random_sample_size = 1000, neighbors_per_point = 10),
84+
terminator = trm("evals", n_evals = 3000)
85+
)
6686
6787
optimizer = opt("mbo",
6888
loop_function = bayesopt_ego,
6989
surrogate = surrogate,
70-
acq_function = acqfun,
71-
acq_optimizer = acqopt)
90+
acq_function = acq_function,
91+
acq_optimizer = acq_optimizer
92+
)
7293
7394
optimizer$optimize(instance)
7495
```
7596

76-
Note that you can also use `bb_optimize` as a shorthand:
97+
We can quickly visualize the contours of the objective function (on log scale) as well as the sampling behavior of our BO run (lighter blue colours indicating points that were evaluated in later stages of the optimization process; the first batch is given by the initial design).
7798

78-
```{r, message = FALSE}
79-
library(bbotk)
80-
library(mlr3mbo)
81-
library(mlr3learners)
82-
set.seed(1)
83-
84-
fun = function(xs) list(y1 = xs$x ^ 2)
85-
86-
surrogate = srlrn(lrn("regr.km", control = list(trace = FALSE)))
87-
acqfun = acqf("ei")
88-
acqopt = acqo(opt("random_search", batch_size = 100),
89-
terminator = trm("evals", n_evals = 100))
99+
```{r, eval = FALSE}
100+
library(ggplot2)
101+
grid = generate_design_grid(instance$search_space, resolution = 1000L)$data
102+
grid[, y := branin(x1 = x1, x2 = x2)]
90103
91-
optimizer = opt("mbo",
92-
loop_function = bayesopt_ego,
93-
surrogate = surrogate,
94-
acq_function = acqfun,
95-
acq_optimizer = acqopt)
96-
97-
result = bb_optimize(
98-
fun,
99-
method = optimizer,
100-
lower = c(x = -10),
101-
upper = c(x = 10),
102-
max_evals = 10)
104+
ggplot(aes(x = x1, y = x2, z = log(y)), data = grid) +
105+
geom_contour(colour = "black") +
106+
geom_point(aes(x = x1, y = x2, colour = batch_nr), data = instance$archive$data) +
107+
labs(x = expression(x[1]), y = expression(x[2])) +
108+
theme_minimal() +
109+
theme(legend.position = "bottom")
103110
```
104111

112+
Note that you can also use `bb_optimize` as a shorthand instead of constructing an optimization instance.
113+
105114
## Simple Tuning Example
106115

107116
```{r, message = FALSE}

README.md

Lines changed: 65 additions & 49 deletions
Original file line numberDiff line numberDiff line change
@@ -17,20 +17,22 @@ multi-objective Bayesian Optimization.
1717

1818
## Get Started
1919

20-
An overview and gentle introduction is given in [this
21-
vignette](https://mlr3mbo.mlr-org.com/dev/articles/mlr3mbo.html).
20+
The best entry point to get familiar with `mlr3mbo` is provided via the
21+
[Bayesian
22+
Optimization](https://mlr3book.mlr-org.com/chapters/chapter5/advanced_tuning_methods_and_black_box_optimization.html#sec-bayesian-optimization)
23+
chapter in the `mlr3book`.
2224

2325
## Design
2426

2527
`mlr3mbo` is built modular relying on the following
2628
[R6](https://cran.r-project.org/package=R6) classes:
2729

28-
- `Surrogate`: Surrogate Model
29-
- `AcqFunction`: Acquisition Function
30-
- `AcqOptimizer`: Acquisition Function Optimizer
30+
- `Surrogate`: Surrogate Model
31+
- `AcqFunction`: Acquisition Function
32+
- `AcqOptimizer`: Acquisition Function Optimizer
3133

32-
Based on these, Bayesian Optimization loops can be written, see, e.g.,
33-
`bayesopt_ego` for sequential single-objective BO.
34+
Based on these, Bayesian Optimization (BO) loops can be written, see,
35+
e.g., `bayesopt_ego` for sequential single-objective BO.
3436

3537
`mlr3mbo` also provides an `OptimizerMbo` class behaving like any other
3638
`Optimizer` from the [bbotk](https://cran.r-project.org/package=bbotk)
@@ -44,71 +46,85 @@ more details.
4446

4547
## Simple Optimization Example
4648

47-
Minimize `f(x) = x^2` via sequential single-objective BO using a GP as
48-
surrogate and EI optimized via random search as acquisition function:
49+
Minimize the two-dimensional Branin function via sequential BO using a
50+
GP as surrogate and EI as acquisition function optimized via a local
51+
serch:
4952

5053
``` r
5154
library(bbotk)
5255
library(mlr3mbo)
5356
library(mlr3learners)
5457
set.seed(1)
5558

56-
obfun = ObjectiveRFun$new(
57-
fun = function(xs) list(y1 = xs$x ^ 2),
58-
domain = ps(x = p_dbl(lower = -10, upper = 10)),
59-
codomain = ps(y1 = p_dbl(tags = "minimize")))
59+
fun = function(xdt) {
60+
y = branin(xdt[["x1"]], xdt[["x2"]])
61+
data.table(y = y)
62+
}
63+
64+
domain = ps(
65+
x1 = p_dbl(-5, 10),
66+
x2 = p_dbl(0, 15)
67+
)
68+
69+
codomain = ps(
70+
y = p_dbl(tags = "minimize")
71+
)
72+
73+
objective = ObjectiveRFunDt$new(
74+
fun = fun,
75+
domain = domain,
76+
codomain = codomain
77+
)
6078

6179
instance = oi(
62-
objective = obfun,
63-
terminator = trm("evals", n_evals = 10))
80+
objective = objective,
81+
terminator = trm("evals", n_evals = 25)
82+
)
6483

6584
surrogate = srlrn(lrn("regr.km", control = list(trace = FALSE)))
66-
acqfun = acqf("ei")
67-
acqopt = acqo(opt("random_search", batch_size = 100),
68-
terminator = trm("evals", n_evals = 100))
85+
86+
acq_function = acqf("ei")
87+
88+
acq_optimizer = acqo(
89+
opt("local_search", n_initial_points = 10, initial_random_sample_size = 1000, neighbors_per_point = 10),
90+
terminator = trm("evals", n_evals = 3000)
91+
)
6992

7093
optimizer = opt("mbo",
7194
loop_function = bayesopt_ego,
7295
surrogate = surrogate,
73-
acq_function = acqfun,
74-
acq_optimizer = acqopt)
96+
acq_function = acq_function,
97+
acq_optimizer = acq_optimizer
98+
)
7599

76100
optimizer$optimize(instance)
77101
```
78102

79-
## x x_domain y1
80-
## <num> <list> <num>
81-
## 1: 0.03897209 <list[1]> 0.001518824
103+
## x1 x2 x_domain y
104+
## <num> <num> <list> <num>
105+
## 1: 3.090821 2.299709 <list[2]> 0.4104925
82106

83-
Note that you can also use `bb_optimize` as a shorthand:
107+
We can quickly visualize the contours of the objective function (on log
108+
scale) as well as the sampling behavior of our BO run (lighter blue
109+
colours indicating points that were evaluated in later stages of the
110+
optimization process; the first batch is given by the initial design).
84111

85112
``` r
86-
library(bbotk)
87-
library(mlr3mbo)
88-
library(mlr3learners)
89-
set.seed(1)
90-
91-
fun = function(xs) list(y1 = xs$x ^ 2)
92-
93-
surrogate = srlrn(lrn("regr.km", control = list(trace = FALSE)))
94-
acqfun = acqf("ei")
95-
acqopt = acqo(opt("random_search", batch_size = 100),
96-
terminator = trm("evals", n_evals = 100))
97-
98-
optimizer = opt("mbo",
99-
loop_function = bayesopt_ego,
100-
surrogate = surrogate,
101-
acq_function = acqfun,
102-
acq_optimizer = acqopt)
103-
104-
result = bb_optimize(
105-
fun,
106-
method = optimizer,
107-
lower = c(x = -10),
108-
upper = c(x = 10),
109-
max_evals = 10)
113+
library(ggplot2)
114+
grid = generate_design_grid(instance$search_space, resolution = 1000L)$data
115+
grid[, y := branin(x1 = x1, x2 = x2)]
116+
117+
ggplot(aes(x = x1, y = x2, z = log(y)), data = grid) +
118+
geom_contour(colour = "black") +
119+
geom_point(aes(x = x1, y = x2, colour = batch_nr), data = instance$archive$data) +
120+
labs(x = expression(x[1]), y = expression(x[2])) +
121+
theme_minimal() +
122+
theme(legend.position = "bottom")
110123
```
111124

125+
Note that you can also use `bb_optimize` as a shorthand instead of
126+
constructing an optimization instance.
127+
112128
## Simple Tuning Example
113129

114130
``` r
@@ -135,4 +151,4 @@ instance$result
135151

136152
## cp learner_param_vals x_domain classif.ce
137153
## <num> <list> <list> <num>
138-
## 1: -4.381681 <list[2]> <list[1]> 0.2070312
154+
## 1: -6.188733 <list[2]> <list[1]> 0.2382812

vignettes/.gitignore

Lines changed: 0 additions & 2 deletions
This file was deleted.

0 commit comments

Comments
 (0)