You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
main="Comparison of true and predicted random effects")
129
129
130
+
#--------------------Choosing tuning parameters using Bayesian optimization and the 'mlrMBO' R package ----------------
131
+
library(mlrMBO)
132
+
library(DiceKriging)
133
+
library(rgenoud)
134
+
source("https://raw.githubusercontent.com/fabsig/GPBoost/master/helpers/R_package_tune_pars_bayesian_optimization.R")# Load required function
135
+
# Define search space
136
+
# Note: if the best combination found below is close to the bounday for a paramter, you might want to extend the corresponding range
137
+
search_space<-list("learning_rate"= c(0.001, 10),
138
+
"min_data_in_leaf"= c(1, 1000),
139
+
"max_depth"= c(-1, -1), # -1 means no depth limit as we tune 'num_leaves'. Can also additionally tune 'max_depth', e.g., "max_depth" = c(-1, 1, 2, 3, 5, 10)
140
+
"num_leaves"= c(2, 2^10),
141
+
"lambda_l2"= c(0, 100),
142
+
"max_bin"= c(63, min(n,10000)),
143
+
"line_search_step_length"= c(TRUE, FALSE))
144
+
metric="mse"# Define metric
145
+
if (likelihood%in% c("bernoulli_probit","bernoulli_logit")) {
146
+
metric="binary_logloss"
147
+
}
148
+
# Note: can also use metric = "test_neg_log_likelihood". For more options, see https://github.com/fabsig/GPBoost/blob/master/docs/Parameters.rst#metric-parameters
"max_depth"= c(-1), # -1 means no depth limit as we tune 'num_leaves'. Can also additionaly tune 'max_depth', e.g., "max_depth" = c(-1, 1, 2, 3, 5, 10)
176
+
"max_depth"= c(-1), # -1 means no depth limit as we tune 'num_leaves'. Can also additionally tune 'max_depth', e.g., "max_depth" = c(-1, 1, 2, 3, 5, 10)
134
177
"num_leaves"=2^(1:10),
135
178
"lambda_l2"= c(0, 1, 10, 100),
136
179
"max_bin"= c(250, 500, 1000, min(n,10000)),
137
180
"line_search_step_length"= c(TRUE, FALSE))
138
-
other_params<-list(verbose=0) # avoid trace information when training models
139
-
# Define metric
140
-
metric="mse"
181
+
metric="mse"# Define metric
141
182
if (likelihood%in% c("bernoulli_probit","bernoulli_logit")) {
142
183
metric="binary_logloss"
143
184
}
144
185
# Note: can also use metric = "test_neg_log_likelihood". For more options, see https://github.com/fabsig/GPBoost/blob/master/docs/Parameters.rst#metric-parameters
# Note: can also use metric = "test_neg_log_likelihood". For more options, see https://github.com/fabsig/GPBoost/blob/master/docs/Parameters.rst#metric-parameters
# Note: can also use metric = "test_neg_log_likelihood". For more options, see https://github.com/fabsig/GPBoost/blob/master/docs/Parameters.rst#metric-parameters
0 commit comments