Skip to content

Commit 3c60e60

Browse files
Using test_that, added tests
1 parent fe44bd8 commit 3c60e60

File tree

8 files changed

+401
-93
lines changed

8 files changed

+401
-93
lines changed
Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
test_that(
2+
3+
"Exhaust Parameter Space"
4+
5+
, {
6+
7+
skip_on_cran()
8+
set.seed(5)
9+
sf <- function(x) {
10+
y <- 1 - x^2
11+
return(y)
12+
}
13+
FUN <- function(x) {
14+
return(list(Score = sf(x)))
15+
}
16+
bounds = list(
17+
x = c(-4L,4L)
18+
)
19+
Results <- bayesOpt(
20+
FUN = FUN
21+
, bounds = bounds
22+
, saveFile = NULL
23+
, initPoints = 4
24+
, iters.n = 300
25+
, iters.k = 1
26+
, otherHalting = list(timeLimit = Inf,minUtility = 0)
27+
, acq = "ucb"
28+
, kappa = 2.576
29+
, eps = 0.0
30+
, parallel = FALSE
31+
, gsPoints = 10
32+
, convThresh = 1e8
33+
, acqThresh = 1.000
34+
, plotProgress = TRUE
35+
, verbose = 1
36+
)
37+
38+
expect_equal(
39+
Results$stopStatus
40+
, ParBayesianOptimization:::makeStopEarlyMessage(
41+
paste0(
42+
"Noise could not be added to find unique parameter set. "
43+
, "Stopping process and returning results so far."
44+
)
45+
)
46+
)
47+
48+
}
49+
50+
)

tests/testthat/test-bayesOpt1D.R

Lines changed: 48 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -1,41 +1,50 @@
1-
context('1 Dimension')
1+
test_that(
2+
3+
"1 Input, Different Specifications"
4+
5+
, {
6+
7+
skip_on_cran()
8+
9+
set.seed(1991)
10+
sf <- function(x) 100 - x^2
11+
FUN <- function(x) {
12+
return(list(Score = sf(x)))
13+
}
14+
bounds = list(
15+
x = c(-2,2)
16+
)
17+
optObj <- bayesOpt(
18+
FUN
19+
, bounds
20+
, initPoints = 4
21+
, iters.n = 2
22+
, verbose = 0
23+
)
24+
expect_equal(optObj$stopStatus , "OK")
25+
expect_equal(nrow(optObj$scoreSummary) , 6)
26+
27+
# Test adding Iterations
28+
optObj <- addIterations(
29+
optObj
30+
, iters.n = 2
31+
, verbose = 0
32+
, gsPoints = 10
33+
)
34+
35+
# Test adding iterations with higher iters.k
36+
optObj <- addIterations(
37+
optObj
38+
, iters.n = 4
39+
, iters.k = 2
40+
, verbose = 0
41+
, gsPoints = 10
42+
)
43+
44+
print(optObj)
45+
46+
expect_equal(nrow(optObj$scoreSummary) , 12)
47+
48+
}
249

3-
set.seed(1991)
4-
5-
sf <- function(x) 100 - x^2
6-
7-
FUN <- function(x) {
8-
return(list(Score = sf(x)))
9-
}
10-
11-
bounds = list(
12-
x = c(-2,2)
1350
)
14-
15-
optObj <- bayesOpt(
16-
FUN
17-
, bounds
18-
, initPoints = 4
19-
, iters.n = 2
20-
, verbose = 0
21-
)
22-
23-
expect_true(optObj$stopStatus == "OK")
24-
expect_true(nrow(optObj$scoreSummary) == 6)
25-
26-
optObj <- addIterations(
27-
optObj
28-
, iters.n = 2
29-
, verbose = 0
30-
, gsPoints = 10
31-
)
32-
33-
optObj <- addIterations(
34-
optObj
35-
, iters.n = 4
36-
, iters.k = 2
37-
, verbose = 0
38-
, gsPoints = 10
39-
)
40-
41-
expect_true(nrow(optObj$scoreSummary) == 12)

tests/testthat/test-bayesOpt2D.R

Lines changed: 41 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -1,42 +1,50 @@
1-
context('2 Dimension')
1+
testthat::test_that(
22

3-
set.seed(1991)
3+
"2 Dimension"
44

5-
sf <- function(x,y) 1000 - (x-5)^2 - (y + 10)^2
5+
, {
66

7-
FUN <- function(x,y) {
8-
return(list(Score = sf(x,y)))
9-
}
7+
skip_on_cran()
108

11-
bounds = list(
12-
x = c(0,15)
13-
, y = c(-20,100)
14-
)
9+
set.seed(1991)
1510

16-
optObj <- bayesOpt(
17-
FUN
18-
, bounds
19-
, initPoints = 4
20-
, iters.n = 2
21-
, verbose = 0
22-
)
11+
sf <- function(x,y) 1000 - (x-5)^2 - (y + 10)^2
2312

24-
expect_true(optObj$stopStatus == "OK")
25-
expect_true(nrow(optObj$scoreSummary) == 6)
13+
FUN <- function(x,y) {
14+
return(list(Score = sf(x,y)))
15+
}
2616

27-
optObj <- addIterations(
28-
optObj
29-
, iters.n = 2
30-
, verbose = 0
31-
, gsPoints = 10
32-
)
17+
bounds = list(
18+
x = c(0,15)
19+
, y = c(-20,100)
20+
)
3321

34-
optObj <- addIterations(
35-
optObj
36-
, iters.n = 2
37-
, iters.k = 2
38-
, verbose = 0
39-
, gsPoints = 10
40-
)
22+
optObj <- bayesOpt(
23+
FUN
24+
, bounds
25+
, initPoints = 4
26+
, iters.n = 2
27+
, verbose = 0
28+
)
4129

42-
expect_true(nrow(optObj$scoreSummary) == 10)
30+
expect_true(optObj$stopStatus == "OK")
31+
expect_true(nrow(optObj$scoreSummary) == 6)
32+
33+
optObj <- addIterations(
34+
optObj
35+
, iters.n = 2
36+
, verbose = 0
37+
, gsPoints = 10
38+
)
39+
40+
optObj <- addIterations(
41+
optObj
42+
, iters.n = 2
43+
, iters.k = 2
44+
, verbose = 0
45+
, gsPoints = 10
46+
)
47+
48+
expect_true(nrow(optObj$scoreSummary) == 10)
49+
}
50+
)
Lines changed: 77 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,77 @@
1+
context('errorHandling')
2+
3+
testthat::test_that(
4+
5+
"continue"
6+
7+
, {
8+
9+
skip_on_cran()
10+
set.seed(10)
11+
sf <- function(x,y) 1000 - (x-5)^2 - (y + 10)^2
12+
13+
FUN <- function(x,y) {
14+
if (runif(1) > 0.5) stop("You foo'd when you should have bar'd.")
15+
return(list(Score = sf(x,y)))
16+
}
17+
18+
bounds = list(
19+
x = c(0,15)
20+
, y = c(-20,100)
21+
)
22+
23+
optObj <- bayesOpt(
24+
FUN
25+
, bounds
26+
, initPoints = 3
27+
, iters.n = 6
28+
, errorHandling = "continue"
29+
, verbose = 1
30+
)
31+
32+
expect_equal(
33+
optObj$stopStatus
34+
, "OK"
35+
)
36+
37+
}
38+
39+
)
40+
41+
testthat::test_that(
42+
43+
"Error Limit"
44+
45+
, {
46+
47+
skip_on_cran()
48+
set.seed(10)
49+
sf <- function(x,y) 1000 - (x-5)^2 - (y + 10)^2
50+
51+
FUN <- function(x,y) {
52+
if (runif(1) > 0.5) stop("You foo'd when you should have bar'd.")
53+
return(list(Score = sf(x,y)))
54+
}
55+
56+
bounds = list(
57+
x = c(0,15)
58+
, y = c(-20,100)
59+
)
60+
61+
optObj <- bayesOpt(
62+
FUN
63+
, bounds
64+
, initPoints = 3
65+
, iters.n = 8
66+
, errorHandling = 2
67+
, verbose = 1
68+
)
69+
70+
expect_equal(
71+
optObj$stopStatus
72+
, ParBayesianOptimization:::makeStopEarlyMessage("Errors from FUN exceeded errorHandling limit")
73+
)
74+
75+
}
76+
77+
)
Lines changed: 91 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,91 @@
1+
context('Hyperparameter Tuning')
2+
3+
testthat::test_that(
4+
5+
"xgboost"
6+
7+
, {
8+
9+
skip_on_cran()
10+
library("xgboost")
11+
set.seed(0)
12+
13+
data(agaricus.train, package = "xgboost")
14+
15+
Folds <- list(
16+
Fold1 = as.integer(seq(1,nrow(agaricus.train$data),by = 3))
17+
, Fold2 = as.integer(seq(2,nrow(agaricus.train$data),by = 3))
18+
, Fold3 = as.integer(seq(3,nrow(agaricus.train$data),by = 3))
19+
)
20+
21+
scoringFunction <- function(
22+
max_depth
23+
, max_leaves
24+
, min_child_weight
25+
, subsample
26+
, colsample_bytree
27+
, gamma
28+
, lambda
29+
, alpha
30+
) {
31+
32+
dtrain <- xgb.DMatrix(agaricus.train$data,label = agaricus.train$label)
33+
34+
Pars <- list(
35+
booster = "gbtree"
36+
, eta = 0.001
37+
, max_depth = max_depth
38+
, max_leaves = max_leaves
39+
, min_child_weight = min_child_weight
40+
, subsample = subsample
41+
, colsample_bytree = colsample_bytree
42+
, gamma = gamma
43+
, lambda = lambda
44+
, alpha = alpha
45+
, objective = "binary:logistic"
46+
, eval_metric = "auc"
47+
)
48+
49+
xgbcv <- xgb.cv(
50+
params = Pars
51+
, data = dtrain
52+
, nround = 100
53+
, folds = Folds
54+
, early_stopping_rounds = 5
55+
, maximize = TRUE
56+
, verbose = 0
57+
)
58+
59+
return(
60+
list(
61+
Score = max(xgbcv$evaluation_log$test_auc_mean)
62+
, nrounds = xgbcv$best_iteration
63+
)
64+
)
65+
}
66+
67+
bounds <- list(
68+
max_depth = c(1L, 5L)
69+
, max_leaves = c(2L,25L)
70+
, min_child_weight = c(0, 25)
71+
, subsample = c(0.25, 1)
72+
, colsample_bytree = c(0.1,1)
73+
, gamma = c(0,1)
74+
, lambda = c(0,1)
75+
, alpha = c(0,1)
76+
)
77+
78+
optObj <- bayesOpt(
79+
FUN = scoringFunction
80+
, bounds = bounds
81+
, initPoints = 9
82+
, iters.n = 4
83+
, iters.k = 1
84+
, gsPoints = 10
85+
)
86+
87+
expect_equal(nrow(optObj$scoreSummary),13)
88+
89+
}
90+
91+
)

0 commit comments

Comments
 (0)