-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtuningexample.r
More file actions
86 lines (57 loc) · 2.43 KB
/
tuningexample.r
File metadata and controls
86 lines (57 loc) · 2.43 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
library("mlr")
library("OpenML")
saveOMLConfig(apikey = "dce6d7b81d7eb26de554be95c812f0db", overwrite = TRUE)
dataset = getOMLDataSet(did = 37)
dataset$data
# SVM classifier
lrn = makeLearner("classif.svm", predict.type="prob")
# SVM hyper-parameter search space
ps = makeParamSet(
makeNumericParam("cost", lower=-15, upper=15, trafo=function(x) 2^x),
makeNumericParam("gamma", lower=-15, upper=15, trafo=function(x) 2^x)
)
# specify the task
openml.task = getOMLTask(task.id = 37)
obj = convertOMLTaskToMlr(openml.task)
# mlr.task = makeClassifTask(data = dataset$data, target = "class")
# obj$mlr.task
# rdesc = makeResampleDesc("CV", iters = 10, stratify = TRUE)
# obj$mlr.rin
run = resample(learner= lrn, task=obj$mlr.task, resampling = obj$mlr.rin, models = TRUE, measures=list(acc, ber))
# random forest classifier
lrn2 = makeLearner("classif.randomForest", predict.type="prob")
# SVM hyper-parameter search space
ps2 = makeParamSet(
makeIntegerParam("ntree", lower=1L, upper=500L)
)
learners = makeModelMultiplexer(base.learners = list(lrn,lrn2))
paramsets = makeModelMultiplexerParamSet(learners, classif.svm = ps, classif.randomForest = ps2)
resample.desc = makeResampleDesc("CV", iters = 10L)
# to save some time we use random search. but you probably want something like this:
# ctrl = makeTuneControlIrace(maxExperiments = 500L)
# 120 evaluations (just fot test)
BUDGET = 120
# Tuning control strategies
# Grid Search
ctrl.grid = makeTuneControlGrid(resolution=round(sqrt(BUDGET))) # 2 dimensions
#Random Search
ctrl.random = makeTuneControlRandom(maxit=BUDGET)
ctrl.irace = makeTuneControlIrace(maxExperiments = BUDGET)
# List of tuning controls
ctrls = list(ctrl.grid, ctrl.random , ctrl.irace)
#res = tuneParams(lrn, iris.task, rdesc, par.set = ps, control = ctrl)
#print(res)
# Calling tuning techniques (for each tuning control ... )
openml.classif.task = getOMLTask(task.id = 37)
mlr.classif.task = convertOMLTaskToMlr(openml.classif.task)
aux = lapply(ctrls, function(ct) {
lrns = makeTuneWrapper(learner=learners, resampling=resample.desc, par.set=paramsets, control=ct, show.info=FALSE)
res = resample(learner=lrns, task=mlr.classif.task$mlr.task, extract=getTuneResult, resampling=resample.desc,
models=TRUE, show.info = FALSE, measures=list(acc)
)
return(res)
})
# Retuning a list with results
return(aux)
#git remote add origin git@github.com:hildafab/cashinr.git
#git push -u origin master