I guess there are shorter solutions, but the following is not so hacky. We use Wrapper to get the model so we can save it to a list in a global environment. Alternatively, you can change this line to something more complex and save it to your hard drive. This can be useful because models can become quite large.
library(mlr)
ps = makeParamSet(
makeDiscreteParam("C", values = 2^(-2:2)),
makeDiscreteParam("sigma", values = 2^(-2:2))
)
ctrl = makeTuneControlGrid()
rdesc = makeResampleDesc("Holdout")
lrn = makeLearner("classif.ksvm")
makeSaveWrapper = function(learner) {
mlr:::makeBaseWrapper(
id = paste0(learner$id, "save", sep = "."),
type = learner$type,
next.learner = learner,
par.set = makeParamSet(),
par.vals = list(),
learner.subclass = "SaveWrapper",
model.subclass = "SaveModel")
}
trainLearner.SaveWrapper = function(.learner, .task, .subset, ...) {
m = train(.learner$next.learner, task = .task, subset = .subset)
stored.models <<- c(stored.models, list(m))
mlr:::makeChainModel(next.model = m, cl = "SaveModel")
}
predictLearner.SaveWrapper = function(.learner, .model, .newdata, ...) {
NextMethod(.newdata = .newdata)
}
stored.models = list()
lrn.saver = makeSaveWrapper(lrn)
res = tuneParams(lrn.saver, task = iris.task, resampling = rdesc, par.set = ps, control = ctrl)
stored.models[[1]]
stored.models[[1]]$learner.model
getLearnerParVals(stored.models[[1]]$learner)
stored.models[[1]]$subset
source
share