# nolint start
library(mlexperiments)
library(mllrnrs)The hardware and bandwidth for this mirror is donated by dogado GmbH, the Webhosting and Full Service-Cloud Provider. Check out our Wordpress Tutorial.
If you wish to report a bug, or if you are interested in having us mirror your free-software or open-source project, please feel free to contact us at mirror[@]dogado.de.
# nolint start
library(mlexperiments)
library(mllrnrs)See https://github.com/kapsner/mllrnrs/blob/main/R/learner_ranger.R for implementation details.
library(mlbench)
data("DNA")
dataset <- DNA |>
data.table::as.data.table() |>
na.omit()
feature_cols <- colnames(dataset)[160:180]
target_col <- "Class"seed <- 123
if (isTRUE(as.logical(Sys.getenv("_R_CHECK_LIMIT_CORES_")))) {
# on cran
ncores <- 2L
} else {
ncores <- ifelse(
test = parallel::detectCores() > 4,
yes = 4L,
no = ifelse(
test = parallel::detectCores() < 2L,
yes = 1L,
no = parallel::detectCores()
)
)
}
options("mlexperiments.bayesian.max_init" = 10L)data_split <- splitTools::partition(
y = dataset[, get(target_col)],
p = c(train = 0.7, test = 0.3),
type = "stratified",
seed = seed
)
train_x <- model.matrix(
~ -1 + .,
dataset[data_split$train, .SD, .SDcols = feature_cols]
)
train_y <- dataset[data_split$train, get(target_col)]
test_x <- model.matrix(
~ -1 + .,
dataset[data_split$test, .SD, .SDcols = feature_cols]
)
test_y <- dataset[data_split$test, get(target_col)]fold_list <- splitTools::create_folds(
y = train_y,
k = 3,
type = "stratified",
seed = seed
)# required learner arguments, not optimized
learner_args <- list(probability = TRUE, classification = TRUE)
# set arguments for predict function and performance metric,
# required for mlexperiments::MLCrossValidation and
# mlexperiments::MLNestedCV
predict_args <- list(reshape = TRUE)
performance_metric <- metric("ACC")
performance_metric_args <- NULL
return_models <- FALSE
# required for grid search and initialization of bayesian optimization
parameter_grid <- expand.grid(
num.trees = seq(500, 1000, 500),
mtry = seq(2, 6, 2),
min.node.size = seq(1, 9, 4),
max.depth = seq(1, 9, 4),
sample.fraction = seq(0.5, 0.8, 0.3)
)
# reduce to a maximum of 10 rows
if (nrow(parameter_grid) > 10) {
set.seed(123)
sample_rows <- sample(seq_len(nrow(parameter_grid)), 10, FALSE)
parameter_grid <- kdry::mlh_subset(parameter_grid, sample_rows)
}
# required for bayesian optimization
parameter_bounds <- list(
num.trees = c(100L, 1000L),
mtry = c(2L, 9L),
min.node.size = c(1L, 20L),
max.depth = c(1L, 40L),
sample.fraction = c(0.3, 1.)
)
optim_args <- list(
iters.n = ncores,
kappa = 3.5,
acq = "ucb"
)tuner <- mlexperiments::MLTuneParameters$new(
learner = mllrnrs::LearnerRanger$new(),
strategy = "grid",
ncores = ncores,
seed = seed
)
tuner$parameter_grid <- parameter_grid
tuner$learner_args <- learner_args
tuner$split_type <- "stratified"
tuner$set_data(
x = train_x,
y = train_y
)
tuner_results_grid <- tuner$execute(k = 3)
#>
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [==================>-----------------------------------------------------------------------------] 2/10 ( 20%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [============================>-------------------------------------------------------------------] 3/10 ( 30%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [=====================================>----------------------------------------------------------] 4/10 ( 40%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [===============================================>------------------------------------------------] 5/10 ( 50%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [=========================================================>--------------------------------------] 6/10 ( 60%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [==================================================================>-----------------------------] 7/10 ( 70%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [============================================================================>-------------------] 8/10 ( 80%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [=====================================================================================>----------] 9/10 ( 90%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [===============================================================================================] 10/10 (100%)
#> Classification: using 'classification error rate' as optimization metric.
head(tuner_results_grid)
#> setting_id metric_optim_mean num.trees mtry min.node.size max.depth sample.fraction probability classification
#> 1: 1 0.4786887 500 2 9 5 0.5 TRUE TRUE
#> 2: 2 0.4791386 500 2 5 5 0.8 TRUE TRUE
#> 3: 3 0.4419159 500 4 9 9 0.5 TRUE TRUE
#> 4: 4 0.4809325 1000 2 9 1 0.5 TRUE TRUE
#> 5: 5 0.4809325 500 2 9 1 0.8 TRUE TRUE
#> 6: 6 0.4329589 1000 6 1 9 0.5 TRUE TRUEtuner <- mlexperiments::MLTuneParameters$new(
learner = mllrnrs::LearnerRanger$new(),
strategy = "bayesian",
ncores = ncores,
seed = seed
)
tuner$parameter_grid <- parameter_grid
tuner$parameter_bounds <- parameter_bounds
tuner$learner_args <- learner_args
tuner$optim_args <- optim_args
tuner$split_type <- "stratified"
tuner$set_data(
x = train_x,
y = train_y
)
tuner_results_bayesian <- tuner$execute(k = 3)
#>
#> Registering parallel backend using 4 cores.
head(tuner_results_bayesian)
#> Epoch setting_id num.trees mtry min.node.size max.depth sample.fraction gpUtility acqOptimum inBounds Elapsed Score
#> 1: 0 1 500 2 9 5 0.5 NA FALSE TRUE 1.597 -0.4791386
#> 2: 0 2 500 2 5 5 0.8 NA FALSE TRUE 1.641 -0.4786887
#> 3: 0 3 500 4 9 9 0.5 NA FALSE TRUE 2.161 -0.4392295
#> 4: 0 4 1000 2 9 1 0.5 NA FALSE TRUE 1.635 -0.4809325
#> 5: 0 5 500 2 9 1 0.8 NA FALSE TRUE 0.416 -0.4809325
#> 6: 0 6 1000 6 1 9 0.5 NA FALSE TRUE 3.373 -0.4378800
#> metric_optim_mean errorMessage probability classification
#> 1: 0.4791386 NA TRUE TRUE
#> 2: 0.4786887 NA TRUE TRUE
#> 3: 0.4392295 NA TRUE TRUE
#> 4: 0.4809325 NA TRUE TRUE
#> 5: 0.4809325 NA TRUE TRUE
#> 6: 0.4378800 NA TRUE TRUEvalidator <- mlexperiments::MLCrossValidation$new(
learner = mllrnrs::LearnerRanger$new(),
fold_list = fold_list,
ncores = ncores,
seed = seed
)
validator$learner_args <- tuner$results$best.setting[-1]
validator$predict_args <- predict_args
validator$performance_metric <- performance_metric
validator$performance_metric_args <- performance_metric_args
validator$return_models <- return_models
validator$set_data(
x = train_x,
y = train_y
)
validator_results <- validator$execute()
#>
#> CV fold: Fold1
#>
#> CV fold: Fold2
#>
#> CV fold: Fold3
#> CV progress [========================================================================================================] 3/3 (100%)
#>
head(validator_results)
#> fold performance num.trees mtry min.node.size max.depth sample.fraction probability classification
#> 1: Fold1 0.5739247 500 4 9 9 0.8 TRUE TRUE
#> 2: Fold2 0.5452092 500 4 9 9 0.8 TRUE TRUE
#> 3: Fold3 0.5685484 500 4 9 9 0.8 TRUE TRUEvalidator <- mlexperiments::MLNestedCV$new(
learner = mllrnrs::LearnerRanger$new(),
strategy = "grid",
fold_list = fold_list,
k_tuning = 3L,
ncores = ncores,
seed = seed
)
validator$parameter_grid <- parameter_grid
validator$learner_args <- learner_args
validator$split_type <- "stratified"
validator$predict_args <- predict_args
validator$performance_metric <- performance_metric
validator$performance_metric_args <- performance_metric_args
validator$return_models <- return_models
validator$set_data(
x = train_x,
y = train_y
)
validator_results <- validator$execute()
#>
#> CV fold: Fold1
#>
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [============================>-------------------------------------------------------------------] 3/10 ( 30%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [=====================================>----------------------------------------------------------] 4/10 ( 40%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [===============================================>------------------------------------------------] 5/10 ( 50%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [=========================================================>--------------------------------------] 6/10 ( 60%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [==================================================================>-----------------------------] 7/10 ( 70%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [============================================================================>-------------------] 8/10 ( 80%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [=====================================================================================>----------] 9/10 ( 90%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [===============================================================================================] 10/10 (100%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> CV fold: Fold2
#> CV progress [====================================================================>-----------------------------------] 2/3 ( 67%)
#>
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [============================>-------------------------------------------------------------------] 3/10 ( 30%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [=====================================>----------------------------------------------------------] 4/10 ( 40%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [===============================================>------------------------------------------------] 5/10 ( 50%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [=========================================================>--------------------------------------] 6/10 ( 60%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [==================================================================>-----------------------------] 7/10 ( 70%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [============================================================================>-------------------] 8/10 ( 80%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [=====================================================================================>----------] 9/10 ( 90%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [===============================================================================================] 10/10 (100%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> CV fold: Fold3
#> CV progress [========================================================================================================] 3/3 (100%)
#>
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [============================>-------------------------------------------------------------------] 3/10 ( 30%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [=====================================>----------------------------------------------------------] 4/10 ( 40%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [===============================================>------------------------------------------------] 5/10 ( 50%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [=========================================================>--------------------------------------] 6/10 ( 60%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [==================================================================>-----------------------------] 7/10 ( 70%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [============================================================================>-------------------] 8/10 ( 80%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [=====================================================================================>----------] 9/10 ( 90%)
#> Classification: using 'classification error rate' as optimization metric.
#>
#> Parameter settings [===============================================================================================] 10/10 (100%)
#> Classification: using 'classification error rate' as optimization metric.
head(validator_results)
#> fold performance num.trees mtry min.node.size max.depth sample.fraction probability classification
#> 1: Fold1 0.5846774 1000 6 1 9 0.5 TRUE TRUE
#> 2: Fold2 0.5452092 1000 6 1 9 0.5 TRUE TRUE
#> 3: Fold3 0.5712366 1000 6 1 9 0.5 TRUE TRUEvalidator <- mlexperiments::MLNestedCV$new(
learner = mllrnrs::LearnerRanger$new(),
strategy = "bayesian",
fold_list = fold_list,
k_tuning = 3L,
ncores = ncores,
seed = 312
)
validator$parameter_grid <- parameter_grid
validator$learner_args <- learner_args
validator$split_type <- "stratified"
validator$parameter_bounds <- parameter_bounds
validator$optim_args <- optim_args
validator$predict_args <- predict_args
validator$performance_metric <- performance_metric
validator$performance_metric_args <- performance_metric_args
validator$return_models <- TRUE
validator$set_data(
x = train_x,
y = train_y
)
validator_results <- validator$execute()
#>
#> CV fold: Fold1
#>
#> Registering parallel backend using 4 cores.
#>
#> CV fold: Fold2
#> CV progress [====================================================================>-----------------------------------] 2/3 ( 67%)
#>
#> Registering parallel backend using 4 cores.
#>
#> CV fold: Fold3
#> CV progress [========================================================================================================] 3/3 (100%)
#>
#> Registering parallel backend using 4 cores.
head(validator_results)
#> fold performance num.trees mtry min.node.size max.depth sample.fraction probability classification
#> 1: Fold1 0.5793011 1000 6 1 9 0.5000000 TRUE TRUE
#> 2: Fold2 0.5411606 636 6 2 12 0.9378338 TRUE TRUE
#> 3: Fold3 0.5725806 388 6 5 14 0.7457303 TRUE TRUEpreds_ranger <- mlexperiments::predictions(
object = validator,
newdata = test_x
)perf_ranger <- mlexperiments::performance(
object = validator,
prediction_results = preds_ranger,
y_ground_truth = test_y
)
perf_ranger
#> model performance
#> 1: Fold1 0.5757576
#> 2: Fold2 0.5506792
#> 3: Fold3 0.5726228Here, ranger’s case.weights-argument is used to rescale the case-weights during the training.
# define the target weights
y_weights <- ifelse(train_y == "n", 0.8, ifelse(train_y == "ei", 1.2, 1))
head(y_weights)
#> [1] 1.2 1.2 0.0 0.8 0.8 0.0tuner_w_weights <- mlexperiments::MLTuneParameters$new(
learner = mllrnrs::LearnerRanger$new(),
strategy = "grid",
ncores = ncores,
seed = seed
)
tuner_w_weights$parameter_grid <- parameter_grid
tuner_w_weights$learner_args <- c(
learner_args,
list(case_weights = y_weights)
)
tuner_w_weights$split_type <- "stratified"
tuner_w_weights$set_data(
x = train_x,
y = train_y
)
tuner_results_grid <- tuner_w_weights$execute(k = 3)
#>
#> Parameter settings [============================>-------------------------------------------------------------------] 3/10 ( 30%)
#> Parameter settings [=====================================>----------------------------------------------------------] 4/10 ( 40%)
#> Parameter settings [===============================================>------------------------------------------------] 5/10 ( 50%)
#> Parameter settings [=========================================================>--------------------------------------] 6/10 ( 60%)
#> Parameter settings [==================================================================>-----------------------------] 7/10 ( 70%)
#> Parameter settings [============================================================================>-------------------] 8/10 ( 80%)
#> Parameter settings [=====================================================================================>----------] 9/10 ( 90%)
#> Parameter settings [===============================================================================================] 10/10 (100%)
head(tuner_results_grid)
#> setting_id metric_optim_mean num.trees mtry min.node.size max.depth sample.fraction probability classification
#> <int> <num> <num> <num> <num> <num> <num> <lgcl> <lgcl>
#> 1: 1 0.4665865 500 2 9 5 0.5 TRUE TRUE
#> 2: 2 0.4656941 500 2 5 5 0.8 TRUE TRUE
#> 3: 3 0.4486635 500 4 9 9 0.5 TRUE TRUE
#> 4: 4 0.4809325 1000 2 9 1 0.5 TRUE TRUE
#> 5: 5 0.4809325 500 2 9 1 0.8 TRUE TRUE
#> 6: 6 0.4544915 1000 6 1 9 0.5 TRUE TRUEvalidator_w_weights <- mlexperiments::MLCrossValidation$new(
learner = mllrnrs::LearnerRanger$new(),
fold_list = fold_list,
ncores = ncores,
seed = seed
)
# append the optimized setting from above with the newly created weights
validator_w_weights$learner_args <- c(
tuner_w_weights$results$best.setting[-1]
)
validator_w_weights$predict_args <- predict_args
validator_w_weights$performance_metric <- performance_metric
validator_w_weights$performance_metric_args <- performance_metric_args
validator_w_weights$return_models <- return_models
validator_w_weights$set_data(
x = train_x,
y = train_y
)
validator_results <- validator_w_weights$execute()
#>
#> CV fold: Fold1
#>
#> CV fold: Fold2
#>
#> CV fold: Fold3
#> CV progress [========================================================================================================] 3/3 (100%)
#>
head(validator_results)
#> fold performance num.trees mtry min.node.size max.depth sample.fraction probability classification
#> <char> <num> <num> <num> <num> <num> <num> <lgcl> <lgcl>
#> 1: Fold1 0.5752688 500 2 5 9 0.5 TRUE TRUE
#> 2: Fold2 0.5438596 500 2 5 9 0.5 TRUE TRUE
#> 3: Fold3 0.5564516 500 2 5 9 0.5 TRUE TRUEThese binaries (installable software) and packages are in development.
They may not be fully stable and should be used with caution. We make no claims about them.
Health stats visible at Monitor.