Unverified Commit 7dcbb8cd authored by James Lamb's avatar James Lamb Committed by GitHub
Browse files

[R-package] limit number of threads used in tests and examples (fixes #5987) (#5988)

parent 7d4d8975
......@@ -24,7 +24,7 @@ CB_ENV <- R6::R6Class(
)
# Format the evaluation metric string
format.eval.string <- function(eval_res, eval_err) {
.format_eval_string <- function(eval_res, eval_err) {
# Check for empty evaluation string
if (is.null(eval_res) || length(eval_res) == 0L) {
......@@ -40,7 +40,7 @@ format.eval.string <- function(eval_res, eval_err) {
}
merge.eval.string <- function(env) {
.merge_eval_string <- function(env) {
# Check length of evaluation list
if (length(env$eval_list) <= 0L) {
......@@ -63,7 +63,7 @@ merge.eval.string <- function(env) {
}
# Set error message
msg <- c(msg, format.eval.string(eval_res = env$eval_list[[j]], eval_err = eval_err))
msg <- c(msg, .format_eval_string(eval_res = env$eval_list[[j]], eval_err = eval_err))
}
......@@ -86,11 +86,11 @@ cb_print_evaluation <- function(period) {
if ((i - 1L) %% period == 0L || is.element(i, c(env$begin_iteration, env$end_iteration))) {
# Merge evaluation string
msg <- merge.eval.string(env = env)
msg <- .merge_eval_string(env = env)
# Check if message is existing
if (nchar(msg) > 0L) {
print(merge.eval.string(env = env))
print(.merge_eval_string(env = env))
}
}
......@@ -270,7 +270,7 @@ cb_early_stop <- function(stopping_rounds, first_metric_only, verbose) {
# Prepare to print if verbose
if (verbose) {
best_msg[[i]] <<- as.character(merge.eval.string(env = env))
best_msg[[i]] <<- as.character(.merge_eval_string(env = env))
}
} else {
......
......@@ -928,6 +928,7 @@ NULL
#' , metric = "l2"
#' , min_data = 1L
#' , learning_rate = 1.0
#' , num_threads = 2L
#' )
#' valids <- list(test = dtest)
#' model <- lgb.train(
......@@ -1086,7 +1087,10 @@ predict.lgb.Booster <- function(object,
#' X <- as.matrix(mtcars[, -1L])
#' y <- mtcars[, 1L]
#' dtrain <- lgb.Dataset(X, label = y, params = list(max_bin = 5L))
#' params <- list(min_data_in_leaf = 2L)
#' params <- list(
#' min_data_in_leaf = 2L
#' , num_threads = 2L
#' )
#' model <- lgb.train(
#' params = params
#' , data = dtrain
......@@ -1231,6 +1235,7 @@ summary.lgb.Booster <- function(object, ...) {
#' , metric = "l2"
#' , min_data = 1L
#' , learning_rate = 1.0
#' , num_threads = 2L
#' )
#' valids <- list(test = dtest)
#' model <- lgb.train(
......@@ -1296,6 +1301,7 @@ lgb.load <- function(filename = NULL, model_str = NULL) {
#' , metric = "l2"
#' , min_data = 1L
#' , learning_rate = 1.0
#' , num_threads = 2L
#' )
#' valids <- list(test = dtest)
#' model <- lgb.train(
......@@ -1351,6 +1357,7 @@ lgb.save <- function(booster, filename, num_iteration = NULL) {
#' , metric = "l2"
#' , min_data = 1L
#' , learning_rate = 1.0
#' , num_threads = 2L
#' )
#' valids <- list(test = dtest)
#' model <- lgb.train(
......@@ -1401,6 +1408,7 @@ lgb.dump <- function(booster, num_iteration = NULL) {
#' , metric = "l2"
#' , min_data = 1L
#' , learning_rate = 1.0
#' , num_threads = 2L
#' )
#' valids <- list(test = dtest)
#' model <- lgb.train(
......
......@@ -59,6 +59,7 @@ CVBooster <- R6::R6Class(
#' , metric = "l2"
#' , min_data = 1L
#' , learning_rate = 1.0
#' , num_threads = 2L
#' )
#' model <- lgb.cv(
#' params = params
......
......@@ -24,6 +24,7 @@
#' , max_depth = -1L
#' , min_data_in_leaf = 1L
#' , min_sum_hessian_in_leaf = 1.0
#' , num_threads = 2L
#' )
#' model <- lgb.train(
#' params = params
......
......@@ -35,6 +35,7 @@
#' , max_depth = -1L
#' , min_data_in_leaf = 1L
#' , min_sum_hessian_in_leaf = 1.0
#' , num_threads = 2L
#' )
#' model <- lgb.train(
#' params = params
......
......@@ -40,6 +40,7 @@
#' , max_depth = -1L
#' , min_data_in_leaf = 1L
#' , min_sum_hessian_in_leaf = 1.0
#' , num_threads = 2L
#' )
#' model <- lgb.train(params, dtrain, 10L)
#'
......
......@@ -28,6 +28,7 @@
#' , learning_rate = 0.1
#' , min_data_in_leaf = 1L
#' , min_sum_hessian_in_leaf = 1.0
#' , num_threads = 2L
#' )
#'
#' model <- lgb.train(
......
......@@ -39,6 +39,7 @@
#' , max_depth = -1L
#' , min_data_in_leaf = 1L
#' , min_sum_hessian_in_leaf = 1.0
#' , num_threads = 2L
#' )
#' model <- lgb.train(
#' params = params
......
......@@ -23,7 +23,9 @@
#' , agaricus.train$label
#' , params = list(objective = "binary")
#' , nrounds = 5L
#' , verbose = 0)
#' , verbose = 0
#' , num_threads = 2L
#' )
#' fname <- tempfile(fileext="rds")
#' saveRDS(model, fname)
#'
......
......@@ -30,6 +30,7 @@
#' , metric = "l2"
#' , min_data = 1L
#' , learning_rate = 1.0
#' , num_threads = 2L
#' )
#' valids <- list(test = dtest)
#' model <- lgb.train(
......
......@@ -23,6 +23,7 @@
#' , metric = "l2"
#' , min_data = 1L
#' , learning_rate = 1.0
#' , num_threads = 2L
#' )
#' valids <- list(test = dtest)
#' model <- lgb.train(
......
......@@ -33,6 +33,7 @@
#' , metric = "l2"
#' , min_data = 1L
#' , learning_rate = 1.0
#' , num_threads = 2L
#' )
#' valids <- list(test = dtest)
#' model <- lgb.train(
......
......@@ -119,7 +119,10 @@ data(mtcars)
X <- as.matrix(mtcars[, -1L])
y <- mtcars[, 1L]
dtrain <- lgb.Dataset(X, label = y, params = list(max_bin = 5L))
params <- list(min_data_in_leaf = 2L)
params <- list(
min_data_in_leaf = 2L
, num_threads = 2L
)
model <- lgb.train(
params = params
, data = dtrain
......
......@@ -160,6 +160,7 @@ params <- list(
, metric = "l2"
, min_data = 1L
, learning_rate = 1.0
, num_threads = 2L
)
model <- lgb.cv(
params = params
......
......@@ -31,6 +31,7 @@ params <- list(
, metric = "l2"
, min_data = 1L
, learning_rate = 1.0
, num_threads = 2L
)
valids <- list(test = dtest)
model <- lgb.train(
......
......@@ -45,6 +45,7 @@ params <- list(
, metric = "l2"
, min_data = 1L
, learning_rate = 1.0
, num_threads = 2L
)
valids <- list(test = dtest)
model <- lgb.train(
......
......@@ -35,6 +35,7 @@ params <- list(
, max_depth = -1L
, min_data_in_leaf = 1L
, min_sum_hessian_in_leaf = 1.0
, num_threads = 2L
)
model <- lgb.train(
params = params
......
......@@ -48,6 +48,7 @@ params <- list(
, max_depth = -1L
, min_data_in_leaf = 1L
, min_sum_hessian_in_leaf = 1.0
, num_threads = 2L
)
model <- lgb.train(
params = params
......
......@@ -31,6 +31,7 @@ params <- list(
, metric = "l2"
, min_data = 1L
, learning_rate = 1.0
, num_threads = 2L
)
valids <- list(test = dtest)
model <- lgb.train(
......
......@@ -51,6 +51,7 @@ params <- list(
, max_depth = -1L
, min_data_in_leaf = 1L
, min_sum_hessian_in_leaf = 1.0
, num_threads = 2L
)
model <- lgb.train(params, dtrain, 10L)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment