Unverified Commit 7dcbb8cd authored by James Lamb's avatar James Lamb Committed by GitHub
Browse files

[R-package] limit number of threads used in tests and examples (fixes #5987) (#5988)

parent 7d4d8975
......@@ -47,6 +47,7 @@ params <- list(
, learning_rate = 0.1
, min_data_in_leaf = 1L
, min_sum_hessian_in_leaf = 1.0
, num_threads = 2L
)
model <- lgb.train(
......
......@@ -58,6 +58,7 @@ params <- list(
, max_depth = -1L
, min_data_in_leaf = 1L
, min_sum_hessian_in_leaf = 1.0
, num_threads = 2L
)
model <- lgb.train(
params = params
......
......@@ -34,7 +34,9 @@ model <- lightgbm(
, agaricus.train$label
, params = list(objective = "binary")
, nrounds = 5L
, verbose = 0)
, verbose = 0
, num_threads = 2L
)
fname <- tempfile(fileext="rds")
saveRDS(model, fname)
......
......@@ -33,6 +33,7 @@ params <- list(
, metric = "l2"
, min_data = 1L
, learning_rate = 1.0
, num_threads = 2L
)
valids <- list(test = dtest)
model <- lgb.train(
......
......@@ -141,6 +141,7 @@ params <- list(
, metric = "l2"
, min_data = 1L
, learning_rate = 1.0
, num_threads = 2L
)
valids <- list(test = dtest)
model <- lgb.train(
......
......@@ -132,6 +132,7 @@ params <- list(
, metric = "l2"
, min_data = 1L
, learning_rate = 1.0
, num_threads = 2L
)
valids <- list(test = dtest)
model <- lgb.train(
......
......@@ -34,6 +34,7 @@ params <- list(
, metric = "l2"
, min_data = 1L
, learning_rate = 1.0
, num_threads = 2L
)
valids <- list(test = dtest)
model <- lgb.train(
......
......@@ -57,6 +57,7 @@ params <- list(
, metric = "l2"
, min_data = 1L
, learning_rate = 1.0
, num_threads = 2L
)
valids <- list(test = dtest)
model <- lgb.train(
......
# ref for this file:
#
# * https://r-pkgs.org/testing-design.html#testthat-setup-files
# LightGBM-internal fix to comply with CRAN policy of only using up to 2 threads in tests and example.
#
# per https://cran.r-project.org/web/packages/policies.html
#
# > If running a package uses multiple threads/cores it must never use more than two simultaneously:
# the check farm is a shared resource and will typically be running many checks simultaneously.
#
.LGB_MAX_THREADS <- 2L
......@@ -14,6 +14,7 @@ test_that("Predictor$finalize() should not fail", {
data = dtrain
, params = list(
objective = "regression"
, num_threads = .LGB_MAX_THREADS
)
, verbose = VERBOSITY
, nrounds = 3L
......@@ -42,6 +43,7 @@ test_that("predictions do not fail for integer input", {
data = dtrain
, params = list(
objective = "regression"
, num_threads = .LGB_MAX_THREADS
)
, verbose = VERBOSITY
, nrounds = 3L
......@@ -77,6 +79,7 @@ test_that("start_iteration works correctly", {
, learning_rate = 0.6
, objective = "binary"
, verbosity = VERBOSITY
, num_threads = .LGB_MAX_THREADS
)
, nrounds = 50L
, valids = list("test" = dtest)
......@@ -126,7 +129,7 @@ test_that("Feature contributions from sparse inputs produce sparse outputs", {
, obj = "regression"
, nrounds = 5L
, verbose = VERBOSITY
, params = list(min_data_in_leaf = 5L)
, params = list(min_data_in_leaf = 5L, num_threads = .LGB_MAX_THREADS)
)
pred_dense <- predict(bst, X, type = "contrib")
......@@ -157,7 +160,7 @@ test_that("Sparse feature contribution predictions do not take inputs with wrong
, obj = "regression"
, nrounds = 5L
, verbose = VERBOSITY
, params = list(min_data_in_leaf = 5L)
, params = list(min_data_in_leaf = 5L, num_threads = .LGB_MAX_THREADS)
)
X_wrong <- X[, c(1L:10L, 1L:10L)]
......@@ -187,7 +190,7 @@ test_that("Feature contribution predictions do not take non-general CSR or CSC i
, obj = "regression"
, nrounds = 5L
, verbose = VERBOSITY
, params = list(min_data_in_leaf = 5L)
, params = list(min_data_in_leaf = 5L, num_threads = .LGB_MAX_THREADS)
)
expect_error(predict(bst, SmatC, type = "contrib"))
......@@ -211,6 +214,7 @@ test_that("predict() params should override keyword argument for raw-score predi
objective = "binary"
, min_data_in_leaf = 1L
, seed = 708L
, num_threads = .LGB_MAX_THREADS
)
, nrounds = 10L
, verbose = VERBOSITY
......@@ -262,6 +266,7 @@ test_that("predict() params should override keyword argument for leaf-index pred
objective = "regression"
, min_data_in_leaf = 1L
, seed = 708L
, num_threads = .LGB_MAX_THREADS
)
, nrounds = 10L
, verbose = VERBOSITY
......@@ -315,6 +320,7 @@ test_that("predict() params should override keyword argument for feature contrib
objective = "regression"
, min_data_in_leaf = 1L
, seed = 708L
, num_threads = .LGB_MAX_THREADS
)
, nrounds = 10L
, verbose = VERBOSITY
......@@ -426,7 +432,7 @@ test_that("predict() keeps row names from data (regression)", {
, obj = "regression"
, nrounds = 5L
, verbose = VERBOSITY
, params = list(min_data_in_leaf = 1L)
, params = list(min_data_in_leaf = 1L, num_threads = .LGB_MAX_THREADS)
)
.check_all_row_name_expectations(bst, X)
})
......@@ -442,6 +448,7 @@ test_that("predict() keeps row names from data (binary classification)", {
, obj = "binary"
, nrounds = 5L
, verbose = VERBOSITY
, params = list(num_threads = .LGB_MAX_THREADS)
)
.check_all_row_name_expectations(bst, X)
})
......@@ -455,7 +462,7 @@ test_that("predict() keeps row names from data (multi-class classification)", {
bst <- lgb.train(
data = dtrain
, obj = "multiclass"
, params = list(num_class = 3L)
, params = list(num_class = 3L, num_threads = .LGB_MAX_THREADS)
, nrounds = 5L
, verbose = VERBOSITY
)
......@@ -479,7 +486,7 @@ test_that("predictions for regression and binary classification are returned as
, obj = "regression"
, nrounds = 5L
, verbose = VERBOSITY
, params = list(min_data_in_leaf = 1L)
, params = list(min_data_in_leaf = 1L, num_threads = .LGB_MAX_THREADS)
)
pred <- predict(model, X)
expect_true(is.vector(pred))
......@@ -497,6 +504,7 @@ test_that("predictions for regression and binary classification are returned as
, obj = "binary"
, nrounds = 5L
, verbose = VERBOSITY
, params = list(num_threads = .LGB_MAX_THREADS)
)
pred <- predict(model, X)
expect_true(is.vector(pred))
......@@ -516,7 +524,7 @@ test_that("predictions for multiclass classification are returned as matrix", {
, obj = "multiclass"
, nrounds = 5L
, verbose = VERBOSITY
, params = list(num_class = 3L)
, params = list(num_class = 3L, num_threads = .LGB_MAX_THREADS)
)
pred <- predict(model, X)
expect_true(is.matrix(pred))
......@@ -533,7 +541,7 @@ test_that("Single-row predictions are identical to multi-row ones", {
X <- as.matrix(mtcars[, -1L])
y <- mtcars[, 1L]
dtrain <- lgb.Dataset(X, label = y, params = list(max_bin = 5L))
params <- list(min_data_in_leaf = 2L)
params <- list(min_data_in_leaf = 2L, num_threads = .LGB_MAX_THREADS)
model <- lgb.train(
params = params
, data = dtrain
......@@ -594,7 +602,7 @@ test_that("Fast-predict configuration accepts non-default prediction types", {
X <- as.matrix(mtcars[, -1L])
y <- mtcars[, 1L]
dtrain <- lgb.Dataset(X, label = y, params = list(max_bin = 5L))
params <- list(min_data_in_leaf = 2L)
params <- list(min_data_in_leaf = 2L, num_threads = .LGB_MAX_THREADS)
model <- lgb.train(
params = params
, data = dtrain
......@@ -624,7 +632,7 @@ test_that("Fast-predict configuration does not block other prediction types", {
X <- as.matrix(mtcars[, -1L])
y <- mtcars[, 1L]
dtrain <- lgb.Dataset(X, label = y, params = list(max_bin = 5L))
params <- list(min_data_in_leaf = 2L)
params <- list(min_data_in_leaf = 2L, num_threads = .LGB_MAX_THREADS)
model <- lgb.train(
params = params
, data = dtrain
......@@ -661,6 +669,7 @@ test_that("predict type='class' returns predicted class for classification objec
, obj = "binary"
, nrounds = 5L
, verbose = VERBOSITY
, params = list(num_threads = .LGB_MAX_THREADS)
)
pred <- predict(bst, X, type = "class")
expect_true(all(pred %in% c(0L, 1L)))
......@@ -674,7 +683,7 @@ test_that("predict type='class' returns predicted class for classification objec
, obj = "multiclass"
, nrounds = 5L
, verbose = VERBOSITY
, params = list(num_class = 3L)
, params = list(num_class = 3L, num_threads = .LGB_MAX_THREADS)
)
pred <- predict(model, X, type = "class")
expect_true(all(pred %in% c(0L, 1L, 2L)))
......@@ -690,6 +699,7 @@ test_that("predict type='class' returns values in the target's range for regress
, obj = "regression"
, nrounds = 5L
, verbose = VERBOSITY
, params = list(num_threads = .LGB_MAX_THREADS)
)
pred <- predict(bst, X, type = "class")
expect_true(!any(pred %in% c(0.0, 1.0)))
......
This diff is collapsed.
......@@ -39,6 +39,7 @@ param <- list(
, objective = logregobj
, metric = "auc"
, verbose = VERBOSITY
, num_threads = .LGB_MAX_THREADS
)
num_round <- 10L
......@@ -54,6 +55,7 @@ test_that("using a custom objective, custom eval, and no other metrics works", {
num_leaves = 8L
, learning_rate = 1.0
, verbose = VERBOSITY
, num_threads = .LGB_MAX_THREADS
)
, data = dtrain
, nrounds = 4L
......
......@@ -133,7 +133,7 @@ test_that("Dataset$set_reference() updates categorical_feature, colnames, and pr
dtrain$construct()
bst <- Booster$new(
train_set = dtrain
, params = list(verbose = -1L)
, params = list(verbose = -1L, num_threads = .LGB_MAX_THREADS)
)
dtrain$.__enclos_env__$private$predictor <- bst$to_predictor()
......@@ -394,6 +394,7 @@ test_that("lgb.Dataset: should be able to run lgb.train() immediately after usin
, num_leaves = 5L
, learning_rate = 1.0
, verbose = VERBOSITY
, num_threads = .LGB_MAX_THREADS
)
# should be able to train right away
......@@ -429,6 +430,7 @@ test_that("lgb.Dataset: should be able to run lgb.cv() immediately after using l
, learning_rate = 1.0
, num_iterations = 5L
, verbosity = VERBOSITY
, num_threads = .LGB_MAX_THREADS
)
# should be able to train right away
......
......@@ -27,6 +27,7 @@ test_that("learning-to-rank with lgb.train() works as expected", {
, lambdarank_truncation_level = 3L
, learning_rate = 0.001
, verbose = VERBOSITY
, num_threads = .LGB_MAX_THREADS
)
model <- lgb.train(
params = params
......@@ -91,6 +92,7 @@ test_that("learning-to-rank with lgb.cv() works as expected", {
, min_data = 1L
, learning_rate = 0.01
, verbose = VERBOSITY
, num_threads = .LGB_MAX_THREADS
)
nfold <- 4L
nrounds <- 10L
......
......@@ -13,6 +13,7 @@ test_that("Booster$finalize() should not fail", {
data = dtrain
, params = list(
objective = "regression"
, num_threads = .LGB_MAX_THREADS
)
, verbose = VERBOSITY
, nrounds = 3L
......@@ -66,6 +67,7 @@ test_that("lgb.get.eval.result() should throw an informative error for incorrect
, min_data = 1L
, learning_rate = 1.0
, verbose = VERBOSITY
, num_threads = .LGB_MAX_THREADS
)
, data = dtrain
, nrounds = 5L
......@@ -185,6 +187,7 @@ test_that("Loading a Booster from a text file works", {
, learning_rate = 1.0
, objective = "binary"
, verbosity = VERBOSITY
, num_threads = .LGB_MAX_THREADS
)
bst <- lightgbm(
data = as.matrix(train$data)
......@@ -227,6 +230,7 @@ test_that("boosters with linear models at leaves can be written to text file and
, metric = "mse"
, seed = 0L
, num_leaves = 2L
, num_threads = .LGB_MAX_THREADS
)
bst <- lgb.train(
......@@ -268,6 +272,7 @@ test_that("Loading a Booster from a string works", {
, learning_rate = 1.0
, objective = "binary"
, verbose = VERBOSITY
, num_threads = .LGB_MAX_THREADS
)
, nrounds = 2L
)
......@@ -299,6 +304,7 @@ test_that("Saving a large model to string should work", {
num_leaves = 100L
, learning_rate = 0.01
, objective = "binary"
, num_threads = .LGB_MAX_THREADS
)
, nrounds = 500L
, verbose = VERBOSITY
......@@ -342,6 +348,7 @@ test_that("Saving a large model to JSON should work", {
num_leaves = 100L
, learning_rate = 0.01
, objective = "binary"
, num_threads = .LGB_MAX_THREADS
)
, nrounds = 200L
, verbose = VERBOSITY
......@@ -372,6 +379,7 @@ test_that("If a string and a file are both passed to lgb.load() the file is used
, learning_rate = 1.0
, objective = "binary"
, verbose = VERBOSITY
, num_threads = .LGB_MAX_THREADS
)
, nrounds = 2L
)
......@@ -406,6 +414,7 @@ test_that("Creating a Booster from a Dataset should work", {
params = list(
objective = "binary"
, verbose = VERBOSITY
, num_threads = .LGB_MAX_THREADS
),
train_set = dtrain
)
......@@ -427,6 +436,7 @@ test_that("Creating a Booster from a Dataset with an existing predictor should w
, learning_rate = 1.0
, objective = "binary"
, verbose = VERBOSITY
, num_threads = .LGB_MAX_THREADS
)
, nrounds = nrounds
)
......@@ -440,6 +450,7 @@ test_that("Creating a Booster from a Dataset with an existing predictor should w
train_set = dtest
, params = list(
verbose = VERBOSITY
, num_threads = .LGB_MAX_THREADS
)
)
expect_true(lgb.is.Booster(bst))
......@@ -463,6 +474,7 @@ test_that("Booster$eval() should work on a Dataset stored in a binary file", {
, metric = "l2"
, num_leaves = 4L
, verbose = VERBOSITY
, num_threads = .LGB_MAX_THREADS
)
, data = dtrain
, nrounds = 2L
......@@ -492,7 +504,7 @@ test_that("Booster$eval() should work on a Dataset stored in a binary file", {
eval_from_file <- bst$eval(
data = lgb.Dataset(
data = test_file
, params = list(verbose = VERBOSITY)
, params = list(verbose = VERBOSITY, num_threads = .LGB_MAX_THREADS)
)$construct()
, name = "test"
)
......@@ -521,6 +533,7 @@ test_that("Booster$rollback_one_iter() should work as expected", {
, learning_rate = 1.0
, objective = "binary"
, verbose = VERBOSITY
, num_threads = .LGB_MAX_THREADS
)
, nrounds = nrounds
)
......@@ -555,6 +568,7 @@ test_that("Booster$update() passing a train_set works as expected", {
, learning_rate = 1.0
, objective = "binary"
, verbose = VERBOSITY
, num_threads = .LGB_MAX_THREADS
)
, nrounds = nrounds
)
......@@ -579,6 +593,7 @@ test_that("Booster$update() passing a train_set works as expected", {
, learning_rate = 1.0
, objective = "binary"
, verbose = VERBOSITY
, num_threads = .LGB_MAX_THREADS
)
, nrounds = nrounds + 1L
)
......@@ -604,6 +619,7 @@ test_that("Booster$update() throws an informative error if you provide a non-Dat
, learning_rate = 1.0
, objective = "binary"
, verbose = VERBOSITY
, num_threads = .LGB_MAX_THREADS
)
, nrounds = nrounds
)
......@@ -631,6 +647,7 @@ test_that("Booster should store parameters and Booster$reset_parameter() should
, boosting = "gbdt"
, num_class = 5L
, verbose = VERBOSITY
, num_threads = .LGB_MAX_THREADS
)
bst <- Booster$new(
params = params
......@@ -658,6 +675,7 @@ test_that("Booster$params should include dataset params, before and after Booste
, max_depth = 4L
, bagging_fraction = 0.8
, verbose = VERBOSITY
, num_threads = .LGB_MAX_THREADS
)
bst <- Booster$new(
params = params
......@@ -670,6 +688,7 @@ test_that("Booster$params should include dataset params, before and after Booste
, max_depth = 4L
, bagging_fraction = 0.8
, verbose = VERBOSITY
, num_threads = .LGB_MAX_THREADS
, max_bin = 17L
)
)
......@@ -681,6 +700,7 @@ test_that("Booster$params should include dataset params, before and after Booste
, max_depth = 4L
, bagging_fraction = 0.9
, verbose = VERBOSITY
, num_threads = .LGB_MAX_THREADS
, max_bin = 17L
)
expect_identical(ret_bst$params, expected_params)
......@@ -699,6 +719,7 @@ test_that("Saving a model with different feature importance types works", {
, learning_rate = 1.0
, objective = "binary"
, verbose = VERBOSITY
, num_threads = .LGB_MAX_THREADS
)
, nrounds = 2L
)
......@@ -754,6 +775,7 @@ test_that("Saving a model with unknown importance type fails", {
, learning_rate = 1.0
, objective = "binary"
, verbose = VERBOSITY
, num_threads = .LGB_MAX_THREADS
)
, nrounds = 2L
)
......@@ -789,6 +811,7 @@ test_that("all parameters are stored correctly with save_model_to_string()", {
params = list(
objective = "regression"
, metric = "l2"
, num_threads = .LGB_MAX_THREADS
)
, data = dtrain
, nrounds = nrounds
......@@ -845,6 +868,7 @@ test_that("early_stopping, num_iterations are stored correctly in model string e
, n_iter = n_iter
, early_stopping_round = early_stopping_round
, n_iter_no_change = n_iter_no_change
, num_threads = .LGB_MAX_THREADS
)
bst <- lgb.train(
......@@ -884,6 +908,7 @@ test_that("Booster: method calls Booster with a null handle should raise an info
objective = "regression"
, metric = "l2"
, num_leaves = 8L
, num_threads = .LGB_MAX_THREADS
)
, data = dtrain
, verbose = VERBOSITY
......@@ -1074,6 +1099,7 @@ test_that("lgb.cv() correctly handles passing through params to the model file",
, early_stopping_round = early_stopping_round
, n_iter_no_change = n_iter_no_change
, verbose = VERBOSITY
, num_threads = .LGB_MAX_THREADS
)
cv_bst <- lgb.cv(
......@@ -1118,6 +1144,7 @@ test_that("params (including dataset params) should be stored in .rds file for B
, max_depth = 4L
, bagging_fraction = 0.8
, verbose = VERBOSITY
, num_threads = .LGB_MAX_THREADS
)
bst <- Booster$new(
params = params
......@@ -1134,6 +1161,7 @@ test_that("params (including dataset params) should be stored in .rds file for B
, max_depth = 4L
, bagging_fraction = 0.8
, verbose = VERBOSITY
, num_threads = .LGB_MAX_THREADS
, max_bin = 17L
)
)
......@@ -1153,6 +1181,7 @@ test_that("params (including dataset params) should be stored in .rds file for B
, max_depth = 4L
, bagging_fraction = 0.8
, verbose = VERBOSITY
, num_threads = .LGB_MAX_THREADS
)
bst <- Booster$new(
params = params
......@@ -1169,6 +1198,7 @@ test_that("params (including dataset params) should be stored in .rds file for B
, max_depth = 4L
, bagging_fraction = 0.8
, verbose = VERBOSITY
, num_threads = .LGB_MAX_THREADS
, max_bin = 17L
)
)
......@@ -1184,6 +1214,7 @@ test_that("Handle is automatically restored when calling predict", {
, params = list(
verbose = VERBOSITY
)
, num_threads = .LGB_MAX_THREADS
)
bst_file <- tempfile(fileext = ".rds")
saveRDS(bst, file = bst_file)
......@@ -1209,6 +1240,7 @@ test_that("boosters with linear models at leaves work with saveRDS.lgb.Booster a
, metric = "mse"
, seed = 0L
, num_leaves = 2L
, num_threads = .LGB_MAX_THREADS
)
bst <- lgb.train(
......@@ -1248,6 +1280,7 @@ test_that("boosters with linear models at leaves can be written to RDS and re-lo
, metric = "mse"
, seed = 0L
, num_leaves = 2L
, num_threads = .LGB_MAX_THREADS
)
bst <- lgb.train(
......@@ -1344,6 +1377,7 @@ test_that("Booster's print, show, and summary work correctly", {
params = list(
objective = "regression"
, min_data_in_leaf = 1L
, num_threads = .LGB_MAX_THREADS
)
, data = lgb.Dataset(
as.matrix(mtcars[, -1L])
......@@ -1359,7 +1393,7 @@ test_that("Booster's print, show, and summary work correctly", {
data("iris")
model <- lgb.train(
params = list(objective = "multiclass", num_class = 3L)
params = list(objective = "multiclass", num_class = 3L, num_threads = .LGB_MAX_THREADS)
, data = lgb.Dataset(
as.matrix(iris[, -5L])
, label = as.numeric(factor(iris$Species)) - 1.0
......@@ -1399,6 +1433,7 @@ test_that("Booster's print, show, and summary work correctly", {
, eval = .evalerror
, verbose = VERBOSITY
, nrounds = 5L
, params = list(num_threads = .LGB_MAX_THREADS)
)
.check_methods_work(model)
......@@ -1410,6 +1445,7 @@ test_that("LGBM_BoosterGetNumFeature_R returns correct outputs", {
params = list(
objective = "regression"
, min_data_in_leaf = 1L
, num_threads = .LGB_MAX_THREADS
)
, data = lgb.Dataset(
as.matrix(mtcars[, -1L])
......
......@@ -31,6 +31,7 @@ test_that("lgb.intereprete works as expected for binary classification", {
, min_data_in_leaf = 1L
, min_sum_hessian_in_leaf = 1.0
, verbose = VERBOSITY
, num_threads = .LGB_MAX_THREADS
)
model <- lgb.train(
params = params
......@@ -83,6 +84,7 @@ test_that("lgb.intereprete works as expected for multiclass classification", {
, learning_rate = 0.00001
, min_data = 1L
, verbose = VERBOSITY
, num_threads = .LGB_MAX_THREADS
)
model <- lgb.train(
params = params
......
......@@ -14,6 +14,7 @@ test_that("lgb.plot.importance() should run without error for well-formed inputs
, min_data_in_leaf = 1L
, min_sum_hessian_in_leaf = 1.0
, verbosity = VERBOSITY
, num_threads = .LGB_MAX_THREADS
)
model <- lgb.train(params, dtrain, 3L)
tree_imp <- lgb.importance(model, percentage = TRUE)
......
......@@ -31,6 +31,7 @@ test_that("lgb.plot.interepretation works as expected for binary classification"
, min_data_in_leaf = 1L
, min_sum_hessian_in_leaf = 1.0
, verbosity = VERBOSITY
, num_threads = .LGB_MAX_THREADS
)
model <- lgb.train(
params = params
......@@ -80,6 +81,7 @@ test_that("lgb.plot.interepretation works as expected for multiclass classificat
, num_class = 3L
, learning_rate = 0.00001
, min_data = 1L
, num_threads = .LGB_MAX_THREADS
)
model <- lgb.train(
params = params
......
......@@ -20,6 +20,7 @@ test_that("Feature penalties work properly", {
, objective = "binary"
, feature_penalty = paste0(feature_penalties, collapse = ",")
, metric = "binary_error"
, num_threads = .LGB_MAX_THREADS
)
, nrounds = 5L
, verbose = -1L
......@@ -97,6 +98,7 @@ test_that("training should warn if you use 'dart' boosting, specified with 'boos
, learning_rate = 0.05
, objective = "binary"
, metric = "binary_error"
, num_threads = .LGB_MAX_THREADS
)
params[[boosting_param]] <- "dart"
expect_warning({
......
......@@ -9,7 +9,7 @@ test_that("Gamma regression reacts on 'weight'", {
y <- X[, 1L] + X[, 2L] + runif(n)
X_pred <- X[1L:5L, ]
params <- list(objective = "gamma")
params <- list(objective = "gamma", num_threads = .LGB_MAX_THREADS)
# Unweighted
dtrain <- lgb.Dataset(X, label = y)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment