Unverified Commit 85b28439 authored by James Lamb's avatar James Lamb Committed by GitHub
Browse files

[R-package] [ci] cut nrounds in unit tests (#3169)

parent fa2de89b
......@@ -8,6 +8,7 @@ test_that("predictions do not fail for integer input", {
data = dtrain
, objective = "regression"
, verbose = -1L
, nrounds = 3L
)
X_double <- X[c(1L, 51L, 101L), , drop = FALSE]
X_integer <- X_double
......
......@@ -32,7 +32,7 @@ test_that("lgb.intereprete works as expected for binary classification", {
model <- lgb.train(
params = params
, data = dtrain
, nrounds = 10L
, nrounds = 3L
)
num_trees <- 5L
tree_interpretation <- lgb.interprete(
......@@ -82,7 +82,7 @@ test_that("lgb.intereprete works as expected for multiclass classification", {
model <- lgb.train(
params = params
, data = dtrain
, nrounds = 10L
, nrounds = 3L
, min_data = 1L
)
num_trees <- 5L
......
......@@ -12,7 +12,7 @@ test_that("lgb.plot.importance() should run without error for well-formed inputs
, min_data_in_leaf = 1L
, min_sum_hessian_in_leaf = 1.0
)
model <- lgb.train(params, dtrain, 10L)
model <- lgb.train(params, dtrain, 3L)
tree_imp <- lgb.importance(model, percentage = TRUE)
# Check that there are no plots present before plotting
......
......@@ -32,7 +32,7 @@ test_that("lgb.plot.interepretation works as expected for binary classification"
model <- lgb.train(
params = params
, data = dtrain
, nrounds = 10L
, nrounds = 3L
)
num_trees <- 5L
tree_interpretation <- lgb.interprete(
......@@ -80,7 +80,7 @@ test_that("lgb.plot.interepretation works as expected for multiclass classificat
model <- lgb.train(
params = params
, data = dtrain
, nrounds = 10L
, nrounds = 3L
, min_data = 1L
)
num_trees <- 5L
......
......@@ -19,7 +19,7 @@ test_that("Feature penalties work properly", {
, label = train$label
, num_leaves = 5L
, learning_rate = 0.05
, nrounds = 20L
, nrounds = 5L
, objective = "binary"
, feature_penalty = paste0(feature_penalties, collapse = ",")
, metric = "binary_error"
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment