Unverified Commit 18161674 authored by James Lamb's avatar James Lamb Committed by GitHub
Browse files

[R-package] [docs] Simplified examples to cut example run time (fixes #2988) (#2989)

* [R-package] [docs] Simplified examles to cut example run time (fixes #2988)

* updated learning rates
parent 151bf070
......@@ -22,7 +22,6 @@ constructed dataset
Construct validation data according to training data
}
\examples{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)
......
......@@ -19,7 +19,6 @@ Please note that \code{init_score} is not saved in binary file.
If you need it, please set it again after loading Dataset.
}
\examples{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)
......
......@@ -21,7 +21,6 @@ Set the categorical features of an \code{lgb.Dataset} object. Use this function
to tell LightGBM which features should be treated as categorical.
}
\examples{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)
......
......@@ -18,7 +18,6 @@ passed dataset
If you want to use validation data, you should set reference to training data
}
\examples{
library(lightgbm)
data(agaricus.train, package ="lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)
......
......@@ -100,7 +100,6 @@ a trained model \code{lgb.CVBooster}.
Cross validation logic used by LightGBM
}
\examples{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)
......@@ -108,10 +107,9 @@ params <- list(objective = "regression", metric = "l2")
model <- lgb.cv(
params = params
, data = dtrain
, nrounds = 10L
, nrounds = 5L
, nfold = 3L
, min_data = 1L
, learning_rate = 1.0
, early_stopping_rounds = 5L
)
}
......@@ -18,6 +18,7 @@ json format of model
Dump LightGBM model to json
}
\examples{
\donttest{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
......@@ -37,5 +38,5 @@ model <- lgb.train(
, early_stopping_rounds = 5L
)
json_model <- lgb.dump(model)
}
}
......@@ -33,7 +33,6 @@ Given a \code{lgb.Booster}, return evaluation results for a
}
\examples{
# train a regression model
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)
......@@ -45,11 +44,10 @@ valids <- list(test = dtest)
model <- lgb.train(
params = params
, data = dtrain
, nrounds = 10L
, nrounds = 5L
, valids = valids
, min_data = 1L
, learning_rate = 1.0
, early_stopping_rounds = 5L
)
# Examine valid data_name values
......
......@@ -24,20 +24,22 @@ For a tree model, a \code{data.table} with the following columns:
Creates a \code{data.table} of feature importances in a model.
}
\examples{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)
params <- list(
objective = "binary"
, learning_rate = 0.01
, num_leaves = 63L
, learning_rate = 0.1
, max_depth = -1L
, min_data_in_leaf = 1L
, min_sum_hessian_in_leaf = 1.0
)
model <- lgb.train(params, dtrain, 10L)
model <- lgb.train(
params = params
, data = dtrain
, nrounds = 5L
)
tree_imp1 <- lgb.importance(model, percentage = TRUE)
tree_imp2 <- lgb.importance(model, percentage = FALSE)
......
......@@ -29,7 +29,6 @@ For regression, binary classification and lambdarank model, a \code{list} of \co
Computes feature contribution components of rawscore prediction.
}
\examples{
Sigmoid <- function(x) 1.0 / (1.0 + exp(-x))
Logit <- function(x) log(x / (1.0 - x))
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
......@@ -40,13 +39,16 @@ test <- agaricus.test
params <- list(
objective = "binary"
, learning_rate = 0.01
, num_leaves = 63L
, learning_rate = 0.1
, max_depth = -1L
, min_data_in_leaf = 1L
, min_sum_hessian_in_leaf = 1.0
)
model <- lgb.train(params, dtrain, 10L)
model <- lgb.train(
params = params
, data = dtrain
, nrounds = 3L
)
tree_interpretation <- lgb.interprete(model, test$data, 1L:5L)
......
......@@ -19,7 +19,7 @@ Load LightGBM takes in either a file path or model string.
If both are provided, Load will default to loading from file
}
\examples{
library(lightgbm)
\donttest{
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)
......@@ -31,15 +31,15 @@ valids <- list(test = dtest)
model <- lgb.train(
params = params
, data = dtrain
, nrounds = 10L
, nrounds = 5L
, valids = valids
, min_data = 1L
, learning_rate = 1.0
, early_stopping_rounds = 5L
, early_stopping_rounds = 3L
)
lgb.save(model, "model.txt")
load_booster <- lgb.load(filename = "model.txt")
model_string <- model$save_model_to_string(NULL) # saves best iteration
load_booster_from_str <- lgb.load(model_str = model_string)
}
}
......@@ -43,15 +43,17 @@ dtrain <- lgb.Dataset(train$data, label = train$label)
params <- list(
objective = "binary"
, learning_rate = 0.01
, num_leaves = 63L
, max_depth = -1L
, learning_rate = 0.1
, min_data_in_leaf = 1L
, min_sum_hessian_in_leaf = 1.0
)
model <- lgb.train(params, dtrain, 10L)
model <- lgb.train(
params = params
, data = dtrain
, nrounds = 5L
)
tree_imp <- lgb.importance(model, percentage = TRUE)
lgb.plot.importance(tree_imp, top_n = 10L, measure = "Gain")
lgb.plot.importance(tree_imp, top_n = 5L, measure = "Gain")
}
......@@ -34,26 +34,41 @@ The graph represents each feature as a horizontal bar of length proportional to
contribution of a feature. Features are shown ranked in a decreasing contribution order.
}
\examples{
library(lightgbm)
Sigmoid <- function(x) {1.0 / (1.0 + exp(-x))}
Logit <- function(x) {log(x / (1.0 - x))}
\donttest{
Logit <- function(x) {
log(x / (1.0 - x))
}
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)
setinfo(dtrain, "init_score", rep(Logit(mean(train$label)), length(train$label)))
labels <- agaricus.train$label
dtrain <- lgb.Dataset(
agaricus.train$data
, label = labels
)
setinfo(dtrain, "init_score", rep(Logit(mean(labels)), length(labels)))
data(agaricus.test, package = "lightgbm")
test <- agaricus.test
params <- list(
objective = "binary"
, learning_rate = 0.01
, num_leaves = 63L
, learning_rate = 0.1
, max_depth = -1L
, min_data_in_leaf = 1L
, min_sum_hessian_in_leaf = 1.0
)
model <- lgb.train(params, dtrain, 10L)
model <- lgb.train(
params = params
, data = dtrain
, nrounds = 5L
)
tree_interpretation <- lgb.interprete(model, test$data, 1L:5L)
lgb.plot.interpretation(tree_interpretation[[1L]], top_n = 10L)
tree_interpretation <- lgb.interprete(
model = model
, data = agaricus.test$data
, idxset = 1L:5L
)
lgb.plot.interpretation(
tree_interpretation_dt = tree_interpretation[[1L]]
, top_n = 5L
)
}
}
......@@ -19,7 +19,6 @@ Attempts to prepare a clean dataset to prepare to put in a \code{lgb.Dataset}.
\code{\link{lgb.prepare_rules}} if you want to apply this transformation to other datasets.
}
\examples{
library(lightgbm)
data(iris)
str(iris)
......
......@@ -22,7 +22,6 @@ Attempts to prepare a clean dataset to prepare to put in a \code{lgb.Dataset}.
input. Consider this as a half memory technique which is dangerous, especially for LightGBM.
}
\examples{
library(lightgbm)
data(iris)
str(iris)
......
......@@ -22,7 +22,6 @@ Attempts to prepare a clean dataset to prepare to put in a \code{lgb.Dataset}.
so you can convert other datasets using this converter.
}
\examples{
library(lightgbm)
data(iris)
str(iris)
......
......@@ -25,7 +25,6 @@ Attempts to prepare a clean dataset to prepare to put in a \code{lgb.Dataset}.
Consider this as a half memory technique which is dangerous, especially for LightGBM.
}
\examples{
library(lightgbm)
data(iris)
str(iris)
......
......@@ -20,6 +20,7 @@ lgb.Booster
Save LightGBM model
}
\examples{
\donttest{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
......@@ -40,3 +41,4 @@ model <- lgb.train(
)
lgb.save(model, "model.txt")
}
}
......@@ -83,7 +83,6 @@ a trained booster model \code{lgb.Booster}.
Logic to train with LightGBM
}
\examples{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)
......@@ -95,10 +94,10 @@ valids <- list(test = dtest)
model <- lgb.train(
params = params
, data = dtrain
, nrounds = 10L
, nrounds = 5L
, valids = valids
, min_data = 1L
, learning_rate = 1.0
, early_stopping_rounds = 5L
, early_stopping_rounds = 3L
)
}
......@@ -26,7 +26,6 @@ Attempts to unload LightGBM packages so you can remove objects cleanly without
apparent reason and you do not want to restart R to fix the lost object.
}
\examples{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)
......@@ -38,11 +37,10 @@ valids <- list(test = dtest)
model <- lgb.train(
params = params
, data = dtrain
, nrounds = 10L
, nrounds = 5L
, valids = valids
, min_data = 1L
, learning_rate = 1.0
, early_stopping_rounds = 5L
)
\dontrun{
......
......@@ -4,7 +4,8 @@
\alias{lgb_shared_params}
\title{Shared parameter docs}
\arguments{
\item{callbacks}{List of callback functions that are applied at each iteration.}
\item{callbacks}{list of callback functions
List of callback functions that are applied at each iteration.}
\item{data}{a \code{lgb.Dataset} object, used for training. Some functions, such as \code{\link{lgb.cv}},
may allow you to pass other types of data like \code{matrix} and then separately supply
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment