Commit 029bcc42 authored by James Lamb's avatar James Lamb Committed by Laurae
Browse files

[R-package] updated examples and removed dontrun guards on them in roxygen (#1626)

parent abd73765
......@@ -13,12 +13,10 @@ lgb.Dataset.construct(dataset)
Construct Dataset explicitly
}
\examples{
\dontrun{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)
lgb.Dataset.construct(dtrain)
}
}
......@@ -22,7 +22,6 @@ constructed dataset
Construct validation data according to training data
}
\examples{
\dontrun{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
......@@ -30,6 +29,5 @@ dtrain <- lgb.Dataset(train$data, label = train$label)
data(agaricus.test, package = "lightgbm")
test <- agaricus.test
dtest <- lgb.Dataset.create.valid(dtrain, test$data, label = test$label)
}
}
......@@ -19,12 +19,10 @@ Save \code{lgb.Dataset} to a binary file
}
\examples{
\dontrun{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)
lgb.Dataset.save(dtrain, "data.bin")
}
}
......@@ -18,7 +18,6 @@ passed dataset
Set categorical feature of \code{lgb.Dataset}
}
\examples{
\dontrun{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
......@@ -26,6 +25,5 @@ dtrain <- lgb.Dataset(train$data, label = train$label)
lgb.Dataset.save(dtrain, "lgb.Dataset.data")
dtrain <- lgb.Dataset("lgb.Dataset.data")
lgb.Dataset.set.categorical(dtrain, 1:2)
}
}
......@@ -18,7 +18,6 @@ passed dataset
If you want to use validation data, you should set reference to training data
}
\examples{
\dontrun{
library(lightgbm)
data(agaricus.train, package ="lightgbm")
train <- agaricus.train
......@@ -27,6 +26,5 @@ data(agaricus.test, package = "lightgbm")
test <- agaricus.test
dtest <- lgb.Dataset(test$data, test = train$label)
lgb.Dataset.set.reference(dtest, dtrain)
}
}
......@@ -80,7 +80,6 @@ a trained model \code{lgb.CVBooster}.
Cross validation logic used by LightGBM
}
\examples{
\dontrun{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
......@@ -94,4 +93,3 @@ model <- lgb.cv(params,
learning_rate = 1,
early_stopping_rounds = 10)
}
}
......@@ -18,7 +18,6 @@ json format of model
Dump LightGBM model to json
}
\examples{
\dontrun{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
......@@ -36,6 +35,5 @@ model <- lgb.train(params,
learning_rate = 1,
early_stopping_rounds = 10)
json_model <- lgb.dump(model)
}
}
......@@ -25,7 +25,6 @@ vector of evaluation result
Get record evaluation result from booster
}
\examples{
\dontrun{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
......@@ -43,6 +42,5 @@ model <- lgb.train(params,
learning_rate = 1,
early_stopping_rounds = 10)
lgb.get.eval.result(model, "test", "l2")
}
}
......@@ -24,13 +24,12 @@ For a tree model, a \code{data.table} with the following columns:
Creates a \code{data.table} of feature importances in a model.
}
\examples{
\dontrun{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)
params = list(objective = "binary",
params <- list(objective = "binary",
learning_rate = 0.01, num_leaves = 63, max_depth = -1,
min_data_in_leaf = 1, min_sum_hessian_in_leaf = 1)
model <- lgb.train(params, dtrain, 20)
......@@ -38,6 +37,5 @@ model <- lgb.train(params, dtrain, 20)
tree_imp1 <- lgb.importance(model, percentage = TRUE)
tree_imp2 <- lgb.importance(model, percentage = FALSE)
}
}
......@@ -27,8 +27,6 @@ For multiclass classification, a \code{list} of \code{data.table} with the Featu
Computes feature contribution components of rawscore prediction.
}
\examples{
\dontrun{
library(lightgbm)
Sigmoid <- function(x) 1 / (1 + exp(-x))
Logit <- function(x) log(x / (1 - x))
data(agaricus.train, package = "lightgbm")
......@@ -38,13 +36,16 @@ setinfo(dtrain, "init_score", rep(Logit(mean(train$label)), length(train$label))
data(agaricus.test, package = "lightgbm")
test <- agaricus.test
params = list(objective = "binary",
learning_rate = 0.01, num_leaves = 63, max_depth = -1,
min_data_in_leaf = 1, min_sum_hessian_in_leaf = 1)
model <- lgb.train(params, dtrain, 20)
params <- list(
objective = "binary"
, learning_rate = 0.01
, num_leaves = 63
, max_depth = -1
, min_data_in_leaf = 1
, min_sum_hessian_in_leaf = 1
)
model <- lgb.train(params, dtrain, 20)
tree_interpretation <- lgb.interprete(model, test$data, 1:5)
}
}
......@@ -20,7 +20,6 @@ Load LightGBM takes in either a file path or model string
If both are provided, Load will default to loading from file
}
\examples{
\dontrun{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
......@@ -41,6 +40,5 @@ lgb.save(model, "model.txt")
load_booster <- lgb.load(filename = "model.txt")
model_string <- model$save_model_to_string(NULL) # saves best iteration
load_booster_from_str <- lgb.load(model_str = model_string)
}
}
......@@ -39,20 +39,17 @@ The columns of the \code{data.table} are:
Parse a LightGBM model json dump into a \code{data.table} structure.
}
\examples{
\dontrun{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)
params = list(objective = "binary",
params <- list(objective = "binary",
learning_rate = 0.01, num_leaves = 63, max_depth = -1,
min_data_in_leaf = 1, min_sum_hessian_in_leaf = 1)
model <- lgb.train(params, dtrain, 20)
model <- lgb.train(params, dtrain, 20)
tree_dt <- lgb.model.dt.tree(model)
}
}
......@@ -29,19 +29,3 @@ Plot previously calculated feature importance: Gain, Cover and Frequency, as a b
The graph represents each feature as a horizontal bar of length proportional to the defined importance of a feature.
Features are shown ranked in a decreasing importance order.
}
\examples{
\dontrun{
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)
params = list(objective = "binary",
learning_rate = 0.01, num_leaves = 63, max_depth = -1,
min_data_in_leaf = 1, min_sum_hessian_in_leaf = 1)
model <- lgb.train(params, dtrain, 20)
model <- lgb.train(params, dtrain, 20)
tree_imp <- lgb.importance(model, percentage = TRUE)
lgb.plot.importance(tree_imp, top_n = 10, measure = "Gain")
}
}
......@@ -29,7 +29,6 @@ The graph represents each feature as a horizontal bar of length proportional to
Features are shown ranked in a decreasing contribution order.
}
\examples{
\dontrun{
library(lightgbm)
Sigmoid <- function(x) {1 / (1 + exp(-x))}
Logit <- function(x) {log(x / (1 - x))}
......@@ -40,7 +39,7 @@ setinfo(dtrain, "init_score", rep(Logit(mean(train$label)), length(train$label))
data(agaricus.test, package = "lightgbm")
test <- agaricus.test
params = list(objective = "binary",
params <- list(objective = "binary",
learning_rate = 0.01, num_leaves = 63, max_depth = -1,
min_data_in_leaf = 1, min_sum_hessian_in_leaf = 1)
model <- lgb.train(params, dtrain, 20)
......@@ -49,4 +48,3 @@ model <- lgb.train(params, dtrain, 20)
tree_interpretation <- lgb.interprete(model, test$data, 1:5)
lgb.plot.interpretation(tree_interpretation[[1]], top_n = 10)
}
}
......@@ -16,7 +16,6 @@ The cleaned dataset. It must be converted to a matrix format (\code{as.matrix})
Attempts to prepare a clean dataset to prepare to put in a lgb.Dataset. Factors and characters are converted to numeric without integers. Please use \code{lgb.prepare_rules} if you want to apply this transformation to other datasets.
}
\examples{
\dontrun{
library(lightgbm)
data(iris)
......@@ -46,6 +45,5 @@ str(lightgbm::lgb.prepare(data = iris))
# $ Petal.Length: num 1.4 1.4 1.3 1.5 1.4 1.7 1.4 1.5 1.4 1.5 ...
# $ Petal.Width : num 0.2 0.2 0.2 0.2 0.2 0.4 0.3 0.2 0.2 0.1 ...
# $ Species : num 1 1 1 1 1 1 1 1 1 1 ...
}
}
......@@ -16,7 +16,6 @@ The cleaned dataset. It must be converted to a matrix format (\code{as.matrix})
Attempts to prepare a clean dataset to prepare to put in a lgb.Dataset. Factors and characters are converted to numeric (specifically: integer). Please use \code{lgb.prepare_rules2} if you want to apply this transformation to other datasets. This is useful if you have a specific need for integer dataset instead of numeric dataset. Note that there are programs which do not support integer-only input. Consider this as a half memory technique which is dangerous, especially for LightGBM.
}
\examples{
\dontrun{
library(lightgbm)
data(iris)
......@@ -28,7 +27,8 @@ str(iris)
# $ Petal.Width : num 0.2 0.2 0.2 0.2 0.2 0.4 0.3 0.2 0.2 0.1 ...
# $ Species : Factor w/ 3 levels "setosa","versicolor",..: 1 1 1 1 ...
str(lgb.prepare2(data = iris)) # Convert all factors/chars to integer
# Convert all factors/chars to integer
str(lgb.prepare2(data = iris))
# 'data.frame': 150 obs. of 5 variables:
# $ Sepal.Length: num 5.1 4.9 4.7 4.6 5 5.4 4.6 5 4.4 4.9 ...
# $ Sepal.Width : num 3.5 3 3.2 3.1 3.6 3.9 3.4 3.4 2.9 3.1 ...
......@@ -48,5 +48,3 @@ str(lightgbm::lgb.prepare2(data = iris))
# $ Species : int 1 1 1 1 1 1 1 1 1 1 ...
}
}
......@@ -18,7 +18,6 @@ A list with the cleaned dataset (\code{data}) and the rules (\code{rules}). The
Attempts to prepare a clean dataset to prepare to put in a lgb.Dataset. Factors and characters are converted to numeric. In addition, keeps rules created so you can convert other datasets using this converter.
}
\examples{
\dontrun{
library(lightgbm)
data(iris)
......@@ -77,5 +76,3 @@ str(newest_iris$data) # SUCCESS!
# $ Species : num 3 3 3 3 3 3 3 3 3 3 ...
}
}
......@@ -18,7 +18,6 @@ A list with the cleaned dataset (\code{data}) and the rules (\code{rules}). The
Attempts to prepare a clean dataset to prepare to put in a lgb.Dataset. Factors and characters are converted to numeric (specifically: integer). In addition, keeps rules created so you can convert other datasets using this converter. This is useful if you have a specific need for integer dataset instead of numeric dataset. Note that there are programs which do not support integer-only input. Consider this as a half memory technique which is dangerous, especially for LightGBM.
}
\examples{
\dontrun{
library(lightgbm)
data(iris)
......@@ -77,5 +76,3 @@ str(newest_iris$data) # SUCCESS!
# $ Species : int 3 3 3 3 3 3 3 3 3 3 ...
}
}
......@@ -20,7 +20,6 @@ lgb.Booster
Save LightGBM model
}
\examples{
\dontrun{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
......@@ -38,6 +37,5 @@ model <- lgb.train(params,
learning_rate = 1,
early_stopping_rounds = 10)
lgb.save(model, "model.txt")
}
}
......@@ -69,7 +69,6 @@ a trained booster model \code{lgb.Booster}.
Logic to train with LightGBM
}
\examples{
\dontrun{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
......@@ -86,6 +85,5 @@ model <- lgb.train(params,
min_data = 1,
learning_rate = 1,
early_stopping_rounds = 10)
}
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment