Unverified Commit faba6caf authored by Nikita Titov's avatar Nikita Titov Committed by GitHub
Browse files

[docs][R] added R-package docs generation routines (#2176)

* added R-package docs generation routines

* change theme to be more consistent with sphinx_rtd_theme on main site in terms of color scheme

* placed man folder with old Rd files back

* specify full path to conda and make script more readable by one line - one pkg

* removed commented lines from build_r_site script

* made one line - one argument in build_reference() call

* pin R package versions

* fixed conflict
parent 317b1bfb
version: 2
formats:
- pdf
python:
version: 3
install:
- requirements: docs/requirements.txt
sphinx:
builder: html
configuration: docs/conf.py
fail_on_warning: true
^build_package.R$
\.gitkeep$
^docs$
^_pkgdown\.yml$
# Objects created by compilation
\.o$
......
......@@ -18,6 +18,7 @@ Description: Tree based algorithms can be improved by introducing boosting frame
5. Capable of handling large-scale data.
In recognition of these advantages, LightGBM has being widely-used in many winning solutions of machine learning competitions.
Comparison experiments on public datasets suggest that LightGBM can outperform existing boosting frameworks on both efficiency and accuracy, with significantly lower memory consumption. In addition, parallel experiments suggest that in certain circumstances, LightGBM can achieve a linear speed-up in training time by using multiple machines.
Encoding: UTF-8
License: MIT + file LICENSE
URL: https://github.com/Microsoft/LightGBM
BugReports: https://github.com/Microsoft/LightGBM/issues
......
......@@ -644,11 +644,11 @@ Booster <- R6::R6Class(
#' valids <- list(test = dtest)
#' model <- lgb.train(params,
#' dtrain,
#' 100,
#' 10,
#' valids,
#' min_data = 1,
#' learning_rate = 1,
#' early_stopping_rounds = 10)
#' early_stopping_rounds = 5)
#' preds <- predict(model, test$data)
#'
#' @rdname predict.lgb.Booster
......@@ -701,11 +701,11 @@ predict.lgb.Booster <- function(object,
#' valids <- list(test = dtest)
#' model <- lgb.train(params,
#' dtrain,
#' 100,
#' 10,
#' valids,
#' min_data = 1,
#' learning_rate = 1,
#' early_stopping_rounds = 10)
#' early_stopping_rounds = 5)
#' lgb.save(model, "model.txt")
#' load_booster <- lgb.load(filename = "model.txt")
#' model_string <- model$save_model_to_string(NULL) # saves best iteration
......@@ -759,11 +759,11 @@ lgb.load <- function(filename = NULL, model_str = NULL){
#' valids <- list(test = dtest)
#' model <- lgb.train(params,
#' dtrain,
#' 100,
#' 10,
#' valids,
#' min_data = 1,
#' learning_rate = 1,
#' early_stopping_rounds = 10)
#' early_stopping_rounds = 5)
#' lgb.save(model, "model.txt")
#'
#' @rdname lgb.save
......@@ -806,11 +806,11 @@ lgb.save <- function(booster, filename, num_iteration = NULL){
#' valids <- list(test = dtest)
#' model <- lgb.train(params,
#' dtrain,
#' 100,
#' 10,
#' valids,
#' min_data = 1,
#' learning_rate = 1,
#' early_stopping_rounds = 10)
#' early_stopping_rounds = 5)
#' json_model <- lgb.dump(model)
#'
#' @rdname lgb.dump
......@@ -850,13 +850,12 @@ lgb.dump <- function(booster, num_iteration = NULL){
#' valids <- list(test = dtest)
#' model <- lgb.train(params,
#' dtrain,
#' 100,
#' 10,
#' valids,
#' min_data = 1,
#' learning_rate = 1,
#' early_stopping_rounds = 10)
#' early_stopping_rounds = 5)
#' lgb.get.eval.result(model, "test", "l2")
#'
#' @rdname lgb.get.eval.result
#' @export
lgb.get.eval.result <- function(booster, data_name, eval_name, iters = NULL, is_err = FALSE) {
......
#' @importFrom methods is
#' @importFrom R6 R6Class
Dataset <- R6::R6Class(
......@@ -1057,7 +1056,6 @@ lgb.Dataset.set.reference <- function(dataset, reference) {
#' @return passed dataset
#'
#' @examples
#'
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
......
#' @importFrom methods is
#' @importFrom R6 R6Class
Predictor <- R6::R6Class(
......
......@@ -64,10 +64,10 @@ CVBooster <- R6::R6Class(
#' model <- lgb.cv(params,
#' dtrain,
#' 10,
#' nfold = 5,
#' nfold = 3,
#' min_data = 1,
#' learning_rate = 1,
#' early_stopping_rounds = 10)
#' early_stopping_rounds = 5)
#' @export
lgb.cv <- function(params = list(),
data,
......
......@@ -24,8 +24,7 @@
#' params <- list(objective = "binary",
#' learning_rate = 0.01, num_leaves = 63, max_depth = -1,
#' min_data_in_leaf = 1, min_sum_hessian_in_leaf = 1)
#' model <- lgb.train(params, dtrain, 20)
#' model <- lgb.train(params, dtrain, 20)
#' model <- lgb.train(params, dtrain, 10)
#'
#' tree_imp1 <- lgb.importance(model, percentage = TRUE)
#' tree_imp2 <- lgb.importance(model, percentage = FALSE)
......
......@@ -34,7 +34,7 @@
#' , min_data_in_leaf = 1
#' , min_sum_hessian_in_leaf = 1
#' )
#' model <- lgb.train(params, dtrain, 20)
#' model <- lgb.train(params, dtrain, 10)
#'
#' tree_interpretation <- lgb.interprete(model, test$data, 1:5)
#'
......
......@@ -38,8 +38,7 @@
#' params <- list(objective = "binary",
#' learning_rate = 0.01, num_leaves = 63, max_depth = -1,
#' min_data_in_leaf = 1, min_sum_hessian_in_leaf = 1)
#' model <- lgb.train(params, dtrain, 20)
#' model <- lgb.train(params, dtrain, 20)
#' model <- lgb.train(params, dtrain, 10)
#'
#' tree_dt <- lgb.model.dt.tree(model)
#'
......
......@@ -30,7 +30,7 @@
#' , min_sum_hessian_in_leaf = 1
#' )
#'
#' model <- lgb.train(params, dtrain, 20)
#' model <- lgb.train(params, dtrain, 10)
#'
#' tree_imp <- lgb.importance(model, percentage = TRUE)
#' lgb.plot.importance(tree_imp, top_n = 10, measure = "Gain")
......
......@@ -29,8 +29,7 @@
#' params <- list(objective = "binary",
#' learning_rate = 0.01, num_leaves = 63, max_depth = -1,
#' min_data_in_leaf = 1, min_sum_hessian_in_leaf = 1)
#' model <- lgb.train(params, dtrain, 20)
#' model <- lgb.train(params, dtrain, 20)
#' model <- lgb.train(params, dtrain, 10)
#'
#' tree_interpretation <- lgb.interprete(model, test$data, 1:5)
#' lgb.plot.interpretation(tree_interpretation[[1]], top_n = 10)
......
......@@ -26,6 +26,7 @@
#' # $ Petal.Width : num 0.2 0.2 0.2 0.2 0.2 0.4 0.3 0.2 0.2 0.1 ...
#' # $ Species : num 1 1 1 1 1 1 1 1 1 1 ...
#'
#' \dontrun{
#' # When lightgbm package is installed, and you do not want to load it
#' # You can still use the function!
#' lgb.unloader()
......@@ -36,6 +37,7 @@
#' # $ Petal.Length: num 1.4 1.4 1.3 1.5 1.4 1.7 1.4 1.5 1.4 1.5 ...
#' # $ Petal.Width : num 0.2 0.2 0.2 0.2 0.2 0.4 0.3 0.2 0.2 0.1 ...
#' # $ Species : num 1 1 1 1 1 1 1 1 1 1 ...
#' }
#'
#' @export
lgb.prepare <- function(data) {
......
......@@ -27,6 +27,7 @@
#' # $ Petal.Width : num 0.2 0.2 0.2 0.2 0.2 0.4 0.3 0.2 0.2 0.1 ...
#' # $ Species : int 1 1 1 1 1 1 1 1 1 1 ...
#'
#' \dontrun{
#' # When lightgbm package is installed, and you do not want to load it
#' # You can still use the function!
#' lgb.unloader()
......@@ -37,6 +38,7 @@
#' # $ Petal.Length: num 1.4 1.4 1.3 1.5 1.4 1.7 1.4 1.5 1.4 1.5 ...
#' # $ Petal.Width : num 0.2 0.2 0.2 0.2 0.2 0.4 0.3 0.2 0.2 0.1 ...
#' # $ Species : int 1 1 1 1 1 1 1 1 1 1 ...
#' }
#'
#' @export
lgb.prepare2 <- function(data) {
......
......@@ -39,12 +39,11 @@
#' valids <- list(test = dtest)
#' model <- lgb.train(params,
#' dtrain,
#' 100,
#' 10,
#' valids,
#' min_data = 1,
#' learning_rate = 1,
#' early_stopping_rounds = 10)
#'
#' early_stopping_rounds = 5)
#' @export
lgb.train <- function(params = list(),
data,
......
......@@ -20,17 +20,20 @@
#' valids <- list(test = dtest)
#' model <- lgb.train(params,
#' dtrain,
#' 100,
#' 10,
#' valids,
#' min_data = 1,
#' learning_rate = 1,
#' early_stopping_rounds = 10)
#' early_stopping_rounds = 5)
#'
#' \dontrun{
#' lgb.unloader(restore = FALSE, wipe = FALSE, envir = .GlobalEnv)
#' rm(model, dtrain, dtest) # Not needed if wipe = TRUE
#' gc() # Not needed if wipe = TRUE
#'
#' library(lightgbm)
#' # Do whatever you want again with LightGBM without object clashing
#' }
#'
#' @export
lgb.unloader <- function(restore = TRUE, wipe = FALSE, envir = .GlobalEnv) {
......
#' @name lgb_shared_params
#' @title Shared parameter docs
#' @description Parameter docs shared by \code{lgb.train}, \code{lgb.cv}, and \code{lightgbm}
......
......@@ -19,11 +19,11 @@
#' valids <- list(test = dtest)
#' model <- lgb.train(params,
#' dtrain,
#' 100,
#' 10,
#' valids,
#' min_data = 1,
#' learning_rate = 1,
#' early_stopping_rounds = 10)
#' early_stopping_rounds = 5)
#' saveRDS.lgb.Booster(model, "model.rds")
#' new_model <- readRDS.lgb.Booster("model.rds")
#'
......
......@@ -25,11 +25,11 @@
#' model <- lgb.train(
#' params
#' , dtrain
#' , 100
#' , 10
#' , valids
#' , min_data = 1
#' , learning_rate = 1
#' , early_stopping_rounds = 10
#' , early_stopping_rounds = 5
#' )
#' saveRDS.lgb.Booster(model, "model.rds")
#' @export
......
......@@ -116,12 +116,12 @@ You may also read [Microsoft/LightGBM#912](https://github.com/microsoft/LightGBM
Examples
--------
Please visit [demo](demo):
* [Basic walkthrough of wrappers](demo/basic_walkthrough.R)
* [Boosting from existing prediction](demo/boost_from_prediction.R)
* [Early Stopping](demo/early_stopping.R)
* [Cross Validation](demo/cross_validation.R)
* [Multiclass Training/Prediction](demo/multiclass.R)
* [Leaf (in)Stability](demo/leaf_stability.R)
* [Weight-Parameter Adjustment Relationship](demo/weight_param.R)
Please visit [demo](https://github.com/microsoft/LightGBM/tree/master/R-package/demo):
* [Basic walkthrough of wrappers](https://github.com/microsoft/LightGBM/blob/master/R-package/demo/basic_walkthrough.R)
* [Boosting from existing prediction](https://github.com/microsoft/LightGBM/blob/master/R-package/demo/boost_from_prediction.R)
* [Early Stopping](https://github.com/microsoft/LightGBM/blob/master/R-package/demo/early_stopping.R)
* [Cross Validation](https://github.com/microsoft/LightGBM/blob/master/R-package/demo/cross_validation.R)
* [Multiclass Training/Prediction](https://github.com/microsoft/LightGBM/blob/master/R-package/demo/multiclass.R)
* [Leaf (in)Stability](https://github.com/microsoft/LightGBM/blob/master/R-package/demo/leaf_stability.R)
* [Weight-Parameter Adjustment Relationship](https://github.com/microsoft/LightGBM/blob/master/R-package/demo/weight_param.R)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment