Commit e7431447 authored by Tyler Hunt's avatar Tyler Hunt Committed by Guolin Ke
Browse files

added a little documentation (#210)

parent 2e100eeb
...@@ -26,7 +26,14 @@ CVBooster <- R6Class( ...@@ -26,7 +26,14 @@ CVBooster <- R6Class(
#' @param nfold the original dataset is randomly partitioned into \code{nfold} equal size subsamples. #' @param nfold the original dataset is randomly partitioned into \code{nfold} equal size subsamples.
#' @param label vector of response values. Should be provided only when data is an R-matrix. #' @param label vector of response values. Should be provided only when data is an R-matrix.
#' @param weight vector of response values. If not NULL, will set to dataset #' @param weight vector of response values. If not NULL, will set to dataset
#' @param obj objective function, can be character or custom objective function #' @param obj objective function, can be character or custom objective function. Examples include
#' \code{regression}, \code{regression_l1}, \code{huber},
#' \code{binary}, \code{lambdarank}, \code{multiclass}, \code{multiclass}
#' @param boosting boosting type. \code{gbdt}, \code{dart}
#' @param num_leaves number of leaves in one tree. defaults to 127
#' @param max_depth Limit the max depth for tree model. This is used to deal with overfit when #data is small.
#' Tree still grow by leaf-wise.
#' @param num_threads Number of threads for LightGBM. For the best speed, set this to the number of real CPU cores, not the number of threads (most CPU using hyper-threading to generate 2 threads per CPU core).
#' @param eval evaluation function, can be (list of) character or custom eval function #' @param eval evaluation function, can be (list of) character or custom eval function
#' @param verbose verbosity for output, if <= 0, also will disable the print of evalutaion during training #' @param verbose verbosity for output, if <= 0, also will disable the print of evalutaion during training
#' @param record Boolean, TRUE will record iteration message to \code{booster$record_evals} #' @param record Boolean, TRUE will record iteration message to \code{booster$record_evals}
......
...@@ -4,7 +4,14 @@ ...@@ -4,7 +4,14 @@
#' @param data a \code{lgb.Dataset} object, used for training #' @param data a \code{lgb.Dataset} object, used for training
#' @param nrounds number of training rounds #' @param nrounds number of training rounds
#' @param valids a list of \code{lgb.Dataset} objects, used for validation #' @param valids a list of \code{lgb.Dataset} objects, used for validation
#' @param obj objective function, can be character or custom objective function #' @param obj objective function, can be character or custom objective function. Examples include
#' \code{regression}, \code{regression_l1}, \code{huber},
#' \code{binary}, \code{lambdarank}, \code{multiclass}, \code{multiclass}
#' @param boosting boosting type. \code{gbdt}, \code{dart}
#' @param num_leaves number of leaves in one tree. defaults to 127
#' @param max_depth Limit the max depth for tree model. This is used to deal with overfit when #data is small.
#' Tree still grow by leaf-wise.
#' @param num_threads Number of threads for LightGBM. For the best speed, set this to the number of real CPU cores, not the number of threads (most CPU using hyper-threading to generate 2 threads per CPU core).
#' @param eval evaluation function, can be (a list of) character or custom eval function #' @param eval evaluation function, can be (a list of) character or custom eval function
#' @param verbose verbosity for output, if <= 0, also will disable the print of evalutaion during training #' @param verbose verbosity for output, if <= 0, also will disable the print of evalutaion during training
#' @param record Boolean, TRUE will record iteration message to \code{booster$record_evals} #' @param record Boolean, TRUE will record iteration message to \code{booster$record_evals}
......
...@@ -7,13 +7,13 @@ ...@@ -7,13 +7,13 @@
\title{Main CV logic for LightGBM} \title{Main CV logic for LightGBM}
\usage{ \usage{
lgb.cv(params = list(), data, nrounds = 10, nfold = 3, label = NULL, lgb.cv(params = list(), data, nrounds = 10, nfold = 3, label = NULL,
weight = NULL, obj = NULL, eval = NULL, verbose = 1, eval_freq = 1L, weight = NULL, obj = NULL, eval = NULL, verbose = 1, record = TRUE,
showsd = TRUE, stratified = TRUE, folds = NULL, init_model = NULL, eval_freq = 1L, showsd = TRUE, stratified = TRUE, folds = NULL,
colnames = NULL, categorical_feature = NULL, init_model = NULL, colnames = NULL, categorical_feature = NULL,
early_stopping_rounds = NULL, callbacks = list(), ...) early_stopping_rounds = NULL, callbacks = list(), ...)
lgb.train(params = list(), data, nrounds = 10, valids = list(), lgb.train(params = list(), data, nrounds = 10, valids = list(),
obj = NULL, eval = NULL, verbose = 1, eval_freq = 1L, obj = NULL, eval = NULL, verbose = 1, record = TRUE, eval_freq = 1L,
init_model = NULL, colnames = NULL, categorical_feature = NULL, init_model = NULL, colnames = NULL, categorical_feature = NULL,
early_stopping_rounds = NULL, callbacks = list(), ...) early_stopping_rounds = NULL, callbacks = list(), ...)
...@@ -35,14 +35,17 @@ lightgbm(data, label = NULL, weight = NULL, params = list(), ...@@ -35,14 +35,17 @@ lightgbm(data, label = NULL, weight = NULL, params = list(),
\item{weight}{vector of response values. If not NULL, will set to dataset} \item{weight}{vector of response values. If not NULL, will set to dataset}
\item{obj}{objective function, can be character or custom objective function} \item{obj}{objective function, can be character or custom objective function. Examples include
\code{regression}, \code{regression_l1}, \code{huber},
\code{binary}, \code{lambdarank}, \code{multiclass}, \code{multiclass}}
\item{eval}{evaluation function, can be (list of) character or custom eval function} \item{eval}{evaluation function, can be (list of) character or custom eval function}
\item{verbose}{verbosity for output \item{verbose}{verbosity for output, if <= 0, also will disable the print of evalutaion during training}
if verbose > 0 , also will record iteration message to booster$record_evals}
\item{record}{Boolean, TRUE will record iteration message to \code{booster$record_evals}}
\item{eval_freq}{evalutaion output frequence} \item{eval_freq}{evalutaion output frequence, only effect when verbose > 0}
\item{showsd}{\code{boolean}, whether to show standard deviation of cross validation} \item{showsd}{\code{boolean}, whether to show standard deviation of cross validation}
...@@ -75,20 +78,41 @@ List of callback functions that are applied at each iteration.} ...@@ -75,20 +78,41 @@ List of callback functions that are applied at each iteration.}
\item{valids}{a list of \code{lgb.Dataset} objects, used for validation} \item{valids}{a list of \code{lgb.Dataset} objects, used for validation}
\item{boosting}{boosting type. \code{gbdt}, \code{dart}}
\item{num_leaves}{number of leaves in one tree. defaults to 127}
\item{max_depth}{Limit the max depth for tree model. This is used to deal with overfit when #data is small.
Tree still grow by leaf-wise.}
\item{num_threads}{Number of threads for LightGBM. For the best speed, set this to the number of real CPU cores, not the number of threads (most CPU using hyper-threading to generate 2 threads per CPU core).}
\item{params}{List of parameters} \item{params}{List of parameters}
\item{data}{a \code{lgb.Dataset} object, used for training} \item{data}{a \code{lgb.Dataset} object, used for training}
\item{nrounds}{number of training rounds} \item{nrounds}{number of training rounds}
\item{obj}{objective function, can be character or custom objective function} \item{obj}{objective function, can be character or custom objective function. Examples include
\code{regression}, \code{regression_l1}, \code{huber},
\code{binary}, \code{lambdarank}, \code{multiclass}, \code{multiclass}}
\item{boosting}{boosting type. \code{gbdt}, \code{dart}}
\item{num_leaves}{number of leaves in one tree. defaults to 127}
\item{max_depth}{Limit the max depth for tree model. This is used to deal with overfit when #data is small.
Tree still grow by leaf-wise.}
\item{num_threads}{Number of threads for LightGBM. For the best speed, set this to the number of real CPU cores, not the number of threads (most CPU using hyper-threading to generate 2 threads per CPU core).}
\item{eval}{evaluation function, can be (a list of) character or custom eval function} \item{eval}{evaluation function, can be (a list of) character or custom eval function}
\item{verbose}{verbosity for output \item{verbose}{verbosity for output, if <= 0, also will disable the print of evalutaion during training}
if \code{verbose > 0}, also will record iteration message to \code{booster$record_evals}}
\item{record}{Boolean, TRUE will record iteration message to \code{booster$record_evals}}
\item{eval_freq}{evalutaion output frequency} \item{eval_freq}{evalutaion output frequency, only effect when verbose > 0}
\item{init_model}{path of model file of \code{lgb.Booster} object, will continue training from this model} \item{init_model}{path of model file of \code{lgb.Booster} object, will continue training from this model}
...@@ -111,7 +135,7 @@ List of callback functions that are applied at each iteration.} ...@@ -111,7 +135,7 @@ List of callback functions that are applied at each iteration.}
\item{...}{other parameters, see parameters.md for more informations} \item{...}{other parameters, see parameters.md for more informations}
} }
\value{ \value{
a trained booster model \code{lgb.Booster}. a trained model \code{lgb.CVBooster}.
a trained booster model \code{lgb.Booster}. a trained booster model \code{lgb.Booster}.
} }
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment