lgb.cv.Rd 3.76 KB
Newer Older
James Lamb's avatar
James Lamb committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
% Generated by roxygen2: do not edit by hand
% Please edit documentation in R/lgb.cv.R
\name{lgb.cv}
\alias{lgb.cv}
\title{Main CV logic for LightGBM}
\usage{
lgb.cv(params = list(), data, nrounds = 10, nfold = 3, label = NULL,
  weight = NULL, obj = NULL, eval = NULL, verbose = 1, record = TRUE,
  eval_freq = 1L, showsd = TRUE, stratified = TRUE, folds = NULL,
  init_model = NULL, colnames = NULL, categorical_feature = NULL,
  early_stopping_rounds = NULL, callbacks = list(), ...)
}
\arguments{
\item{params}{List of parameters}

\item{data}{a \code{lgb.Dataset} object, used for training}

\item{nrounds}{number of training rounds}

\item{nfold}{the original dataset is randomly partitioned into \code{nfold} equal size subsamples.}

\item{label}{vector of response values. Should be provided only when data is an R-matrix.}

\item{weight}{vector of response values. If not NULL, will set to dataset}

26
\item{obj}{objective function, can be character or custom objective function. Examples include
James Lamb's avatar
James Lamb committed
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
\code{regression}, \code{regression_l1}, \code{huber},
\code{binary}, \code{lambdarank}, \code{multiclass}, \code{multiclass}}

\item{eval}{evaluation function, can be (list of) character or custom eval function}

\item{verbose}{verbosity for output, if <= 0, also will disable the print of evaluation during training}

\item{record}{Boolean, TRUE will record iteration message to \code{booster$record_evals}}

\item{eval_freq}{evaluation output frequency, only effect when verbose > 0}

\item{showsd}{\code{boolean}, whether to show standard deviation of cross validation}

\item{stratified}{a \code{boolean} indicating whether sampling of folds should be stratified
by the values of outcome labels.}

\item{folds}{\code{list} provides a possibility to use a list of pre-defined CV folds
(each element must be a vector of test fold's indices). When folds are supplied,
the \code{nfold} and \code{stratified} parameters are ignored.}

\item{init_model}{path of model file of \code{lgb.Booster} object, will continue training from this model}

\item{colnames}{feature names, if not null, will use this to overwrite the names in dataset}

\item{categorical_feature}{list of str or int
type int represents index,
type str represents feature names}

\item{early_stopping_rounds}{int
Activates early stopping.
Requires at least one validation data and one metric
If there's more than one, will check all of them except the training data
Returns the model with (best_iter + early_stopping_rounds)
If early stopping occurs, the model will have 'best_iter' field}

\item{callbacks}{list of callback functions
List of callback functions that are applied at each iteration.}

\item{...}{other parameters, see Parameters.rst for more information. A few key parameters:
\itemize{
    \item{boosting}{Boosting type. \code{"gbdt"} or \code{"dart"}}
    \item{num_leaves}{number of leaves in one tree. defaults to 127}
69
    \item{max_depth}{Limit the max depth for tree model. This is used to deal with
James Lamb's avatar
James Lamb committed
70
71
                     overfit when #data is small. Tree still grow by leaf-wise.}
    \item{num_threads}{Number of threads for LightGBM. For the best speed, set this to
72
                       the number of real CPU cores, not the number of threads (most
James Lamb's avatar
James Lamb committed
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
                       CPU using hyper-threading to generate 2 threads per CPU core).}
}}
}
\value{
a trained model \code{lgb.CVBooster}.
}
\description{
Cross validation logic used by LightGBM
}
\examples{
library(lightgbm)
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)
params <- list(objective = "regression", metric = "l2")
model <- lgb.cv(params,
                dtrain,
                10,
                nfold = 5,
                min_data = 1,
                learning_rate = 1,
                early_stopping_rounds = 10)
}