lgb.train.Rd 2.5 KB
Newer Older
Guolin Ke's avatar
Guolin Ke committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
% Generated by roxygen2: do not edit by hand
% Please edit documentation in R/lgb.train.R, R/lightgbm.R
\name{lgb.train}
\alias{lgb.train}
\alias{lightgbm}
\title{Main training logic for LightGBM}
\usage{
lgb.train(params = list(), data, nrounds = 10, valids = list(),
  obj = NULL, eval = NULL, verbose = 1, eval_freq = 1L,
  init_model = NULL, colnames = NULL, categorical_feature = NULL,
  early_stopping_rounds = NULL, callbacks = list(), ...)

lightgbm(data, label = NULL, weight = NULL, params = list(),
  nrounds = 10, verbose = 1, eval_freq = 1L,
  early_stopping_rounds = NULL, save_name = "lightgbm.model",
  init_model = NULL, callbacks = list(), ...)
}
\arguments{
\item{params}{List of parameters}

\item{data}{a \code{lgb.Dataset} object, used for training}

\item{nrounds}{number of training rounds}

\item{valids}{a list of \code{lgb.Dataset} object, used for validation}

\item{obj}{objective function, can be character or custom objective function}

\item{eval}{evaluation function, can be (list of) character or custom eval function}

\item{verbose}{verbosity for output
if verbose > 0 , also will record iteration message to booster$record_evals}

\item{eval_freq}{evalutaion output frequence}

\item{init_model}{path of model file of \code{lgb.Booster} object, will continue train from this model}

\item{colnames}{feature names, if not null, will use this to overwrite the names in dataset}

\item{categorical_feature}{list of str or int
type int represents index,
type str represents feature names}

\item{early_stopping_rounds}{int
Activates early stopping.
Requires at least one validation data and one metric
If there's more than one, will check all of them
Returns the model with (best_iter + early_stopping_rounds)
If early stopping occurs, the model will have 'best_iter' field}

\item{callbacks}{list of callback functions
List of callback functions that are applied at each iteration.}

\item{...}{other parameters, see parameters.md for more informations}
}
\value{
a trained booster model \code{lgb.Booster}.
}
\description{
Main training logic for LightGBM
}
\examples{
library(lightgbm)
data(agaricus.train, package='lightgbm')
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label=train$label)
data(agaricus.test, package='lightgbm')
test <- agaricus.test
dtest <- lgb.Dataset.create.valid(dtrain, test$data, label=test$label)
params <- list(objective="regression", metric="l2")
valids <- list(test=dtest)
model <- lgb.train(params, dtrain, 100, valids, min_data=1, learning_rate=1, early_stopping_rounds=10)

}