lgb.dump.Rd 1.33 KB
Newer Older
Guolin Ke's avatar
Guolin Ke committed
1
2
3
4
5
6
% Generated by roxygen2: do not edit by hand
% Please edit documentation in R/lgb.Booster.R
\name{lgb.dump}
\alias{lgb.dump}
\title{Dump LightGBM model to json}
\usage{
7
lgb.dump(booster, num_iteration = NULL, start_iteration = 1L)
Guolin Ke's avatar
Guolin Ke committed
8
9
10
11
}
\arguments{
\item{booster}{Object of class \code{lgb.Booster}}

12
13
14
15
16
\item{num_iteration}{Number of iterations to be dumped. NULL or <= 0 means use best iteration}

\item{start_iteration}{Index (1-based) of the first boosting round to dump.
For example, passing \code{start_iteration=5, num_iteration=3} for a regression model
means "dump the fifth, sixth, and seventh tree"}
Guolin Ke's avatar
Guolin Ke committed
17
18
19
20
21
22
23
24
}
\value{
json format of model
}
\description{
Dump LightGBM model to json
}
\examples{
25
\donttest{
Guolin Ke's avatar
Guolin Ke committed
26
library(lightgbm)
27
28
\dontshow{setLGBMthreads(2L)}
\dontshow{data.table::setDTthreads(1L)}
Guolin Ke's avatar
Guolin Ke committed
29
30
31
32
33
34
data(agaricus.train, package = "lightgbm")
train <- agaricus.train
dtrain <- lgb.Dataset(train$data, label = train$label)
data(agaricus.test, package = "lightgbm")
test <- agaricus.test
dtest <- lgb.Dataset.create.valid(dtrain, test$data, label = test$label)
35
36
37
38
39
params <- list(
  objective = "regression"
  , metric = "l2"
  , min_data = 1L
  , learning_rate = 1.0
40
  , num_threads = 2L
41
)
Guolin Ke's avatar
Guolin Ke committed
42
valids <- list(test = dtest)
43
44
45
model <- lgb.train(
  params = params
  , data = dtrain
46
  , nrounds = 10L
47
  , valids = valids
48
  , early_stopping_rounds = 5L
49
)
Guolin Ke's avatar
Guolin Ke committed
50
json_model <- lgb.dump(model)
51
}
Guolin Ke's avatar
Guolin Ke committed
52
}