Commit 3ec345e2 authored by Laurae's avatar Laurae Committed by Guolin Ke
Browse files

Fix [R-package] Prevent remembering parameters (#799)

* Revert "[R-package] Prevent remembering parameters (#796)"

This reverts commit c795e2c8e12e8fe989bde0ab4ee59c2301325814.

* Use model load/unload trick for free-ing up memory

* Missing comma

* Hand copy manual update (1/2)

* Hand copy manual update (2/2)
parent 4e9b589b
Dataset <- R6Class( Dataset <- R6Class(
classname = "lgb.Dataset", classname = "lgb.Dataset",
cloneable = TRUE, cloneable = FALSE,
public = list( public = list(
# Logical to check whether a dataset can be used re-modeled in-memory as another Dataset or not
remodel = TRUE,
# Finalize will free up the handles # Finalize will free up the handles
finalize = function() { finalize = function() {
...@@ -279,9 +276,6 @@ Dataset <- R6Class( ...@@ -279,9 +276,6 @@ Dataset <- R6Class(
stop("lgb.Dataset.construct: label should be set") stop("lgb.Dataset.construct: label should be set")
} }
# Forcefully block construction
self$remodel <- FALSE
# Return self # Return self
return(invisible(self)) return(invisible(self))
......
...@@ -27,6 +27,7 @@ ...@@ -27,6 +27,7 @@
#' If there's more than one, will check all of them #' If there's more than one, will check all of them
#' Returns the model with (best_iter + early_stopping_rounds) #' Returns the model with (best_iter + early_stopping_rounds)
#' If early stopping occurs, the model will have 'best_iter' field #' If early stopping occurs, the model will have 'best_iter' field
#' @param reset_data Boolean, setting it to TRUE (not the default value) will transform the booster model into a predictor model which frees up memory and the original datasets
#' @param callbacks list of callback functions #' @param callbacks list of callback functions
#' List of callback functions that are applied at each iteration. #' List of callback functions that are applied at each iteration.
#' @param ... other parameters, see parameters.md for more informations #' @param ... other parameters, see parameters.md for more informations
...@@ -70,6 +71,7 @@ lgb.train <- function(params = list(), ...@@ -70,6 +71,7 @@ lgb.train <- function(params = list(),
categorical_feature = NULL, categorical_feature = NULL,
early_stopping_rounds = NULL, early_stopping_rounds = NULL,
callbacks = list(), callbacks = list(),
reset_data = FALSE,
...) { ...) {
# Setup temporary variables # Setup temporary variables
...@@ -153,10 +155,7 @@ lgb.train <- function(params = list(), ...@@ -153,10 +155,7 @@ lgb.train <- function(params = list(),
} }
# Construct datasets, if needed # Construct datasets, if needed
if (data$remodel == TRUE) { data$construct()
data <- data$clone(deep = FALSE)
data$construct()
}
vaild_contain_train <- FALSE vaild_contain_train <- FALSE
train_data_name <- "train" train_data_name <- "train"
reduced_valid_sets <- list() reduced_valid_sets <- list()
...@@ -218,7 +217,7 @@ lgb.train <- function(params = list(), ...@@ -218,7 +217,7 @@ lgb.train <- function(params = list(),
env$model <- booster env$model <- booster
env$begin_iteration <- begin_iteration env$begin_iteration <- begin_iteration
env$end_iteration <- end_iteration env$end_iteration <- end_iteration
# Start training model using number of iterations to start and end with # Start training model using number of iterations to start and end with
for (i in seq(from = begin_iteration, to = end_iteration)) { for (i in seq(from = begin_iteration, to = end_iteration)) {
...@@ -262,6 +261,22 @@ lgb.train <- function(params = list(), ...@@ -262,6 +261,22 @@ lgb.train <- function(params = list(),
} }
# Check for booster model conversion to predictor model
if (reset_data) {
# Store temporarily model data elsewhere
booster_old <- list(best_iter = booster$best_iter,
best_score = booster$best_score,
record_evals = booster$record_evals)
# Reload model
booster <- lgb.load(model_str = booster$save_model_to_string())
booster$best_iter <- booster_old$best_iter
booster$best_score <- booster_old$best_score
booster$record_evals <- booster_old$record_evals
}
# Return booster # Return booster
return(booster) return(booster)
......
...@@ -15,7 +15,8 @@ lgb.cv(params = list(), data, nrounds = 10, nfold = 3, label = NULL, ...@@ -15,7 +15,8 @@ lgb.cv(params = list(), data, nrounds = 10, nfold = 3, label = NULL,
lgb.train(params = list(), data, nrounds = 10, valids = list(), lgb.train(params = list(), data, nrounds = 10, valids = list(),
obj = NULL, eval = NULL, verbose = 1, record = TRUE, eval_freq = 1L, obj = NULL, eval = NULL, verbose = 1, record = TRUE, eval_freq = 1L,
init_model = NULL, colnames = NULL, categorical_feature = NULL, init_model = NULL, colnames = NULL, categorical_feature = NULL,
early_stopping_rounds = NULL, callbacks = list(), ...) early_stopping_rounds = NULL, callbacks = list(), reset_data = FALSE,
...)
lightgbm(data, label = NULL, weight = NULL, params = list(), lightgbm(data, label = NULL, weight = NULL, params = list(),
nrounds = 10, verbose = 1, eval_freq = 1L, nrounds = 10, verbose = 1, eval_freq = 1L,
...@@ -78,6 +79,8 @@ List of callback functions that are applied at each iteration.} ...@@ -78,6 +79,8 @@ List of callback functions that are applied at each iteration.}
\item{valids}{a list of \code{lgb.Dataset} objects, used for validation} \item{valids}{a list of \code{lgb.Dataset} objects, used for validation}
\item{reset_data}{Boolean, setting it to TRUE (not the default value) will transform the booster model into a predictor model which frees up memory and the original datasets}
\item{boosting}{boosting type. \code{gbdt}, \code{dart}} \item{boosting}{boosting type. \code{gbdt}, \code{dart}}
\item{num_leaves}{number of leaves in one tree. defaults to 127} \item{num_leaves}{number of leaves in one tree. defaults to 127}
......
...@@ -5,7 +5,8 @@ ...@@ -5,7 +5,8 @@
\title{Predict method for LightGBM model} \title{Predict method for LightGBM model}
\usage{ \usage{
\method{predict}{lgb.Booster}(object, data, num_iteration = NULL, \method{predict}{lgb.Booster}(object, data, num_iteration = NULL,
rawscore = FALSE, predleaf = FALSE, header = FALSE, reshape = FALSE) rawscore = FALSE, predleaf = FALSE, header = FALSE, reshape = FALSE,
...)
} }
\arguments{ \arguments{
\item{object}{Object of class \code{lgb.Booster}} \item{object}{Object of class \code{lgb.Booster}}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment