lgb.Booster.R 46 KB
Newer Older
James Lamb's avatar
James Lamb committed
1
#' @importFrom R6 R6Class
2
#' @importFrom utils modifyList
James Lamb's avatar
James Lamb committed
3
Booster <- R6::R6Class(
4
  classname = "lgb.Booster",
5
  cloneable = FALSE,
Guolin Ke's avatar
Guolin Ke committed
6
  public = list(
7

8
    best_iter = -1L,
9
    best_score = NA_real_,
10
    params = list(),
Guolin Ke's avatar
Guolin Ke committed
11
    record_evals = list(),
12
    data_processor = NULL,
13

14
15
    # Finalize will free up the handles
    finalize = function() {
16
17
18
19
20
      .Call(
        LGBM_BoosterFree_R
        , private$handle
      )
      private$handle <- NULL
21
      return(invisible(NULL))
22
    },
23

24
25
    # Initialize will create a starter booster
    initialize = function(params = list(),
Guolin Ke's avatar
Guolin Ke committed
26
27
                          train_set = NULL,
                          modelfile = NULL,
28
                          model_str = NULL) {
29

30
      handle <- NULL
31

32
      if (!is.null(train_set)) {
33

34
        if (!.is_Dataset(train_set)) {
35
36
37
38
          stop("lgb.Booster: Can only use lgb.Dataset as training data")
        }
        train_set_handle <- train_set$.__enclos_env__$private$get_handle()
        params <- utils::modifyList(params, train_set$get_params())
39
        params_str <- .params2str(params = params)
40
41
42
43
44
45
        # Store booster handle
        handle <- .Call(
          LGBM_BoosterCreate_R
          , train_set_handle
          , params_str
        )
46

47
48
49
50
51
        # Create private booster information
        private$train_set <- train_set
        private$train_set_version <- train_set$.__enclos_env__$private$version
        private$num_dataset <- 1L
        private$init_predictor <- train_set$.__enclos_env__$private$predictor
52

53
        if (!is.null(private$init_predictor)) {
54

55
56
57
58
59
          # Merge booster
          .Call(
            LGBM_BoosterMerge_R
            , handle
            , private$init_predictor$.__enclos_env__$private$handle
60
          )
61

62
        }
63

64
65
        # Check current iteration
        private$is_predicted_cur_iter <- c(private$is_predicted_cur_iter, FALSE)
66

67
      } else if (!is.null(modelfile)) {
68

69
70
71
72
        # Do we have a model file as character?
        if (!is.character(modelfile)) {
          stop("lgb.Booster: Can only use a string as model file path")
        }
73

74
        modelfile <- path.expand(modelfile)
75

76
77
78
79
80
        # Create booster from model
        handle <- .Call(
          LGBM_BoosterCreateFromModelfile_R
          , modelfile
        )
81
        params <- private$get_loaded_param(handle)
82

83
      } else if (!is.null(model_str)) {
84

85
86
87
88
        # Do we have a model_str as character/raw?
        if (!is.raw(model_str) && !is.character(model_str)) {
          stop("lgb.Booster: Can only use a character/raw vector as model_str")
        }
89

90
91
92
93
94
        # Create booster from model
        handle <- .Call(
          LGBM_BoosterLoadModelFromString_R
          , model_str
        )
95

Guolin Ke's avatar
Guolin Ke committed
96
      } else {
97

98
99
100
101
        # Booster non existent
        stop(
          "lgb.Booster: Need at least either training dataset, "
          , "model file, or model_str to create booster instance"
102
        )
103

Guolin Ke's avatar
Guolin Ke committed
104
      }
105

106
107
108
109
110
111
112
113
114
      class(handle) <- "lgb.Booster.handle"
      private$handle <- handle
      private$num_class <- 1L
      .Call(
        LGBM_BoosterGetNumClasses_R
        , private$handle
        , private$num_class
      )

115
116
      self$params <- params

117
118
      return(invisible(NULL))

Guolin Ke's avatar
Guolin Ke committed
119
    },
120

121
    # Set training data name
Guolin Ke's avatar
Guolin Ke committed
122
    set_train_data_name = function(name) {
123

124
      # Set name
Guolin Ke's avatar
Guolin Ke committed
125
      private$name_train_set <- name
126
      return(invisible(self))
127

Guolin Ke's avatar
Guolin Ke committed
128
    },
129

130
    # Add validation data
Guolin Ke's avatar
Guolin Ke committed
131
    add_valid = function(data, name) {
132

133
      if (!.is_Dataset(data)) {
134
        stop("lgb.Booster.add_valid: Can only use lgb.Dataset as validation data")
Guolin Ke's avatar
Guolin Ke committed
135
      }
136

Guolin Ke's avatar
Guolin Ke committed
137
      if (!identical(data$.__enclos_env__$private$predictor, private$init_predictor)) {
138
139
140
141
        stop(
          "lgb.Booster.add_valid: Failed to add validation data; "
          , "you should use the same predictor for these data"
        )
Guolin Ke's avatar
Guolin Ke committed
142
      }
143

144
145
      if (!is.character(name)) {
        stop("lgb.Booster.add_valid: Can only use characters as data name")
Guolin Ke's avatar
Guolin Ke committed
146
      }
147

148
      # Add validation data to booster
149
150
      .Call(
        LGBM_BoosterAddValidData_R
151
152
153
        , private$handle
        , data$.__enclos_env__$private$get_handle()
      )
154

155
156
      private$valid_sets <- c(private$valid_sets, data)
      private$name_valid_sets <- c(private$name_valid_sets, name)
157
      private$num_dataset <- private$num_dataset + 1L
158
      private$is_predicted_cur_iter <- c(private$is_predicted_cur_iter, FALSE)
159

160
      return(invisible(self))
161

Guolin Ke's avatar
Guolin Ke committed
162
    },
163

164
    reset_parameter = function(params) {
165

166
      if (methods::is(self$params, "list")) {
167
        params <- utils::modifyList(self$params, params)
168
169
      }

170
      params_str <- .params2str(params = params)
171

172
173
      self$restore_handle()

174
175
      .Call(
        LGBM_BoosterResetParameter_R
176
177
178
        , private$handle
        , params_str
      )
179
      self$params <- params
180

181
      return(invisible(self))
182

Guolin Ke's avatar
Guolin Ke committed
183
    },
184

185
    # Perform boosting update iteration
Guolin Ke's avatar
Guolin Ke committed
186
    update = function(train_set = NULL, fobj = NULL) {
187

188
189
190
191
192
193
      if (is.null(train_set)) {
        if (private$train_set$.__enclos_env__$private$version != private$train_set_version) {
          train_set <- private$train_set
        }
      }

Guolin Ke's avatar
Guolin Ke committed
194
      if (!is.null(train_set)) {
195

196
        if (!.is_Dataset(train_set)) {
Guolin Ke's avatar
Guolin Ke committed
197
198
          stop("lgb.Booster.update: Only can use lgb.Dataset as training data")
        }
199

Guolin Ke's avatar
Guolin Ke committed
200
        if (!identical(train_set$predictor, private$init_predictor)) {
201
          stop("lgb.Booster.update: Change train_set failed, you should use the same predictor for these data")
Guolin Ke's avatar
Guolin Ke committed
202
        }
203

204
205
        .Call(
          LGBM_BoosterResetTrainingData_R
206
207
208
          , private$handle
          , train_set$.__enclos_env__$private$get_handle()
        )
209

210
        private$train_set <- train_set
211
        private$train_set_version <- train_set$.__enclos_env__$private$version
212

Guolin Ke's avatar
Guolin Ke committed
213
      }
214

215
      # Check if objective is empty
Guolin Ke's avatar
Guolin Ke committed
216
      if (is.null(fobj)) {
217
218
219
        if (private$set_objective_to_none) {
          stop("lgb.Booster.update: cannot update due to null objective function")
        }
220
        # Boost iteration from known objective
221
222
        .Call(
          LGBM_BoosterUpdateOneIter_R
223
224
          , private$handle
        )
225

Guolin Ke's avatar
Guolin Ke committed
226
      } else {
227

228
229
230
        if (!is.function(fobj)) {
          stop("lgb.Booster.update: fobj should be a function")
        }
231
        if (!private$set_objective_to_none) {
232
          self$reset_parameter(params = list(objective = "none"))
233
          private$set_objective_to_none <- TRUE
234
        }
235
        # Perform objective calculation
236
237
        preds <- private$inner_predict(1L)
        gpair <- fobj(preds, private$train_set)
238

239
        # Check for gradient and hessian as list
240
        if (is.null(gpair$grad) || is.null(gpair$hess)) {
241
          stop("lgb.Booster.update: custom objective should
242
243
            return a list with attributes (hess, grad)")
        }
244

245
246
247
248
249
250
251
252
253
254
255
        # Check grad and hess have the right shape
        n_grad <- length(gpair$grad)
        n_hess <- length(gpair$hess)
        n_preds <- length(preds)
        if (n_grad != n_preds) {
          stop(sprintf("Expected custom objective function to return grad with length %d, got %d.", n_preds, n_grad))
        }
        if (n_hess != n_preds) {
          stop(sprintf("Expected custom objective function to return hess with length %d, got %d.", n_preds, n_hess))
        }

256
        # Return custom boosting gradient/hessian
257
258
        .Call(
          LGBM_BoosterUpdateOneIterCustom_R
259
260
261
          , private$handle
          , gpair$grad
          , gpair$hess
262
          , n_preds
263
        )
264

Guolin Ke's avatar
Guolin Ke committed
265
      }
266

267
      # Loop through each iteration
268
      for (i in seq_along(private$is_predicted_cur_iter)) {
Guolin Ke's avatar
Guolin Ke committed
269
270
        private$is_predicted_cur_iter[[i]] <- FALSE
      }
271

272
      return(invisible(self))
273

Guolin Ke's avatar
Guolin Ke committed
274
    },
275

276
    # Return one iteration behind
Guolin Ke's avatar
Guolin Ke committed
277
    rollback_one_iter = function() {
278

279
280
      self$restore_handle()

281
282
      .Call(
        LGBM_BoosterRollbackOneIter_R
283
284
        , private$handle
      )
285

286
      # Loop through each iteration
287
      for (i in seq_along(private$is_predicted_cur_iter)) {
Guolin Ke's avatar
Guolin Ke committed
288
289
        private$is_predicted_cur_iter[[i]] <- FALSE
      }
290

291
      return(invisible(self))
292

Guolin Ke's avatar
Guolin Ke committed
293
    },
294

295
    # Get current iteration
Guolin Ke's avatar
Guolin Ke committed
296
    current_iter = function() {
297

298
299
      self$restore_handle()

300
      cur_iter <- 0L
301
302
303
304
      .Call(
        LGBM_BoosterGetCurrentIteration_R
        , private$handle
        , cur_iter
305
      )
306
      return(cur_iter)
307

Guolin Ke's avatar
Guolin Ke committed
308
    },
309

310
    # Get upper bound
311
    upper_bound = function() {
312

313
314
      self$restore_handle()

315
      upper_bound <- 0.0
316
317
318
319
      .Call(
        LGBM_BoosterGetUpperBoundValue_R
        , private$handle
        , upper_bound
320
      )
321
      return(upper_bound)
322
323
324
325

    },

    # Get lower bound
326
    lower_bound = function() {
327

328
329
      self$restore_handle()

330
      lower_bound <- 0.0
331
332
333
334
      .Call(
        LGBM_BoosterGetLowerBoundValue_R
        , private$handle
        , lower_bound
335
      )
336
      return(lower_bound)
337
338
339

    },

340
    # Evaluate data on metrics
Guolin Ke's avatar
Guolin Ke committed
341
    eval = function(data, name, feval = NULL) {
342

343
      if (!.is_Dataset(data)) {
344
        stop("lgb.Booster.eval: Can only use lgb.Dataset to eval")
Guolin Ke's avatar
Guolin Ke committed
345
      }
346

347
      # Check for identical data
348
      data_idx <- 0L
349
      if (identical(data, private$train_set)) {
350
        data_idx <- 1L
351
      } else {
352

353
        # Check for validation data
354
        if (length(private$valid_sets) > 0L) {
355

356
          for (i in seq_along(private$valid_sets)) {
357

358
            # Check for identical validation data with training data
Guolin Ke's avatar
Guolin Ke committed
359
            if (identical(data, private$valid_sets[[i]])) {
360

361
              # Found identical data, skip
362
              data_idx <- i + 1L
Guolin Ke's avatar
Guolin Ke committed
363
              break
364

Guolin Ke's avatar
Guolin Ke committed
365
            }
366

Guolin Ke's avatar
Guolin Ke committed
367
          }
368

Guolin Ke's avatar
Guolin Ke committed
369
        }
370

Guolin Ke's avatar
Guolin Ke committed
371
      }
372

373
      # Check if evaluation was not done
374
      if (data_idx == 0L) {
375

376
        # Add validation data by name
Guolin Ke's avatar
Guolin Ke committed
377
378
        self$add_valid(data, name)
        data_idx <- private$num_dataset
379

Guolin Ke's avatar
Guolin Ke committed
380
      }
381

382
      # Evaluate data
383
384
385
386
387
388
      return(
        private$inner_eval(
          data_name = name
          , data_idx = data_idx
          , feval = feval
        )
389
      )
390

Guolin Ke's avatar
Guolin Ke committed
391
    },
392

393
    # Evaluation training data
Guolin Ke's avatar
Guolin Ke committed
394
    eval_train = function(feval = NULL) {
395
      return(private$inner_eval(private$name_train_set, 1L, feval))
Guolin Ke's avatar
Guolin Ke committed
396
    },
397

398
    # Evaluation validation data
Guolin Ke's avatar
Guolin Ke committed
399
    eval_valid = function(feval = NULL) {
400

401
      ret <- list()
402

403
      if (length(private$valid_sets) <= 0L) {
404
405
        return(ret)
      }
406

407
      for (i in seq_along(private$valid_sets)) {
408
409
        ret <- append(
          x = ret
410
          , values = private$inner_eval(private$name_valid_sets[[i]], i + 1L, feval)
411
        )
Guolin Ke's avatar
Guolin Ke committed
412
      }
413

414
      return(ret)
415

Guolin Ke's avatar
Guolin Ke committed
416
    },
417

418
    # Save model
419
    save_model = function(filename, num_iteration = NULL, feature_importance_type = 0L) {
420

421
422
      self$restore_handle()

423
424
425
      if (is.null(num_iteration)) {
        num_iteration <- self$best_iter
      }
426

427
428
      filename <- path.expand(filename)

429
430
      .Call(
        LGBM_BoosterSaveModel_R
431
432
        , private$handle
        , as.integer(num_iteration)
433
        , as.integer(feature_importance_type)
434
        , filename
435
      )
436

437
      return(invisible(self))
Guolin Ke's avatar
Guolin Ke committed
438
    },
439

440
441
442
    save_model_to_string = function(num_iteration = NULL, feature_importance_type = 0L, as_char = TRUE) {

      self$restore_handle()
443

444
445
446
      if (is.null(num_iteration)) {
        num_iteration <- self$best_iter
      }
447

448
      model_str <- .Call(
449
          LGBM_BoosterSaveModelToString_R
450
451
452
          , private$handle
          , as.integer(num_iteration)
          , as.integer(feature_importance_type)
453
454
      )

455
456
457
458
      if (as_char) {
        model_str <- rawToChar(model_str)
      }

459
      return(model_str)
460

461
    },
462

463
    # Dump model in memory
464
    dump_model = function(num_iteration = NULL, feature_importance_type = 0L) {
465

466
467
      self$restore_handle()

468
469
470
      if (is.null(num_iteration)) {
        num_iteration <- self$best_iter
      }
471

472
      model_str <- .Call(
473
474
475
476
477
478
        LGBM_BoosterDumpModel_R
        , private$handle
        , as.integer(num_iteration)
        , as.integer(feature_importance_type)
      )

479
      return(model_str)
480

Guolin Ke's avatar
Guolin Ke committed
481
    },
482

483
    # Predict on new data
Guolin Ke's avatar
Guolin Ke committed
484
    predict = function(data,
485
                       start_iteration = NULL,
486
487
488
                       num_iteration = NULL,
                       rawscore = FALSE,
                       predleaf = FALSE,
489
                       predcontrib = FALSE,
490
                       header = FALSE,
491
                       params = list()) {
492

493
494
      self$restore_handle()

495
496
497
      if (is.null(num_iteration)) {
        num_iteration <- self$best_iter
      }
498

499
500
501
      if (is.null(start_iteration)) {
        start_iteration <- 0L
      }
502

503
504
505
506
507
508
509
510
      # possibly override keyword arguments with parameters
      #
      # NOTE: this length() check minimizes the latency introduced by these checks,
      #       for the common case where params is empty
      #
      # NOTE: doing this here instead of in Predictor$predict() to keep
      #       Predictor$predict() as fast as possible
      if (length(params) > 0L) {
511
        params <- .check_wrapper_param(
512
513
514
515
          main_param_name = "predict_raw_score"
          , params = params
          , alternative_kwarg_value = rawscore
        )
516
        params <- .check_wrapper_param(
517
518
519
520
          main_param_name = "predict_leaf_index"
          , params = params
          , alternative_kwarg_value = predleaf
        )
521
        params <- .check_wrapper_param(
522
523
524
525
526
527
528
529
530
          main_param_name = "predict_contrib"
          , params = params
          , alternative_kwarg_value = predcontrib
        )
        rawscore <- params[["predict_raw_score"]]
        predleaf <- params[["predict_leaf_index"]]
        predcontrib <- params[["predict_contrib"]]
      }

531
      # Predict on new data
532
533
534
      predictor <- Predictor$new(
        modelfile = private$handle
        , params = params
535
        , fast_predict_config = private$fast_predict_config
536
      )
537
538
      return(
        predictor$predict(
539
540
541
542
543
544
545
          data = data
          , start_iteration = start_iteration
          , num_iteration = num_iteration
          , rawscore = rawscore
          , predleaf = predleaf
          , predcontrib = predcontrib
          , header = header
546
        )
547
      )
548

549
    },
550

551
552
    # Transform into predictor
    to_predictor = function() {
553
      return(Predictor$new(modelfile = private$handle))
Guolin Ke's avatar
Guolin Ke committed
554
    },
555

556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
    configure_fast_predict = function(csr = FALSE,
                                      start_iteration = NULL,
                                      num_iteration = NULL,
                                      rawscore = FALSE,
                                      predleaf = FALSE,
                                      predcontrib = FALSE,
                                      params = list()) {

      self$restore_handle()
      ncols <- .Call(LGBM_BoosterGetNumFeature_R, private$handle)

      if (is.null(num_iteration)) {
        num_iteration <- -1L
      }
      if (is.null(start_iteration)) {
        start_iteration <- 0L
      }

      if (!csr) {
        fun <- LGBM_BoosterPredictForMatSingleRowFastInit_R
      } else {
        fun <- LGBM_BoosterPredictForCSRSingleRowFastInit_R
      }

      fast_handle <- .Call(
        fun
        , private$handle
        , ncols
        , rawscore
        , predleaf
        , predcontrib
        , start_iteration
        , num_iteration
589
        , .params2str(params = params)
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
      )

      private$fast_predict_config <- list(
        handle = fast_handle
        , csr = as.logical(csr)
        , ncols = ncols
        , start_iteration = start_iteration
        , num_iteration = num_iteration
        , rawscore = as.logical(rawscore)
        , predleaf = as.logical(predleaf)
        , predcontrib = as.logical(predcontrib)
        , params = params
      )

      return(invisible(NULL))
    },

607
608
    # Used for serialization
    raw = NULL,
609

610
611
612
613
614
615
    # Store serialized raw bytes in model object
    save_raw = function() {
      if (is.null(self$raw)) {
        self$raw <- self$save_model_to_string(NULL, as_char = FALSE)
      }
      return(invisible(NULL))
616

617
    },
618

619
620
    drop_raw = function() {
      self$raw <- NULL
621
      return(invisible(NULL))
622
    },
623

624
    check_null_handle = function() {
625
      return(.is_null_handle(private$handle))
626
627
628
629
630
631
632
633
634
635
636
637
638
639
    },

    restore_handle = function() {
      if (self$check_null_handle()) {
        if (is.null(self$raw)) {
          .Call(LGBM_NullBoosterHandleError_R)
        }
        private$handle <- .Call(LGBM_BoosterLoadModelFromString_R, self$raw)
      }
      return(invisible(NULL))
    },

    get_handle = function() {
      return(private$handle)
640
    }
641

Guolin Ke's avatar
Guolin Ke committed
642
643
  ),
  private = list(
644
645
646
647
648
649
650
    handle = NULL,
    train_set = NULL,
    name_train_set = "training",
    valid_sets = list(),
    name_valid_sets = list(),
    predict_buffer = list(),
    is_predicted_cur_iter = list(),
651
652
    num_class = 1L,
    num_dataset = 0L,
653
654
    init_predictor = NULL,
    eval_names = NULL,
Guolin Ke's avatar
Guolin Ke committed
655
    higher_better_inner_eval = NULL,
656
    set_objective_to_none = FALSE,
657
    train_set_version = 0L,
658
    fast_predict_config = list(),
659
660
    # Predict data
    inner_predict = function(idx) {
661

662
      # Store data name
Guolin Ke's avatar
Guolin Ke committed
663
      data_name <- private$name_train_set
664

665
666
      if (idx > 1L) {
        data_name <- private$name_valid_sets[[idx - 1L]]
667
      }
668

669
      # Check for unknown dataset (over the maximum provided range)
Guolin Ke's avatar
Guolin Ke committed
670
671
672
      if (idx > private$num_dataset) {
        stop("data_idx should not be greater than num_dataset")
      }
673

674
      # Check for prediction buffer
Guolin Ke's avatar
Guolin Ke committed
675
      if (is.null(private$predict_buffer[[data_name]])) {
676

677
        # Store predictions
678
        npred <- 0L
679
680
        .Call(
          LGBM_BoosterGetNumPredict_R
681
          , private$handle
682
          , as.integer(idx - 1L)
683
          , npred
684
        )
685
        private$predict_buffer[[data_name]] <- numeric(npred)
686

Guolin Ke's avatar
Guolin Ke committed
687
      }
688

689
      # Check if current iteration was already predicted
Guolin Ke's avatar
Guolin Ke committed
690
      if (!private$is_predicted_cur_iter[[idx]]) {
691

692
        # Use buffer
693
694
        .Call(
          LGBM_BoosterGetPredict_R
695
          , private$handle
696
          , as.integer(idx - 1L)
697
          , private$predict_buffer[[data_name]]
698
        )
Guolin Ke's avatar
Guolin Ke committed
699
700
        private$is_predicted_cur_iter[[idx]] <- TRUE
      }
701

702
      return(private$predict_buffer[[data_name]])
Guolin Ke's avatar
Guolin Ke committed
703
    },
704

705
    # Get evaluation information
Guolin Ke's avatar
Guolin Ke committed
706
    get_eval_info = function() {
707

Guolin Ke's avatar
Guolin Ke committed
708
      if (is.null(private$eval_names)) {
709
        eval_names <- .Call(
710
          LGBM_BoosterGetEvalNames_R
711
712
          , private$handle
        )
713

714
        if (length(eval_names) > 0L) {
715

716
          # Parse and store privately names
717
          private$eval_names <- eval_names
718
719
720

          # some metrics don't map cleanly to metric names, for example "ndcg@1" is just the
          # ndcg metric evaluated at the first "query result" in learning-to-rank
721
          metric_names <- gsub("@.*", "", eval_names)
722
          private$higher_better_inner_eval <- .METRICS_HIGHER_BETTER()[metric_names]
723

Guolin Ke's avatar
Guolin Ke committed
724
        }
725

Guolin Ke's avatar
Guolin Ke committed
726
      }
727

728
      return(private$eval_names)
729

Guolin Ke's avatar
Guolin Ke committed
730
    },
731

732
733
734
735
736
737
738
739
740
741
742
743
744
745
    get_loaded_param = function(handle) {
      params_str <- .Call(
        LGBM_BoosterGetLoadedParam_R
        , handle
      )
      params <- jsonlite::fromJSON(params_str)
      if ("interaction_constraints" %in% names(params)) {
        params[["interaction_constraints"]] <- lapply(params[["interaction_constraints"]], function(x) x + 1L)
      }

      return(params)

    },

Guolin Ke's avatar
Guolin Ke committed
746
    inner_eval = function(data_name, data_idx, feval = NULL) {
747

748
      # Check for unknown dataset (over the maximum provided range)
Guolin Ke's avatar
Guolin Ke committed
749
750
751
      if (data_idx > private$num_dataset) {
        stop("data_idx should not be greater than num_dataset")
      }
752

753
754
      self$restore_handle()

Guolin Ke's avatar
Guolin Ke committed
755
      private$get_eval_info()
756

Guolin Ke's avatar
Guolin Ke committed
757
      ret <- list()
758

759
      if (length(private$eval_names) > 0L) {
760

761
762
        # Create evaluation values
        tmp_vals <- numeric(length(private$eval_names))
763
764
        .Call(
          LGBM_BoosterGetEval_R
765
          , private$handle
766
          , as.integer(data_idx - 1L)
767
          , tmp_vals
768
        )
769

770
        for (i in seq_along(private$eval_names)) {
771

772
773
774
775
776
          # Store evaluation and append to return
          res <- list()
          res$data_name <- data_name
          res$name <- private$eval_names[i]
          res$value <- tmp_vals[i]
Guolin Ke's avatar
Guolin Ke committed
777
          res$higher_better <- private$higher_better_inner_eval[i]
778
          ret <- append(ret, list(res))
779

Guolin Ke's avatar
Guolin Ke committed
780
        }
781

Guolin Ke's avatar
Guolin Ke committed
782
      }
783

784
      # Check if there are evaluation metrics
Guolin Ke's avatar
Guolin Ke committed
785
      if (!is.null(feval)) {
786

787
        # Check if evaluation metric is a function
788
        if (!is.function(feval)) {
Guolin Ke's avatar
Guolin Ke committed
789
790
          stop("lgb.Booster.eval: feval should be a function")
        }
791

Guolin Ke's avatar
Guolin Ke committed
792
        data <- private$train_set
793

794
        # Check if data to assess is existing differently
795
796
        if (data_idx > 1L) {
          data <- private$valid_sets[[data_idx - 1L]]
797
        }
798

799
        # Perform function evaluation
800
        res <- feval(private$inner_predict(data_idx), data)
801

802
        if (is.null(res$name) || is.null(res$value) ||  is.null(res$higher_better)) {
803
804
805
          stop(
            "lgb.Booster.eval: custom eval function should return a list with attribute (name, value, higher_better)"
          )
806
        }
807

808
        # Append names and evaluation
Guolin Ke's avatar
Guolin Ke committed
809
        res$data_name <- data_name
810
        ret <- append(ret, list(res))
Guolin Ke's avatar
Guolin Ke committed
811
      }
812

813
      return(ret)
814

Guolin Ke's avatar
Guolin Ke committed
815
    }
816

Guolin Ke's avatar
Guolin Ke committed
817
818
819
  )
)

820
#' @name lgb_predict_shared_params
821
822
823
824
825
#' @param type Type of prediction to output. Allowed types are:\itemize{
#'             \item \code{"response"}: will output the predicted score according to the objective function being
#'                   optimized (depending on the link function that the objective uses), after applying any necessary
#'                   transformations - for example, for \code{objective="binary"}, it will output class probabilities.
#'             \item \code{"class"}: for classification objectives, will output the class with the highest predicted
826
827
828
#'                   probability. For other objectives, will output the same as "response". Note that \code{"class"} is
#'                   not a supported type for \link{lgb.configure_fast_predict} (see the documentation of that function
#'                   for more details).
829
830
831
832
833
834
835
#'             \item \code{"raw"}: will output the non-transformed numbers (sum of predictions from boosting iterations'
#'                   results) from which the "response" number is produced for a given objective function - for example,
#'                   for \code{objective="binary"}, this corresponds to log-odds. For many objectives such as
#'                   "regression", since no transformation is applied, the output will be the same as for "response".
#'             \item \code{"leaf"}: will output the index of the terminal node / leaf at which each observations falls
#'                   in each tree in the model, outputted as integers, with one column per tree.
#'             \item \code{"contrib"}: will return the per-feature contributions for each prediction, including an
836
#'                   intercept (each feature will produce one column).
837
838
839
840
#'             }
#'
#'             Note that, if using custom objectives, types "class" and "response" will not be available and will
#'             default towards using "raw" instead.
841
842
843
844
845
#'
#'             If the model was fit through function \link{lightgbm} and it was passed a factor as labels,
#'             passing the prediction type through \code{params} instead of through this argument might
#'             result in factor levels for classification objectives not being applied correctly to the
#'             resulting output.
846
847
848
#'
#'             \emph{New in version 4.0.0}
#'
849
850
851
852
853
854
855
856
#' @param start_iteration int or None, optional (default=None)
#'                        Start index of the iteration to predict.
#'                        If None or <= 0, starts from the first iteration.
#' @param num_iteration int or None, optional (default=None)
#'                      Limit number of iterations in the prediction.
#'                      If None, if the best iteration exists and start_iteration is None or <= 0, the
#'                      best iteration is used; otherwise, all iterations from start_iteration are used.
#'                      If <= 0, all iterations from start_iteration are used (no limits).
857
858
859
#' @param params a list of additional named parameters. See
#'               \href{https://lightgbm.readthedocs.io/en/latest/Parameters.html#predict-parameters}{
#'               the "Predict Parameters" section of the documentation} for a list of parameters and
860
861
#'               valid values. Where these conflict with the values of keyword arguments to this function,
#'               the values in \code{params} take precedence.
862
863
864
865
866
NULL

#' @name predict.lgb.Booster
#' @title Predict method for LightGBM model
#' @description Predicted values based on class \code{lgb.Booster}
867
868
869
#'
#'              \emph{New in version 4.0.0}
#'
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
#' @details If the model object has been configured for fast single-row predictions through
#'          \link{lgb.configure_fast_predict}, this function will use the prediction parameters
#'          that were configured for it - as such, extra prediction parameters should not be passed
#'          here, otherwise the configuration will be ignored and the slow route will be taken.
#' @inheritParams lgb_predict_shared_params
#' @param object Object of class \code{lgb.Booster}
#' @param newdata a \code{matrix} object, a \code{dgCMatrix}, a \code{dgRMatrix} object, a \code{dsparseVector} object,
#'                or a character representing a path to a text file (CSV, TSV, or LibSVM).
#'
#'                For sparse inputs, if predictions are only going to be made for a single row, it will be faster to
#'                use CSR format, in which case the data may be passed as either a single-row CSR matrix (class
#'                \code{dgRMatrix} from package \code{Matrix}) or as a sparse numeric vector (class
#'                \code{dsparseVector} from package \code{Matrix}).
#'
#'                If single-row predictions are going to be performed frequently, it is recommended to
#'                pre-configure the model object for fast single-row sparse predictions through function
#'                \link{lgb.configure_fast_predict}.
887
888
889
#'
#'                \emph{Changed from 'data', in version 4.0.0}
#'
890
#' @param header only used for prediction for text file. True if text file has header
891
#' @param ... ignored
892
#' @return For prediction types that are meant to always return one output per observation (e.g. when predicting
893
894
#'         \code{type="response"} or \code{type="raw"} on a binary classification or regression objective), will
#'         return a vector with one element per row in \code{newdata}.
895
#'
896
#'         For prediction types that are meant to return more than one output per observation (e.g. when predicting
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
#'         \code{type="response"} or \code{type="raw"} on a multi-class objective, or when predicting
#'         \code{type="leaf"}, regardless of objective), will return a matrix with one row per observation in
#'         \code{newdata} and one column per output.
#'
#'         For \code{type="leaf"} predictions, will return a matrix with one row per observation in \code{newdata}
#'         and one column per tree. Note that for multiclass objectives, LightGBM trains one tree per class at each
#'         boosting iteration. That means that, for example, for a multiclass model with 3 classes, the leaf
#'         predictions for the first class can be found in columns 1, 4, 7, 10, etc.
#'
#'         For \code{type="contrib"}, will return a matrix of SHAP values with one row per observation in
#'         \code{newdata} and columns corresponding to features. For regression, ranking, cross-entropy, and binary
#'         classification objectives, this matrix contains one column per feature plus a final column containing the
#'         Shapley base value. For multiclass objectives, this matrix will represent \code{num_classes} such matrices,
#'         in the order "feature contributions for first class, feature contributions for second class, feature
#'         contributions for third class, etc.".
912
#'
913
914
915
916
917
#'         If the model was fit through function \link{lightgbm} and it was passed a factor as labels, predictions
#'         returned from this function will retain the factor levels (either as values for \code{type="class"}, or
#'         as column names for \code{type="response"} and \code{type="raw"} for multi-class objectives). Note that
#'         passing the requested prediction type under \code{params} instead of through \code{type} might result in
#'         the factor levels not being present in the output.
Guolin Ke's avatar
Guolin Ke committed
918
#' @examples
919
#' \donttest{
920
921
#' \dontshow{setLGBMthreads(2L)}
#' \dontshow{data.table::setDTthreads(1L)}
922
923
924
925
926
927
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
#' data(agaricus.test, package = "lightgbm")
#' test <- agaricus.test
#' dtest <- lgb.Dataset.create.valid(dtrain, test$data, label = test$label)
928
929
930
931
932
#' params <- list(
#'   objective = "regression"
#'   , metric = "l2"
#'   , min_data = 1L
#'   , learning_rate = 1.0
933
#'   , num_threads = 2L
934
#' )
935
#' valids <- list(test = dtest)
936
937
938
#' model <- lgb.train(
#'   params = params
#'   , data = dtrain
939
#'   , nrounds = 5L
940
941
#'   , valids = valids
#' )
942
#' preds <- predict(model, test$data)
943
944
#'
#' # pass other prediction parameters
945
#' preds <- predict(
946
947
948
949
950
951
#'     model,
#'     test$data,
#'     params = list(
#'         predict_disable_shape_check = TRUE
#'    )
#' )
952
#' }
953
#' @importFrom utils modifyList
Guolin Ke's avatar
Guolin Ke committed
954
#' @export
James Lamb's avatar
James Lamb committed
955
predict.lgb.Booster <- function(object,
956
                                newdata,
957
                                type = "response",
958
                                start_iteration = NULL,
James Lamb's avatar
James Lamb committed
959
960
                                num_iteration = NULL,
                                header = FALSE,
961
                                params = list(),
James Lamb's avatar
James Lamb committed
962
                                ...) {
963

964
  if (!.is_Booster(x = object)) {
965
    stop("predict.lgb.Booster: object should be an ", sQuote("lgb.Booster"))
Guolin Ke's avatar
Guolin Ke committed
966
  }
967

968
969
  additional_params <- list(...)
  if (length(additional_params) > 0L) {
970
971
    additional_params_names <- names(additional_params)
    if ("reshape" %in% additional_params_names) {
972
973
      stop("'reshape' argument is no longer supported.")
    }
974
975
976
977
978
979
980
981
982
983
984
985
986
987

    old_args_for_type <- list(
      "rawscore" = "raw"
      , "predleaf" = "leaf"
      , "predcontrib" = "contrib"
    )
    for (arg in names(old_args_for_type)) {
      if (arg %in% additional_params_names) {
        stop(sprintf("Argument '%s' is no longer supported. Use type='%s' instead."
                     , arg
                     , old_args_for_type[[arg]]))
      }
    }

988
989
    warning(paste0(
      "predict.lgb.Booster: Found the following passed through '...': "
990
      , toString(names(additional_params))
991
      , ". These are ignored. Use argument 'params' instead."
992
993
994
    ))
  }

995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
  if (!is.null(object$params$objective) && object$params$objective == "none" && type %in% c("class", "response")) {
    warning("Prediction types 'class' and 'response' are not supported for custom objectives.")
    type <- "raw"
  }

  rawscore <- FALSE
  predleaf <- FALSE
  predcontrib <- FALSE
  if (type == "raw") {
    rawscore <- TRUE
  } else if (type == "leaf") {
    predleaf <- TRUE
  } else if (type == "contrib") {
    predcontrib <- TRUE
  }

  pred <- object$predict(
    data = newdata
    , start_iteration = start_iteration
    , num_iteration = num_iteration
    , rawscore = rawscore
    , predleaf =  predleaf
    , predcontrib =  predcontrib
    , header = header
    , params = params
1020
  )
1021
  if (type == "class") {
1022
    if (object$params$objective %in% .BINARY_OBJECTIVES()) {
1023
      pred <- as.integer(pred >= 0.5)
1024
    } else if (object$params$objective %in% .MULTICLASS_OBJECTIVES()) {
1025
1026
1027
      pred <- max.col(pred) - 1L
    }
  }
1028
1029
1030
1031
1032
1033
  if (!is.null(object$data_processor)) {
    pred <- object$data_processor$process_predictions(
      pred = pred
      , type = type
    )
  }
1034
  return(pred)
Guolin Ke's avatar
Guolin Ke committed
1035
1036
}

1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
#' @title Configure Fast Single-Row Predictions
#' @description Pre-configures a LightGBM model object to produce fast single-row predictions
#'              for a given input data type, prediction type, and parameters.
#' @details Calling this function multiple times with different parameters might not override
#'          the previous configuration and might trigger undefined behavior.
#'
#'          Any saved configuration for fast predictions might be lost after making a single-row
#'          prediction of a different type than what was configured (except for types "response" and
#'          "class", which can be switched between each other at any time without losing the configuration).
#'
#'          In some situations, setting a fast prediction configuration for one type of prediction
#'          might cause the prediction function to keep using that configuration for single-row
#'          predictions even if the requested type of prediction is different from what was configured.
#'
#'          Note that this function will not accept argument \code{type="class"} - for such cases, one
#'          can pass \code{type="response"} to this function and then \code{type="class"} to the
#'          \code{predict} function - the fast configuration will not be lost or altered if the switch
#'          is between "response" and "class".
#'
#'          The configuration does not survive de-serializations, so it has to be generated
#'          anew in every R process that is going to use it (e.g. if loading a model object
#'          through \code{readRDS}, whatever configuration was there previously will be lost).
#'
#'          Requesting a different prediction type or passing parameters to \link{predict.lgb.Booster}
#'          will cause it to ignore the fast-predict configuration and take the slow route instead
#'          (but be aware that an existing configuration might not always be overriden by supplying
#'          different parameters or prediction type, so make sure to check that the output is what
#'          was expected when a prediction is to be made on a single row for something different than
#'          what is configured).
#'
#'          Note that, if configuring a non-default prediction type (such as leaf indices),
#'          then that type must also be passed in the call to \link{predict.lgb.Booster} in
#'          order for it to use the configuration. This also applies for \code{start_iteration}
#'          and \code{num_iteration}, but \bold{the \code{params} list must be empty} in the call to \code{predict}.
#'
#'          Predictions about feature contributions do not allow a fast route for CSR inputs,
#'          and as such, this function will produce an error if passing \code{csr=TRUE} and
#'          \code{type = "contrib"} together.
#' @inheritParams lgb_predict_shared_params
#' @param model LighGBM model object (class \code{lgb.Booster}).
#'
#'              \bold{The object will be modified in-place}.
#' @param csr Whether the prediction function is going to be called on sparse CSR inputs.
#'            If \code{FALSE}, will be assumed that predictions are going to be called on single-row
#'            regular R matrices.
#' @return The same \code{model} that was passed as input, invisibly, with the desired
#'         configuration stored inside it and available to be used in future calls to
#'         \link{predict.lgb.Booster}.
#' @examples
#' \donttest{
1087
1088
#' \dontshow{setLGBMthreads(2L)}
#' \dontshow{data.table::setDTthreads(1L)}
1089
1090
1091
1092
1093
#' library(lightgbm)
#' data(mtcars)
#' X <- as.matrix(mtcars[, -1L])
#' y <- mtcars[, 1L]
#' dtrain <- lgb.Dataset(X, label = y, params = list(max_bin = 5L))
1094
1095
1096
1097
#' params <- list(
#'   min_data_in_leaf = 2L
#'   , num_threads = 2L
#' )
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
#' model <- lgb.train(
#'   params = params
#'  , data = dtrain
#'  , obj = "regression"
#'  , nrounds = 5L
#'  , verbose = -1L
#' )
#' lgb.configure_fast_predict(model)
#'
#' x_single <- X[11L, , drop = FALSE]
#' predict(model, x_single)
#'
#' # Will not use it if the prediction to be made
#' # is different from what was configured
#' predict(model, x_single, type = "leaf")
#' }
#' @export
lgb.configure_fast_predict <- function(model,
                                       csr = FALSE,
                                       start_iteration = NULL,
                                       num_iteration = NULL,
                                       type = "response",
                                       params = list()) {
1121
  if (!.is_Booster(x = model)) {
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
    stop("lgb.configure_fast_predict: model should be an ", sQuote("lgb.Booster"))
  }
  if (type == "class") {
    stop("type='class' is not supported for 'lgb.configure_fast_predict'. Use 'response' instead.")
  }

  rawscore <- FALSE
  predleaf <- FALSE
  predcontrib <- FALSE
  if (type == "raw") {
    rawscore <- TRUE
  } else if (type == "leaf") {
    predleaf <- TRUE
  } else if (type == "contrib") {
    predcontrib <- TRUE
  }

  if (csr && predcontrib) {
    stop("'lgb.configure_fast_predict' does not support feature contributions for CSR data.")
  }
  model$configure_fast_predict(
    csr = csr
    , start_iteration = start_iteration
    , num_iteration = num_iteration
    , rawscore = rawscore
    , predleaf = predleaf
    , predcontrib = predcontrib
    , params = params
  )
  return(invisible(model))
}

1154
1155
1156
#' @name print.lgb.Booster
#' @title Print method for LightGBM model
#' @description Show summary information about a LightGBM model object (same as \code{summary}).
1157
1158
1159
#'
#'              \emph{New in version 4.0.0}
#'
1160
1161
#' @param x Object of class \code{lgb.Booster}
#' @param ... Not used
1162
#' @return The same input \code{x}, returned as invisible.
1163
1164
1165
1166
#' @export
print.lgb.Booster <- function(x, ...) {
  # nolint start
  handle <- x$.__enclos_env__$private$handle
1167
  handle_is_null <- .is_null_handle(handle)
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184

  if (!handle_is_null) {
    ntrees <- x$current_iter()
    if (ntrees == 1L) {
      cat("LightGBM Model (1 tree)\n")
    } else {
      cat(sprintf("LightGBM Model (%d trees)\n", ntrees))
    }
  } else {
    cat("LightGBM Model\n")
  }

  if (!handle_is_null) {
    obj <- x$params$objective
    if (obj == "none") {
      obj <- "custom"
    }
1185
1186
    num_class <- x$.__enclos_env__$private$num_class
    if (num_class == 1L) {
1187
1188
1189
1190
      cat(sprintf("Objective: %s\n", obj))
    } else {
      cat(sprintf("Objective: %s (%d classes)\n"
          , obj
1191
          , num_class))
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
    }
  } else {
    cat("(Booster handle is invalid)\n")
  }

  if (!handle_is_null) {
    ncols <- .Call(LGBM_BoosterGetNumFeature_R, handle)
    cat(sprintf("Fitted to dataset with %d columns\n", ncols))
  }
  # nolint end

  return(invisible(x))
}

#' @name summary.lgb.Booster
#' @title Summary method for LightGBM model
#' @description Show summary information about a LightGBM model object (same as \code{print}).
1209
1210
1211
#'
#'              \emph{New in version 4.0.0}
#'
1212
1213
#' @param object Object of class \code{lgb.Booster}
#' @param ... Not used
1214
#' @return The same input \code{object}, returned as invisible.
1215
1216
1217
1218
1219
#' @export
summary.lgb.Booster <- function(object, ...) {
  print(object)
}

1220
1221
#' @name lgb.load
#' @title Load LightGBM model
1222
1223
#' @description Load LightGBM takes in either a file path or model string.
#'              If both are provided, Load will default to loading from file
Guolin Ke's avatar
Guolin Ke committed
1224
#' @param filename path of model file
1225
#' @param model_str a str containing the model (as a \code{character} or \code{raw} vector)
1226
#'
1227
#' @return lgb.Booster
1228
#'
Guolin Ke's avatar
Guolin Ke committed
1229
#' @examples
1230
#' \donttest{
1231
1232
#' \dontshow{setLGBMthreads(2L)}
#' \dontshow{data.table::setDTthreads(1L)}
1233
1234
1235
1236
1237
1238
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
#' data(agaricus.test, package = "lightgbm")
#' test <- agaricus.test
#' dtest <- lgb.Dataset.create.valid(dtrain, test$data, label = test$label)
1239
1240
1241
1242
1243
#' params <- list(
#'   objective = "regression"
#'   , metric = "l2"
#'   , min_data = 1L
#'   , learning_rate = 1.0
1244
#'   , num_threads = 2L
1245
#' )
1246
#' valids <- list(test = dtest)
1247
1248
1249
#' model <- lgb.train(
#'   params = params
#'   , data = dtrain
1250
#'   , nrounds = 5L
1251
#'   , valids = valids
1252
#'   , early_stopping_rounds = 3L
1253
#' )
1254
1255
1256
#' model_file <- tempfile(fileext = ".txt")
#' lgb.save(model, model_file)
#' load_booster <- lgb.load(filename = model_file)
1257
1258
#' model_string <- model$save_model_to_string(NULL) # saves best iteration
#' load_booster_from_str <- lgb.load(model_str = model_string)
1259
#' }
Guolin Ke's avatar
Guolin Ke committed
1260
#' @export
1261
lgb.load <- function(filename = NULL, model_str = NULL) {
1262

1263
1264
  filename_provided <- !is.null(filename)
  model_str_provided <- !is.null(model_str)
1265

1266
1267
1268
1269
  if (filename_provided) {
    if (!is.character(filename)) {
      stop("lgb.load: filename should be character")
    }
1270
    filename <- path.expand(filename)
1271
1272
1273
    if (!file.exists(filename)) {
      stop(sprintf("lgb.load: file '%s' passed to filename does not exist", filename))
    }
1274
1275
    return(invisible(Booster$new(modelfile = filename)))
  }
1276

1277
  if (model_str_provided) {
1278
1279
    if (!is.raw(model_str) && !is.character(model_str)) {
      stop("lgb.load: model_str should be a character/raw vector")
1280
    }
1281
1282
    return(invisible(Booster$new(model_str = model_str)))
  }
1283

1284
  stop("lgb.load: either filename or model_str must be given")
Guolin Ke's avatar
Guolin Ke committed
1285
1286
}

1287
1288
1289
#' @name lgb.save
#' @title Save LightGBM model
#' @description Save LightGBM model
Guolin Ke's avatar
Guolin Ke committed
1290
1291
1292
#' @param booster Object of class \code{lgb.Booster}
#' @param filename saved filename
#' @param num_iteration number of iteration want to predict with, NULL or <= 0 means use best iteration
1293
#'
1294
#' @return lgb.Booster
1295
#'
Guolin Ke's avatar
Guolin Ke committed
1296
#' @examples
1297
#' \donttest{
1298
1299
#' \dontshow{setLGBMthreads(2L)}
#' \dontshow{data.table::setDTthreads(1L)}
1300
1301
1302
1303
1304
1305
1306
#' library(lightgbm)
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
#' data(agaricus.test, package = "lightgbm")
#' test <- agaricus.test
#' dtest <- lgb.Dataset.create.valid(dtrain, test$data, label = test$label)
1307
1308
1309
1310
1311
#' params <- list(
#'   objective = "regression"
#'   , metric = "l2"
#'   , min_data = 1L
#'   , learning_rate = 1.0
1312
#'   , num_threads = 2L
1313
#' )
1314
#' valids <- list(test = dtest)
1315
1316
1317
#' model <- lgb.train(
#'   params = params
#'   , data = dtrain
1318
#'   , nrounds = 10L
1319
#'   , valids = valids
1320
#'   , early_stopping_rounds = 5L
1321
#' )
1322
#' lgb.save(model, tempfile(fileext = ".txt"))
1323
#' }
Guolin Ke's avatar
Guolin Ke committed
1324
#' @export
1325
lgb.save <- function(booster, filename, num_iteration = NULL) {
1326

1327
  if (!.is_Booster(x = booster)) {
1328
1329
    stop("lgb.save: booster should be an ", sQuote("lgb.Booster"))
  }
1330

1331
1332
  if (!(is.character(filename) && length(filename) == 1L)) {
    stop("lgb.save: filename should be a string")
1333
  }
1334
  filename <- path.expand(filename)
1335

1336
  # Store booster
1337
1338
1339
1340
1341
1342
  return(
    invisible(booster$save_model(
      filename = filename
      , num_iteration = num_iteration
    ))
  )
1343

Guolin Ke's avatar
Guolin Ke committed
1344
1345
}

1346
1347
1348
#' @name lgb.dump
#' @title Dump LightGBM model to json
#' @description Dump LightGBM model to json
Guolin Ke's avatar
Guolin Ke committed
1349
1350
#' @param booster Object of class \code{lgb.Booster}
#' @param num_iteration number of iteration want to predict with, NULL or <= 0 means use best iteration
1351
#'
Guolin Ke's avatar
Guolin Ke committed
1352
#' @return json format of model
1353
#'
Guolin Ke's avatar
Guolin Ke committed
1354
#' @examples
1355
#' \donttest{
1356
#' library(lightgbm)
1357
1358
#' \dontshow{setLGBMthreads(2L)}
#' \dontshow{data.table::setDTthreads(1L)}
1359
1360
1361
1362
1363
1364
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
#' data(agaricus.test, package = "lightgbm")
#' test <- agaricus.test
#' dtest <- lgb.Dataset.create.valid(dtrain, test$data, label = test$label)
1365
1366
1367
1368
1369
#' params <- list(
#'   objective = "regression"
#'   , metric = "l2"
#'   , min_data = 1L
#'   , learning_rate = 1.0
1370
#'   , num_threads = 2L
1371
#' )
1372
#' valids <- list(test = dtest)
1373
1374
1375
#' model <- lgb.train(
#'   params = params
#'   , data = dtrain
1376
#'   , nrounds = 10L
1377
#'   , valids = valids
1378
#'   , early_stopping_rounds = 5L
1379
#' )
1380
#' json_model <- lgb.dump(model)
1381
#' }
Guolin Ke's avatar
Guolin Ke committed
1382
#' @export
1383
lgb.dump <- function(booster, num_iteration = NULL) {
1384

1385
  if (!.is_Booster(x = booster)) {
1386
    stop("lgb.dump: booster should be an ", sQuote("lgb.Booster"))
1387
  }
1388

1389
  # Return booster at requested iteration
1390
  return(booster$dump_model(num_iteration =  num_iteration))
1391

Guolin Ke's avatar
Guolin Ke committed
1392
1393
}

1394
1395
#' @name lgb.get.eval.result
#' @title Get record evaluation result from booster
1396
1397
#' @description Given a \code{lgb.Booster}, return evaluation results for a
#'              particular metric on a particular dataset.
Guolin Ke's avatar
Guolin Ke committed
1398
#' @param booster Object of class \code{lgb.Booster}
1399
1400
1401
1402
#' @param data_name Name of the dataset to return evaluation results for.
#' @param eval_name Name of the evaluation metric to return results for.
#' @param iters An integer vector of iterations you want to get evaluation results for. If NULL
#'              (the default), evaluation results for all iterations will be returned.
Guolin Ke's avatar
Guolin Ke committed
1403
#' @param is_err TRUE will return evaluation error instead
1404
#'
1405
#' @return numeric vector of evaluation result
1406
#'
1407
#' @examples
1408
#' \donttest{
1409
1410
#' \dontshow{setLGBMthreads(2L)}
#' \dontshow{data.table::setDTthreads(1L)}
1411
#' # train a regression model
1412
1413
1414
1415
1416
1417
#' data(agaricus.train, package = "lightgbm")
#' train <- agaricus.train
#' dtrain <- lgb.Dataset(train$data, label = train$label)
#' data(agaricus.test, package = "lightgbm")
#' test <- agaricus.test
#' dtest <- lgb.Dataset.create.valid(dtrain, test$data, label = test$label)
1418
1419
1420
1421
1422
#' params <- list(
#'   objective = "regression"
#'   , metric = "l2"
#'   , min_data = 1L
#'   , learning_rate = 1.0
1423
#'   , num_threads = 2L
1424
#' )
1425
#' valids <- list(test = dtest)
1426
1427
1428
#' model <- lgb.train(
#'   params = params
#'   , data = dtrain
1429
#'   , nrounds = 5L
1430
1431
#'   , valids = valids
#' )
1432
1433
1434
1435
1436
1437
1438
1439
#'
#' # Examine valid data_name values
#' print(setdiff(names(model$record_evals), "start_iter"))
#'
#' # Examine valid eval_name values for dataset "test"
#' print(names(model$record_evals[["test"]]))
#'
#' # Get L2 values for "test" dataset
1440
#' lgb.get.eval.result(model, "test", "l2")
1441
#' }
Guolin Ke's avatar
Guolin Ke committed
1442
#' @export
1443
lgb.get.eval.result <- function(booster, data_name, eval_name, iters = NULL, is_err = FALSE) {
1444

1445
  if (!.is_Booster(x = booster)) {
1446
    stop("lgb.get.eval.result: Can only use ", sQuote("lgb.Booster"), " to get eval result")
Guolin Ke's avatar
Guolin Ke committed
1447
  }
1448

1449
1450
  if (!is.character(data_name) || !is.character(eval_name)) {
    stop("lgb.get.eval.result: data_name and eval_name should be characters")
Guolin Ke's avatar
Guolin Ke committed
1451
  }
1452

1453
1454
1455
1456
1457
1458
1459
  # NOTE: "start_iter" exists in booster$record_evals but is not a valid data_name
  data_names <- setdiff(names(booster$record_evals), "start_iter")
  if (!(data_name %in% data_names)) {
    stop(paste0(
      "lgb.get.eval.result: data_name "
      , shQuote(data_name)
      , " not found. Only the following datasets exist in record evals: ["
1460
      , toString(data_names)
1461
1462
      , "]"
    ))
Guolin Ke's avatar
Guolin Ke committed
1463
  }
1464

1465
  # Check if evaluation result is existing
1466
1467
1468
1469
1470
1471
1472
1473
  eval_names <- names(booster$record_evals[[data_name]])
  if (!(eval_name %in% eval_names)) {
    stop(paste0(
      "lgb.get.eval.result: eval_name "
      , shQuote(eval_name)
      , " not found. Only the following eval_names exist for dataset "
      , shQuote(data_name)
      , ": ["
1474
      , toString(eval_names)
1475
1476
      , "]"
    ))
Guolin Ke's avatar
Guolin Ke committed
1477
  }
1478

1479
  result <- booster$record_evals[[data_name]][[eval_name]][[.EVAL_KEY()]]
1480

1481
  # Check if error is requested
1482
  if (is_err) {
1483
    result <- booster$record_evals[[data_name]][[eval_name]][[.EVAL_ERR_KEY()]]
Guolin Ke's avatar
Guolin Ke committed
1484
  }
1485

1486
  if (is.null(iters)) {
Guolin Ke's avatar
Guolin Ke committed
1487
1488
    return(as.numeric(result))
  }
1489

1490
  # Parse iteration and booster delta
Guolin Ke's avatar
Guolin Ke committed
1491
  iters <- as.integer(iters)
1492
  delta <- booster$record_evals$start_iter - 1.0
Guolin Ke's avatar
Guolin Ke committed
1493
  iters <- iters - delta
1494

1495
  return(as.numeric(result[iters]))
Guolin Ke's avatar
Guolin Ke committed
1496
}