gbdt.cpp 29.8 KB
Newer Older
1
2
3
4
/*!
 * Copyright (c) 2016 Microsoft Corporation. All rights reserved.
 * Licensed under the MIT License. See LICENSE file in the project root for license information.
 */
Guolin Ke's avatar
Guolin Ke committed
5
6
7
#include "gbdt.h"

#include <LightGBM/metric.h>
Guolin Ke's avatar
Guolin Ke committed
8
#include <LightGBM/network.h>
9
10
11
12
#include <LightGBM/objective_function.h>
#include <LightGBM/prediction_early_stop.h>
#include <LightGBM/utils/common.h>
#include <LightGBM/utils/openmp_wrapper.h>
Guolin Ke's avatar
Guolin Ke committed
13

14
15
16
17
#include <chrono>
#include <ctime>
#include <sstream>

Guolin Ke's avatar
Guolin Ke committed
18
19
namespace LightGBM {

20
21
22
int LGBM_config_::current_device = lgbm_device_cpu;
int LGBM_config_::current_learner = use_cpu_learner;

23
24
25
GBDT::GBDT()
    : iter_(0),
      train_data_(nullptr),
26
      config_(nullptr),
27
28
29
30
31
32
33
34
35
36
37
38
      objective_function_(nullptr),
      early_stopping_round_(0),
      es_first_metric_only_(false),
      max_feature_idx_(0),
      num_tree_per_iteration_(1),
      num_class_(1),
      num_iteration_for_pred_(0),
      shrinkage_rate_(0.1f),
      num_init_iteration_(0),
      need_re_bagging_(false),
      balanced_bagging_(false),
      bagging_runner_(0, bagging_rand_block_) {
Guolin Ke's avatar
Guolin Ke committed
39
  average_output_ = false;
Guolin Ke's avatar
Guolin Ke committed
40
  tree_learner_ = nullptr;
Guolin Ke's avatar
Guolin Ke committed
41
42
43
44
45
}

GBDT::~GBDT() {
}

Guolin Ke's avatar
Guolin Ke committed
46
void GBDT::Init(const Config* config, const Dataset* train_data, const ObjectiveFunction* objective_function,
47
                const std::vector<const Metric*>& training_metrics) {
Nikita Titov's avatar
Nikita Titov committed
48
  CHECK_NOTNULL(train_data);
49
  train_data_ = train_data;
50
  if (!config->monotone_constraints.empty()) {
Nikita Titov's avatar
Nikita Titov committed
51
    CHECK_EQ(static_cast<size_t>(train_data_->num_total_features()), config->monotone_constraints.size());
Nikita Titov's avatar
Nikita Titov committed
52
  }
53
  if (!config->feature_contri.empty()) {
Nikita Titov's avatar
Nikita Titov committed
54
    CHECK_EQ(static_cast<size_t>(train_data_->num_total_features()), config->feature_contri.size());
55
  }
56
  iter_ = 0;
wxchan's avatar
wxchan committed
57
  num_iteration_for_pred_ = 0;
58
  max_feature_idx_ = 0;
wxchan's avatar
wxchan committed
59
  num_class_ = config->num_class;
Guolin Ke's avatar
Guolin Ke committed
60
61
  config_ = std::unique_ptr<Config>(new Config(*config));
  early_stopping_round_ = config_->early_stopping_round;
62
  es_first_metric_only_ = config_->first_metric_only;
Guolin Ke's avatar
Guolin Ke committed
63
  shrinkage_rate_ = config_->learning_rate;
64

65
66
67
68
  if (config_->device_type == std::string("cuda")) {
    LGBM_config_::current_learner = use_cuda_learner;
  }

69
  // load forced_splits file
70
71
72
73
74
  if (!config->forcedsplits_filename.empty()) {
    std::ifstream forced_splits_file(config->forcedsplits_filename.c_str());
    std::stringstream buffer;
    buffer << forced_splits_file.rdbuf();
    std::string err;
Guolin Ke's avatar
Guolin Ke committed
75
    forced_splits_json_ = Json::parse(buffer.str(), &err);
76
77
  }

78
79
80
  objective_function_ = objective_function;
  num_tree_per_iteration_ = num_class_;
  if (objective_function_ != nullptr) {
Guolin Ke's avatar
Guolin Ke committed
81
    num_tree_per_iteration_ = objective_function_->NumModelPerIteration();
82
83
  }

84
85
  is_constant_hessian_ = GetIsConstHessian(objective_function);

Guolin Ke's avatar
Guolin Ke committed
86
  tree_learner_ = std::unique_ptr<TreeLearner>(TreeLearner::CreateTreeLearner(config_->tree_learner, config_->device_type, config_.get()));
87
88
89

  // init tree learner
  tree_learner_->Init(train_data_, is_constant_hessian_);
90
  tree_learner_->SetForcedSplit(&forced_splits_json_);
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114

  // push training metrics
  training_metrics_.clear();
  for (const auto& metric : training_metrics) {
    training_metrics_.push_back(metric);
  }
  training_metrics_.shrink_to_fit();

  train_score_updater_.reset(new ScoreUpdater(train_data_, num_tree_per_iteration_));

  num_data_ = train_data_->num_data();
  // create buffer for gradients and hessians
  if (objective_function_ != nullptr) {
    size_t total_size = static_cast<size_t>(num_data_) * num_tree_per_iteration_;
    gradients_.resize(total_size);
    hessians_.resize(total_size);
  }
  // get max feature index
  max_feature_idx_ = train_data_->num_total_features() - 1;
  // get label index
  label_idx_ = train_data_->label_idx();
  // get feature names
  feature_names_ = train_data_->feature_names();
  feature_infos_ = train_data_->feature_infos();
115
  monotone_constraints_ = config->monotone_constraints;
116
117

  // if need bagging, create buffer
Guolin Ke's avatar
Guolin Ke committed
118
  ResetBaggingConfig(config_.get(), true);
119
120
121

  class_need_train_ = std::vector<bool>(num_tree_per_iteration_, true);
  if (objective_function_ != nullptr && objective_function_->SkipEmptyClass()) {
Nikita Titov's avatar
Nikita Titov committed
122
    CHECK_EQ(num_tree_per_iteration_, num_class_);
123
124
    for (int i = 0; i < num_class_; ++i) {
      class_need_train_[i] = objective_function_->ClassNeedTrain(i);
125
126
    }
  }
wxchan's avatar
wxchan committed
127
128
129
}

void GBDT::AddValidDataset(const Dataset* valid_data,
130
                           const std::vector<const Metric*>& valid_metrics) {
wxchan's avatar
wxchan committed
131
  if (!train_data_->CheckAlign(*valid_data)) {
132
    Log::Fatal("Cannot add validation data, since it has different bin mappers with training data");
133
  }
Guolin Ke's avatar
Guolin Ke committed
134
  // for a validation dataset, we need its score and metric
135
  auto new_score_updater = std::unique_ptr<ScoreUpdater>(new ScoreUpdater(valid_data, num_tree_per_iteration_));
wxchan's avatar
wxchan committed
136
137
  // update score
  for (int i = 0; i < iter_; ++i) {
138
139
140
    for (int cur_tree_id = 0; cur_tree_id < num_tree_per_iteration_; ++cur_tree_id) {
      auto curr_tree = (i + num_init_iteration_) * num_tree_per_iteration_ + cur_tree_id;
      new_score_updater->AddScore(models_[curr_tree].get(), cur_tree_id);
wxchan's avatar
wxchan committed
141
142
    }
  }
Guolin Ke's avatar
Guolin Ke committed
143
  valid_score_updater_.push_back(std::move(new_score_updater));
Guolin Ke's avatar
Guolin Ke committed
144
145
146
147
  valid_metrics_.emplace_back();
  for (const auto& metric : valid_metrics) {
    valid_metrics_.back().push_back(metric);
  }
Guolin Ke's avatar
Guolin Ke committed
148
  valid_metrics_.back().shrink_to_fit();
149

150
151
152
153
154
155
156
  if (early_stopping_round_ > 0) {
    auto num_metrics = valid_metrics.size();
    if (es_first_metric_only_) { num_metrics = 1; }
    best_iter_.emplace_back(num_metrics, 0);
    best_score_.emplace_back(num_metrics, kMinScore);
    best_msg_.emplace_back(num_metrics);
  }
Guolin Ke's avatar
Guolin Ke committed
157
158
}

Guolin Ke's avatar
Guolin Ke committed
159
void GBDT::Boosting() {
160
  Common::FunctionTimer fun_timer("GBDT::Boosting", global_timer);
Guolin Ke's avatar
Guolin Ke committed
161
162
163
164
165
166
167
168
169
  if (objective_function_ == nullptr) {
    Log::Fatal("No object function provided");
  }
  // objective function will calculate gradients and hessians
  int64_t num_score = 0;
  objective_function_->
    GetGradients(GetTrainingScore(&num_score), gradients_.data(), hessians_.data());
}

170
data_size_t GBDT::BaggingHelper(data_size_t start, data_size_t cnt, data_size_t* buffer) {
171
172
173
  if (cnt <= 0) {
    return 0;
  }
174
  data_size_t cur_left_cnt = 0;
175
  data_size_t cur_right_pos = cnt;
176
177
  // random bagging, minimal unit is one record
  for (data_size_t i = 0; i < cnt; ++i) {
178
179
180
    auto cur_idx = start + i;
    if (bagging_rands_[cur_idx / bagging_rand_block_].NextFloat() < config_->bagging_fraction) {
      buffer[cur_left_cnt++] = cur_idx;
181
    } else {
182
      buffer[--cur_right_pos] = cur_idx;
183
184
185
186
    }
  }
  return cur_left_cnt;
}
Guolin Ke's avatar
Guolin Ke committed
187

188
189
data_size_t GBDT::BalancedBaggingHelper(data_size_t start, data_size_t cnt,
                                        data_size_t* buffer) {
Guolin Ke's avatar
Guolin Ke committed
190
191
192
193
194
  if (cnt <= 0) {
    return 0;
  }
  auto label_ptr = train_data_->metadata().label();
  data_size_t cur_left_cnt = 0;
195
  data_size_t cur_right_pos = cnt;
Guolin Ke's avatar
Guolin Ke committed
196
197
  // random bagging, minimal unit is one record
  for (data_size_t i = 0; i < cnt; ++i) {
198
    auto cur_idx = start + i;
Guolin Ke's avatar
Guolin Ke committed
199
200
201
    bool is_pos = label_ptr[start + i] > 0;
    bool is_in_bag = false;
    if (is_pos) {
202
203
      is_in_bag = bagging_rands_[cur_idx / bagging_rand_block_].NextFloat() <
                  config_->pos_bagging_fraction;
Guolin Ke's avatar
Guolin Ke committed
204
    } else {
205
206
      is_in_bag = bagging_rands_[cur_idx / bagging_rand_block_].NextFloat() <
                  config_->neg_bagging_fraction;
Guolin Ke's avatar
Guolin Ke committed
207
208
    }
    if (is_in_bag) {
209
      buffer[cur_left_cnt++] = cur_idx;
Guolin Ke's avatar
Guolin Ke committed
210
    } else {
211
      buffer[--cur_right_pos] = cur_idx;
Guolin Ke's avatar
Guolin Ke committed
212
213
214
215
216
    }
  }
  return cur_left_cnt;
}

217
void GBDT::Bagging(int iter) {
218
  Common::FunctionTimer fun_timer("GBDT::Bagging", global_timer);
Guolin Ke's avatar
Guolin Ke committed
219
  // if need bagging
Guolin Ke's avatar
Guolin Ke committed
220
221
  if ((bag_data_cnt_ < num_data_ && iter % config_->bagging_freq == 0) ||
      need_re_bagging_) {
Guolin Ke's avatar
Guolin Ke committed
222
    need_re_bagging_ = false;
223
224
225
226
227
228
229
230
    auto left_cnt = bagging_runner_.Run<true>(
        num_data_,
        [=](int, data_size_t cur_start, data_size_t cur_cnt, data_size_t* left,
            data_size_t*) {
          data_size_t cur_left_count = 0;
          if (balanced_bagging_) {
            cur_left_count =
                BalancedBaggingHelper(cur_start, cur_cnt, left);
Guolin Ke's avatar
Guolin Ke committed
231
          } else {
232
            cur_left_count = BaggingHelper(cur_start, cur_cnt, left);
Guolin Ke's avatar
Guolin Ke committed
233
          }
234
235
236
          return cur_left_count;
        },
        bag_data_indices_.data());
Guolin Ke's avatar
Guolin Ke committed
237
    bag_data_cnt_ = left_cnt;
Guolin Ke's avatar
Guolin Ke committed
238
    Log::Debug("Re-bagging, using %d data to train", bag_data_cnt_);
Guolin Ke's avatar
Guolin Ke committed
239
    // set bagging data to tree learner
Guolin Ke's avatar
Guolin Ke committed
240
    if (!is_use_subset_) {
241
      tree_learner_->SetBaggingData(nullptr, bag_data_indices_.data(), bag_data_cnt_);
Guolin Ke's avatar
Guolin Ke committed
242
243
    } else {
      // get subset
Guolin Ke's avatar
Guolin Ke committed
244
      tmp_subset_->ReSize(bag_data_cnt_);
245
      tmp_subset_->CopySubrow(train_data_, bag_data_indices_.data(),
Guolin Ke's avatar
Guolin Ke committed
246
                              bag_data_cnt_, false);
247
248
      tree_learner_->SetBaggingData(tmp_subset_.get(), bag_data_indices_.data(),
                                    bag_data_cnt_);
Guolin Ke's avatar
Guolin Ke committed
249
    }
Guolin Ke's avatar
Guolin Ke committed
250
251
252
  }
}

Guolin Ke's avatar
Guolin Ke committed
253
void GBDT::Train(int snapshot_freq, const std::string& model_output_path) {
254
  Common::FunctionTimer fun_timer("GBDT::Train", global_timer);
Guolin Ke's avatar
Guolin Ke committed
255
256
  bool is_finished = false;
  auto start_time = std::chrono::steady_clock::now();
Guolin Ke's avatar
Guolin Ke committed
257
  for (int iter = 0; iter < config_->num_iterations && !is_finished; ++iter) {
Guolin Ke's avatar
Guolin Ke committed
258
259
260
261
    is_finished = TrainOneIter(nullptr, nullptr);
    if (!is_finished) {
      is_finished = EvalAndCheckEarlyStopping();
    }
Guolin Ke's avatar
Guolin Ke committed
262
263
264
265
266
267
268
    auto end_time = std::chrono::steady_clock::now();
    // output used time per iteration
    Log::Info("%f seconds elapsed, finished iteration %d", std::chrono::duration<double,
              std::milli>(end_time - start_time) * 1e-3, iter + 1);
    if (snapshot_freq > 0
        && (iter + 1) % snapshot_freq == 0) {
      std::string snapshot_out = model_output_path + ".snapshot_iter_" + std::to_string(iter + 1);
269
      SaveModelToFile(0, -1, config_->saved_feature_importance_type, snapshot_out.c_str());
Guolin Ke's avatar
Guolin Ke committed
270
271
272
273
    }
  }
}

274
void GBDT::RefitTree(const std::vector<std::vector<int>>& tree_leaf_prediction) {
275
276
277
  CHECK_GT(tree_leaf_prediction.size(), 0);
  CHECK_EQ(static_cast<size_t>(num_data_), tree_leaf_prediction.size());
  CHECK_EQ(static_cast<size_t>(models_.size()), tree_leaf_prediction[0].size());
278
279
280
281
282
283
284
285
286
  int num_iterations = static_cast<int>(models_.size() / num_tree_per_iteration_);
  std::vector<int> leaf_pred(num_data_);
  for (int iter = 0; iter < num_iterations; ++iter) {
    Boosting();
    for (int tree_id = 0; tree_id < num_tree_per_iteration_; ++tree_id) {
      int model_index = iter * num_tree_per_iteration_ + tree_id;
      #pragma omp parallel for schedule(static)
      for (int i = 0; i < num_data_; ++i) {
        leaf_pred[i] = tree_leaf_prediction[i][model_index];
Nikita Titov's avatar
Nikita Titov committed
287
        CHECK_LT(leaf_pred[i], models_[model_index]->num_leaves());
288
      }
289
290
291
      size_t offset = static_cast<size_t>(tree_id) * num_data_;
      auto grad = gradients_.data() + offset;
      auto hess = hessians_.data() + offset;
292
293
294
295
296
297
298
      auto new_tree = tree_learner_->FitByExistingTree(models_[model_index].get(), leaf_pred, grad, hess);
      train_score_updater_->AddScore(tree_learner_.get(), new_tree, tree_id);
      models_[model_index].reset(new_tree);
    }
  }
}

299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
/* If the custom "average" is implemented it will be used inplace of the label average (if enabled)
*
* An improvement to this is to have options to explicitly choose
* (i) standard average
* (ii) custom average if available
* (iii) any user defined scalar bias (e.g. using a new option "init_score" that overrides (i) and (ii) )
*
* (i) and (ii) could be selected as say "auto_init_score" = 0 or 1 etc..
*
*/
double ObtainAutomaticInitialScore(const ObjectiveFunction* fobj, int class_id) {
  double init_score = 0.0;
  if (fobj != nullptr) {
    init_score = fobj->BoostFromScore(class_id);
  }
  if (Network::num_machines() > 1) {
    init_score = Network::GlobalSyncUpByMean(init_score);
  }
  return init_score;
}

Guolin Ke's avatar
Guolin Ke committed
320
double GBDT::BoostFromAverage(int class_id, bool update_scorer) {
321
  Common::FunctionTimer fun_timer("GBDT::BoostFromAverage", global_timer);
322
  // boosting from average label; or customized "average" if implemented for the current objective
323
324
325
  if (models_.empty() && !train_score_updater_->has_init_score() && objective_function_ != nullptr) {
    if (config_->boost_from_average || (train_data_ != nullptr && train_data_->num_features() == 0)) {
      double init_score = ObtainAutomaticInitialScore(objective_function_, class_id);
326
      if (std::fabs(init_score) > kEpsilon) {
Guolin Ke's avatar
Guolin Ke committed
327
328
329
330
331
        if (update_scorer) {
          train_score_updater_->AddScore(init_score, class_id);
          for (auto& score_updater : valid_score_updater_) {
            score_updater->AddScore(init_score, class_id);
          }
332
333
334
        }
        Log::Info("Start training from score %lf", init_score);
        return init_score;
Guolin Ke's avatar
Guolin Ke committed
335
      }
336
337
338
    } else if (std::string(objective_function_->GetName()) == std::string("regression_l1")
               || std::string(objective_function_->GetName()) == std::string("quantile")
               || std::string(objective_function_->GetName()) == std::string("mape")) {
339
      Log::Warning("Disabling boost_from_average in %s may cause the slow convergence", objective_function_->GetName());
340
    }
341
  }
Guolin Ke's avatar
Guolin Ke committed
342
343
  return 0.0f;
}
Guolin Ke's avatar
Guolin Ke committed
344

Guolin Ke's avatar
Guolin Ke committed
345
bool GBDT::TrainOneIter(const score_t* gradients, const score_t* hessians) {
346
  Common::FunctionTimer fun_timer("GBDT::TrainOneIter", global_timer);
347
  std::vector<double> init_scores(num_tree_per_iteration_, 0.0);
Guolin Ke's avatar
Guolin Ke committed
348
  // boosting first
Guolin Ke's avatar
Guolin Ke committed
349
  if (gradients == nullptr || hessians == nullptr) {
350
    for (int cur_tree_id = 0; cur_tree_id < num_tree_per_iteration_; ++cur_tree_id) {
Guolin Ke's avatar
Guolin Ke committed
351
      init_scores[cur_tree_id] = BoostFromAverage(cur_tree_id, true);
352
    }
Guolin Ke's avatar
Guolin Ke committed
353
    Boosting();
Guolin Ke's avatar
Guolin Ke committed
354
355
    gradients = gradients_.data();
    hessians = hessians_.data();
Guolin Ke's avatar
Guolin Ke committed
356
  }
357
358
  // bagging logic
  Bagging(iter_);
Guolin Ke's avatar
Guolin Ke committed
359

Guolin Ke's avatar
Guolin Ke committed
360
  bool should_continue = false;
361
  for (int cur_tree_id = 0; cur_tree_id < num_tree_per_iteration_; ++cur_tree_id) {
362
    const size_t offset = static_cast<size_t>(cur_tree_id) * num_data_;
363
    std::unique_ptr<Tree> new_tree(new Tree(2, false));
364
    if (class_need_train_[cur_tree_id] && train_data_->num_features() > 0) {
365
366
      auto grad = gradients + offset;
      auto hess = hessians + offset;
Guolin Ke's avatar
Guolin Ke committed
367
368
369
      // need to copy gradients for bagging subset.
      if (is_use_subset_ && bag_data_cnt_ < num_data_) {
        for (int i = 0; i < bag_data_cnt_; ++i) {
370
371
          gradients_[offset + i] = grad[bag_data_indices_[i]];
          hessians_[offset + i] = hess[bag_data_indices_[i]];
Guolin Ke's avatar
Guolin Ke committed
372
        }
373
374
        grad = gradients_.data() + offset;
        hess = hessians_.data() + offset;
Guolin Ke's avatar
Guolin Ke committed
375
      }
376
      new_tree.reset(tree_learner_->Train(grad, hess));
377
    }
Guolin Ke's avatar
Guolin Ke committed
378

Guolin Ke's avatar
Guolin Ke committed
379
    if (new_tree->num_leaves() > 1) {
Guolin Ke's avatar
Guolin Ke committed
380
      should_continue = true;
381
      auto score_ptr = train_score_updater_->score() + offset;
382
383
      auto residual_getter = [score_ptr](const label_t* label, int i) {return static_cast<double>(label[i]) - score_ptr[i]; };
      tree_learner_->RenewTreeOutput(new_tree.get(), objective_function_, residual_getter,
384
                                     num_data_, bag_data_indices_.data(), bag_data_cnt_);
Guolin Ke's avatar
Guolin Ke committed
385
386
387
      // shrinkage by learning rate
      new_tree->Shrinkage(shrinkage_rate_);
      // update score
388
      UpdateScore(new_tree.get(), cur_tree_id);
389
390
      if (std::fabs(init_scores[cur_tree_id]) > kEpsilon) {
        new_tree->AddBias(init_scores[cur_tree_id]);
Guolin Ke's avatar
Guolin Ke committed
391
      }
392
393
    } else {
      // only add default score one-time
394
395
396
397
398
399
400
401
402
      if (models_.size() < static_cast<size_t>(num_tree_per_iteration_)) {
        double output = 0.0;
        if (!class_need_train_[cur_tree_id]) {
          if (objective_function_ != nullptr) {
            output = objective_function_->BoostFromScore(cur_tree_id);
          }
        } else {
          output = init_scores[cur_tree_id];
        }
403
        new_tree->AsConstantTree(output);
Guolin Ke's avatar
Guolin Ke committed
404
        // updates scores
405
        train_score_updater_->AddScore(output, cur_tree_id);
406
        for (auto& score_updater : valid_score_updater_) {
407
          score_updater->AddScore(output, cur_tree_id);
408
409
410
        }
      }
    }
Guolin Ke's avatar
Guolin Ke committed
411
412
413
    // add model
    models_.push_back(std::move(new_tree));
  }
Guolin Ke's avatar
Guolin Ke committed
414

Guolin Ke's avatar
Guolin Ke committed
415
  if (!should_continue) {
416
    Log::Warning("Stopped training because there are no more leaves that meet the split requirements");
417
418
419
420
    if (models_.size() > static_cast<size_t>(num_tree_per_iteration_)) {
      for (int cur_tree_id = 0; cur_tree_id < num_tree_per_iteration_; ++cur_tree_id) {
        models_.pop_back();
      }
Guolin Ke's avatar
Guolin Ke committed
421
422
423
    }
    return true;
  }
424

Guolin Ke's avatar
Guolin Ke committed
425
426
  ++iter_;
  return false;
Guolin Ke's avatar
Guolin Ke committed
427
}
428

wxchan's avatar
wxchan committed
429
void GBDT::RollbackOneIter() {
430
  if (iter_ <= 0) { return; }
wxchan's avatar
wxchan committed
431
  // reset score
432
  for (int cur_tree_id = 0; cur_tree_id < num_tree_per_iteration_; ++cur_tree_id) {
Guolin Ke's avatar
Guolin Ke committed
433
    auto curr_tree = models_.size() - num_tree_per_iteration_ + cur_tree_id;
wxchan's avatar
wxchan committed
434
    models_[curr_tree]->Shrinkage(-1.0);
435
    train_score_updater_->AddScore(models_[curr_tree].get(), cur_tree_id);
wxchan's avatar
wxchan committed
436
    for (auto& score_updater : valid_score_updater_) {
437
      score_updater->AddScore(models_[curr_tree].get(), cur_tree_id);
wxchan's avatar
wxchan committed
438
439
440
    }
  }
  // remove model
441
  for (int cur_tree_id = 0; cur_tree_id < num_tree_per_iteration_; ++cur_tree_id) {
wxchan's avatar
wxchan committed
442
443
444
445
446
    models_.pop_back();
  }
  --iter_;
}

Guolin Ke's avatar
Guolin Ke committed
447
bool GBDT::EvalAndCheckEarlyStopping() {
448
449
  bool is_met_early_stopping = false;
  // print message for metric
Guolin Ke's avatar
Guolin Ke committed
450
  auto best_msg = OutputMetric(iter_);
Guolin Ke's avatar
Guolin Ke committed
451
452


Guolin Ke's avatar
Guolin Ke committed
453
  is_met_early_stopping = !best_msg.empty();
454
455
  if (is_met_early_stopping) {
    Log::Info("Early stopping at iteration %d, the best iteration round is %d",
456
              iter_, iter_ - early_stopping_round_);
Guolin Ke's avatar
Guolin Ke committed
457
    Log::Info("Output of best iteration round:\n%s", best_msg.c_str());
458
    // pop last early_stopping_round_ models
459
    for (int i = 0; i < early_stopping_round_ * num_tree_per_iteration_; ++i) {
460
461
462
463
      models_.pop_back();
    }
  }
  return is_met_early_stopping;
Guolin Ke's avatar
Guolin Ke committed
464
465
}

466
void GBDT::UpdateScore(const Tree* tree, const int cur_tree_id) {
467
  Common::FunctionTimer fun_timer("GBDT::UpdateScore", global_timer);
Guolin Ke's avatar
Guolin Ke committed
468
  // update training score
Guolin Ke's avatar
Guolin Ke committed
469
  if (!is_use_subset_) {
470
    train_score_updater_->AddScore(tree_learner_.get(), tree, cur_tree_id);
Guolin Ke's avatar
Guolin Ke committed
471
472
473
474
475
476

    // we need to predict out-of-bag scores of data for boosting
    if (num_data_ - bag_data_cnt_ > 0) {
      train_score_updater_->AddScore(tree, bag_data_indices_.data() + bag_data_cnt_, num_data_ - bag_data_cnt_, cur_tree_id);
    }

Guolin Ke's avatar
Guolin Ke committed
477
  } else {
478
    train_score_updater_->AddScore(tree, cur_tree_id);
Guolin Ke's avatar
Guolin Ke committed
479
  }
Guolin Ke's avatar
Guolin Ke committed
480
481


Guolin Ke's avatar
Guolin Ke committed
482
  // update validation score
Guolin Ke's avatar
Guolin Ke committed
483
  for (auto& score_updater : valid_score_updater_) {
484
    score_updater->AddScore(tree, cur_tree_id);
Guolin Ke's avatar
Guolin Ke committed
485
486
487
  }
}

Guolin Ke's avatar
Guolin Ke committed
488
489
490
491
std::vector<double> GBDT::EvalOneMetric(const Metric* metric, const double* score) const {
  return metric->Eval(score, objective_function_);
}

Guolin Ke's avatar
Guolin Ke committed
492
std::string GBDT::OutputMetric(int iter) {
Guolin Ke's avatar
Guolin Ke committed
493
  bool need_output = (iter % config_->metric_freq) == 0;
Guolin Ke's avatar
Guolin Ke committed
494
495
  std::string ret = "";
  std::stringstream msg_buf;
496
  std::vector<std::pair<size_t, size_t>> meet_early_stopping_pairs;
Guolin Ke's avatar
Guolin Ke committed
497
  // print training metric
Guolin Ke's avatar
Guolin Ke committed
498
  if (need_output) {
499
500
    for (auto& sub_metric : training_metrics_) {
      auto name = sub_metric->GetName();
Guolin Ke's avatar
Guolin Ke committed
501
      auto scores = EvalOneMetric(sub_metric, train_score_updater_->score());
Guolin Ke's avatar
Guolin Ke committed
502
      for (size_t k = 0; k < name.size(); ++k) {
Guolin Ke's avatar
Guolin Ke committed
503
504
505
506
507
508
        std::stringstream tmp_buf;
        tmp_buf << "Iteration:" << iter
          << ", training " << name[k]
          << " : " << scores[k];
        Log::Info(tmp_buf.str().c_str());
        if (early_stopping_round_ > 0) {
509
          msg_buf << tmp_buf.str() << '\n';
Guolin Ke's avatar
Guolin Ke committed
510
        }
511
      }
512
    }
Guolin Ke's avatar
Guolin Ke committed
513
514
  }
  // print validation metric
Guolin Ke's avatar
Guolin Ke committed
515
  if (need_output || early_stopping_round_ > 0) {
516
517
    for (size_t i = 0; i < valid_metrics_.size(); ++i) {
      for (size_t j = 0; j < valid_metrics_[i].size(); ++j) {
Guolin Ke's avatar
Guolin Ke committed
518
        auto test_scores = EvalOneMetric(valid_metrics_[i][j], valid_score_updater_[i]->score());
Guolin Ke's avatar
Guolin Ke committed
519
520
521
522
523
524
525
526
527
528
        auto name = valid_metrics_[i][j]->GetName();
        for (size_t k = 0; k < name.size(); ++k) {
          std::stringstream tmp_buf;
          tmp_buf << "Iteration:" << iter
            << ", valid_" << i + 1 << " " << name[k]
            << " : " << test_scores[k];
          if (need_output) {
            Log::Info(tmp_buf.str().c_str());
          }
          if (early_stopping_round_ > 0) {
529
            msg_buf << tmp_buf.str() << '\n';
530
          }
wxchan's avatar
wxchan committed
531
        }
532
        if (es_first_metric_only_ && j > 0) { continue; }
Guolin Ke's avatar
Guolin Ke committed
533
        if (ret.empty() && early_stopping_round_ > 0) {
534
535
536
          auto cur_score = valid_metrics_[i][j]->factor_to_bigger_better() * test_scores.back();
          if (cur_score > best_score_[i][j]) {
            best_score_[i][j] = cur_score;
537
            best_iter_[i][j] = iter;
Guolin Ke's avatar
Guolin Ke committed
538
            meet_early_stopping_pairs.emplace_back(i, j);
539
          } else {
Guolin Ke's avatar
Guolin Ke committed
540
            if (iter - best_iter_[i][j] >= early_stopping_round_) { ret = best_msg_[i][j]; }
541
          }
wxchan's avatar
wxchan committed
542
543
        }
      }
Guolin Ke's avatar
Guolin Ke committed
544
545
    }
  }
Guolin Ke's avatar
Guolin Ke committed
546
547
548
  for (auto& pair : meet_early_stopping_pairs) {
    best_msg_[pair.first][pair.second] = msg_buf.str();
  }
wxchan's avatar
wxchan committed
549
  return ret;
Guolin Ke's avatar
Guolin Ke committed
550
551
}

552
/*! \brief Get eval result */
553
std::vector<double> GBDT::GetEvalAt(int data_idx) const {
Guolin Ke's avatar
Guolin Ke committed
554
  CHECK(data_idx >= 0 && data_idx <= static_cast<int>(valid_score_updater_.size()));
555
556
  std::vector<double> ret;
  if (data_idx == 0) {
557
    for (auto& sub_metric : training_metrics_) {
Guolin Ke's avatar
Guolin Ke committed
558
      auto scores = EvalOneMetric(sub_metric, train_score_updater_->score());
559
560
561
      for (auto score : scores) {
        ret.push_back(score);
      }
562
    }
563
  } else {
564
565
    auto used_idx = data_idx - 1;
    for (size_t j = 0; j < valid_metrics_[used_idx].size(); ++j) {
Guolin Ke's avatar
Guolin Ke committed
566
      auto test_scores = EvalOneMetric(valid_metrics_[used_idx][j], valid_score_updater_[used_idx]->score());
567
568
569
      for (auto score : test_scores) {
        ret.push_back(score);
      }
570
571
572
573
574
    }
  }
  return ret;
}

Guolin Ke's avatar
Guolin Ke committed
575
/*! \brief Get training scores result */
576
const double* GBDT::GetTrainingScore(int64_t* out_len) {
577
  *out_len = static_cast<int64_t>(train_score_updater_->num_data()) * num_class_;
Guolin Ke's avatar
Guolin Ke committed
578
  return train_score_updater_->score();
579
580
}

581
void GBDT::PredictContrib(const double* features, double* output) const {
582
  // set zero
Guolin Ke's avatar
Guolin Ke committed
583
584
  const int num_features = max_feature_idx_ + 1;
  std::memset(output, 0, sizeof(double) * num_tree_per_iteration_ * (num_features + 1));
585
586
  const int end_iteration_for_pred = start_iteration_for_pred_ + num_iteration_for_pred_;
  for (int i = start_iteration_for_pred_; i < end_iteration_for_pred; ++i) {
587
588
    // predict all the trees for one iteration
    for (int k = 0; k < num_tree_per_iteration_; ++k) {
Guolin Ke's avatar
Guolin Ke committed
589
      models_[i * num_tree_per_iteration_ + k]->PredictContrib(features, num_features, output + k*(num_features + 1));
590
    }
591
592
593
594
595
596
  }
}

void GBDT::PredictContribByMap(const std::unordered_map<int, double>& features,
                               std::vector<std::unordered_map<int, double>>* output) const {
  const int num_features = max_feature_idx_ + 1;
597
598
  const int end_iteration_for_pred = start_iteration_for_pred_ + num_iteration_for_pred_;
  for (int i = start_iteration_for_pred_; i < end_iteration_for_pred; ++i) {
599
600
601
    // predict all the trees for one iteration
    for (int k = 0; k < num_tree_per_iteration_; ++k) {
      models_[i * num_tree_per_iteration_ + k]->PredictContribByMap(features, num_features, &((*output)[k]));
602
603
604
605
    }
  }
}

Guolin Ke's avatar
Guolin Ke committed
606
607
void GBDT::GetPredictAt(int data_idx, double* out_result, int64_t* out_len) {
  CHECK(data_idx >= 0 && data_idx <= static_cast<int>(valid_score_updater_.size()));
Guolin Ke's avatar
Guolin Ke committed
608

609
  const double* raw_scores = nullptr;
Guolin Ke's avatar
Guolin Ke committed
610
611
  data_size_t num_data = 0;
  if (data_idx == 0) {
wxchan's avatar
wxchan committed
612
    raw_scores = GetTrainingScore(out_len);
Guolin Ke's avatar
Guolin Ke committed
613
614
615
616
617
    num_data = train_score_updater_->num_data();
  } else {
    auto used_idx = data_idx - 1;
    raw_scores = valid_score_updater_[used_idx]->score();
    num_data = valid_score_updater_[used_idx]->num_data();
618
    *out_len = static_cast<int64_t>(num_data) * num_class_;
Guolin Ke's avatar
Guolin Ke committed
619
  }
Guolin Ke's avatar
Guolin Ke committed
620
  if (objective_function_ != nullptr) {
Guolin Ke's avatar
Guolin Ke committed
621
622
    #pragma omp parallel for schedule(static)
    for (data_size_t i = 0; i < num_data; ++i) {
Guolin Ke's avatar
Guolin Ke committed
623
      std::vector<double> tree_pred(num_tree_per_iteration_);
624
      for (int j = 0; j < num_tree_per_iteration_; ++j) {
Guolin Ke's avatar
Guolin Ke committed
625
        tree_pred[j] = raw_scores[j * num_data + i];
626
      }
Guolin Ke's avatar
Guolin Ke committed
627
628
      std::vector<double> tmp_result(num_class_);
      objective_function_->ConvertOutput(tree_pred.data(), tmp_result.data());
Guolin Ke's avatar
Guolin Ke committed
629
      for (int j = 0; j < num_class_; ++j) {
630
        out_result[j * num_data + i] = static_cast<double>(tmp_result[j]);
Guolin Ke's avatar
Guolin Ke committed
631
632
      }
    }
633
  } else {
Guolin Ke's avatar
Guolin Ke committed
634
    #pragma omp parallel for schedule(static)
Guolin Ke's avatar
Guolin Ke committed
635
    for (data_size_t i = 0; i < num_data; ++i) {
636
      for (int j = 0; j < num_tree_per_iteration_; ++j) {
Guolin Ke's avatar
Guolin Ke committed
637
        out_result[j * num_data + i] = static_cast<double>(raw_scores[j * num_data + i]);
Guolin Ke's avatar
Guolin Ke committed
638
639
640
641
642
      }
    }
  }
}

643
644
double GBDT::GetUpperBoundValue() const {
  double max_value = 0.0;
Nikita Titov's avatar
Nikita Titov committed
645
  for (const auto &tree : models_) {
646
647
648
649
650
651
652
    max_value += tree->GetUpperBoundValue();
  }
  return max_value;
}

double GBDT::GetLowerBoundValue() const {
  double min_value = 0.0;
Nikita Titov's avatar
Nikita Titov committed
653
  for (const auto &tree : models_) {
654
655
656
657
658
    min_value += tree->GetLowerBoundValue();
  }
  return min_value;
}

Guolin Ke's avatar
Guolin Ke committed
659
660
661
void GBDT::ResetTrainingData(const Dataset* train_data, const ObjectiveFunction* objective_function,
                             const std::vector<const Metric*>& training_metrics) {
  if (train_data != train_data_ && !train_data_->CheckAlign(*train_data)) {
662
    Log::Fatal("Cannot reset training data, since new training data has different bin mappers");
wxchan's avatar
wxchan committed
663
664
  }

Guolin Ke's avatar
Guolin Ke committed
665
666
  objective_function_ = objective_function;
  if (objective_function_ != nullptr) {
Nikita Titov's avatar
Nikita Titov committed
667
    CHECK_EQ(num_tree_per_iteration_, objective_function_->NumModelPerIteration());
668
  }
669
  is_constant_hessian_ = GetIsConstHessian(objective_function);
670

Guolin Ke's avatar
Guolin Ke committed
671
672
673
674
  // push training metrics
  training_metrics_.clear();
  for (const auto& metric : training_metrics) {
    training_metrics_.push_back(metric);
675
  }
Guolin Ke's avatar
Guolin Ke committed
676
  training_metrics_.shrink_to_fit();
677

Guolin Ke's avatar
Guolin Ke committed
678
679
680
681
682
  if (train_data != train_data_) {
    train_data_ = train_data;
    // not same training data, need reset score and others
    // create score tracker
    train_score_updater_.reset(new ScoreUpdater(train_data_, num_tree_per_iteration_));
683

Guolin Ke's avatar
Guolin Ke committed
684
685
686
687
688
689
    // update score
    for (int i = 0; i < iter_; ++i) {
      for (int cur_tree_id = 0; cur_tree_id < num_tree_per_iteration_; ++cur_tree_id) {
        auto curr_tree = (i + num_init_iteration_) * num_tree_per_iteration_ + cur_tree_id;
        train_score_updater_->AddScore(models_[curr_tree].get(), cur_tree_id);
      }
690
691
    }

Guolin Ke's avatar
Guolin Ke committed
692
    num_data_ = train_data_->num_data();
693

Guolin Ke's avatar
Guolin Ke committed
694
695
696
697
698
699
    // create buffer for gradients and hessians
    if (objective_function_ != nullptr) {
      size_t total_size = static_cast<size_t>(num_data_) * num_tree_per_iteration_;
      gradients_.resize(total_size);
      hessians_.resize(total_size);
    }
700

Guolin Ke's avatar
Guolin Ke committed
701
702
703
704
    max_feature_idx_ = train_data_->num_total_features() - 1;
    label_idx_ = train_data_->label_idx();
    feature_names_ = train_data_->feature_names();
    feature_infos_ = train_data_->feature_infos();
705

706
    tree_learner_->ResetTrainingData(train_data, is_constant_hessian_);
Guolin Ke's avatar
Guolin Ke committed
707
    ResetBaggingConfig(config_.get(), true);
708
709
  } else {
    tree_learner_->ResetIsConstantHessian(is_constant_hessian_);
710
  }
711
712
}

Guolin Ke's avatar
Guolin Ke committed
713
714
void GBDT::ResetConfig(const Config* config) {
  auto new_config = std::unique_ptr<Config>(new Config(*config));
715
  if (!config->monotone_constraints.empty()) {
Nikita Titov's avatar
Nikita Titov committed
716
    CHECK_EQ(static_cast<size_t>(train_data_->num_total_features()), config->monotone_constraints.size());
717
718
  }
  if (!config->feature_contri.empty()) {
Nikita Titov's avatar
Nikita Titov committed
719
    CHECK_EQ(static_cast<size_t>(train_data_->num_total_features()), config->feature_contri.size());
720
  }
Guolin Ke's avatar
Guolin Ke committed
721
722
723
  early_stopping_round_ = new_config->early_stopping_round;
  shrinkage_rate_ = new_config->learning_rate;
  if (tree_learner_ != nullptr) {
Guolin Ke's avatar
Guolin Ke committed
724
    tree_learner_->ResetConfig(new_config.get());
725
  }
Guolin Ke's avatar
Guolin Ke committed
726
727
  if (train_data_ != nullptr) {
    ResetBaggingConfig(new_config.get(), false);
728
  }
729
  if (config_.get() != nullptr && config_->forcedsplits_filename != new_config->forcedsplits_filename) {
730
731
732
733
734
735
736
    // load forced_splits file
    if (!new_config->forcedsplits_filename.empty()) {
      std::ifstream forced_splits_file(
          new_config->forcedsplits_filename.c_str());
      std::stringstream buffer;
      buffer << forced_splits_file.rdbuf();
      std::string err;
Guolin Ke's avatar
Guolin Ke committed
737
      forced_splits_json_ = Json::parse(buffer.str(), &err);
738
739
740
741
742
743
      tree_learner_->SetForcedSplit(&forced_splits_json_);
    } else {
      forced_splits_json_ = Json();
      tree_learner_->SetForcedSplit(nullptr);
    }
  }
Guolin Ke's avatar
Guolin Ke committed
744
  config_.reset(new_config.release());
Guolin Ke's avatar
Guolin Ke committed
745
746
}

Guolin Ke's avatar
Guolin Ke committed
747
void GBDT::ResetBaggingConfig(const Config* config, bool is_change_dataset) {
Guolin Ke's avatar
Guolin Ke committed
748
  // if need bagging, create buffer
Guolin Ke's avatar
Guolin Ke committed
749
750
751
752
753
754
  data_size_t num_pos_data = 0;
  if (objective_function_ != nullptr) {
    num_pos_data = objective_function_->NumPositiveData();
  }
  bool balance_bagging_cond = (config->pos_bagging_fraction < 1.0 || config->neg_bagging_fraction < 1.0) && (num_pos_data > 0);
  if ((config->bagging_fraction < 1.0 || balance_bagging_cond) && config->bagging_freq > 0) {
755
756
    need_re_bagging_ = false;
    if (!is_change_dataset &&
Guolin Ke's avatar
Guolin Ke committed
757
758
      config_.get() != nullptr && config_->bagging_fraction == config->bagging_fraction && config_->bagging_freq == config->bagging_freq
      && config_->pos_bagging_fraction == config->pos_bagging_fraction && config_->neg_bagging_fraction == config->neg_bagging_fraction) {
759
760
      return;
    }
Guolin Ke's avatar
Guolin Ke committed
761
762
    if (balance_bagging_cond) {
      balanced_bagging_ = true;
763
      bag_data_cnt_ = static_cast<data_size_t>(num_pos_data * config->pos_bagging_fraction)
Guolin Ke's avatar
Guolin Ke committed
764
765
766
767
                      + static_cast<data_size_t>((num_data_ - num_pos_data) * config->neg_bagging_fraction);
    } else {
      bag_data_cnt_ = static_cast<data_size_t>(config->bagging_fraction * num_data_);
    }
Guolin Ke's avatar
Guolin Ke committed
768
    bag_data_indices_.resize(num_data_);
769
770
771
772
773
774
    bagging_runner_.ReSize(num_data_);
    bagging_rands_.clear();
    for (int i = 0;
         i < (num_data_ + bagging_rand_block_ - 1) / bagging_rand_block_; ++i) {
      bagging_rands_.emplace_back(config_->bagging_seed + i);
    }
775

776
777
    double average_bag_rate =
        (static_cast<double>(bag_data_cnt_) / num_data_) / config->bagging_freq;
Guolin Ke's avatar
Guolin Ke committed
778
779
    is_use_subset_ = false;
    const int group_threshold_usesubset = 100;
780
    if (average_bag_rate <= 0.5
781
        && (train_data_->num_feature_groups() < group_threshold_usesubset)) {
Guolin Ke's avatar
Guolin Ke committed
782
783
784
785
786
      if (tmp_subset_ == nullptr || is_change_dataset) {
        tmp_subset_.reset(new Dataset(bag_data_cnt_));
        tmp_subset_->CopyFeatureMapperFrom(train_data_);
      }
      is_use_subset_ = true;
787
      Log::Debug("Use subset for bagging");
Guolin Ke's avatar
Guolin Ke committed
788
789
    }

790
    need_re_bagging_ = true;
791

Guolin Ke's avatar
Guolin Ke committed
792
793
794
795
796
    if (is_use_subset_ && bag_data_cnt_ < num_data_) {
      if (objective_function_ == nullptr) {
        size_t total_size = static_cast<size_t>(num_data_) * num_tree_per_iteration_;
        gradients_.resize(total_size);
        hessians_.resize(total_size);
797
      }
798
    }
799
  } else {
Guolin Ke's avatar
Guolin Ke committed
800
801
    bag_data_cnt_ = num_data_;
    bag_data_indices_.clear();
802
    bagging_runner_.ReSize(0);
Guolin Ke's avatar
Guolin Ke committed
803
    is_use_subset_ = false;
804
  }
wxchan's avatar
wxchan committed
805
806
}

Guolin Ke's avatar
Guolin Ke committed
807
}  // namespace LightGBM