"src/vscode:/vscode.git/clone" did not exist on "2051223b826cce4b6d9574c6f33dc380091ddfd7"
feature_histogram.hpp 43.6 KB
Newer Older
1
2
/*!
 * Copyright (c) 2016 Microsoft Corporation. All rights reserved.
3
4
 * Licensed under the MIT License. See LICENSE file in the project root for
 * license information.
5
 */
Guolin Ke's avatar
Guolin Ke committed
6
7
8
#ifndef LIGHTGBM_TREELEARNER_FEATURE_HISTOGRAM_HPP_
#define LIGHTGBM_TREELEARNER_FEATURE_HISTOGRAM_HPP_

9
#include <LightGBM/bin.h>
Guolin Ke's avatar
Guolin Ke committed
10
#include <LightGBM/dataset.h>
11
#include <LightGBM/utils/array_args.h>
Guolin Ke's avatar
Guolin Ke committed
12

13
#include <algorithm>
14
#include <cmath>
15
16
17
18
19
#include <cstring>
#include <memory>
#include <utility>
#include <vector>

20
#include "monotone_constraints.hpp"
Nikita Titov's avatar
Nikita Titov committed
21
#include "split_info.hpp"
Guolin Ke's avatar
Guolin Ke committed
22

23
namespace LightGBM {
Guolin Ke's avatar
Guolin Ke committed
24

Guolin Ke's avatar
Guolin Ke committed
25
class FeatureMetainfo {
26
 public:
Guolin Ke's avatar
Guolin Ke committed
27
  int num_bin;
Guolin Ke's avatar
Guolin Ke committed
28
  MissingType missing_type;
29
  int8_t offset = 0;
Guolin Ke's avatar
Guolin Ke committed
30
  uint32_t default_bin;
31
32
  int8_t monotone_type = 0;
  double penalty = 1.0;
Guolin Ke's avatar
Guolin Ke committed
33
  /*! \brief pointer of tree config */
Guolin Ke's avatar
Guolin Ke committed
34
  const Config* config;
35
  BinType bin_type;
36
37
  /*! \brief random number generator for extremely randomized trees */
  mutable Random rand;
Guolin Ke's avatar
Guolin Ke committed
38
};
Guolin Ke's avatar
Guolin Ke committed
39
/*!
40
41
42
 * \brief FeatureHistogram is used to construct and store a histogram for a
 * feature.
 */
Guolin Ke's avatar
Guolin Ke committed
43
class FeatureHistogram {
44
 public:
45
  FeatureHistogram() { data_ = nullptr; }
Guolin Ke's avatar
Guolin Ke committed
46

47
  ~FeatureHistogram() {}
Guolin Ke's avatar
Guolin Ke committed
48

Guolin Ke's avatar
Guolin Ke committed
49
50
51
52
53
  /*! \brief Disable copy */
  FeatureHistogram& operator=(const FeatureHistogram&) = delete;
  /*! \brief Disable copy */
  FeatureHistogram(const FeatureHistogram&) = delete;

Guolin Ke's avatar
Guolin Ke committed
54
  /*!
55
56
57
58
   * \brief Init the feature histogram
   * \param feature the feature data for this histogram
   * \param min_num_data_one_leaf minimal number of data in one leaf
   */
59
  void Init(hist_t* data, const FeatureMetainfo* meta) {
Guolin Ke's avatar
Guolin Ke committed
60
61
    meta_ = meta;
    data_ = data;
62
63
64
65
    ResetFunc();
  }

  void ResetFunc() {
66
    if (meta_->bin_type == BinType::NumericalBin) {
67
      FuncForNumrical();
68
    } else {
69
      FuncForCategorical();
70
    }
Guolin Ke's avatar
Guolin Ke committed
71
72
  }

73
  hist_t* RawData() { return data_; }
74

Guolin Ke's avatar
Guolin Ke committed
75
  /*!
76
77
78
   * \brief Subtract current histograms with other
   * \param other The histogram that want to subtract
   */
Guolin Ke's avatar
Guolin Ke committed
79
  void Subtract(const FeatureHistogram& other) {
80
81
    for (int i = 0; i < (meta_->num_bin - meta_->offset) * 2; ++i) {
      data_[i] -= other.data_[i];
Guolin Ke's avatar
Guolin Ke committed
82
83
    }
  }
84

85
86
87
88
  void FindBestThreshold(double sum_gradient, double sum_hessian,
                         data_size_t num_data,
                         const ConstraintEntry& constraints,
                         SplitInfo* output) {
Guolin Ke's avatar
Guolin Ke committed
89
    output->default_left = true;
Guolin Ke's avatar
Guolin Ke committed
90
    output->gain = kMinScore;
91
92
    find_best_threshold_fun_(sum_gradient, sum_hessian + 2 * kEpsilon, num_data,
                             constraints, output);
Guolin Ke's avatar
Guolin Ke committed
93
    output->gain *= meta_->penalty;
94
95
  }

96
97
98
  template <bool USE_RAND, bool USE_L1, bool USE_MAX_OUTPUT>
  double BeforeNumercal(double sum_gradient, double sum_hessian,
                        SplitInfo* output, int* rand_threshold) {
Guolin Ke's avatar
Guolin Ke committed
99
    is_splittable_ = false;
100
101
102
103
104
105
106
107
108
    output->monotone_type = meta_->monotone_type;
    double gain_shift = GetLeafGain<USE_L1, USE_MAX_OUTPUT>(
        sum_gradient, sum_hessian, meta_->config->lambda_l1,
        meta_->config->lambda_l2, meta_->config->max_delta_step);
    *rand_threshold = 0;
    if (USE_RAND) {
      if (meta_->num_bin - 2 > 0) {
        *rand_threshold = meta_->rand.NextInt(0, meta_->num_bin - 2);
      }
109
    }
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
    return gain_shift + meta_->config->min_gain_to_split;
  }

  void FuncForNumrical() {
    if (meta_->config->extra_trees) {
      if (meta_->config->monotone_constraints.empty()) {
        FuncForNumricalL1<true, false>();
      } else {
        FuncForNumricalL1<true, true>();
      }
    } else {
      if (meta_->config->monotone_constraints.empty()) {
        FuncForNumricalL1<false, false>();
      } else {
        FuncForNumricalL1<false, true>();
      }
    }
  }
  template <bool USE_RAND, bool USE_MC>
  void FuncForNumricalL1() {
    if (meta_->config->lambda_l1 > 0) {
      if (meta_->config->max_delta_step > 0) {
        FuncForNumricalL2<USE_RAND, USE_MC, true, true>();
      } else {
        FuncForNumricalL2<USE_RAND, USE_MC, true, false>();
      }
    } else {
      if (meta_->config->max_delta_step > 0) {
        FuncForNumricalL2<USE_RAND, USE_MC, false, true>();
      } else {
        FuncForNumricalL2<USE_RAND, USE_MC, false, false>();
      }
    }
  }

  template <bool USE_RAND, bool USE_MC, bool USE_L1, bool USE_MAX_OUTPUT>
  void FuncForNumricalL2() {
147
148
149
150
151
152
153
154
155
#define TEMPLATE_PREFIX USE_RAND, USE_MC, USE_L1, USE_MAX_OUTPUT
#define LAMBDA_ARGUMENTS                                         \
  double sum_gradient, double sum_hessian, data_size_t num_data, \
      const ConstraintEntry &constraints, SplitInfo *output
#define BEFORE_ARGUMENTS sum_gradient, sum_hessian, output, &rand_threshold
#define FUNC_ARGUMENTS                                                      \
  sum_gradient, sum_hessian, num_data, constraints, min_gain_shift, output, \
      rand_threshold

Guolin Ke's avatar
Guolin Ke committed
156
157
    if (meta_->num_bin > 2 && meta_->missing_type != MissingType::None) {
      if (meta_->missing_type == MissingType::Zero) {
158
159
160
161
162
163
164
165
166
167
        find_best_threshold_fun_ = [=](LAMBDA_ARGUMENTS) {
          int rand_threshold = 0;
          double min_gain_shift =
              BeforeNumercal<USE_RAND, USE_L1, USE_MAX_OUTPUT>(
                  BEFORE_ARGUMENTS);
          FindBestThresholdSequentially<TEMPLATE_PREFIX, true, true, false>(
              FUNC_ARGUMENTS);
          FindBestThresholdSequentially<TEMPLATE_PREFIX, false, true, false>(
              FUNC_ARGUMENTS);
        };
Guolin Ke's avatar
Guolin Ke committed
168
      } else {
169
170
171
172
173
174
175
176
177
178
        find_best_threshold_fun_ = [=](LAMBDA_ARGUMENTS) {
          int rand_threshold = 0;
          double min_gain_shift =
              BeforeNumercal<USE_RAND, USE_L1, USE_MAX_OUTPUT>(
                  BEFORE_ARGUMENTS);
          FindBestThresholdSequentially<TEMPLATE_PREFIX, true, false, true>(
              FUNC_ARGUMENTS);
          FindBestThresholdSequentially<TEMPLATE_PREFIX, false, false, true>(
              FUNC_ARGUMENTS);
        };
Guolin Ke's avatar
Guolin Ke committed
179
      }
180
    } else {
181
      if (meta_->missing_type != MissingType::NaN) {
182
183
184
185
186
187
188
189
        find_best_threshold_fun_ = [=](LAMBDA_ARGUMENTS) {
          int rand_threshold = 0;
          double min_gain_shift =
              BeforeNumercal<USE_RAND, USE_L1, USE_MAX_OUTPUT>(
                  BEFORE_ARGUMENTS);
          FindBestThresholdSequentially<TEMPLATE_PREFIX, true, false, false>(
              FUNC_ARGUMENTS);
        };
190
      } else {
191
192
193
194
195
196
197
198
199
200
        find_best_threshold_fun_ = [=](LAMBDA_ARGUMENTS) {
          int rand_threshold = 0;
          double min_gain_shift =
              BeforeNumercal<USE_RAND, USE_L1, USE_MAX_OUTPUT>(
                  BEFORE_ARGUMENTS);
          FindBestThresholdSequentially<USE_RAND, USE_MC, USE_L1,
                                        USE_MAX_OUTPUT, true, false, false>(
              FUNC_ARGUMENTS);
          output->default_left = false;
        };
Guolin Ke's avatar
Guolin Ke committed
201
      }
Guolin Ke's avatar
Guolin Ke committed
202
    }
203
204
205
206
#undef TEMPLATE_PREFIX
#undef LAMBDA_ARGUMENTS
#undef BEFORE_ARGUMENTS
#undef FUNC_ARGURMENTS
Guolin Ke's avatar
Guolin Ke committed
207
  }
208

209
  void FuncForCategorical() {
210
    if (meta_->config->extra_trees) {
211
212
213
214
215
216
217
218
219
220
221
222
223
      if (meta_->config->monotone_constraints.empty()) {
        FuncForCategoricalL1<true, false>();
      } else {
        FuncForCategoricalL1<true, true>();
      }
    } else {
      if (meta_->config->monotone_constraints.empty()) {
        FuncForCategoricalL1<false, false>();
      } else {
        FuncForCategoricalL1<false, true>();
      }
    }
  }
224

225
226
  template <bool USE_RAND, bool USE_MC>
  void FuncForCategoricalL1() {
227
228
229
#define ARGUMENTS                                                      \
  std::placeholders::_1, std::placeholders::_2, std::placeholders::_3, \
      std::placeholders::_4, std::placeholders::_5
230
231
232
233
234
    if (meta_->config->lambda_l1 > 0) {
      if (meta_->config->max_delta_step > 0) {
        find_best_threshold_fun_ =
            std::bind(&FeatureHistogram::FindBestThresholdCategoricalInner<
                          USE_RAND, USE_MC, true, true>,
235
                      this, ARGUMENTS);
236
237
238
239
      } else {
        find_best_threshold_fun_ =
            std::bind(&FeatureHistogram::FindBestThresholdCategoricalInner<
                          USE_RAND, USE_MC, true, false>,
240
                      this, ARGUMENTS);
241
      }
242
    } else {
243
244
245
246
      if (meta_->config->max_delta_step > 0) {
        find_best_threshold_fun_ =
            std::bind(&FeatureHistogram::FindBestThresholdCategoricalInner<
                          USE_RAND, USE_MC, false, true>,
247
                      this, ARGUMENTS);
248
249
250
251
      } else {
        find_best_threshold_fun_ =
            std::bind(&FeatureHistogram::FindBestThresholdCategoricalInner<
                          USE_RAND, USE_MC, false, false>,
252
                      this, ARGUMENTS);
253
      }
254
    }
255
#undef ARGUMENTS
256
257
  }

258
259
260
261
262
263
264
  template <bool USE_RAND, bool USE_MC, bool USE_L1, bool USE_MAX_OUTPUT>
  void FindBestThresholdCategoricalInner(double sum_gradient,
                                         double sum_hessian,
                                         data_size_t num_data,
                                         const ConstraintEntry& constraints,
                                         SplitInfo* output) {
    is_splittable_ = false;
Guolin Ke's avatar
Guolin Ke committed
265
    output->default_left = false;
266
    double best_gain = kMinScore;
267
    data_size_t best_left_count = 0;
ChenZhiyong's avatar
ChenZhiyong committed
268
269
    double best_sum_left_gradient = 0;
    double best_sum_left_hessian = 0;
270
271
272
    double gain_shift = GetLeafGain<USE_L1, USE_MAX_OUTPUT>(
        sum_gradient, sum_hessian, meta_->config->lambda_l1,
        meta_->config->lambda_l2, meta_->config->max_delta_step);
273

Guolin Ke's avatar
Guolin Ke committed
274
    double min_gain_shift = gain_shift + meta_->config->min_gain_to_split;
ChenZhiyong's avatar
ChenZhiyong committed
275
    bool is_full_categorical = meta_->missing_type == MissingType::None;
276
    int used_bin = meta_->num_bin - 1 + is_full_categorical;
ChenZhiyong's avatar
ChenZhiyong committed
277

Guolin Ke's avatar
Guolin Ke committed
278
    std::vector<int> sorted_idx;
Guolin Ke's avatar
Guolin Ke committed
279
280
    double l2 = meta_->config->lambda_l2;
    bool use_onehot = meta_->num_bin <= meta_->config->max_cat_to_onehot;
281
282
    int best_threshold = -1;
    int best_dir = 1;
283
    const double cnt_factor = num_data / sum_hessian;
284
    int rand_threshold = 0;
285
    if (use_onehot) {
286
      if (USE_RAND) {
287
        if (used_bin > 0) {
288
          rand_threshold = meta_->rand.NextInt(0, used_bin);
289
290
        }
      }
291
      for (int t = 0; t < used_bin; ++t) {
292
293
        const auto grad = GET_GRAD(data_, t);
        const auto hess = GET_HESS(data_, t);
294
295
        data_size_t cnt =
            static_cast<data_size_t>(Common::RoundInt(hess * cnt_factor));
296
        // if data not enough, or sum hessian too small
297
        if (cnt < meta_->config->min_data_in_leaf ||
298
            hess < meta_->config->min_sum_hessian_in_leaf) {
299
          continue;
300
        }
301
        data_size_t other_count = num_data - cnt;
302
        // if data not enough
303
304
305
        if (other_count < meta_->config->min_data_in_leaf) {
          continue;
        }
ChenZhiyong's avatar
ChenZhiyong committed
306

307
        double sum_other_hessian = sum_hessian - hess - kEpsilon;
308
        // if sum hessian too small
309
        if (sum_other_hessian < meta_->config->min_sum_hessian_in_leaf) {
310
          continue;
311
        }
ChenZhiyong's avatar
ChenZhiyong committed
312

313
        double sum_other_gradient = sum_gradient - grad;
314
        if (USE_RAND) {
315
316
317
318
          if (t != rand_threshold) {
            continue;
          }
        }
319
        // current split gain
320
321
322
323
        double current_gain = GetSplitGains<USE_MC, USE_L1, USE_MAX_OUTPUT>(
            sum_other_gradient, sum_other_hessian, grad, hess + kEpsilon,
            meta_->config->lambda_l1, l2, meta_->config->max_delta_step,
            constraints, 0);
324
        // gain with split is worse than without split
325
326
327
        if (current_gain <= min_gain_shift) {
          continue;
        }
328
329

        // mark to is splittable
ChenZhiyong's avatar
ChenZhiyong committed
330
        is_splittable_ = true;
331
        // better split point
ChenZhiyong's avatar
ChenZhiyong committed
332
        if (current_gain > best_gain) {
333
          best_threshold = t;
334
335
336
          best_sum_left_gradient = grad;
          best_sum_left_hessian = hess + kEpsilon;
          best_left_count = cnt;
ChenZhiyong's avatar
ChenZhiyong committed
337
          best_gain = current_gain;
338
339
340
341
        }
      }
    } else {
      for (int i = 0; i < used_bin; ++i) {
342
343
        if (Common::RoundInt(GET_HESS(data_, i) * cnt_factor) >=
            meta_->config->cat_smooth) {
344
345
346
347
348
          sorted_idx.push_back(i);
        }
      }
      used_bin = static_cast<int>(sorted_idx.size());

Guolin Ke's avatar
Guolin Ke committed
349
      l2 += meta_->config->cat_l2;
350
351

      auto ctr_fun = [this](double sum_grad, double sum_hess) {
Guolin Ke's avatar
Guolin Ke committed
352
        return (sum_grad) / (sum_hess + meta_->config->cat_smooth);
353
354
      };
      std::sort(sorted_idx.begin(), sorted_idx.end(),
355
356
357
358
                [this, &ctr_fun](int i, int j) {
                  return ctr_fun(GET_GRAD(data_, i), GET_HESS(data_, i)) <
                         ctr_fun(GET_GRAD(data_, j), GET_HESS(data_, j));
                });
359
360
361
362
363

      std::vector<int> find_direction(1, 1);
      std::vector<int> start_position(1, 0);
      find_direction.push_back(-1);
      start_position.push_back(used_bin - 1);
364
365
      const int max_num_cat =
          std::min(meta_->config->max_cat_threshold, (used_bin + 1) / 2);
366
      int max_threshold = std::max(std::min(max_num_cat, used_bin) - 1, 0);
367
      if (USE_RAND) {
368
        if (max_threshold > 0) {
369
          rand_threshold = meta_->rand.NextInt(0, max_threshold);
370
        }
371
      }
372

373
374
375
376
      is_splittable_ = false;
      for (size_t out_i = 0; out_i < find_direction.size(); ++out_i) {
        auto dir = find_direction[out_i];
        auto start_pos = start_position[out_i];
Guolin Ke's avatar
Guolin Ke committed
377
        data_size_t min_data_per_group = meta_->config->min_data_per_group;
378
379
380
381
382
383
384
        data_size_t cnt_cur_group = 0;
        double sum_left_gradient = 0.0f;
        double sum_left_hessian = kEpsilon;
        data_size_t left_count = 0;
        for (int i = 0; i < used_bin && i < max_num_cat; ++i) {
          auto t = sorted_idx[start_pos];
          start_pos += dir;
385
386
          const auto grad = GET_GRAD(data_, t);
          const auto hess = GET_HESS(data_, t);
387
388
          data_size_t cnt =
              static_cast<data_size_t>(Common::RoundInt(hess * cnt_factor));
389

390
391
392
393
          sum_left_gradient += grad;
          sum_left_hessian += hess;
          left_count += cnt;
          cnt_cur_group += cnt;
394

395
          if (left_count < meta_->config->min_data_in_leaf ||
396
              sum_left_hessian < meta_->config->min_sum_hessian_in_leaf) {
397
            continue;
398
          }
399
          data_size_t right_count = num_data - left_count;
400
          if (right_count < meta_->config->min_data_in_leaf ||
401
              right_count < min_data_per_group) {
402
            break;
403
          }
404
405

          double sum_right_hessian = sum_hessian - sum_left_hessian;
406
407
408
          if (sum_right_hessian < meta_->config->min_sum_hessian_in_leaf) {
            break;
          }
409

410
411
412
          if (cnt_cur_group < min_data_per_group) {
            continue;
          }
413
414
415
416

          cnt_cur_group = 0;

          double sum_right_gradient = sum_gradient - sum_left_gradient;
417
          if (USE_RAND) {
418
419
            if (i != rand_threshold) {
              continue;
420
            }
421
          }
422
423
424
425
          double current_gain = GetSplitGains<USE_MC, USE_L1, USE_MAX_OUTPUT>(
              sum_left_gradient, sum_left_hessian, sum_right_gradient,
              sum_right_hessian, meta_->config->lambda_l1, l2,
              meta_->config->max_delta_step, constraints, 0);
426
427
428
          if (current_gain <= min_gain_shift) {
            continue;
          }
429
430
431
432
433
434
435
436
437
          is_splittable_ = true;
          if (current_gain > best_gain) {
            best_left_count = left_count;
            best_sum_left_gradient = sum_left_gradient;
            best_sum_left_hessian = sum_left_hessian;
            best_threshold = i;
            best_gain = current_gain;
            best_dir = dir;
          }
ChenZhiyong's avatar
ChenZhiyong committed
438
        }
439
440
      }
    }
441

442
    if (is_splittable_) {
443
444
445
446
447
      output->left_output =
          CalculateSplittedLeafOutput<USE_MC, USE_L1, USE_MAX_OUTPUT>(
              best_sum_left_gradient, best_sum_left_hessian,
              meta_->config->lambda_l1, l2, meta_->config->max_delta_step,
              constraints);
448
449
450
      output->left_count = best_left_count;
      output->left_sum_gradient = best_sum_left_gradient;
      output->left_sum_hessian = best_sum_left_hessian - kEpsilon;
451
452
453
454
455
      output->right_output =
          CalculateSplittedLeafOutput<USE_MC, USE_L1, USE_MAX_OUTPUT>(
              sum_gradient - best_sum_left_gradient,
              sum_hessian - best_sum_left_hessian, meta_->config->lambda_l1, l2,
              meta_->config->max_delta_step, constraints);
456
457
      output->right_count = num_data - best_left_count;
      output->right_sum_gradient = sum_gradient - best_sum_left_gradient;
458
459
      output->right_sum_hessian =
          sum_hessian - best_sum_left_hessian - kEpsilon;
Guolin Ke's avatar
Guolin Ke committed
460
      output->gain = best_gain - min_gain_shift;
461
462
      if (use_onehot) {
        output->num_cat_threshold = 1;
463
464
        output->cat_threshold =
            std::vector<uint32_t>(1, static_cast<uint32_t>(best_threshold));
ChenZhiyong's avatar
ChenZhiyong committed
465
      } else {
466
        output->num_cat_threshold = best_threshold + 1;
467
468
        output->cat_threshold =
            std::vector<uint32_t>(output->num_cat_threshold);
469
470
471
472
473
474
475
476
477
478
        if (best_dir == 1) {
          for (int i = 0; i < output->num_cat_threshold; ++i) {
            auto t = sorted_idx[i];
            output->cat_threshold[i] = t;
          }
        } else {
          for (int i = 0; i < output->num_cat_threshold; ++i) {
            auto t = sorted_idx[used_bin - 1 - i];
            output->cat_threshold[i] = t;
          }
ChenZhiyong's avatar
ChenZhiyong committed
479
480
        }
      }
Guolin Ke's avatar
Guolin Ke committed
481
      output->monotone_type = 0;
482
    }
483
484
  }

485
  void GatherInfoForThreshold(double sum_gradient, double sum_hessian,
486
487
                              uint32_t threshold, data_size_t num_data,
                              SplitInfo* output) {
488
    if (meta_->bin_type == BinType::NumericalBin) {
489
490
      GatherInfoForThresholdNumerical(sum_gradient, sum_hessian, threshold,
                                      num_data, output);
491
    } else {
492
493
      GatherInfoForThresholdCategorical(sum_gradient, sum_hessian, threshold,
                                        num_data, output);
494
495
496
497
    }
  }

  void GatherInfoForThresholdNumerical(double sum_gradient, double sum_hessian,
498
499
500
501
502
                                       uint32_t threshold, data_size_t num_data,
                                       SplitInfo* output) {
    double gain_shift = GetLeafGain<true, true>(
        sum_gradient, sum_hessian, meta_->config->lambda_l1,
        meta_->config->lambda_l2, meta_->config->max_delta_step);
Guolin Ke's avatar
Guolin Ke committed
503
    double min_gain_shift = gain_shift + meta_->config->min_gain_to_split;
504
505

    // do stuff here
506
    const int8_t offset = meta_->offset;
507
508
509
510
511
512

    double sum_right_gradient = 0.0f;
    double sum_right_hessian = kEpsilon;
    data_size_t right_count = 0;

    // set values
513
514
    bool use_na_as_missing = false;
    bool skip_default_bin = false;
515
516
    if (meta_->missing_type == MissingType::Zero) {
      skip_default_bin = true;
517
    } else if (meta_->missing_type == MissingType::NaN) {
518
519
520
      use_na_as_missing = true;
    }

521
522
    int t = meta_->num_bin - 1 - offset - use_na_as_missing;
    const int t_end = 1 - offset;
523
    const double cnt_factor = num_data / sum_hessian;
524
525
    // from right to left, and we don't need data in bin0
    for (; t >= t_end; --t) {
526
527
528
      if (static_cast<uint32_t>(t + offset) < threshold) {
        break;
      }
529
530

      // need to skip default bin
531
532
533
534
      if (skip_default_bin &&
          (t + offset) == static_cast<int>(meta_->default_bin)) {
        continue;
      }
535
536
      const auto grad = GET_GRAD(data_, t);
      const auto hess = GET_HESS(data_, t);
537
538
      data_size_t cnt =
          static_cast<data_size_t>(Common::RoundInt(hess * cnt_factor));
539
540
541
      sum_right_gradient += grad;
      sum_right_hessian += hess;
      right_count += cnt;
542
543
544
545
    }
    double sum_left_gradient = sum_gradient - sum_right_gradient;
    double sum_left_hessian = sum_hessian - sum_right_hessian;
    data_size_t left_count = num_data - right_count;
546
547
548
549
550
551
552
    double current_gain =
        GetLeafGain<true, true>(
            sum_left_gradient, sum_left_hessian, meta_->config->lambda_l1,
            meta_->config->lambda_l2, meta_->config->max_delta_step) +
        GetLeafGain<true, true>(
            sum_right_gradient, sum_right_hessian, meta_->config->lambda_l1,
            meta_->config->lambda_l2, meta_->config->max_delta_step);
553
554
555
556

    // gain with split is worse than without split
    if (std::isnan(current_gain) || current_gain <= min_gain_shift) {
      output->gain = kMinScore;
557
      Log::Warning(
558
          "'Forced Split' will be ignored since the gain getting worse.");
559
      return;
560
    }
561
562
563

    // update split information
    output->threshold = threshold;
564
565
566
    output->left_output = CalculateSplittedLeafOutput<true, true>(
        sum_left_gradient, sum_left_hessian, meta_->config->lambda_l1,
        meta_->config->lambda_l2, meta_->config->max_delta_step);
567
568
569
    output->left_count = left_count;
    output->left_sum_gradient = sum_left_gradient;
    output->left_sum_hessian = sum_left_hessian - kEpsilon;
570
571
572
573
    output->right_output = CalculateSplittedLeafOutput<true, true>(
        sum_gradient - sum_left_gradient, sum_hessian - sum_left_hessian,
        meta_->config->lambda_l1, meta_->config->lambda_l2,
        meta_->config->max_delta_step);
574
575
576
    output->right_count = num_data - left_count;
    output->right_sum_gradient = sum_gradient - sum_left_gradient;
    output->right_sum_hessian = sum_hessian - sum_left_hessian - kEpsilon;
577
    output->gain = current_gain - min_gain_shift;
578
579
580
    output->default_left = true;
  }

581
582
583
584
  void GatherInfoForThresholdCategorical(double sum_gradient,
                                         double sum_hessian, uint32_t threshold,
                                         data_size_t num_data,
                                         SplitInfo* output) {
585
586
    // get SplitInfo for a given one-hot categorical split.
    output->default_left = false;
587
588
589
    double gain_shift = GetLeafGain<true, true>(
        sum_gradient, sum_hessian, meta_->config->lambda_l1,
        meta_->config->lambda_l2, meta_->config->max_delta_step);
Guolin Ke's avatar
Guolin Ke committed
590
    double min_gain_shift = gain_shift + meta_->config->min_gain_to_split;
591
592
593
594
595
596
597
    bool is_full_categorical = meta_->missing_type == MissingType::None;
    int used_bin = meta_->num_bin - 1 + is_full_categorical;
    if (threshold >= static_cast<uint32_t>(used_bin)) {
      output->gain = kMinScore;
      Log::Warning("Invalid categorical threshold split");
      return;
    }
598
599
600
    const double cnt_factor = num_data / sum_hessian;
    const auto grad = GET_GRAD(data_, threshold);
    const auto hess = GET_HESS(data_, threshold);
601
602
    data_size_t cnt =
        static_cast<data_size_t>(Common::RoundInt(hess * cnt_factor));
603

Guolin Ke's avatar
Guolin Ke committed
604
    double l2 = meta_->config->lambda_l2;
605
    data_size_t left_count = cnt;
606
    data_size_t right_count = num_data - left_count;
607
    double sum_left_hessian = hess + kEpsilon;
608
    double sum_right_hessian = sum_hessian - sum_left_hessian;
609
    double sum_left_gradient = grad;
610
611
    double sum_right_gradient = sum_gradient - sum_left_gradient;
    // current split gain
612
613
614
615
616
617
618
    double current_gain =
        GetLeafGain<true, true>(sum_right_gradient, sum_right_hessian,
                                meta_->config->lambda_l1, l2,
                                meta_->config->max_delta_step) +
        GetLeafGain<true, true>(sum_left_gradient, sum_left_hessian,
                                meta_->config->lambda_l1, l2,
                                meta_->config->max_delta_step);
619
620
    if (std::isnan(current_gain) || current_gain <= min_gain_shift) {
      output->gain = kMinScore;
621
622
      Log::Warning(
          "'Forced Split' will be ignored since the gain getting worse.");
623
624
625
      return;
    }

626
627
628
    output->left_output = CalculateSplittedLeafOutput<true, true>(
        sum_left_gradient, sum_left_hessian, meta_->config->lambda_l1, l2,
        meta_->config->max_delta_step);
629
630
631
    output->left_count = left_count;
    output->left_sum_gradient = sum_left_gradient;
    output->left_sum_hessian = sum_left_hessian - kEpsilon;
632
633
634
    output->right_output = CalculateSplittedLeafOutput<true, true>(
        sum_right_gradient, sum_right_hessian, meta_->config->lambda_l1, l2,
        meta_->config->max_delta_step);
635
636
637
638
639
640
641
642
    output->right_count = right_count;
    output->right_sum_gradient = sum_gradient - sum_left_gradient;
    output->right_sum_hessian = sum_right_hessian - kEpsilon;
    output->gain = current_gain - min_gain_shift;
    output->num_cat_threshold = 1;
    output->cat_threshold = std::vector<uint32_t>(1, threshold);
  }

Guolin Ke's avatar
Guolin Ke committed
643
  /*!
644
645
   * \brief Binary size of this histogram
   */
Guolin Ke's avatar
Guolin Ke committed
646
  int SizeOfHistgram() const {
647
    return (meta_->num_bin - meta_->offset) * kHistEntrySize;
Guolin Ke's avatar
Guolin Ke committed
648
649
650
  }

  /*!
651
652
   * \brief Restore histogram from memory
   */
Guolin Ke's avatar
Guolin Ke committed
653
  void FromMemory(char* memory_data) {
654
655
    std::memcpy(data_, memory_data,
                (meta_->num_bin - meta_->offset) * kHistEntrySize);
Guolin Ke's avatar
Guolin Ke committed
656
657
658
  }

  /*!
659
660
   * \brief True if this histogram can be splitted
   */
Guolin Ke's avatar
Guolin Ke committed
661
662
663
  bool is_splittable() { return is_splittable_; }

  /*!
664
665
   * \brief Set splittable to this histogram
   */
Guolin Ke's avatar
Guolin Ke committed
666
667
  void set_is_splittable(bool val) { is_splittable_ = val; }

668
669
670
671
672
  static double ThresholdL1(double s, double l1) {
    const double reg_s = std::max(0.0, std::fabs(s) - l1);
    return Common::Sign(s) * reg_s;
  }

673
674
675
676
677
678
679
680
681
682
683
  template <bool USE_L1, bool USE_MAX_OUTPUT>
  static double CalculateSplittedLeafOutput(double sum_gradients,
                                            double sum_hessians, double l1,
                                            double l2, double max_delta_step) {
    if (USE_L1) {
      double ret = -ThresholdL1(sum_gradients, l1) / (sum_hessians + l2);
      if (USE_MAX_OUTPUT) {
        if (max_delta_step > 0 && std::fabs(ret) > max_delta_step) {
          return Common::Sign(ret) * max_delta_step;
        }
      }
684
685
      return ret;
    } else {
686
687
688
689
690
691
692
      double ret = -sum_gradients / (sum_hessians + l2);
      if (USE_MAX_OUTPUT) {
        if (max_delta_step > 0 && std::fabs(ret) > max_delta_step) {
          return Common::Sign(ret) * max_delta_step;
        }
      }
      return ret;
693
    }
Guolin Ke's avatar
Guolin Ke committed
694
695
  }

696
697
698
699
700
701
702
703
704
705
706
707
708
709
  template <bool USE_MC, bool USE_L1, bool USE_MAX_OUTPUT>
  static double CalculateSplittedLeafOutput(
      double sum_gradients, double sum_hessians, double l1, double l2,
      double max_delta_step, const ConstraintEntry& constraints) {
    double ret = CalculateSplittedLeafOutput<USE_L1, USE_MAX_OUTPUT>(
        sum_gradients, sum_hessians, l1, l2, max_delta_step);
    if (USE_MC) {
      if (ret < constraints.min) {
        ret = constraints.min;
      } else if (ret > constraints.max) {
        ret = constraints.max;
      }
    }
    return ret;
Guolin Ke's avatar
Guolin Ke committed
710
711
  }

712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
 private:
  template <bool USE_MC, bool USE_L1, bool USE_MAX_OUTPUT>
  static double GetSplitGains(double sum_left_gradients,
                              double sum_left_hessians,
                              double sum_right_gradients,
                              double sum_right_hessians, double l1, double l2,
                              double max_delta_step,
                              const ConstraintEntry& constraints,
                              int8_t monotone_constraint) {
    if (!USE_MC) {
      return GetLeafGain<USE_L1, USE_MAX_OUTPUT>(sum_left_gradients,
                                                 sum_left_hessians, l1, l2,
                                                 max_delta_step) +
             GetLeafGain<USE_L1, USE_MAX_OUTPUT>(sum_right_gradients,
                                                 sum_right_hessians, l1, l2,
                                                 max_delta_step);
    } else {
      double left_output =
          CalculateSplittedLeafOutput<USE_MC, USE_L1, USE_MAX_OUTPUT>(
              sum_left_gradients, sum_left_hessians, l1, l2, max_delta_step,
              constraints);
      double right_output =
          CalculateSplittedLeafOutput<USE_MC, USE_L1, USE_MAX_OUTPUT>(
              sum_right_gradients, sum_right_hessians, l1, l2, max_delta_step,
              constraints);
      if (((monotone_constraint > 0) && (left_output > right_output)) ||
          ((monotone_constraint < 0) && (left_output < right_output))) {
        return 0;
      }
      return GetLeafGainGivenOutput<USE_L1>(
                 sum_left_gradients, sum_left_hessians, l1, l2, left_output) +
             GetLeafGainGivenOutput<USE_L1>(
                 sum_right_gradients, sum_right_hessians, l1, l2, right_output);
Guolin Ke's avatar
Guolin Ke committed
745
    }
Guolin Ke's avatar
Guolin Ke committed
746
  }
Guolin Ke's avatar
Guolin Ke committed
747

748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
  template <bool USE_L1, bool USE_MAX_OUTPUT>
  static double GetLeafGain(double sum_gradients, double sum_hessians,
                            double l1, double l2, double max_delta_step) {
    if (!USE_MAX_OUTPUT) {
      if (USE_L1) {
        const double sg_l1 = ThresholdL1(sum_gradients, l1);
        return (sg_l1 * sg_l1) / (sum_hessians + l2);
      } else {
        return (sum_gradients * sum_gradients) / (sum_hessians + l2);
      }
    } else {
      double output = CalculateSplittedLeafOutput<USE_L1, USE_MAX_OUTPUT>(
          sum_gradients, sum_hessians, l1, l2, max_delta_step);
      return GetLeafGainGivenOutput<USE_L1>(sum_gradients, sum_hessians, l1, l2,
                                            output);
    }
Guolin Ke's avatar
Guolin Ke committed
764
765
  }

766
767
768
769
770
771
772
773
774
775
776
  template <bool USE_L1>
  static double GetLeafGainGivenOutput(double sum_gradients,
                                       double sum_hessians, double l1,
                                       double l2, double output) {
    if (USE_L1) {
      const double sg_l1 = ThresholdL1(sum_gradients, l1);
      return -(2.0 * sg_l1 * output + (sum_hessians + l2) * output * output);
    } else {
      return -(2.0 * sum_gradients * output +
               (sum_hessians + l2) * output * output);
    }
Guolin Ke's avatar
Guolin Ke committed
777
  }
Guolin Ke's avatar
Guolin Ke committed
778

779
780
  template <bool USE_RAND, bool USE_MC, bool USE_L1, bool USE_MAX_OUTPUT,
            bool REVERSE, bool SKIP_DEFAULT_BIN, bool NA_AS_MISSING>
guolinke's avatar
guolinke committed
781
782
783
784
785
  void FindBestThresholdSequentially(double sum_gradient, double sum_hessian,
                                     data_size_t num_data,
                                     const ConstraintEntry& constraints,
                                     double min_gain_shift, SplitInfo* output,
                                     int rand_threshold) {
786
    const int8_t offset = meta_->offset;
Guolin Ke's avatar
Guolin Ke committed
787
788
789
790
791
    double best_sum_left_gradient = NAN;
    double best_sum_left_hessian = NAN;
    double best_gain = kMinScore;
    data_size_t best_left_count = 0;
    uint32_t best_threshold = static_cast<uint32_t>(meta_->num_bin);
792
    const double cnt_factor = num_data / sum_hessian;
793
    if (REVERSE) {
Guolin Ke's avatar
Guolin Ke committed
794
795
796
797
      double sum_right_gradient = 0.0f;
      double sum_right_hessian = kEpsilon;
      data_size_t right_count = 0;

798
      int t = meta_->num_bin - 1 - offset - NA_AS_MISSING;
799
      const int t_end = 1 - offset;
Guolin Ke's avatar
Guolin Ke committed
800
801
802
803

      // from right to left, and we don't need data in bin0
      for (; t >= t_end; --t) {
        // need to skip default bin
804
805
806
807
808
        if (SKIP_DEFAULT_BIN) {
          if ((t + offset) == static_cast<int>(meta_->default_bin)) {
            continue;
          }
        }
809
810
        const auto grad = GET_GRAD(data_, t);
        const auto hess = GET_HESS(data_, t);
811
812
        data_size_t cnt =
            static_cast<data_size_t>(Common::RoundInt(hess * cnt_factor));
813
814
815
        sum_right_gradient += grad;
        sum_right_hessian += hess;
        right_count += cnt;
Guolin Ke's avatar
Guolin Ke committed
816
        // if data not enough, or sum hessian too small
817
        if (right_count < meta_->config->min_data_in_leaf ||
818
            sum_right_hessian < meta_->config->min_sum_hessian_in_leaf) {
819
          continue;
820
        }
Guolin Ke's avatar
Guolin Ke committed
821
822
        data_size_t left_count = num_data - right_count;
        // if data not enough
823
824
825
        if (left_count < meta_->config->min_data_in_leaf) {
          break;
        }
Guolin Ke's avatar
Guolin Ke committed
826
827
828

        double sum_left_hessian = sum_hessian - sum_right_hessian;
        // if sum hessian too small
829
830
831
        if (sum_left_hessian < meta_->config->min_sum_hessian_in_leaf) {
          break;
        }
Guolin Ke's avatar
Guolin Ke committed
832
833

        double sum_left_gradient = sum_gradient - sum_right_gradient;
834
        if (USE_RAND) {
835
          if (t - 1 + offset != rand_threshold) {
Guolin Ke's avatar
Guolin Ke committed
836
            continue;
837
          }
Guolin Ke's avatar
Guolin Ke committed
838
        }
Guolin Ke's avatar
Guolin Ke committed
839
        // current split gain
840
841
842
843
844
        double current_gain = GetSplitGains<USE_MC, USE_L1, USE_MAX_OUTPUT>(
            sum_left_gradient, sum_left_hessian, sum_right_gradient,
            sum_right_hessian, meta_->config->lambda_l1,
            meta_->config->lambda_l2, meta_->config->max_delta_step,
            constraints, meta_->monotone_type);
Guolin Ke's avatar
Guolin Ke committed
845
        // gain with split is worse than without split
846
847
848
        if (current_gain <= min_gain_shift) {
          continue;
        }
Guolin Ke's avatar
Guolin Ke committed
849
850
851
852
853
854
855
856
857
858
859
860

        // mark to is splittable
        is_splittable_ = true;
        // better split point
        if (current_gain > best_gain) {
          best_left_count = left_count;
          best_sum_left_gradient = sum_left_gradient;
          best_sum_left_hessian = sum_left_hessian;
          // left is <= threshold, right is > threshold.  so this is t-1
          best_threshold = static_cast<uint32_t>(t - 1 + offset);
          best_gain = current_gain;
        }
Guolin Ke's avatar
Guolin Ke committed
861
      }
ChenZhiyong's avatar
ChenZhiyong committed
862
    } else {
Guolin Ke's avatar
Guolin Ke committed
863
864
865
866
867
      double sum_left_gradient = 0.0f;
      double sum_left_hessian = kEpsilon;
      data_size_t left_count = 0;

      int t = 0;
868
      const int t_end = meta_->num_bin - 2 - offset;
Guolin Ke's avatar
Guolin Ke committed
869

870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
      if (NA_AS_MISSING) {
        if (offset == 1) {
          sum_left_gradient = sum_gradient;
          sum_left_hessian = sum_hessian - kEpsilon;
          left_count = num_data;
          for (int i = 0; i < meta_->num_bin - offset; ++i) {
            const auto grad = GET_GRAD(data_, i);
            const auto hess = GET_HESS(data_, i);
            data_size_t cnt =
                static_cast<data_size_t>(Common::RoundInt(hess * cnt_factor));
            sum_left_gradient -= grad;
            sum_left_hessian -= hess;
            left_count -= cnt;
          }
          t = -1;
Guolin Ke's avatar
Guolin Ke committed
885
886
887
        }
      }

Guolin Ke's avatar
Guolin Ke committed
888
      for (; t <= t_end; ++t) {
889
890
891
892
893
        if (SKIP_DEFAULT_BIN) {
          if ((t + offset) == static_cast<int>(meta_->default_bin)) {
            continue;
          }
        }
Guolin Ke's avatar
Guolin Ke committed
894
        if (t >= 0) {
895
896
          sum_left_gradient += GET_GRAD(data_, t);
          sum_left_hessian += GET_HESS(data_, t);
897
898
          left_count += static_cast<data_size_t>(
              Common::RoundInt(GET_HESS(data_, t) * cnt_factor));
Guolin Ke's avatar
Guolin Ke committed
899
        }
Guolin Ke's avatar
Guolin Ke committed
900
        // if data not enough, or sum hessian too small
901
        if (left_count < meta_->config->min_data_in_leaf ||
902
            sum_left_hessian < meta_->config->min_sum_hessian_in_leaf) {
903
          continue;
904
        }
Guolin Ke's avatar
Guolin Ke committed
905
906
        data_size_t right_count = num_data - left_count;
        // if data not enough
907
908
909
        if (right_count < meta_->config->min_data_in_leaf) {
          break;
        }
Guolin Ke's avatar
Guolin Ke committed
910
911
912

        double sum_right_hessian = sum_hessian - sum_left_hessian;
        // if sum hessian too small
913
914
915
        if (sum_right_hessian < meta_->config->min_sum_hessian_in_leaf) {
          break;
        }
Guolin Ke's avatar
Guolin Ke committed
916
917

        double sum_right_gradient = sum_gradient - sum_left_gradient;
918
        if (USE_RAND) {
Guolin Ke's avatar
Guolin Ke committed
919
920
          if (t + offset != rand_threshold) {
            continue;
921
          }
Guolin Ke's avatar
Guolin Ke committed
922
        }
Guolin Ke's avatar
Guolin Ke committed
923
        // current split gain
924
925
926
927
928
        double current_gain = GetSplitGains<USE_MC, USE_L1, USE_MAX_OUTPUT>(
            sum_left_gradient, sum_left_hessian, sum_right_gradient,
            sum_right_hessian, meta_->config->lambda_l1,
            meta_->config->lambda_l2, meta_->config->max_delta_step,
            constraints, meta_->monotone_type);
Guolin Ke's avatar
Guolin Ke committed
929
        // gain with split is worse than without split
930
931
932
        if (current_gain <= min_gain_shift) {
          continue;
        }
Guolin Ke's avatar
Guolin Ke committed
933
934
935
936
937
938
939
940
941
942
943

        // mark to is splittable
        is_splittable_ = true;
        // better split point
        if (current_gain > best_gain) {
          best_left_count = left_count;
          best_sum_left_gradient = sum_left_gradient;
          best_sum_left_hessian = sum_left_hessian;
          best_threshold = static_cast<uint32_t>(t + offset);
          best_gain = current_gain;
        }
Guolin Ke's avatar
Guolin Ke committed
944
945
946
      }
    }

947
    if (is_splittable_ && best_gain > output->gain + min_gain_shift) {
Guolin Ke's avatar
Guolin Ke committed
948
949
      // update split information
      output->threshold = best_threshold;
950
951
952
953
954
      output->left_output =
          CalculateSplittedLeafOutput<USE_MC, USE_L1, USE_MAX_OUTPUT>(
              best_sum_left_gradient, best_sum_left_hessian,
              meta_->config->lambda_l1, meta_->config->lambda_l2,
              meta_->config->max_delta_step, constraints);
Guolin Ke's avatar
Guolin Ke committed
955
956
957
      output->left_count = best_left_count;
      output->left_sum_gradient = best_sum_left_gradient;
      output->left_sum_hessian = best_sum_left_hessian - kEpsilon;
958
959
960
961
962
963
      output->right_output =
          CalculateSplittedLeafOutput<USE_MC, USE_L1, USE_MAX_OUTPUT>(
              sum_gradient - best_sum_left_gradient,
              sum_hessian - best_sum_left_hessian, meta_->config->lambda_l1,
              meta_->config->lambda_l2, meta_->config->max_delta_step,
              constraints);
Guolin Ke's avatar
Guolin Ke committed
964
965
      output->right_count = num_data - best_left_count;
      output->right_sum_gradient = sum_gradient - best_sum_left_gradient;
966
967
968
969
      output->right_sum_hessian =
          sum_hessian - best_sum_left_hessian - kEpsilon;
      output->gain = best_gain - min_gain_shift;
      output->default_left = REVERSE;
Guolin Ke's avatar
Guolin Ke committed
970
971
972
    }
  }

Guolin Ke's avatar
Guolin Ke committed
973
  const FeatureMetainfo* meta_;
Guolin Ke's avatar
Guolin Ke committed
974
  /*! \brief sum of gradient of each bin */
975
  hist_t* data_;
Guolin Ke's avatar
Guolin Ke committed
976
  bool is_splittable_ = true;
977

978
979
980
  std::function<void(double, double, data_size_t, const ConstraintEntry&,
                     SplitInfo*)>
      find_best_threshold_fun_;
Guolin Ke's avatar
Guolin Ke committed
981
};
Nikita Titov's avatar
Nikita Titov committed
982

Guolin Ke's avatar
Guolin Ke committed
983
class HistogramPool {
984
 public:
Guolin Ke's avatar
Guolin Ke committed
985
  /*!
986
987
   * \brief Constructor
   */
Guolin Ke's avatar
Guolin Ke committed
988
  HistogramPool() {
Guolin Ke's avatar
Guolin Ke committed
989
990
    cache_size_ = 0;
    total_size_ = 0;
Guolin Ke's avatar
Guolin Ke committed
991
  }
992

Guolin Ke's avatar
Guolin Ke committed
993
  /*!
994
995
996
   * \brief Destructor
   */
  ~HistogramPool() {}
997

Guolin Ke's avatar
Guolin Ke committed
998
  /*!
999
1000
1001
1002
   * \brief Reset pool size
   * \param cache_size Max cache size
   * \param total_size Total size will be used
   */
Guolin Ke's avatar
Guolin Ke committed
1003
  void Reset(int cache_size, int total_size) {
Guolin Ke's avatar
Guolin Ke committed
1004
1005
    cache_size_ = cache_size;
    // at least need 2 bucket to store smaller leaf and larger leaf
1006
    CHECK_GE(cache_size_, 2);
Guolin Ke's avatar
Guolin Ke committed
1007
1008
1009
1010
1011
1012
    total_size_ = total_size;
    if (cache_size_ > total_size_) {
      cache_size_ = total_size_;
    }
    is_enough_ = (cache_size_ == total_size_);
    if (!is_enough_) {
1013
1014
1015
      mapper_.resize(total_size_);
      inverse_mapper_.resize(cache_size_);
      last_used_time_.resize(cache_size_);
Guolin Ke's avatar
Guolin Ke committed
1016
1017
1018
      ResetMap();
    }
  }
1019

Guolin Ke's avatar
Guolin Ke committed
1020
  /*!
1021
1022
   * \brief Reset mapper
   */
Guolin Ke's avatar
Guolin Ke committed
1023
1024
1025
1026
1027
1028
1029
1030
  void ResetMap() {
    if (!is_enough_) {
      cur_time_ = 0;
      std::fill(mapper_.begin(), mapper_.end(), -1);
      std::fill(inverse_mapper_.begin(), inverse_mapper_.end(), -1);
      std::fill(last_used_time_.begin(), last_used_time_.end(), 0);
    }
  }
1031
1032
1033
  template <bool USE_DATA, bool USE_CONFIG>
  static void SetFeatureInfo(const Dataset* train_data, const Config* config,
                             std::vector<FeatureMetainfo>* feature_meta) {
1034
1035
1036
    auto& ref_feature_meta = *feature_meta;
    const int num_feature = train_data->num_features();
    ref_feature_meta.resize(num_feature);
1037
#pragma omp parallel for schedule(static, 512) if (num_feature >= 1024)
1038
    for (int i = 0; i < num_feature; ++i) {
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
      if (USE_DATA) {
        ref_feature_meta[i].num_bin = train_data->FeatureNumBin(i);
        ref_feature_meta[i].default_bin =
            train_data->FeatureBinMapper(i)->GetDefaultBin();
        ref_feature_meta[i].missing_type =
            train_data->FeatureBinMapper(i)->missing_type();
        if (train_data->FeatureBinMapper(i)->GetMostFreqBin() == 0) {
          ref_feature_meta[i].offset = 1;
        } else {
          ref_feature_meta[i].offset = 0;
        }
        ref_feature_meta[i].bin_type =
            train_data->FeatureBinMapper(i)->bin_type();
1052
      }
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
      if (USE_CONFIG) {
        const int real_fidx = train_data->RealFeatureIndex(i);
        if (!config->monotone_constraints.empty()) {
          ref_feature_meta[i].monotone_type =
              config->monotone_constraints[real_fidx];
        } else {
          ref_feature_meta[i].monotone_type = 0;
        }
        if (!config->feature_contri.empty()) {
          ref_feature_meta[i].penalty = config->feature_contri[real_fidx];
        } else {
          ref_feature_meta[i].penalty = 1.0;
        }
        ref_feature_meta[i].rand = Random(config->extra_seed + i);
1067
1068
1069
1070
1071
      }
      ref_feature_meta[i].config = config;
    }
  }

1072
1073
  void DynamicChangeSize(const Dataset* train_data, bool is_hist_colwise,
                         const Config* config, int cache_size, int total_size) {
Guolin Ke's avatar
Guolin Ke committed
1074
    if (feature_metas_.empty()) {
1075
      SetFeatureInfo<true, true>(train_data, config, &feature_metas_);
1076
      uint64_t bin_cnt_over_features = 0;
1077
      for (int i = 0; i < train_data->num_features(); ++i) {
1078
1079
        bin_cnt_over_features +=
            static_cast<uint64_t>(feature_metas_[i].num_bin);
Guolin Ke's avatar
Guolin Ke committed
1080
      }
1081
      Log::Info("Total Bins %d", bin_cnt_over_features);
Guolin Ke's avatar
Guolin Ke committed
1082
    }
Guolin Ke's avatar
Guolin Ke committed
1083
    int old_cache_size = static_cast<int>(pool_.size());
Guolin Ke's avatar
Guolin Ke committed
1084
    Reset(cache_size, total_size);
Guolin Ke's avatar
Guolin Ke committed
1085
1086
1087
1088
1089

    if (cache_size > old_cache_size) {
      pool_.resize(cache_size);
      data_.resize(cache_size);
    }
1090
    int num_total_bin = static_cast<int>(train_data->NumTotalBin());
Guolin Ke's avatar
Guolin Ke committed
1091

1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
    std::vector<int> offsets;
    if (is_hist_colwise) {
      int offset = 0;
      for (int j = 0; j < train_data->num_features(); ++j) {
        offset += train_data->SubFeatureBinOffset(j);
        offsets.push_back(offset);
        auto num_bin = train_data->FeatureNumBin(j);
        if (train_data->FeatureBinMapper(j)->GetMostFreqBin() == 0) {
          num_bin -= 1;
        }
        offset += num_bin;
      }
    } else {
      num_total_bin = 1;
      for (int j = 0; j < train_data->num_features(); ++j) {
        offsets.push_back(num_total_bin);
        num_total_bin += train_data->FeatureBinMapper(j)->num_bin();
        if (train_data->FeatureBinMapper(j)->GetMostFreqBin() == 0) {
          num_total_bin -= 1;
        }
      }
    }
1114
    OMP_INIT_EX();
1115
#pragma omp parallel for schedule(static)
Guolin Ke's avatar
Guolin Ke committed
1116
    for (int i = old_cache_size; i < cache_size; ++i) {
1117
      OMP_LOOP_EX_BEGIN();
Guolin Ke's avatar
Guolin Ke committed
1118
      pool_[i].reset(new FeatureHistogram[train_data->num_features()]);
1119
      data_[i].resize(num_total_bin * 2);
Guolin Ke's avatar
Guolin Ke committed
1120
      for (int j = 0; j < train_data->num_features(); ++j) {
1121
        pool_[i][j].Init(data_[i].data() + offsets[j] * 2, &feature_metas_[j]);
Guolin Ke's avatar
Guolin Ke committed
1122
      }
1123
      OMP_LOOP_EX_END();
Guolin Ke's avatar
Guolin Ke committed
1124
    }
1125
    OMP_THROW_EX();
Guolin Ke's avatar
Guolin Ke committed
1126
1127
  }

1128
  void ResetConfig(const Dataset* train_data, const Config* config) {
1129
1130
    CHECK_GT(train_data->num_features(), 0);
    const Config* old_config = feature_metas_[0].config;
1131
    SetFeatureInfo<false, true>(train_data, config, &feature_metas_);
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
    // if need to reset the function pointers
    if (old_config->lambda_l1 != config->lambda_l1 ||
        old_config->monotone_constraints != config->monotone_constraints ||
        old_config->extra_trees != config->extra_trees ||
        old_config->max_delta_step != config->max_delta_step) {
#pragma omp parallel for schedule(static)
      for (int i = 0; i < cache_size_; ++i) {
        for (int j = 0; j < train_data->num_features(); ++j) {
          pool_[i][j].ResetFunc();
        }
      }
    }
Guolin Ke's avatar
Guolin Ke committed
1144
  }
1145

Guolin Ke's avatar
Guolin Ke committed
1146
  /*!
1147
1148
1149
1150
1151
1152
   * \brief Get data for the specific index
   * \param idx which index want to get
   * \param out output data will store into this
   * \return True if this index is in the pool, False if this index is not in
   * the pool
   */
Guolin Ke's avatar
Guolin Ke committed
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
  bool Get(int idx, FeatureHistogram** out) {
    if (is_enough_) {
      *out = pool_[idx].get();
      return true;
    } else if (mapper_[idx] >= 0) {
      int slot = mapper_[idx];
      *out = pool_[slot].get();
      last_used_time_[slot] = ++cur_time_;
      return true;
    } else {
1163
      // choose the least used slot
Guolin Ke's avatar
Guolin Ke committed
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
      int slot = static_cast<int>(ArrayArgs<int>::ArgMin(last_used_time_));
      *out = pool_[slot].get();
      last_used_time_[slot] = ++cur_time_;

      // reset previous mapper
      if (inverse_mapper_[slot] >= 0) mapper_[inverse_mapper_[slot]] = -1;

      // update current mapper
      mapper_[idx] = slot;
      inverse_mapper_[slot] = idx;
      return false;
    }
  }

  /*!
1179
1180
1181
1182
   * \brief Move data from one index to another index
   * \param src_idx
   * \param dst_idx
   */
Guolin Ke's avatar
Guolin Ke committed
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
  void Move(int src_idx, int dst_idx) {
    if (is_enough_) {
      std::swap(pool_[src_idx], pool_[dst_idx]);
      return;
    }
    if (mapper_[src_idx] < 0) {
      return;
    }
    // get slot of src idx
    int slot = mapper_[src_idx];
    // reset src_idx
    mapper_[src_idx] = -1;

    // move to dst idx
    mapper_[dst_idx] = slot;
    last_used_time_[slot] = ++cur_time_;
    inverse_mapper_[slot] = dst_idx;
  }
1201

1202
 private:
Guolin Ke's avatar
Guolin Ke committed
1203
  std::vector<std::unique_ptr<FeatureHistogram[]>> pool_;
1204
1205
1206
  std::vector<
      std::vector<hist_t, Common::AlignmentAllocator<hist_t, kAlignedSize>>>
      data_;
Guolin Ke's avatar
Guolin Ke committed
1207
  std::vector<FeatureMetainfo> feature_metas_;
Guolin Ke's avatar
Guolin Ke committed
1208
1209
1210
1211
1212
1213
1214
1215
1216
  int cache_size_;
  int total_size_;
  bool is_enough_ = false;
  std::vector<int> mapper_;
  std::vector<int> inverse_mapper_;
  std::vector<int> last_used_time_;
  int cur_time_ = 0;
};

Guolin Ke's avatar
Guolin Ke committed
1217
}  // namespace LightGBM
1218
#endif  // LightGBM_TREELEARNER_FEATURE_HISTOGRAM_HPP_