bin.h 18 KB
Newer Older
1
2
3
4
/*!
 * Copyright (c) 2016 Microsoft Corporation. All rights reserved.
 * Licensed under the MIT License. See LICENSE file in the project root for license information.
 */
Guolin Ke's avatar
Guolin Ke committed
5
6
7
#ifndef LIGHTGBM_BIN_H_
#define LIGHTGBM_BIN_H_

Guolin Ke's avatar
Guolin Ke committed
8
#include <LightGBM/meta.h>
9
#include <LightGBM/utils/common.h>
Guolin Ke's avatar
Guolin Ke committed
10
#include <LightGBM/utils/file_io.h>
11

12
13
#include <limits>
#include <string>
Guolin Ke's avatar
Guolin Ke committed
14
#include <functional>
15
#include <sstream>
16
17
#include <unordered_map>
#include <vector>
Guolin Ke's avatar
Guolin Ke committed
18
19
20

namespace LightGBM {

21
22
23
24
25
26
27
28
29
30
31
enum BinType {
  NumericalBin,
  CategoricalBin
};

enum MissingType {
  None,
  Zero,
  NaN
};

32
33
typedef double hist_t;

34
35
const size_t kHistEntrySize = 2 * sizeof(hist_t);
const int kHistOffset = 2;
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
const double kSparseThreshold = 0.7;

#define GET_GRAD(hist, i) hist[(i) << 1]
#define GET_HESS(hist, i) hist[((i) << 1) + 1]

inline static void HistogramSumReducer(const char* src, char* dst, int type_size, comm_size_t len) {
  comm_size_t used_size = 0;
  const hist_t* p1;
  hist_t* p2;
  while (used_size < len) {
    // convert
    p1 = reinterpret_cast<const hist_t*>(src);
    p2 = reinterpret_cast<hist_t*>(dst);
    *p2 += *p1;
    src += type_size;
    dst += type_size;
    used_size += type_size;
53
  }
54
}
55
56
57
58

/*! \brief This class used to convert feature values into bin,
*          and store some meta information for bin*/
class BinMapper {
Nikita Titov's avatar
Nikita Titov committed
59
 public:
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
  BinMapper();
  BinMapper(const BinMapper& other);
  explicit BinMapper(const void* memory);
  ~BinMapper();

  bool CheckAlign(const BinMapper& other) const {
    if (num_bin_ != other.num_bin_) {
      return false;
    }
    if (missing_type_ != other.missing_type_) {
      return false;
    }
    if (bin_type_ == BinType::NumericalBin) {
      for (int i = 0; i < num_bin_; ++i) {
        if (bin_upper_bound_[i] != other.bin_upper_bound_[i]) {
          return false;
Guolin Ke's avatar
Guolin Ke committed
76
        }
77
78
79
80
81
      }
    } else {
      for (int i = 0; i < num_bin_; i++) {
        if (bin_2_categorical_[i] != other.bin_2_categorical_[i]) {
          return false;
82
        }
83
84
      }
    }
85
86
    return true;
  }
87

88
89
90
91
  /*! \brief Get number of bins */
  inline int num_bin() const { return num_bin_; }
  /*! \brief Missing Type */
  inline MissingType missing_type() const { return missing_type_; }
Lingyi Hu's avatar
Lingyi Hu committed
92
93
  /*! \brief True if bin is trivial (contains only one bin) */
  inline bool is_trivial() const { return is_trivial_; }
94
95
96
97
98
99
  /*! \brief Sparsity of this bin ( num_zero_bins / num_data ) */
  inline double sparse_rate() const { return sparse_rate_; }
  /*!
  * \brief Save binary data to file
  * \param file File want to write
  */
100
  void SaveBinaryToFile(const VirtualFileWriter* writer) const;
101
102
103
104
105
106
107
108
109
110
  /*!
  * \brief Mapping bin into feature value
  * \param bin
  * \return Feature value of this bin
  */
  inline double BinToValue(uint32_t bin) const {
    if (bin_type_ == BinType::NumericalBin) {
      return bin_upper_bound_[bin];
    } else {
      return bin_2_categorical_[bin];
Guolin Ke's avatar
Guolin Ke committed
111
    }
112
  }
Guolin Ke's avatar
Guolin Ke committed
113

114
115
116
117
118
  /*!
  * \brief Get sizes in byte of this object
  */
  size_t SizesInByte() const;
  /*!
119
  * \brief Mapping feature value into bin
120
121
122
123
124
125
126
127
128
129
130
131
  * \param value
  * \return bin for this feature value
  */
  inline uint32_t ValueToBin(double value) const;

  /*!
  * \brief Get the default bin when value is 0
  * \return default bin
  */
  inline uint32_t GetDefaultBin() const {
    return default_bin_;
  }
Guolin Ke's avatar
Guolin Ke committed
132
133
134
135
136

  inline uint32_t GetMostFreqBin() const {
    return most_freq_bin_;
  }

137
138
  /*!
  * \brief Construct feature value to bin mapper according feature values
139
  * \param values (Sampled) values of this feature, Note: not include zero.
140
141
142
143
144
145
146
147
  * \param num_values number of values.
  * \param total_sample_cnt number of total sample count, equal with values.size() + num_zeros
  * \param max_bin The maximal number of bin
  * \param min_data_in_bin min number of data in one bin
  * \param min_split_data
  * \param bin_type Type of this bin
  * \param use_missing True to enable missing value handle
  * \param zero_as_missing True to use zero as missing value
148
  * \param forced_upper_bounds Vector of split points that must be used (if this has size less than max_bin, remaining splits are found by the algorithm)
149
  */
150
  void FindBin(double* values, int num_values, size_t total_sample_cnt, int max_bin, int min_data_in_bin, int min_split_data, BinType bin_type,
151
               bool use_missing, bool zero_as_missing, const std::vector<double>& forced_upper_bounds);
152
153
154
155
156
157
158
159
160

  /*!
  * \brief Use specific number of bin to calculate the size of this class
  * \param bin The number of bin
  * \return Size
  */
  static int SizeForSpecificBin(int bin);

  /*!
161
  * \brief Serializing this object to buffer
162
163
164
165
166
  * \param buffer The destination
  */
  void CopyTo(char* buffer) const;

  /*!
167
  * \brief Deserializing this object from buffer
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
  * \param buffer The source
  */
  void CopyFrom(const char* buffer);

  /*!
  * \brief Get bin types
  */
  inline BinType bin_type() const { return bin_type_; }

  /*!
  * \brief Get bin info
  */
  inline std::string bin_info() const {
    if (bin_type_ == BinType::CategoricalBin) {
      return Common::Join(bin_2_categorical_, ":");
    } else {
      std::stringstream str_buf;
      str_buf << std::setprecision(std::numeric_limits<double>::digits10 + 2);
      str_buf << '[' << min_val_ << ':' << max_val_ << ']';
      return str_buf.str();
188
    }
189
  }
Guolin Ke's avatar
Guolin Ke committed
190

Nikita Titov's avatar
Nikita Titov committed
191
 private:
192
193
194
195
196
  /*! \brief Number of bins */
  int num_bin_;
  MissingType missing_type_;
  /*! \brief Store upper bound for each bin */
  std::vector<double> bin_upper_bound_;
Lingyi Hu's avatar
Lingyi Hu committed
197
198
  /*! \brief True if this feature is trivial */
  bool is_trivial_;
199
200
201
202
203
204
205
206
  /*! \brief Sparse rate of this bins( num_bin0/num_data ) */
  double sparse_rate_;
  /*! \brief Type of this bin */
  BinType bin_type_;
  /*! \brief Mapper from categorical to bin */
  std::unordered_map<int, unsigned int> categorical_2_bin_;
  /*! \brief Mapper from bin to categorical */
  std::vector<int> bin_2_categorical_;
207
  /*! \brief minimal feature value */
208
209
210
211
212
  double min_val_;
  /*! \brief maximum feature value */
  double max_val_;
  /*! \brief bin value of feature value 0 */
  uint32_t default_bin_;
Guolin Ke's avatar
Guolin Ke committed
213
214

  uint32_t most_freq_bin_;
215
216
217
218
219
220
};

/*!
* \brief Interface for ordered bin data. efficient for construct histogram, especially for sparse bin
*        There are 2 advantages by using ordered bin.
*        1. group the data by leafs to improve the cache hit.
221
*        2. only store the non-zero bin, which can speed up the histogram construction for sparse features.
222
223
224
225
*        However it brings additional cost: it need re-order the bins after every split, which will cost much for dense feature.
*        So we only using ordered bin for sparse situations.
*/
class OrderedBin {
Nikita Titov's avatar
Nikita Titov committed
226
 public:
227
228
  /*! \brief virtual destructor */
  virtual ~OrderedBin() {}
Guolin Ke's avatar
Guolin Ke committed
229
230

  /*!
231
232
233
234
  * \brief Initialization logic.
  * \param used_indices If used_indices.size() == 0 means using all data, otherwise, used_indices[i] == true means i-th data is used
           (this logic was build for bagging logic)
  * \param num_leaves Number of leaves on this iteration
Guolin Ke's avatar
Guolin Ke committed
235
  */
236
  virtual void Init(const char* used_indices, data_size_t num_leaves) = 0;
Guolin Ke's avatar
Guolin Ke committed
237
238

  /*!
239
240
241
242
  * \brief Construct histogram by using this bin
  *        Note: Unlike Bin, OrderedBin doesn't use ordered gradients and ordered hessians.
  *        Because it is hard to know the relative index in one leaf for sparse bin, since we skipped zero bins.
  * \param leaf Using which leaf's data to construct
243
244
  * \param gradients Gradients, Note:non-ordered by leaf
  * \param hessians Hessians, Note:non-ordered by leaf
245
  * \param out Output Result
Guolin Ke's avatar
Guolin Ke committed
246
  */
247
  virtual void ConstructHistogram(int leaf, const score_t* gradients,
248
    const score_t* hessians, hist_t* out) const = 0;
249
250
251
252
253
254

  /*!
  * \brief Construct histogram by using this bin
  *        Note: Unlike Bin, OrderedBin doesn't use ordered gradients and ordered hessians.
  *        Because it is hard to know the relative index in one leaf for sparse bin, since we skipped zero bins.
  * \param leaf Using which leaf's data to construct
255
  * \param gradients Gradients, Note:non-ordered by leaf
256
257
  * \param out Output Result
  */
258
  virtual void ConstructHistogram(int leaf, const score_t* gradients, hist_t* out) const = 0;
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273

  /*!
  * \brief Split current bin, and perform re-order by leaf
  * \param leaf Using which leaf's to split
  * \param right_leaf The new leaf index after perform this split
  * \param is_in_leaf is_in_leaf[i] == mark means the i-th data will be on left leaf after split
  * \param mark is_in_leaf[i] == mark means the i-th data will be on left leaf after split
  */
  virtual void Split(int leaf, int right_leaf, const char* is_in_leaf, char mark) = 0;

  virtual data_size_t NonZeroCount(int leaf) const = 0;
};

/*! \brief Iterator for one bin column */
class BinIterator {
Nikita Titov's avatar
Nikita Titov committed
274
 public:
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
  /*!
  * \brief Get bin data on specific row index
  * \param idx Index of this data
  * \return Bin data
  */
  virtual uint32_t Get(data_size_t idx) = 0;
  virtual uint32_t RawGet(data_size_t idx) = 0;
  virtual void Reset(data_size_t idx) = 0;
  virtual ~BinIterator() = default;
};

/*!
* \brief Interface for bin data. This class will store bin data for one feature.
*        unlike OrderedBin, this class will store data by original order.
*        Note that it may cause cache misses when construct histogram,
*        but it doesn't need to re-order operation, So it will be faster than OrderedBin for dense feature
*/
class Bin {
Nikita Titov's avatar
Nikita Titov committed
293
 public:
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
  /*! \brief virtual destructor */
  virtual ~Bin() {}
  /*!
  * \brief Push one record
  * \pram tid Thread id
  * \param idx Index of record
  * \param value bin value of record
  */
  virtual void Push(int tid, data_size_t idx, uint32_t value) = 0;


  virtual void CopySubset(const Bin* full_bin, const data_size_t* used_indices, data_size_t num_used_indices) = 0;
  /*!
  * \brief Get bin iterator of this bin for specific feature
  * \param min_bin min_bin of current used feature
  * \param max_bin max_bin of current used feature
Guolin Ke's avatar
Guolin Ke committed
310
  * \param most_freq_bin
311
312
  * \return Iterator of this bin
  */
Guolin Ke's avatar
Guolin Ke committed
313
  virtual BinIterator* GetIterator(uint32_t min_bin, uint32_t max_bin, uint32_t most_freq_bin) const = 0;
314
315
316
317
318

  /*!
  * \brief Save binary data to file
  * \param file File want to write
  */
319
  virtual void SaveBinaryToFile(const VirtualFileWriter* writer) const = 0;
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346

  /*!
  * \brief Load from memory
  * \param memory
  * \param local_used_indices
  */
  virtual void LoadFromMemory(const void* memory,
    const std::vector<data_size_t>& local_used_indices) = 0;

  /*!
  * \brief Get sizes in byte of this object
  */
  virtual size_t SizesInByte() const = 0;

  /*! \brief Number of all data */
  virtual data_size_t num_data() const = 0;

  virtual void ReSize(data_size_t num_data) = 0;

  /*!
  * \brief Construct histogram of this feature,
  *        Note: We use ordered_gradients and ordered_hessians to improve cache hit chance
  *        The naive solution is using gradients[data_indices[i]] for data_indices[i] to get gradients,
           which is not cache friendly, since the access of memory is not continuous.
  *        ordered_gradients and ordered_hessians are preprocessed, and they are re-ordered by data_indices.
  *        Ordered_gradients[i] is aligned with data_indices[i]'s gradients (same for ordered_hessians).
  * \param data_indices Used data indices in current leaf
347
348
  * \param start start index in data_indices
  * \param end end index in data_indices
349
350
351
352
353
  * \param ordered_gradients Pointer to gradients, the data_indices[i]-th data's gradient is ordered_gradients[i]
  * \param ordered_hessians Pointer to hessians, the data_indices[i]-th data's hessian is ordered_hessians[i]
  * \param out Output Result
  */
  virtual void ConstructHistogram(
354
    const data_size_t* data_indices, data_size_t start, data_size_t end,
355
    const score_t* ordered_gradients, const score_t* ordered_hessians,
356
    hist_t* out) const = 0;
357

358
  virtual void ConstructHistogram(data_size_t start, data_size_t end,
359
    const score_t* ordered_gradients, const score_t* ordered_hessians,
360
    hist_t* out) const = 0;
361
362
363
364
365
366
367
368
369

  /*!
  * \brief Construct histogram of this feature,
  *        Note: We use ordered_gradients and ordered_hessians to improve cache hit chance
  *        The naive solution is using gradients[data_indices[i]] for data_indices[i] to get gradients,
  which is not cache friendly, since the access of memory is not continuous.
  *        ordered_gradients and ordered_hessians are preprocessed, and they are re-ordered by data_indices.
  *        Ordered_gradients[i] is aligned with data_indices[i]'s gradients (same for ordered_hessians).
  * \param data_indices Used data indices in current leaf
370
371
  * \param start start index in data_indices
  * \param end end index in data_indices
372
373
374
  * \param ordered_gradients Pointer to gradients, the data_indices[i]-th data's gradient is ordered_gradients[i]
  * \param out Output Result
  */
375
  virtual void ConstructHistogram(const data_size_t* data_indices, data_size_t start, data_size_t end,
376
                                  const score_t* ordered_gradients, hist_t* out) const = 0;
377

378
  virtual void ConstructHistogram(data_size_t start, data_size_t end,
379
                                  const score_t* ordered_gradients, hist_t* out) const = 0;
380
381
382
383
384

  /*!
  * \brief Split data according to threshold, if bin <= threshold, will put into left(lte_indices), else put into right(gt_indices)
  * \param min_bin min_bin of current used feature
  * \param max_bin max_bin of current used feature
Guolin Ke's avatar
Guolin Ke committed
385
386
  * \param default_bin default bin for feature value 0
  * \param most_freq_bin
387
388
389
390
391
392
393
394
395
  * \param missing_type missing type
  * \param default_left missing bin will go to left child
  * \param threshold The split threshold.
  * \param data_indices Used data indices. After called this function. The less than or equal data indices will store on this object.
  * \param num_data Number of used data
  * \param lte_indices After called this function. The less or equal data indices will store on this object.
  * \param gt_indices After called this function. The greater data indices will store on this object.
  * \return The number of less than or equal data.
  */
396
  virtual data_size_t Split(uint32_t min_bin, uint32_t max_bin,
Guolin Ke's avatar
Guolin Ke committed
397
    uint32_t default_bin, uint32_t most_freq_bin, MissingType missing_type, bool default_left, uint32_t threshold,
398
399
400
401
402
403
404
    data_size_t* data_indices, data_size_t num_data,
    data_size_t* lte_indices, data_size_t* gt_indices) const = 0;

  /*!
  * \brief Split data according to threshold, if bin <= threshold, will put into left(lte_indices), else put into right(gt_indices)
  * \param min_bin min_bin of current used feature
  * \param max_bin max_bin of current used feature
Guolin Ke's avatar
Guolin Ke committed
405
  * \param most_freq_bin
406
  * \param threshold The split threshold.
407
  * \param num_threshold Number of threshold
408
409
410
411
412
413
414
  * \param data_indices Used data indices. After called this function. The less than or equal data indices will store on this object.
  * \param num_data Number of used data
  * \param lte_indices After called this function. The less or equal data indices will store on this object.
  * \param gt_indices After called this function. The greater data indices will store on this object.
  * \return The number of less than or equal data.
  */
  virtual data_size_t SplitCategorical(uint32_t min_bin, uint32_t max_bin,
Guolin Ke's avatar
Guolin Ke committed
415
                            uint32_t most_freq_bin, const uint32_t* threshold, int num_threshold,
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
                            data_size_t* data_indices, data_size_t num_data,
                            data_size_t* lte_indices, data_size_t* gt_indices) const = 0;

  /*!
  * \brief After pushed all feature data, call this could have better refactor for bin data
  */
  virtual void FinishLoad() = 0;

  /*!
  * \brief Create object for bin data of one feature, used for dense feature
  * \param num_data Total number of data
  * \param num_bin Number of bin
  * \return The bin data object
  */
  static Bin* CreateDenseBin(data_size_t num_data, int num_bin);

  /*!
  * \brief Create object for bin data of one feature, used for sparse feature
  * \param num_data Total number of data
  * \param num_bin Number of bin
  * \return The bin data object
  */
  static Bin* CreateSparseBin(data_size_t num_data, int num_bin);
439
440
441
442
443

  /*!
  * \brief Deep copy the bin
  */
  virtual Bin* Clone() = 0;
444
445
};

446
447

class MultiValBin {
448
 public:
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
  virtual ~MultiValBin() {}

  virtual data_size_t num_data() const = 0;

  virtual int32_t num_bin() const = 0;

  virtual void ReSize(data_size_t num_data) = 0;

  virtual void PushOneRow(int tid, data_size_t idx, const std::vector<uint32_t>& values) = 0;

  virtual void CopySubset(const Bin* full_bin, const data_size_t* used_indices, data_size_t num_used_indices) = 0;

  virtual void ConstructHistogram(
    const data_size_t* data_indices, data_size_t start, data_size_t end,
    const score_t* gradients, const score_t* hessians,
    hist_t* out) const = 0;

  virtual void ConstructHistogram(data_size_t start, data_size_t end,
    const score_t* gradients, const score_t* hessians,
    hist_t* out) const = 0;

  virtual void ConstructHistogram(const data_size_t* data_indices, data_size_t start, data_size_t end,
    const score_t* ordered_gradients, hist_t* out) const = 0;

  virtual void ConstructHistogram(data_size_t start, data_size_t end,
    const score_t* ordered_gradients, hist_t* out) const = 0;

  virtual void FinishLoad() = 0;

  virtual bool IsSparse() = 0;

  static MultiValBin* CreateMultiValBin(data_size_t num_data, int num_bin, int num_feature, double sparse_rate);

  virtual MultiValBin* Clone() = 0;
};

485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
inline uint32_t BinMapper::ValueToBin(double value) const {
  if (std::isnan(value)) {
    if (missing_type_ == MissingType::NaN) {
      return num_bin_ - 1;
    } else {
      value = 0.0f;
    }
  }
  if (bin_type_ == BinType::NumericalBin) {
    // binary search to find bin
    int l = 0;
    int r = num_bin_ - 1;
    if (missing_type_ == MissingType::NaN) {
      r -= 1;
    }
    while (l < r) {
      int m = (r + l - 1) / 2;
      if (value <= bin_upper_bound_[m]) {
        r = m;
504
      } else {
505
        l = m + 1;
506
507
      }
    }
508
509
510
511
512
513
514
515
516
    return l;
  } else {
    int int_value = static_cast<int>(value);
    // convert negative value to NaN bin
    if (int_value < 0) {
      return num_bin_ - 1;
    }
    if (categorical_2_bin_.count(int_value)) {
      return categorical_2_bin_.at(int_value);
Guolin Ke's avatar
Guolin Ke committed
517
    } else {
518
      return num_bin_ - 1;
Guolin Ke's avatar
Guolin Ke committed
519
520
    }
  }
521
}
Guolin Ke's avatar
Guolin Ke committed
522
523
524

}  // namespace LightGBM

Guolin Ke's avatar
Guolin Ke committed
525
#endif   // LightGBM_BIN_H_