goss.hpp 7.54 KB
Newer Older
1
2
3
4
/*!
 * Copyright (c) 2017 Microsoft Corporation. All rights reserved.
 * Licensed under the MIT License. See LICENSE file in the project root for license information.
 */
Guolin Ke's avatar
Guolin Ke committed
5
6
7
#ifndef LIGHTGBM_BOOSTING_GOSS_H_
#define LIGHTGBM_BOOSTING_GOSS_H_

8
9
10
11
#include <LightGBM/boosting.h>
#include <LightGBM/utils/array_args.h>
#include <LightGBM/utils/log.h>

Guolin Ke's avatar
Guolin Ke committed
12
#include <string>
13
#include <algorithm>
14
15
#include <chrono>
#include <cstdio>
Guolin Ke's avatar
Guolin Ke committed
16
#include <cstdint>
17
18
19
20
21
#include <fstream>
#include <vector>

#include "gbdt.h"
#include "score_updater.hpp"
Guolin Ke's avatar
Guolin Ke committed
22
23
24
25

namespace LightGBM {

class GOSS: public GBDT {
Nikita Titov's avatar
Nikita Titov committed
26
 public:
Guolin Ke's avatar
Guolin Ke committed
27
28
29
  /*!
  * \brief Constructor
  */
30
  GOSS() : GBDT() {
Guolin Ke's avatar
Guolin Ke committed
31
32
33
34
35
  }

  ~GOSS() {
  }

Guolin Ke's avatar
Guolin Ke committed
36
  void Init(const Config* config, const Dataset* train_data, const ObjectiveFunction* objective_function,
37
38
            const std::vector<const Metric*>& training_metrics) override {
    GBDT::Init(config, train_data, objective_function, training_metrics);
39
    ResetGoss();
40
41
42
43
44
45
    if (objective_function_ == nullptr) {
      // use customized objective function
      size_t total_size = static_cast<size_t>(num_data_) * num_tree_per_iteration_;
      gradients_.resize(total_size, 0.0f);
      hessians_.resize(total_size, 0.0f);
    }
46
47
48
49
50
51
52
53
  }

  void ResetTrainingData(const Dataset* train_data, const ObjectiveFunction* objective_function,
                         const std::vector<const Metric*>& training_metrics) override {
    GBDT::ResetTrainingData(train_data, objective_function, training_metrics);
    ResetGoss();
  }

Guolin Ke's avatar
Guolin Ke committed
54
  void ResetConfig(const Config* config) override {
55
56
57
58
    GBDT::ResetConfig(config);
    ResetGoss();
  }

59
60
61
62
  bool TrainOneIter(const score_t* gradients, const score_t* hessians) override {
    if (gradients != nullptr) {
      // use customized objective function
      CHECK(hessians != nullptr && objective_function_ == nullptr);
Guolin Ke's avatar
Guolin Ke committed
63
      int64_t total_size = static_cast<int64_t>(num_data_) * num_tree_per_iteration_;
64
      #pragma omp parallel for schedule(static)
Guolin Ke's avatar
Guolin Ke committed
65
      for (int64_t i = 0; i < total_size; ++i) {
66
67
68
69
70
71
72
73
74
75
        gradients_[i] = gradients[i];
        hessians_[i] = hessians[i];
      }
      return GBDT::TrainOneIter(gradients_.data(), hessians_.data());
    } else {
      CHECK(hessians == nullptr);
      return GBDT::TrainOneIter(nullptr, nullptr);
    }
  }

76
  void ResetGoss() {
Nikita Titov's avatar
Nikita Titov committed
77
    CHECK_LE(config_->top_rate + config_->other_rate, 1.0f);
Guolin Ke's avatar
Guolin Ke committed
78
79
    CHECK(config_->top_rate > 0.0f && config_->other_rate > 0.0f);
    if (config_->bagging_freq > 0 && config_->bagging_fraction != 1.0f) {
80
      Log::Fatal("Cannot use bagging in GOSS");
Guolin Ke's avatar
Guolin Ke committed
81
    }
82
    Log::Info("Using GOSS");
83
    balanced_bagging_ = false;
Guolin Ke's avatar
Guolin Ke committed
84
    bag_data_indices_.resize(num_data_);
85
    bagging_runner_.ReSize(num_data_);
Guolin Ke's avatar
Guolin Ke committed
86
87
88
89
90
    bagging_rands_.clear();
    for (int i = 0;
         i < (num_data_ + bagging_rand_block_ - 1) / bagging_rand_block_; ++i) {
      bagging_rands_.emplace_back(config_->bagging_seed + i);
    }
Guolin Ke's avatar
Guolin Ke committed
91
    is_use_subset_ = false;
Guolin Ke's avatar
Guolin Ke committed
92
93
    if (config_->top_rate + config_->other_rate <= 0.5) {
      auto bag_data_cnt = static_cast<data_size_t>((config_->top_rate + config_->other_rate) * num_data_);
94
      bag_data_cnt = std::max(1, bag_data_cnt);
Guolin Ke's avatar
Guolin Ke committed
95
96
97
98
99
100
101
102
      tmp_subset_.reset(new Dataset(bag_data_cnt));
      tmp_subset_->CopyFeatureMapperFrom(train_data_);
      is_use_subset_ = true;
    }
    // flag to not bagging first
    bag_data_cnt_ = num_data_;
  }

103
  data_size_t BaggingHelper(data_size_t start, data_size_t cnt, data_size_t* buffer) override {
104
    if (cnt <= 0) {
105
106
      return 0;
    }
107
    std::vector<score_t> tmp_gradients(cnt, 0.0f);
Guolin Ke's avatar
Guolin Ke committed
108
    for (data_size_t i = 0; i < cnt; ++i) {
109
      for (int cur_tree_id = 0; cur_tree_id < num_tree_per_iteration_; ++cur_tree_id) {
110
        size_t idx = static_cast<size_t>(cur_tree_id) * num_data_ + start + i;
Guolin Ke's avatar
Guolin Ke committed
111
112
        tmp_gradients[i] += std::fabs(gradients_[idx] * hessians_[idx]);
      }
Guolin Ke's avatar
Guolin Ke committed
113
    }
Guolin Ke's avatar
Guolin Ke committed
114
115
    data_size_t top_k = static_cast<data_size_t>(cnt * config_->top_rate);
    data_size_t other_k = static_cast<data_size_t>(cnt * config_->other_rate);
Guolin Ke's avatar
Guolin Ke committed
116
    top_k = std::max(1, top_k);
Guolin Ke's avatar
Guolin Ke committed
117
    ArrayArgs<score_t>::ArgMaxAtK(&tmp_gradients, 0, static_cast<int>(tmp_gradients.size()), top_k - 1);
118
    score_t threshold = tmp_gradients[top_k - 1];
Guolin Ke's avatar
Guolin Ke committed
119

120
    score_t multiply = static_cast<score_t>(cnt - top_k) / other_k;
Guolin Ke's avatar
Guolin Ke committed
121
    data_size_t cur_left_cnt = 0;
122
    data_size_t cur_right_pos = cnt;
Guolin Ke's avatar
Guolin Ke committed
123
124
    data_size_t big_weight_cnt = 0;
    for (data_size_t i = 0; i < cnt; ++i) {
125
      auto cur_idx = start + i;
126
      score_t grad = 0.0f;
127
      for (int cur_tree_id = 0; cur_tree_id < num_tree_per_iteration_; ++cur_tree_id) {
128
        size_t idx = static_cast<size_t>(cur_tree_id) * num_data_ + cur_idx;
Guolin Ke's avatar
Guolin Ke committed
129
130
131
        grad += std::fabs(gradients_[idx] * hessians_[idx]);
      }
      if (grad >= threshold) {
132
        buffer[cur_left_cnt++] = cur_idx;
Guolin Ke's avatar
Guolin Ke committed
133
134
135
136
137
138
        ++big_weight_cnt;
      } else {
        data_size_t sampled = cur_left_cnt - big_weight_cnt;
        data_size_t rest_need = other_k - sampled;
        data_size_t rest_all = (cnt - i) - (top_k - big_weight_cnt);
        double prob = (rest_need) / static_cast<double>(rest_all);
139
140
        if (bagging_rands_[cur_idx / bagging_rand_block_].NextFloat() < prob) {
          buffer[cur_left_cnt++] = cur_idx;
141
          for (int cur_tree_id = 0; cur_tree_id < num_tree_per_iteration_; ++cur_tree_id) {
142
            size_t idx = static_cast<size_t>(cur_tree_id) * num_data_ + cur_idx;
Guolin Ke's avatar
Guolin Ke committed
143
144
145
            gradients_[idx] *= multiply;
            hessians_[idx] *= multiply;
          }
Guolin Ke's avatar
Guolin Ke committed
146
        } else {
147
          buffer[--cur_right_pos] = cur_idx;
Guolin Ke's avatar
Guolin Ke committed
148
149
150
151
152
153
154
155
156
        }
      }
    }
    return cur_left_cnt;
  }

  void Bagging(int iter) override {
    bag_data_cnt_ = num_data_;
    // not subsample for first iterations
Guolin Ke's avatar
Guolin Ke committed
157
    if (iter < static_cast<int>(1.0f / config_->learning_rate)) { return; }
Guolin Ke's avatar
Guolin Ke committed
158
159
160
161
162
163
164
165
166
167
168
169
    auto left_cnt = bagging_runner_.Run<true>(
        num_data_,
        [=](int, data_size_t cur_start, data_size_t cur_cnt, data_size_t* left,
            data_size_t*) {
          data_size_t cur_left_count = 0;
          cur_left_count = BaggingHelper(cur_start, cur_cnt, left);
          return cur_left_count;
        },
        bag_data_indices_.data());
    bag_data_cnt_ = left_cnt;
    // set bagging data to tree learner
    if (!is_use_subset_) {
170
171
172
173
174
175
176
177
178
179
      #ifdef USE_CUDA_EXP
      if (config_->device_type == std::string("cuda_exp")) {
        CopyFromHostToCUDADevice<data_size_t>(cuda_bag_data_indices_.RawData(), bag_data_indices_.data(), static_cast<size_t>(num_data_), __FILE__, __LINE__);
        tree_learner_->SetBaggingData(nullptr, cuda_bag_data_indices_.RawData(), bag_data_cnt_);
      } else {
      #endif  // USE_CUDA_EXP
        tree_learner_->SetBaggingData(nullptr, bag_data_indices_.data(), bag_data_cnt_);
      #ifdef USE_CUDA_EXP
      }
      #endif  // USE_CUDA_EXP
Guolin Ke's avatar
Guolin Ke committed
180
181
182
183
184
    } else {
      // get subset
      tmp_subset_->ReSize(bag_data_cnt_);
      tmp_subset_->CopySubrow(train_data_, bag_data_indices_.data(),
                              bag_data_cnt_, false);
185
186
187
188
189
190
191
192
193
194
195
196
      #ifdef USE_CUDA_EXP
      if (config_->device_type == std::string("cuda_exp")) {
        CopyFromHostToCUDADevice<data_size_t>(cuda_bag_data_indices_.RawData(), bag_data_indices_.data(), static_cast<size_t>(num_data_), __FILE__, __LINE__);
        tree_learner_->SetBaggingData(tmp_subset_.get(), cuda_bag_data_indices_.RawData(),
                                      bag_data_cnt_);
      } else {
      #endif  // USE_CUDA_EXP
        tree_learner_->SetBaggingData(tmp_subset_.get(), bag_data_indices_.data(),
                                      bag_data_cnt_);
      #ifdef USE_CUDA_EXP
      }
      #endif  // USE_CUDA_EXP
Guolin Ke's avatar
Guolin Ke committed
197
    }
Guolin Ke's avatar
Guolin Ke committed
198
  }
199
200
201
202
203

 protected:
  bool GetIsConstHessian(const ObjectiveFunction*) override {
    return false;
  }
Guolin Ke's avatar
Guolin Ke committed
204
205
206
207
};

}  // namespace LightGBM
#endif   // LIGHTGBM_BOOSTING_GOSS_H_