"R-package/vscode:/vscode.git/clone" did not exist on "8d2ec69f4f685b0ab1c4624d59ee2d3287bb3147"
multiclass_objective.hpp 7.98 KB
Newer Older
1
2
3
4
/*!
 * Copyright (c) 2016 Microsoft Corporation. All rights reserved.
 * Licensed under the MIT License. See LICENSE file in the project root for license information.
 */
5
6
7
8
9
#ifndef LIGHTGBM_OBJECTIVE_MULTICLASS_OBJECTIVE_HPP_
#define LIGHTGBM_OBJECTIVE_MULTICLASS_OBJECTIVE_HPP_

#include <LightGBM/objective_function.h>

10
11
#include <string>
#include <algorithm>
12
#include <cmath>
13
14
#include <cstring>
#include <memory>
Guolin Ke's avatar
Guolin Ke committed
15
16
17
#include <vector>

#include "binary_objective.hpp"
18
19
20

namespace LightGBM {
/*!
Guolin Ke's avatar
Guolin Ke committed
21
* \brief Objective function for multiclass classification, use softmax as objective functions
22
*/
Guolin Ke's avatar
Guolin Ke committed
23
class MulticlassSoftmax: public ObjectiveFunction {
Nikita Titov's avatar
Nikita Titov committed
24
 public:
Guolin Ke's avatar
Guolin Ke committed
25
  explicit MulticlassSoftmax(const Config& config) {
26
27
    num_class_ = config.num_class;
  }
28

29
30
31
  explicit MulticlassSoftmax(const std::vector<std::string>& strs) {
    num_class_ = -1;
    for (auto str : strs) {
Guolin Ke's avatar
Guolin Ke committed
32
      auto tokens = Common::Split(str.c_str(), ':');
33
34
35
36
37
38
39
      if (tokens.size() == 2) {
        if (tokens[0] == std::string("num_class")) {
          Common::Atoi(tokens[1].c_str(), &num_class_);
        }
      }
    }
    if (num_class_ < 0) {
40
      Log::Fatal("Objective should contain num_class field");
41
42
43
    }
  }

Guolin Ke's avatar
Guolin Ke committed
44
  ~MulticlassSoftmax() {
45
  }
46

47
48
49
50
  void Init(const Metadata& metadata, data_size_t num_data) override {
    num_data_ = num_data;
    label_ = metadata.label();
    weights_ = metadata.weights();
Guolin Ke's avatar
Guolin Ke committed
51
    label_int_.resize(num_data_);
52
53
    class_init_probs_.resize(num_class_, 0.0);
    double sum_weight = 0.0;
54
55
56
57
58
    for (int i = 0; i < num_data_; ++i) {
      label_int_[i] = static_cast<int>(label_[i]);
      if (label_int_[i] < 0 || label_int_[i] >= num_class_) {
        Log::Fatal("Label must be in [0, %d), but found %d in label", num_class_, label_int_[i]);
      }
59
60
61
62
63
64
65
66
67
68
69
70
      if (weights_ == nullptr) {
        class_init_probs_[label_int_[i]] += 1.0;
      } else {
        class_init_probs_[label_int_[i]] += weights_[i];
        sum_weight += weights_[i];
      }
    }
    if (weights_ == nullptr) {
      sum_weight = num_data_;
    }
    for (int i = 0; i < num_class_; ++i) {
      class_init_probs_[i] /= sum_weight;
71
72
73
    }
  }

74
  void GetGradients(const double* score, score_t* gradients, score_t* hessians) const override {
75
    if (weights_ == nullptr) {
zhangyafeikimi's avatar
zhangyafeikimi committed
76
77
      std::vector<double> rec;
      #pragma omp parallel for schedule(static) private(rec)
78
      for (data_size_t i = 0; i < num_data_; ++i) {
zhangyafeikimi's avatar
zhangyafeikimi committed
79
        rec.resize(num_class_);
Guolin Ke's avatar
Guolin Ke committed
80
        for (int k = 0; k < num_class_; ++k) {
81
82
          size_t idx = static_cast<size_t>(num_data_) * k + i;
          rec[k] = static_cast<double>(score[idx]);
83
        }
84
        Common::Softmax(&rec);
85
        for (int k = 0; k < num_class_; ++k) {
86
          auto p = rec[k];
87
          size_t idx = static_cast<size_t>(num_data_) * k + i;
88
          if (label_int_[i] == k) {
Guolin Ke's avatar
Guolin Ke committed
89
            gradients[idx] = static_cast<score_t>(p - 1.0f);
90
          } else {
Guolin Ke's avatar
Guolin Ke committed
91
            gradients[idx] = static_cast<score_t>(p);
92
          }
Guolin Ke's avatar
Guolin Ke committed
93
          hessians[idx] = static_cast<score_t>(2.0f * p * (1.0f - p));
94
        }
95
96
      }
    } else {
zhangyafeikimi's avatar
zhangyafeikimi committed
97
98
      std::vector<double> rec;
      #pragma omp parallel for schedule(static) private(rec)
99
      for (data_size_t i = 0; i < num_data_; ++i) {
zhangyafeikimi's avatar
zhangyafeikimi committed
100
        rec.resize(num_class_);
Guolin Ke's avatar
Guolin Ke committed
101
        for (int k = 0; k < num_class_; ++k) {
102
103
          size_t idx = static_cast<size_t>(num_data_) * k + i;
          rec[k] = static_cast<double>(score[idx]);
104
        }
105
106
        Common::Softmax(&rec);
        for (int k = 0; k < num_class_; ++k) {
107
          auto p = rec[k];
108
          size_t idx = static_cast<size_t>(num_data_) * k + i;
109
          if (label_int_[i] == k) {
Guolin Ke's avatar
Guolin Ke committed
110
            gradients[idx] = static_cast<score_t>((p - 1.0f) * weights_[i]);
111
          } else {
Guolin Ke's avatar
Guolin Ke committed
112
            gradients[idx] = static_cast<score_t>((p) * weights_[i]);
113
          }
Guolin Ke's avatar
Guolin Ke committed
114
          hessians[idx] = static_cast<score_t>((2.0f * p * (1.0f - p))* weights_[i]);
115
116
117
118
119
        }
      }
    }
  }

Guolin Ke's avatar
Guolin Ke committed
120
121
  void ConvertOutput(const double* input, double* output) const override {
    Common::Softmax(input, output, num_class_);
122
123
  }

Guolin Ke's avatar
Guolin Ke committed
124
125
  const char* GetName() const override {
    return "multiclass";
126
127
  }

128
129
130
131
132
133
134
135
136
  std::string ToString() const override {
    std::stringstream str_buf;
    str_buf << GetName() << " ";
    str_buf << "num_class:" << num_class_;
    return str_buf.str();
  }

  bool SkipEmptyClass() const override { return true; }

Guolin Ke's avatar
Guolin Ke committed
137
  int NumModelPerIteration() const override { return num_class_; }
Guolin Ke's avatar
Guolin Ke committed
138
139

  int NumPredictOneRow() const override { return num_class_; }
140

141
142
  bool NeedAccuratePrediction() const override { return false; }

143
144
145
146
  double BoostFromScore(int class_id) const override {
    return std::log(std::max<double>(kEpsilon, class_init_probs_[class_id]));
  }

147
148
  bool ClassNeedTrain(int class_id) const override {
    if (std::fabs(class_init_probs_[class_id]) <= kEpsilon
149
150
151
152
153
154
155
        || std::fabs(class_init_probs_[class_id]) >= 1.0 - kEpsilon) {
      return false;
    } else {
      return true;
    }
  }

Nikita Titov's avatar
Nikita Titov committed
156
 private:
157
158
159
160
161
  /*! \brief Number of data */
  data_size_t num_data_;
  /*! \brief Number of classes */
  int num_class_;
  /*! \brief Pointer of label */
162
  const label_t* label_;
163
  /*! \brief Corresponding integers of label_ */
Guolin Ke's avatar
Guolin Ke committed
164
  std::vector<int> label_int_;
165
  /*! \brief Weights for data */
166
  const label_t* weights_;
167
  std::vector<double> class_init_probs_;
Guolin Ke's avatar
Guolin Ke committed
168
169
170
171
172
173
};

/*!
* \brief Objective function for multiclass classification, use one-vs-all binary objective function
*/
class MulticlassOVA: public ObjectiveFunction {
Nikita Titov's avatar
Nikita Titov committed
174
 public:
Guolin Ke's avatar
Guolin Ke committed
175
  explicit MulticlassOVA(const Config& config) {
Guolin Ke's avatar
Guolin Ke committed
176
177
178
    num_class_ = config.num_class;
    for (int i = 0; i < num_class_; ++i) {
      binary_loss_.emplace_back(
179
        new BinaryLogloss(config, [i](label_t label) { return static_cast<int>(label) == i; }));
Guolin Ke's avatar
Guolin Ke committed
180
    }
181
182
183
184
185
186
187
    sigmoid_ = config.sigmoid;
  }

  explicit MulticlassOVA(const std::vector<std::string>& strs) {
    num_class_ = -1;
    sigmoid_ = -1;
    for (auto str : strs) {
Guolin Ke's avatar
Guolin Ke committed
188
      auto tokens = Common::Split(str.c_str(), ':');
189
190
191
192
193
194
195
196
197
      if (tokens.size() == 2) {
        if (tokens[0] == std::string("num_class")) {
          Common::Atoi(tokens[1].c_str(), &num_class_);
        } else if (tokens[0] == std::string("sigmoid")) {
          Common::Atof(tokens[1].c_str(), &sigmoid_);
        }
      }
    }
    if (num_class_ < 0) {
198
      Log::Fatal("Objective should contain num_class field");
199
200
201
202
    }
    if (sigmoid_ <= 0.0) {
      Log::Fatal("Sigmoid parameter %f should be greater than zero", sigmoid_);
    }
Guolin Ke's avatar
Guolin Ke committed
203
204
205
206
207
208
209
210
211
212
213
214
  }

  ~MulticlassOVA() {
  }

  void Init(const Metadata& metadata, data_size_t num_data) override {
    num_data_ = num_data;
    for (int i = 0; i < num_class_; ++i) {
      binary_loss_[i]->Init(metadata, num_data);
    }
  }

215
  void GetGradients(const double* score, score_t* gradients, score_t* hessians) const override {
Guolin Ke's avatar
Guolin Ke committed
216
    for (int i = 0; i < num_class_; ++i) {
217
      int64_t bias = static_cast<int64_t>(num_data_) * i;
Guolin Ke's avatar
Guolin Ke committed
218
219
220
221
222
223
224
225
      binary_loss_[i]->GetGradients(score + bias, gradients + bias, hessians + bias);
    }
  }

  const char* GetName() const override {
    return "multiclassova";
  }

Guolin Ke's avatar
Guolin Ke committed
226
  void ConvertOutput(const double* input, double* output) const override {
227
    for (int i = 0; i < num_class_; ++i) {
Guolin Ke's avatar
Guolin Ke committed
228
      output[i] = 1.0f / (1.0f + std::exp(-sigmoid_ * input[i]));
229
230
231
232
233
234
235
236
237
238
239
240
241
    }
  }

  std::string ToString() const override {
    std::stringstream str_buf;
    str_buf << GetName() << " ";
    str_buf << "num_class:" << num_class_ << " ";
    str_buf << "sigmoid:" << sigmoid_;
    return str_buf.str();
  }

  bool SkipEmptyClass() const override { return true; }

Guolin Ke's avatar
Guolin Ke committed
242
  int NumModelPerIteration() const override { return num_class_; }
Guolin Ke's avatar
Guolin Ke committed
243
244

  int NumPredictOneRow() const override { return num_class_; }
245

246
247
  bool NeedAccuratePrediction() const override { return false; }

248
249
250
251
252
253
254
255
  double BoostFromScore(int class_id) const override {
    return binary_loss_[class_id]->BoostFromScore(0);
  }

  bool ClassNeedTrain(int class_id) const override {
    return binary_loss_[class_id]->ClassNeedTrain(0);
  }

Nikita Titov's avatar
Nikita Titov committed
256
 private:
Guolin Ke's avatar
Guolin Ke committed
257
258
259
260
261
  /*! \brief Number of data */
  data_size_t num_data_;
  /*! \brief Number of classes */
  int num_class_;
  std::vector<std::unique_ptr<BinaryLogloss>> binary_loss_;
262
  double sigmoid_;
263
264
265
266
};

}  // namespace LightGBM
#endif   // LightGBM_OBJECTIVE_MULTICLASS_OBJECTIVE_HPP_