xentropy_objective.hpp 8.76 KB
Newer Older
1
2
3
#ifndef LIGHTGBM_OBJECTIVE_XENTROPY_OBJECTIVE_HPP_
#define LIGHTGBM_OBJECTIVE_XENTROPY_OBJECTIVE_HPP_

4
5
#include <LightGBM/meta.h>

6
7
8
9
10
11
12
13
14
15
16
17
#include <LightGBM/utils/common.h>

#include <LightGBM/objective_function.h>

#include <cstring>
#include <cmath>

/*
 * Implements gradients and hessians for the following point losses.
 * Target y is anything in interval [0, 1].
 *
 * (1) CrossEntropy; "xentropy";
Tony-Y's avatar
Tony-Y committed
18
 *
19
20
21
22
23
24
25
 * loss(y, p, w) = { -(1-y)*log(1-p)-y*log(p) }*w,
 * with probability p = 1/(1+exp(-f)), where f is being boosted
 *
 * ConvertToOutput: f -> p
 *
 * (2) CrossEntropyLambda; "xentlambda"
 *
Tony-Y's avatar
Tony-Y committed
26
 * loss(y, p, w) = -(1-y)*log(1-p)-y*log(p),
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
 * with p = 1-exp(-lambda*w), lambda = log(1+exp(f)), f being boosted, and w > 0
 *
 * ConvertToOutput: f -> lambda
 *
 * (1) and (2) are the same if w=1; but outputs still differ.
 *
 */

namespace LightGBM {
/*!
* \brief Objective function for cross-entropy (with optional linear weights)
*/
class CrossEntropy: public ObjectiveFunction {
public:
  explicit CrossEntropy(const ObjectiveConfig&) {
  }

  explicit CrossEntropy(const std::vector<std::string>&) {
  }

  ~CrossEntropy() {}

  void Init(const Metadata& metadata, data_size_t num_data) override {
    num_data_ = num_data;
    label_ = metadata.label();
    weights_ = metadata.weights();

    CHECK_NOTNULL(label_);
Tony-Y's avatar
Tony-Y committed
55
    Common::CheckElementsIntervalClosed(label_, 0.0f, 1.0f, num_data_, GetName());
56
57
58
59
60
    Log::Info("[%s:%s]: (objective) labels passed interval [0, 1] check",  GetName(), __func__);

    if (weights_ != nullptr) {
      float minw;
      double sumw;
61
      Common::ObtainMinMaxSum(weights_, num_data_, &minw, (float*)nullptr, &sumw);
62
63
64
65
66
67
68
      if (minw < 0.0f) {
        Log::Fatal("[%s]: at least one weight is negative.", GetName());
      }
      if (sumw == 0.0f) {
        Log::Fatal("[%s]: sum of weights is zero.", GetName());
      }
    }
Tony-Y's avatar
Tony-Y committed
69

70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
  }

  void GetGradients(const double* score, score_t* gradients, score_t* hessians) const override {
    if (weights_ == nullptr) {
      // compute pointwise gradients and hessians with implied unit weights
      #pragma omp parallel for schedule(static)
      for (data_size_t i = 0; i < num_data_; ++i) {
        const double z = 1.0f / (1.0f + std::exp(-score[i]));
        gradients[i] = static_cast<score_t>(z - label_[i]);
        hessians[i] = static_cast<score_t>(z * (1.0f - z));
      }
    } else {
      // compute pointwise gradients and hessians with given weights
      #pragma omp parallel for schedule(static)
      for (data_size_t i = 0; i < num_data_; ++i) {
        const double z = 1.0f / (1.0f + std::exp(-score[i]));
        gradients[i] = static_cast<score_t>((z - label_[i]) * weights_[i]);
        hessians[i] = static_cast<score_t>(z * (1.0f - z) * weights_[i]);
      }
    }
  }

  const char* GetName() const override {
    return "xentropy";
  }

  // convert score to a probability
  void ConvertOutput(const double* input, double* output) const override {
    output[0] = 1.0f / (1.0f + std::exp(-input[0]));
  }

  std::string ToString() const override {
    std::stringstream str_buf;
    str_buf << GetName();
    return str_buf.str();
  }

107
  // allow boost from average option
108
109
  bool BoostFromAverage() const override { return true; }

110
111
112
113
114
  // implement custom average to boost from (if enabled among options)
  bool GetCustomAverage(double *initscore) const override {
    if (initscore == nullptr) return false;
    double suml = 0.0f;
    double sumw = 0.0f;
115
    if (weights_ != nullptr) {
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
      for (data_size_t i = 0; i < num_data_; ++i) {
        suml += label_[i] * weights_[i];
        sumw += weights_[i];
      }
    } else {
      sumw = static_cast<double>(num_data_);
      for (data_size_t i = 0; i < num_data_; ++i) {
        suml += label_[i];
      }
    }
    double pavg = suml / sumw;
    *initscore = std::log(pavg / (1.0f - pavg));
    Log::Info("[%s:%s]: pavg=%f -> initscore=%f",  GetName(), __func__, pavg, *initscore);
    return true;
  }

132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
private:
  /*! \brief Number of data points */
  data_size_t num_data_;
  /*! \brief Pointer for label */
  const float* label_;
  /*! \brief Weights for data */
  const float* weights_;
};

/*!
* \brief Objective function for alternative parameterization of cross-entropy (see top of file for explanation)
*/
class CrossEntropyLambda: public ObjectiveFunction {
public:
  explicit CrossEntropyLambda(const ObjectiveConfig&) {
    min_weight_ = max_weight_ = 0.0f;
  }

  explicit CrossEntropyLambda(const std::vector<std::string>&) {
  }

  ~CrossEntropyLambda() {}

  void Init(const Metadata& metadata, data_size_t num_data) override {
    num_data_ = num_data;
    label_ = metadata.label();
    weights_ = metadata.weights();

    CHECK_NOTNULL(label_);
Tony-Y's avatar
Tony-Y committed
161
    Common::CheckElementsIntervalClosed(label_, 0.0f, 1.0f, num_data_, GetName());
162
163
164
165
    Log::Info("[%s:%s]: (objective) labels passed interval [0, 1] check",  GetName(), __func__);

    if (weights_ != nullptr) {

166
      Common::ObtainMinMaxSum(weights_, num_data_, &min_weight_, &max_weight_, (float*)nullptr);
167
168
169
170
171
172
      if (min_weight_ <= 0.0f) {
        Log::Fatal("[%s]: at least one weight is non-positive.", GetName());
      }

      // Issue an info statement about this ratio
      double weight_ratio = max_weight_ / min_weight_;
Tony-Y's avatar
Tony-Y committed
173
      Log::Info("[%s:%s]: min, max weights = %f, %f; ratio = %f",
174
175
176
177
                GetName(), __func__,
                min_weight_, max_weight_,
                weight_ratio);
    } else {
Tony-Y's avatar
Tony-Y committed
178
      // all weights are implied to be unity; no need to do anything
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
    }
  }

  void GetGradients(const double* score, score_t* gradients, score_t* hessians) const override {
    if (weights_ == nullptr) {
      // compute pointwise gradients and hessians with implied unit weights; exactly equivalent to CrossEntropy with unit weights
      #pragma omp parallel for schedule(static)
      for (data_size_t i = 0; i < num_data_; ++i) {
        const double z = 1.0f / (1.0f + std::exp(-score[i]));
        gradients[i] = static_cast<score_t>(z - label_[i]);
        hessians[i] = static_cast<score_t>(z * (1.0f - z));
      }
    } else {
      // compute pointwise gradients and hessians with given weights
      #pragma omp parallel for schedule(static)
      for (data_size_t i = 0; i < num_data_; ++i) {
        const double w = weights_[i];
        const double y = label_[i];
        const double epf = std::exp(score[i]);
        const double hhat = std::log(1.0f + epf);
        const double z = 1.0f - std::exp(-w*hhat);
        const double enf = 1.0f / epf; // = std::exp(-score[i]);
        gradients[i] = static_cast<score_t>((1.0f - y / z) * w / (1.0f + enf));
        const double c = 1.0f / (1.0f - z);
        double d = 1.0f + epf;
        const double a = w * epf / (d * d);
        d = c - 1.0f;
        const double b = (c / (d * d) ) * (1.0f + w * epf - c);
        hessians[i] = static_cast<score_t>(a * (1.0f + y * b));
      }
    }
  }

  const char* GetName() const override {
    return "xentlambda";
  }

  //
  // ATTENTION: the function output is the "normalized exponential parameter" lambda > 0, not the probability
  //
  // If this code would read: output[0] = 1.0f / (1.0f + std::exp(-input[0]));
  // The output would still not be the probability unless the weights are unity.
  //
  // Let z = 1 / (1 + exp(-f)), then prob(z) = 1-(1-z)^w, where w is the weight for the specific point.
  //

  void ConvertOutput(const double* input, double* output) const override {
    output[0] = std::log(1.0f + std::exp(input[0]));
  }

  std::string ToString() const override {
    std::stringstream str_buf;
    str_buf << GetName();
    return str_buf.str();
  }

  bool BoostFromAverage() const override { return true; }

237
238
239
240
241
  bool GetCustomAverage(double *initscore) const override {
    if (initscore == nullptr) return false;
    double sumy = 0.0f;
    for (data_size_t i = 0; i < num_data_; ++i) sumy += label_[i];
    double sumw = 0.0f;
Laurae's avatar
Laurae committed
242
    if (weights_ != nullptr) {
243
244
245
246
247
248
249
250
251
252
      for (data_size_t i = 0; i < num_data_; ++i) sumw += weights_[i];
    } else {
      sumw = static_cast<double>(num_data_);
    }
    double havg = sumy / sumw;
    *initscore = std::log(std::exp(havg) - 1.0f);
    Log::Info("[%s:%s]: havg=%f -> initscore=%f",  GetName(), __func__, havg, *initscore);
    return true;
  }

253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
private:
  /*! \brief Number of data points */
  data_size_t num_data_;
  /*! \brief Pointer for label */
  const float* label_;
  /*! \brief Weights for data */
  const float* weights_;
  /*! \brief Minimum weight found during init */
  float min_weight_;
  /*! \brief Maximum weight found during init */
  float max_weight_;
};

}  // end namespace LightGBM

#endif   // end #ifndef LIGHTGBM_OBJECTIVE_XENTROPY_OBJECTIVE_HPP_