xentropy_objective.hpp 8.97 KB
Newer Older
1
2
3
#ifndef LIGHTGBM_OBJECTIVE_XENTROPY_OBJECTIVE_HPP_
#define LIGHTGBM_OBJECTIVE_XENTROPY_OBJECTIVE_HPP_

Guolin Ke's avatar
Guolin Ke committed
4
#include <LightGBM/objective_function.h>
5
6
#include <LightGBM/meta.h>

7
8
9
10
11
12
13
14
15
16
#include <LightGBM/utils/common.h>

#include <cstring>
#include <cmath>

/*
 * Implements gradients and hessians for the following point losses.
 * Target y is anything in interval [0, 1].
 *
 * (1) CrossEntropy; "xentropy";
Tony-Y's avatar
Tony-Y committed
17
 *
18
19
20
21
22
23
24
 * loss(y, p, w) = { -(1-y)*log(1-p)-y*log(p) }*w,
 * with probability p = 1/(1+exp(-f)), where f is being boosted
 *
 * ConvertToOutput: f -> p
 *
 * (2) CrossEntropyLambda; "xentlambda"
 *
Tony-Y's avatar
Tony-Y committed
25
 * loss(y, p, w) = -(1-y)*log(1-p)-y*log(p),
26
27
28
29
30
31
32
33
34
35
36
37
38
39
 * with p = 1-exp(-lambda*w), lambda = log(1+exp(f)), f being boosted, and w > 0
 *
 * ConvertToOutput: f -> lambda
 *
 * (1) and (2) are the same if w=1; but outputs still differ.
 *
 */

namespace LightGBM {
/*!
* \brief Objective function for cross-entropy (with optional linear weights)
*/
class CrossEntropy: public ObjectiveFunction {
public:
Guolin Ke's avatar
Guolin Ke committed
40
  explicit CrossEntropy(const Config&) {
41
42
43
44
45
46
47
48
49
50
51
52
53
  }

  explicit CrossEntropy(const std::vector<std::string>&) {
  }

  ~CrossEntropy() {}

  void Init(const Metadata& metadata, data_size_t num_data) override {
    num_data_ = num_data;
    label_ = metadata.label();
    weights_ = metadata.weights();

    CHECK_NOTNULL(label_);
54
    Common::CheckElementsIntervalClosed<label_t>(label_, 0.0f, 1.0f, num_data_, GetName());
55
56
57
    Log::Info("[%s:%s]: (objective) labels passed interval [0, 1] check",  GetName(), __func__);

    if (weights_ != nullptr) {
58
      label_t minw;
59
      double sumw;
60
      Common::ObtainMinMaxSum(weights_, num_data_, &minw, (label_t*)nullptr, &sumw);
61
      if (minw < 0.0f) {
62
        Log::Fatal("[%s]: at least one weight is negative", GetName());
63
64
      }
      if (sumw == 0.0f) {
65
        Log::Fatal("[%s]: sum of weights is zero", GetName());
66
67
      }
    }
Tony-Y's avatar
Tony-Y committed
68

69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
  }

  void GetGradients(const double* score, score_t* gradients, score_t* hessians) const override {
    if (weights_ == nullptr) {
      // compute pointwise gradients and hessians with implied unit weights
      #pragma omp parallel for schedule(static)
      for (data_size_t i = 0; i < num_data_; ++i) {
        const double z = 1.0f / (1.0f + std::exp(-score[i]));
        gradients[i] = static_cast<score_t>(z - label_[i]);
        hessians[i] = static_cast<score_t>(z * (1.0f - z));
      }
    } else {
      // compute pointwise gradients and hessians with given weights
      #pragma omp parallel for schedule(static)
      for (data_size_t i = 0; i < num_data_; ++i) {
        const double z = 1.0f / (1.0f + std::exp(-score[i]));
        gradients[i] = static_cast<score_t>((z - label_[i]) * weights_[i]);
        hessians[i] = static_cast<score_t>(z * (1.0f - z) * weights_[i]);
      }
    }
  }

  const char* GetName() const override {
    return "xentropy";
  }

  // convert score to a probability
  void ConvertOutput(const double* input, double* output) const override {
    output[0] = 1.0f / (1.0f + std::exp(-input[0]));
  }

  std::string ToString() const override {
    std::stringstream str_buf;
    str_buf << GetName();
    return str_buf.str();
  }

106
  // implement custom average to boost from (if enabled among options)
107
  double BoostFromScore(int) const override {
108
109
    double suml = 0.0f;
    double sumw = 0.0f;
110
    if (weights_ != nullptr) {
111
      #pragma omp parallel for schedule(static) reduction(+:suml,sumw)
112
113
114
115
116
117
      for (data_size_t i = 0; i < num_data_; ++i) {
        suml += label_[i] * weights_[i];
        sumw += weights_[i];
      }
    } else {
      sumw = static_cast<double>(num_data_);
118
      #pragma omp parallel for schedule(static) reduction(+:suml)
119
120
121
122
123
      for (data_size_t i = 0; i < num_data_; ++i) {
        suml += label_[i];
      }
    }
    double pavg = suml / sumw;
124
125
    pavg = std::min(pavg, 1.0 - kEpsilon);
    pavg = std::max<double>(pavg, kEpsilon);
126
    double initscore = std::log(pavg / (1.0f - pavg));
127
    Log::Info("[%s:%s]: pavg = %f -> initscore = %f",  GetName(), __func__, pavg, initscore);
128
    return initscore;
129
130
  }

131
132
133
134
private:
  /*! \brief Number of data points */
  data_size_t num_data_;
  /*! \brief Pointer for label */
135
  const label_t* label_;
136
  /*! \brief Weights for data */
137
  const label_t* weights_;
138
139
140
141
142
143
144
};

/*!
* \brief Objective function for alternative parameterization of cross-entropy (see top of file for explanation)
*/
class CrossEntropyLambda: public ObjectiveFunction {
public:
Guolin Ke's avatar
Guolin Ke committed
145
  explicit CrossEntropyLambda(const Config&) {
146
147
148
149
150
151
152
153
154
155
156
157
158
159
    min_weight_ = max_weight_ = 0.0f;
  }

  explicit CrossEntropyLambda(const std::vector<std::string>&) {
  }

  ~CrossEntropyLambda() {}

  void Init(const Metadata& metadata, data_size_t num_data) override {
    num_data_ = num_data;
    label_ = metadata.label();
    weights_ = metadata.weights();

    CHECK_NOTNULL(label_);
160
    Common::CheckElementsIntervalClosed<label_t>(label_, 0.0f, 1.0f, num_data_, GetName());
161
162
163
164
    Log::Info("[%s:%s]: (objective) labels passed interval [0, 1] check",  GetName(), __func__);

    if (weights_ != nullptr) {

165
      Common::ObtainMinMaxSum(weights_, num_data_, &min_weight_, &max_weight_, (label_t*)nullptr);
166
      if (min_weight_ <= 0.0f) {
167
        Log::Fatal("[%s]: at least one weight is non-positive", GetName());
168
169
170
171
      }

      // Issue an info statement about this ratio
      double weight_ratio = max_weight_ / min_weight_;
Tony-Y's avatar
Tony-Y committed
172
      Log::Info("[%s:%s]: min, max weights = %f, %f; ratio = %f",
173
174
175
176
                GetName(), __func__,
                min_weight_, max_weight_,
                weight_ratio);
    } else {
Tony-Y's avatar
Tony-Y committed
177
      // all weights are implied to be unity; no need to do anything
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
    }
  }

  void GetGradients(const double* score, score_t* gradients, score_t* hessians) const override {
    if (weights_ == nullptr) {
      // compute pointwise gradients and hessians with implied unit weights; exactly equivalent to CrossEntropy with unit weights
      #pragma omp parallel for schedule(static)
      for (data_size_t i = 0; i < num_data_; ++i) {
        const double z = 1.0f / (1.0f + std::exp(-score[i]));
        gradients[i] = static_cast<score_t>(z - label_[i]);
        hessians[i] = static_cast<score_t>(z * (1.0f - z));
      }
    } else {
      // compute pointwise gradients and hessians with given weights
      #pragma omp parallel for schedule(static)
      for (data_size_t i = 0; i < num_data_; ++i) {
        const double w = weights_[i];
        const double y = label_[i];
        const double epf = std::exp(score[i]);
        const double hhat = std::log(1.0f + epf);
        const double z = 1.0f - std::exp(-w*hhat);
        const double enf = 1.0f / epf; // = std::exp(-score[i]);
        gradients[i] = static_cast<score_t>((1.0f - y / z) * w / (1.0f + enf));
        const double c = 1.0f / (1.0f - z);
        double d = 1.0f + epf;
        const double a = w * epf / (d * d);
        d = c - 1.0f;
        const double b = (c / (d * d) ) * (1.0f + w * epf - c);
        hessians[i] = static_cast<score_t>(a * (1.0f + y * b));
      }
    }
  }

  const char* GetName() const override {
    return "xentlambda";
  }

  //
  // ATTENTION: the function output is the "normalized exponential parameter" lambda > 0, not the probability
  //
  // If this code would read: output[0] = 1.0f / (1.0f + std::exp(-input[0]));
  // The output would still not be the probability unless the weights are unity.
  //
  // Let z = 1 / (1 + exp(-f)), then prob(z) = 1-(1-z)^w, where w is the weight for the specific point.
  //

  void ConvertOutput(const double* input, double* output) const override {
    output[0] = std::log(1.0f + std::exp(input[0]));
  }

  std::string ToString() const override {
    std::stringstream str_buf;
    str_buf << GetName();
    return str_buf.str();
  }

234
  double BoostFromScore(int) const override {
235
    double suml = 0.0f;
236
    double sumw = 0.0f;
Laurae's avatar
Laurae committed
237
    if (weights_ != nullptr) {
238
239
240
241
242
      #pragma omp parallel for schedule(static) reduction(+:suml,sumw)
      for (data_size_t i = 0; i < num_data_; ++i) {
        suml += label_[i] * weights_[i];
        sumw += weights_[i];
      }
243
244
    } else {
      sumw = static_cast<double>(num_data_);
245
246
247
248
      #pragma omp parallel for schedule(static) reduction(+:suml)
      for (data_size_t i = 0; i < num_data_; ++i) {
        suml += label_[i];
      }
249
    }
250
251
    double havg = suml / sumw;
    double initscore = std::log(std::exp(havg) - 1.0f);
252
    Log::Info("[%s:%s]: havg = %f -> initscore = %f",  GetName(), __func__, havg, initscore);
253
    return initscore;
254
255
  }

256
257
258
259
private:
  /*! \brief Number of data points */
  data_size_t num_data_;
  /*! \brief Pointer for label */
260
  const label_t* label_;
261
  /*! \brief Weights for data */
262
  const label_t* weights_;
263
  /*! \brief Minimum weight found during init */
264
  label_t min_weight_;
265
  /*! \brief Maximum weight found during init */
266
  label_t max_weight_;
267
268
269
270
271
};

}  // end namespace LightGBM

#endif   // end #ifndef LIGHTGBM_OBJECTIVE_XENTROPY_OBJECTIVE_HPP_