xentropy_objective.hpp 8.88 KB
Newer Older
1
2
3
#ifndef LIGHTGBM_OBJECTIVE_XENTROPY_OBJECTIVE_HPP_
#define LIGHTGBM_OBJECTIVE_XENTROPY_OBJECTIVE_HPP_

Guolin Ke's avatar
Guolin Ke committed
4
#include <LightGBM/objective_function.h>
5
6
#include <LightGBM/meta.h>

7
8
9
10
11
12
13
14
15
16
#include <LightGBM/utils/common.h>

#include <cstring>
#include <cmath>

/*
 * Implements gradients and hessians for the following point losses.
 * Target y is anything in interval [0, 1].
 *
 * (1) CrossEntropy; "xentropy";
Tony-Y's avatar
Tony-Y committed
17
 *
18
19
20
21
22
23
24
 * loss(y, p, w) = { -(1-y)*log(1-p)-y*log(p) }*w,
 * with probability p = 1/(1+exp(-f)), where f is being boosted
 *
 * ConvertToOutput: f -> p
 *
 * (2) CrossEntropyLambda; "xentlambda"
 *
Tony-Y's avatar
Tony-Y committed
25
 * loss(y, p, w) = -(1-y)*log(1-p)-y*log(p),
26
27
28
29
30
31
32
33
34
35
36
37
38
39
 * with p = 1-exp(-lambda*w), lambda = log(1+exp(f)), f being boosted, and w > 0
 *
 * ConvertToOutput: f -> lambda
 *
 * (1) and (2) are the same if w=1; but outputs still differ.
 *
 */

namespace LightGBM {
/*!
* \brief Objective function for cross-entropy (with optional linear weights)
*/
class CrossEntropy: public ObjectiveFunction {
public:
Guolin Ke's avatar
Guolin Ke committed
40
  explicit CrossEntropy(const Config&) {
41
42
43
44
45
46
47
48
49
50
51
52
53
  }

  explicit CrossEntropy(const std::vector<std::string>&) {
  }

  ~CrossEntropy() {}

  void Init(const Metadata& metadata, data_size_t num_data) override {
    num_data_ = num_data;
    label_ = metadata.label();
    weights_ = metadata.weights();

    CHECK_NOTNULL(label_);
54
    Common::CheckElementsIntervalClosed<label_t>(label_, 0.0f, 1.0f, num_data_, GetName());
55
56
57
    Log::Info("[%s:%s]: (objective) labels passed interval [0, 1] check",  GetName(), __func__);

    if (weights_ != nullptr) {
58
      label_t minw;
59
      double sumw;
60
      Common::ObtainMinMaxSum(weights_, num_data_, &minw, (label_t*)nullptr, &sumw);
61
      if (minw < 0.0f) {
62
        Log::Fatal("[%s]: at least one weight is negative", GetName());
63
64
      }
      if (sumw == 0.0f) {
65
        Log::Fatal("[%s]: sum of weights is zero", GetName());
66
67
      }
    }
Tony-Y's avatar
Tony-Y committed
68

69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
  }

  void GetGradients(const double* score, score_t* gradients, score_t* hessians) const override {
    if (weights_ == nullptr) {
      // compute pointwise gradients and hessians with implied unit weights
      #pragma omp parallel for schedule(static)
      for (data_size_t i = 0; i < num_data_; ++i) {
        const double z = 1.0f / (1.0f + std::exp(-score[i]));
        gradients[i] = static_cast<score_t>(z - label_[i]);
        hessians[i] = static_cast<score_t>(z * (1.0f - z));
      }
    } else {
      // compute pointwise gradients and hessians with given weights
      #pragma omp parallel for schedule(static)
      for (data_size_t i = 0; i < num_data_; ++i) {
        const double z = 1.0f / (1.0f + std::exp(-score[i]));
        gradients[i] = static_cast<score_t>((z - label_[i]) * weights_[i]);
        hessians[i] = static_cast<score_t>(z * (1.0f - z) * weights_[i]);
      }
    }
  }

  const char* GetName() const override {
    return "xentropy";
  }

  // convert score to a probability
  void ConvertOutput(const double* input, double* output) const override {
    output[0] = 1.0f / (1.0f + std::exp(-input[0]));
  }

  std::string ToString() const override {
    std::stringstream str_buf;
    str_buf << GetName();
    return str_buf.str();
  }

106
  // implement custom average to boost from (if enabled among options)
107
  double BoostFromScore() const override {
108
109
    double suml = 0.0f;
    double sumw = 0.0f;
110
    if (weights_ != nullptr) {
111
      #pragma omp parallel for schedule(static) reduction(+:suml,sumw)
112
113
114
115
116
117
      for (data_size_t i = 0; i < num_data_; ++i) {
        suml += label_[i] * weights_[i];
        sumw += weights_[i];
      }
    } else {
      sumw = static_cast<double>(num_data_);
118
      #pragma omp parallel for schedule(static) reduction(+:suml)
119
120
121
122
123
      for (data_size_t i = 0; i < num_data_; ++i) {
        suml += label_[i];
      }
    }
    double pavg = suml / sumw;
124
    double initscore = std::log(pavg / (1.0f - pavg));
125
    Log::Info("[%s:%s]: pavg = %f -> initscore = %f",  GetName(), __func__, pavg, initscore);
126
    return initscore;
127
128
  }

129
130
131
132
private:
  /*! \brief Number of data points */
  data_size_t num_data_;
  /*! \brief Pointer for label */
133
  const label_t* label_;
134
  /*! \brief Weights for data */
135
  const label_t* weights_;
136
137
138
139
140
141
142
};

/*!
* \brief Objective function for alternative parameterization of cross-entropy (see top of file for explanation)
*/
class CrossEntropyLambda: public ObjectiveFunction {
public:
Guolin Ke's avatar
Guolin Ke committed
143
  explicit CrossEntropyLambda(const Config&) {
144
145
146
147
148
149
150
151
152
153
154
155
156
157
    min_weight_ = max_weight_ = 0.0f;
  }

  explicit CrossEntropyLambda(const std::vector<std::string>&) {
  }

  ~CrossEntropyLambda() {}

  void Init(const Metadata& metadata, data_size_t num_data) override {
    num_data_ = num_data;
    label_ = metadata.label();
    weights_ = metadata.weights();

    CHECK_NOTNULL(label_);
158
    Common::CheckElementsIntervalClosed<label_t>(label_, 0.0f, 1.0f, num_data_, GetName());
159
160
161
162
    Log::Info("[%s:%s]: (objective) labels passed interval [0, 1] check",  GetName(), __func__);

    if (weights_ != nullptr) {

163
      Common::ObtainMinMaxSum(weights_, num_data_, &min_weight_, &max_weight_, (label_t*)nullptr);
164
      if (min_weight_ <= 0.0f) {
165
        Log::Fatal("[%s]: at least one weight is non-positive", GetName());
166
167
168
169
      }

      // Issue an info statement about this ratio
      double weight_ratio = max_weight_ / min_weight_;
Tony-Y's avatar
Tony-Y committed
170
      Log::Info("[%s:%s]: min, max weights = %f, %f; ratio = %f",
171
172
173
174
                GetName(), __func__,
                min_weight_, max_weight_,
                weight_ratio);
    } else {
Tony-Y's avatar
Tony-Y committed
175
      // all weights are implied to be unity; no need to do anything
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
    }
  }

  void GetGradients(const double* score, score_t* gradients, score_t* hessians) const override {
    if (weights_ == nullptr) {
      // compute pointwise gradients and hessians with implied unit weights; exactly equivalent to CrossEntropy with unit weights
      #pragma omp parallel for schedule(static)
      for (data_size_t i = 0; i < num_data_; ++i) {
        const double z = 1.0f / (1.0f + std::exp(-score[i]));
        gradients[i] = static_cast<score_t>(z - label_[i]);
        hessians[i] = static_cast<score_t>(z * (1.0f - z));
      }
    } else {
      // compute pointwise gradients and hessians with given weights
      #pragma omp parallel for schedule(static)
      for (data_size_t i = 0; i < num_data_; ++i) {
        const double w = weights_[i];
        const double y = label_[i];
        const double epf = std::exp(score[i]);
        const double hhat = std::log(1.0f + epf);
        const double z = 1.0f - std::exp(-w*hhat);
        const double enf = 1.0f / epf; // = std::exp(-score[i]);
        gradients[i] = static_cast<score_t>((1.0f - y / z) * w / (1.0f + enf));
        const double c = 1.0f / (1.0f - z);
        double d = 1.0f + epf;
        const double a = w * epf / (d * d);
        d = c - 1.0f;
        const double b = (c / (d * d) ) * (1.0f + w * epf - c);
        hessians[i] = static_cast<score_t>(a * (1.0f + y * b));
      }
    }
  }

  const char* GetName() const override {
    return "xentlambda";
  }

  //
  // ATTENTION: the function output is the "normalized exponential parameter" lambda > 0, not the probability
  //
  // If this code would read: output[0] = 1.0f / (1.0f + std::exp(-input[0]));
  // The output would still not be the probability unless the weights are unity.
  //
  // Let z = 1 / (1 + exp(-f)), then prob(z) = 1-(1-z)^w, where w is the weight for the specific point.
  //

  void ConvertOutput(const double* input, double* output) const override {
    output[0] = std::log(1.0f + std::exp(input[0]));
  }

  std::string ToString() const override {
    std::stringstream str_buf;
    str_buf << GetName();
    return str_buf.str();
  }

232
233
  double BoostFromScore() const override {
    double suml = 0.0f;
234
    double sumw = 0.0f;
Laurae's avatar
Laurae committed
235
    if (weights_ != nullptr) {
236
237
238
239
240
      #pragma omp parallel for schedule(static) reduction(+:suml,sumw)
      for (data_size_t i = 0; i < num_data_; ++i) {
        suml += label_[i] * weights_[i];
        sumw += weights_[i];
      }
241
242
    } else {
      sumw = static_cast<double>(num_data_);
243
244
245
246
      #pragma omp parallel for schedule(static) reduction(+:suml)
      for (data_size_t i = 0; i < num_data_; ++i) {
        suml += label_[i];
      }
247
    }
248
249
    double havg = suml / sumw;
    double initscore = std::log(std::exp(havg) - 1.0f);
250
    Log::Info("[%s:%s]: havg = %f -> initscore = %f",  GetName(), __func__, havg, initscore);
251
    return initscore;
252
253
  }

254
255
256
257
private:
  /*! \brief Number of data points */
  data_size_t num_data_;
  /*! \brief Pointer for label */
258
  const label_t* label_;
259
  /*! \brief Weights for data */
260
  const label_t* weights_;
261
  /*! \brief Minimum weight found during init */
262
  label_t min_weight_;
263
  /*! \brief Maximum weight found during init */
264
  label_t max_weight_;
265
266
267
268
269
};

}  // end namespace LightGBM

#endif   // end #ifndef LIGHTGBM_OBJECTIVE_XENTROPY_OBJECTIVE_HPP_