"vscode:/vscode.git/clone" did not exist on "b6d4ad8361379a902091e3a584491c39fa89ef2d"
xentropy_objective.hpp 9.58 KB
Newer Older
1
2
3
4
/*!
 * Copyright (c) 2017 Microsoft Corporation. All rights reserved.
 * Licensed under the MIT License. See LICENSE file in the project root for license information.
 */
5
6
7
#ifndef LIGHTGBM_OBJECTIVE_XENTROPY_OBJECTIVE_HPP_
#define LIGHTGBM_OBJECTIVE_XENTROPY_OBJECTIVE_HPP_

8
9
10
11
#include <LightGBM/meta.h>
#include <LightGBM/objective_function.h>
#include <LightGBM/utils/common.h>

12
13
#include <string>
#include <algorithm>
14
#include <cmath>
15
16
#include <cstring>
#include <vector>
17
18

/*
Andrew Ziem's avatar
Andrew Ziem committed
19
 * Implements gradients and Hessians for the following point losses.
20
21
22
 * Target y is anything in interval [0, 1].
 *
 * (1) CrossEntropy; "xentropy";
Tony-Y's avatar
Tony-Y committed
23
 *
24
25
26
27
28
29
30
 * loss(y, p, w) = { -(1-y)*log(1-p)-y*log(p) }*w,
 * with probability p = 1/(1+exp(-f)), where f is being boosted
 *
 * ConvertToOutput: f -> p
 *
 * (2) CrossEntropyLambda; "xentlambda"
 *
Tony-Y's avatar
Tony-Y committed
31
 * loss(y, p, w) = -(1-y)*log(1-p)-y*log(p),
32
33
34
35
36
37
38
39
40
41
42
43
44
 * with p = 1-exp(-lambda*w), lambda = log(1+exp(f)), f being boosted, and w > 0
 *
 * ConvertToOutput: f -> lambda
 *
 * (1) and (2) are the same if w=1; but outputs still differ.
 *
 */

namespace LightGBM {
/*!
* \brief Objective function for cross-entropy (with optional linear weights)
*/
class CrossEntropy: public ObjectiveFunction {
Nikita Titov's avatar
Nikita Titov committed
45
 public:
Guolin Ke's avatar
Guolin Ke committed
46
47
  explicit CrossEntropy(const Config& config)
      : deterministic_(config.deterministic) {}
48

Guolin Ke's avatar
Guolin Ke committed
49
50
  explicit CrossEntropy(const std::vector<std::string>&)
      : deterministic_(false) {
51
52
53
54
55
56
57
58
59
60
  }

  ~CrossEntropy() {}

  void Init(const Metadata& metadata, data_size_t num_data) override {
    num_data_ = num_data;
    label_ = metadata.label();
    weights_ = metadata.weights();

    CHECK_NOTNULL(label_);
61
    Common::CheckElementsIntervalClosed<label_t>(label_, 0.0f, 1.0f, num_data_, GetName());
62
63
64
    Log::Info("[%s:%s]: (objective) labels passed interval [0, 1] check",  GetName(), __func__);

    if (weights_ != nullptr) {
65
      label_t minw;
66
      double sumw;
67
      Common::ObtainMinMaxSum(weights_, num_data_, &minw, static_cast<label_t*>(nullptr), &sumw);
68
      if (minw < 0.0f) {
69
        Log::Fatal("[%s]: at least one weight is negative", GetName());
70
71
      }
      if (sumw == 0.0f) {
72
        Log::Fatal("[%s]: sum of weights is zero", GetName());
73
74
75
76
77
78
      }
    }
  }

  void GetGradients(const double* score, score_t* gradients, score_t* hessians) const override {
    if (weights_ == nullptr) {
Andrew Ziem's avatar
Andrew Ziem committed
79
      // compute pointwise gradients and Hessians with implied unit weights
80
81
82
83
84
85
86
      #pragma omp parallel for schedule(static)
      for (data_size_t i = 0; i < num_data_; ++i) {
        const double z = 1.0f / (1.0f + std::exp(-score[i]));
        gradients[i] = static_cast<score_t>(z - label_[i]);
        hessians[i] = static_cast<score_t>(z * (1.0f - z));
      }
    } else {
Andrew Ziem's avatar
Andrew Ziem committed
87
      // compute pointwise gradients and Hessians with given weights
88
89
90
91
92
93
94
95
96
97
      #pragma omp parallel for schedule(static)
      for (data_size_t i = 0; i < num_data_; ++i) {
        const double z = 1.0f / (1.0f + std::exp(-score[i]));
        gradients[i] = static_cast<score_t>((z - label_[i]) * weights_[i]);
        hessians[i] = static_cast<score_t>(z * (1.0f - z) * weights_[i]);
      }
    }
  }

  const char* GetName() const override {
Guolin Ke's avatar
Guolin Ke committed
98
    return "cross_entropy";
99
100
101
102
103
104
105
106
107
108
109
110
111
  }

  // convert score to a probability
  void ConvertOutput(const double* input, double* output) const override {
    output[0] = 1.0f / (1.0f + std::exp(-input[0]));
  }

  std::string ToString() const override {
    std::stringstream str_buf;
    str_buf << GetName();
    return str_buf.str();
  }

112
  // implement custom average to boost from (if enabled among options)
113
  double BoostFromScore(int) const override {
114
115
    double suml = 0.0f;
    double sumw = 0.0f;
116
    if (weights_ != nullptr) {
Guolin Ke's avatar
Guolin Ke committed
117
118
      #pragma omp parallel for schedule(static) reduction(+:suml, sumw) if (!deterministic_)

119
      for (data_size_t i = 0; i < num_data_; ++i) {
120
        suml += static_cast<double>(label_[i]) * weights_[i];
121
122
123
124
        sumw += weights_[i];
      }
    } else {
      sumw = static_cast<double>(num_data_);
Guolin Ke's avatar
Guolin Ke committed
125
126
      #pragma omp parallel for schedule(static) reduction(+:suml) if (!deterministic_)

127
128
129
130
131
      for (data_size_t i = 0; i < num_data_; ++i) {
        suml += label_[i];
      }
    }
    double pavg = suml / sumw;
132
133
    pavg = std::min(pavg, 1.0 - kEpsilon);
    pavg = std::max<double>(pavg, kEpsilon);
134
    double initscore = std::log(pavg / (1.0f - pavg));
135
    Log::Info("[%s:%s]: pavg = %f -> initscore = %f",  GetName(), __func__, pavg, initscore);
136
    return initscore;
137
138
  }

Nikita Titov's avatar
Nikita Titov committed
139
 private:
140
141
142
  /*! \brief Number of data points */
  data_size_t num_data_;
  /*! \brief Pointer for label */
143
  const label_t* label_;
144
  /*! \brief Weights for data */
145
  const label_t* weights_;
Guolin Ke's avatar
Guolin Ke committed
146
  const bool deterministic_;
147
148
149
150
151
152
};

/*!
* \brief Objective function for alternative parameterization of cross-entropy (see top of file for explanation)
*/
class CrossEntropyLambda: public ObjectiveFunction {
Nikita Titov's avatar
Nikita Titov committed
153
 public:
Guolin Ke's avatar
Guolin Ke committed
154
155
  explicit CrossEntropyLambda(const Config& config)
      : deterministic_(config.deterministic) {
156
157
158
    min_weight_ = max_weight_ = 0.0f;
  }

Guolin Ke's avatar
Guolin Ke committed
159
160
  explicit CrossEntropyLambda(const std::vector<std::string>&)
      : deterministic_(false) {}
161
162
163
164
165
166
167
168
169

  ~CrossEntropyLambda() {}

  void Init(const Metadata& metadata, data_size_t num_data) override {
    num_data_ = num_data;
    label_ = metadata.label();
    weights_ = metadata.weights();

    CHECK_NOTNULL(label_);
170
    Common::CheckElementsIntervalClosed<label_t>(label_, 0.0f, 1.0f, num_data_, GetName());
171
172
173
    Log::Info("[%s:%s]: (objective) labels passed interval [0, 1] check",  GetName(), __func__);

    if (weights_ != nullptr) {
174
      Common::ObtainMinMaxSum(weights_, num_data_, &min_weight_, &max_weight_, static_cast<label_t*>(nullptr));
175
      if (min_weight_ <= 0.0f) {
176
        Log::Fatal("[%s]: at least one weight is non-positive", GetName());
177
178
179
180
      }

      // Issue an info statement about this ratio
      double weight_ratio = max_weight_ / min_weight_;
Tony-Y's avatar
Tony-Y committed
181
      Log::Info("[%s:%s]: min, max weights = %f, %f; ratio = %f",
182
183
184
185
                GetName(), __func__,
                min_weight_, max_weight_,
                weight_ratio);
    } else {
Tony-Y's avatar
Tony-Y committed
186
      // all weights are implied to be unity; no need to do anything
187
188
189
190
191
    }
  }

  void GetGradients(const double* score, score_t* gradients, score_t* hessians) const override {
    if (weights_ == nullptr) {
Andrew Ziem's avatar
Andrew Ziem committed
192
      // compute pointwise gradients and Hessians with implied unit weights; exactly equivalent to CrossEntropy with unit weights
193
194
195
196
197
198
199
      #pragma omp parallel for schedule(static)
      for (data_size_t i = 0; i < num_data_; ++i) {
        const double z = 1.0f / (1.0f + std::exp(-score[i]));
        gradients[i] = static_cast<score_t>(z - label_[i]);
        hessians[i] = static_cast<score_t>(z * (1.0f - z));
      }
    } else {
Andrew Ziem's avatar
Andrew Ziem committed
200
      // compute pointwise gradients and Hessians with given weights
201
202
203
204
205
      #pragma omp parallel for schedule(static)
      for (data_size_t i = 0; i < num_data_; ++i) {
        const double w = weights_[i];
        const double y = label_[i];
        const double epf = std::exp(score[i]);
206
        const double hhat = std::log1p(epf);
207
        const double z = 1.0f - std::exp(-w*hhat);
208
        const double enf = 1.0f / epf;  // = std::exp(-score[i]);
209
210
211
212
213
214
215
216
217
218
219
220
        gradients[i] = static_cast<score_t>((1.0f - y / z) * w / (1.0f + enf));
        const double c = 1.0f / (1.0f - z);
        double d = 1.0f + epf;
        const double a = w * epf / (d * d);
        d = c - 1.0f;
        const double b = (c / (d * d) ) * (1.0f + w * epf - c);
        hessians[i] = static_cast<score_t>(a * (1.0f + y * b));
      }
    }
  }

  const char* GetName() const override {
Guolin Ke's avatar
Guolin Ke committed
221
    return "cross_entropy_lambda";
222
223
224
225
226
227
228
229
230
231
232
233
  }

  //
  // ATTENTION: the function output is the "normalized exponential parameter" lambda > 0, not the probability
  //
  // If this code would read: output[0] = 1.0f / (1.0f + std::exp(-input[0]));
  // The output would still not be the probability unless the weights are unity.
  //
  // Let z = 1 / (1 + exp(-f)), then prob(z) = 1-(1-z)^w, where w is the weight for the specific point.
  //

  void ConvertOutput(const double* input, double* output) const override {
234
    output[0] = std::log1p(std::exp(input[0]));
235
236
237
238
239
240
241
242
  }

  std::string ToString() const override {
    std::stringstream str_buf;
    str_buf << GetName();
    return str_buf.str();
  }

243
  double BoostFromScore(int) const override {
244
    double suml = 0.0f;
245
    double sumw = 0.0f;
Laurae's avatar
Laurae committed
246
    if (weights_ != nullptr) {
Guolin Ke's avatar
Guolin Ke committed
247
248
      #pragma omp parallel for schedule(static) reduction(+:suml, sumw) if (!deterministic_)

249
      for (data_size_t i = 0; i < num_data_; ++i) {
250
        suml += static_cast<double>(label_[i]) * weights_[i];
251
252
        sumw += weights_[i];
      }
253
254
    } else {
      sumw = static_cast<double>(num_data_);
Guolin Ke's avatar
Guolin Ke committed
255
256
      #pragma omp parallel for schedule(static) reduction(+:suml) if (!deterministic_)

257
258
259
      for (data_size_t i = 0; i < num_data_; ++i) {
        suml += label_[i];
      }
260
    }
261
    double havg = suml / sumw;
262
    double initscore = std::log(std::expm1(havg));
263
    Log::Info("[%s:%s]: havg = %f -> initscore = %f",  GetName(), __func__, havg, initscore);
264
    return initscore;
265
266
  }

267
 private:
268
269
270
  /*! \brief Number of data points */
  data_size_t num_data_;
  /*! \brief Pointer for label */
271
  const label_t* label_;
272
  /*! \brief Weights for data */
273
  const label_t* weights_;
274
  /*! \brief Minimum weight found during init */
275
  label_t min_weight_;
276
  /*! \brief Maximum weight found during init */
277
  label_t max_weight_;
Guolin Ke's avatar
Guolin Ke committed
278
  const bool deterministic_;
279
280
281
282
283
};

}  // end namespace LightGBM

#endif   // end #ifndef LIGHTGBM_OBJECTIVE_XENTROPY_OBJECTIVE_HPP_