binary_objective.hpp 4.2 KB
Newer Older
Guolin Ke's avatar
Guolin Ke committed
1
2
3
4
5
6
7
8
9
10
#ifndef LIGHTGBM_OBJECTIVE_BINARY_OBJECTIVE_HPP_
#define LIGHTGBM_OBJECTIVE_BINARY_OBJECTIVE_HPP_

#include <LightGBM/objective_function.h>

#include <cstring>
#include <cmath>

namespace LightGBM {
/*!
11
* \brief Objective function for binary classification
Guolin Ke's avatar
Guolin Ke committed
12
13
14
*/
class BinaryLogloss: public ObjectiveFunction {
public:
Guolin Ke's avatar
Guolin Ke committed
15
  explicit BinaryLogloss(const ObjectiveConfig& config, std::function<bool(float)> is_pos = nullptr) {
Guolin Ke's avatar
Guolin Ke committed
16
    is_unbalance_ = config.is_unbalance;
17
    sigmoid_ = static_cast<double>(config.sigmoid);
Guolin Ke's avatar
Guolin Ke committed
18
    if (sigmoid_ <= 0.0) {
19
      Log::Fatal("Sigmoid parameter %f should be greater than zero", sigmoid_);
Guolin Ke's avatar
Guolin Ke committed
20
    }
21
    scale_pos_weight_ = static_cast<double>(config.scale_pos_weight);
Guolin Ke's avatar
Guolin Ke committed
22
23
24
25
    is_pos_ = is_pos;
    if (is_pos_ == nullptr) {
      is_pos_ = [](float label) {return label > 0; };
    }
Guolin Ke's avatar
Guolin Ke committed
26
  }
Guolin Ke's avatar
Guolin Ke committed
27

Guolin Ke's avatar
Guolin Ke committed
28
  ~BinaryLogloss() {}
Guolin Ke's avatar
Guolin Ke committed
29

Guolin Ke's avatar
Guolin Ke committed
30
31
32
33
34
35
36
  void Init(const Metadata& metadata, data_size_t num_data) override {
    num_data_ = num_data;
    label_ = metadata.label();
    weights_ = metadata.weights();
    data_size_t cnt_positive = 0;
    data_size_t cnt_negative = 0;
    // count for positive and negative samples
37
    #pragma omp parallel for schedule(static) reduction(+:cnt_positive, cnt_negative)
Guolin Ke's avatar
Guolin Ke committed
38
    for (data_size_t i = 0; i < num_data_; ++i) {
Guolin Ke's avatar
Guolin Ke committed
39
      if (is_pos_(label_[i])) {
Guolin Ke's avatar
Guolin Ke committed
40
41
42
43
44
        ++cnt_positive;
      } else {
        ++cnt_negative;
      }
    }
ProtD's avatar
ProtD committed
45
    Log::Info("Number of positive: %d, number of negative: %d", cnt_positive, cnt_negative);
Guolin Ke's avatar
Guolin Ke committed
46
47
48
49
50
51
52
    // use -1 for negative class, and 1 for positive class
    label_val_[0] = -1;
    label_val_[1] = 1;
    // weight for label
    label_weights_[0] = 1.0f;
    label_weights_[1] = 1.0f;
    // if using unbalance, change the labels weight
53
    if (is_unbalance_ && cnt_positive > 0 && cnt_negative > 0) {
54
55
      if (cnt_positive > cnt_negative) {
        label_weights_[1] = 1.0f;
56
        label_weights_[0] = static_cast<double>(cnt_positive) / cnt_negative;
57
      } else {
58
        label_weights_[1] = static_cast<double>(cnt_negative) / cnt_positive;
59
60
        label_weights_[0] = 1.0f;
      }
Guolin Ke's avatar
Guolin Ke committed
61
    }
Guolin Ke's avatar
Guolin Ke committed
62
    label_weights_[1] *= scale_pos_weight_;
Guolin Ke's avatar
Guolin Ke committed
63
64
  }

65
  void GetGradients(const double* score, score_t* gradients, score_t* hessians) const override {
Guolin Ke's avatar
Guolin Ke committed
66
67
68
69
    if (weights_ == nullptr) {
      #pragma omp parallel for schedule(static)
      for (data_size_t i = 0; i < num_data_; ++i) {
        // get label and label weights
Guolin Ke's avatar
Guolin Ke committed
70
        const int is_pos = is_pos_(label_[i]);
Guolin Ke's avatar
Guolin Ke committed
71
72
        const int label = label_val_[is_pos];
        const double label_weight = label_weights_[is_pos];
Guolin Ke's avatar
Guolin Ke committed
73
        // calculate gradients and hessians
74
        const double response = -label * sigmoid_ / (1.0f + std::exp(label * sigmoid_ * score[i]));
75
76
        const double abs_response = fabs(response);
        gradients[i] = static_cast<score_t>(response * label_weight);
77
        hessians[i] = static_cast<score_t>(abs_response * (sigmoid_ - abs_response) * label_weight);
Guolin Ke's avatar
Guolin Ke committed
78
79
80
81
82
      }
    } else {
      #pragma omp parallel for schedule(static)
      for (data_size_t i = 0; i < num_data_; ++i) {
        // get label and label weights
Guolin Ke's avatar
Guolin Ke committed
83
        const int is_pos = is_pos_(label_[i]);
Guolin Ke's avatar
Guolin Ke committed
84
85
        const int label = label_val_[is_pos];
        const double label_weight = label_weights_[is_pos];
Guolin Ke's avatar
Guolin Ke committed
86
        // calculate gradients and hessians
87
        const double response = -label * sigmoid_ / (1.0f + std::exp(label * sigmoid_ * score[i]));
88
89
        const double abs_response = fabs(response);
        gradients[i] = static_cast<score_t>(response * label_weight  * weights_[i]);
90
        hessians[i] = static_cast<score_t>(abs_response * (sigmoid_ - abs_response) * label_weight * weights_[i]);
Guolin Ke's avatar
Guolin Ke committed
91
92
93
94
      }
    }
  }

Guolin Ke's avatar
Guolin Ke committed
95
96
  const char* GetName() const override {
    return "binary";
Guolin Ke's avatar
Guolin Ke committed
97
98
99
100
101
102
103
104
105
106
  }

private:
  /*! \brief Number of data */
  data_size_t num_data_;
  /*! \brief Pointer of label */
  const float* label_;
  /*! \brief True if using unbalance training */
  bool is_unbalance_;
  /*! \brief Sigmoid parameter */
107
  double sigmoid_;
Guolin Ke's avatar
Guolin Ke committed
108
109
110
  /*! \brief Values for positive and negative labels */
  int label_val_[2];
  /*! \brief Weights for positive and negative labels */
111
  double label_weights_[2];
Guolin Ke's avatar
Guolin Ke committed
112
113
  /*! \brief Weights for data */
  const float* weights_;
114
  double scale_pos_weight_;
Guolin Ke's avatar
Guolin Ke committed
115
  std::function<bool(float)> is_pos_;
Guolin Ke's avatar
Guolin Ke committed
116
117
118
};

}  // namespace LightGBM
Guolin Ke's avatar
Guolin Ke committed
119
#endif   // LightGBM_OBJECTIVE_BINARY_OBJECTIVE_HPP_