multiclass_metric.hpp 5.07 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
#ifndef LIGHTGBM_METRIC_MULTICLASS_METRIC_HPP_
#define LIGHTGBM_METRIC_MULTICLASS_METRIC_HPP_

#include <LightGBM/utils/log.h>

#include <LightGBM/metric.h>

#include <cmath>

namespace LightGBM {
/*!
* \brief Metric for multiclass task.
* Use static class "PointWiseLossCalculator" to calculate loss point-wise
*/
template<typename PointWiseLossCalculator>
class MulticlassMetric: public Metric {
public:
18
19
  explicit MulticlassMetric(const MetricConfig&) {

20
21
22
23
24
25
  }

  virtual ~MulticlassMetric() {

  }

Guolin Ke's avatar
Guolin Ke committed
26
27
28
  void Init(const Metadata& metadata, data_size_t num_data) override {

    name_.emplace_back(PointWiseLossCalculator::Name());
29
30
31
32
33
34
    num_data_ = num_data;
    // get label
    label_ = metadata.label();
    // get weights
    weights_ = metadata.weights();
    if (weights_ == nullptr) {
35
      sum_weights_ = static_cast<double>(num_data_);
36
37
38
39
40
41
42
    } else {
      sum_weights_ = 0.0f;
      for (data_size_t i = 0; i < num_data_; ++i) {
        sum_weights_ += weights_[i];
      }
    }
  }
43

Guolin Ke's avatar
Guolin Ke committed
44
  const std::vector<std::string>& GetName() const override {
45
    return name_;
46
47
  }

48
  double factor_to_bigger_better() const override {
49
    return -1.0f;
50
  }
51
52
53

  std::vector<double> Eval(const double* score, const ObjectiveFunction* objective,
                           int num_tree_per_iteration) const override {
54
    double sum_loss = 0.0;
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
    if (objective != nullptr) {
      if (weights_ == nullptr) {
        #pragma omp parallel for schedule(static) reduction(+:sum_loss)
        for (data_size_t i = 0; i < num_data_; ++i) {
          std::vector<double> rec(num_tree_per_iteration);
          for (int k = 0; k < num_tree_per_iteration; ++k) {
            size_t idx = static_cast<size_t>(num_data_) * k + i;
            rec[k] = static_cast<double>(score[idx]);
          }
          rec = objective->ConvertOutput(rec);
          // add loss
          sum_loss += PointWiseLossCalculator::LossOnPoint(label_[i], rec);
        }
      } else {
        #pragma omp parallel for schedule(static) reduction(+:sum_loss)
        for (data_size_t i = 0; i < num_data_; ++i) {
          std::vector<double> rec(num_tree_per_iteration);
          for (int k = 0; k < num_tree_per_iteration; ++k) {
            size_t idx = static_cast<size_t>(num_data_) * k + i;
            rec[k] = static_cast<double>(score[idx]);
          }
          rec = objective->ConvertOutput(rec);
          // add loss
          sum_loss += PointWiseLossCalculator::LossOnPoint(label_[i], rec) * weights_[i];
79
80
81
        }
      }
    } else {
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
      if (weights_ == nullptr) {
        #pragma omp parallel for schedule(static) reduction(+:sum_loss)
        for (data_size_t i = 0; i < num_data_; ++i) {
          std::vector<double> rec(num_tree_per_iteration);
          for (int k = 0; k < num_tree_per_iteration; ++k) {
            size_t idx = static_cast<size_t>(num_data_) * k + i;
            rec[k] = static_cast<double>(score[idx]);
          }
          // add loss
          sum_loss += PointWiseLossCalculator::LossOnPoint(label_[i], rec);
        }
      } else {
        #pragma omp parallel for schedule(static) reduction(+:sum_loss)
        for (data_size_t i = 0; i < num_data_; ++i) {
          std::vector<double> rec(num_tree_per_iteration);
          for (int k = 0; k < num_tree_per_iteration; ++k) {
            size_t idx = static_cast<size_t>(num_data_) * k + i;
            rec[k] = static_cast<double>(score[idx]);
          }
          // add loss
          sum_loss += PointWiseLossCalculator::LossOnPoint(label_[i], rec) * weights_[i];
103
104
105
        }
      }
    }
106
107
    double loss = sum_loss / sum_weights_;
    return std::vector<double>(1, loss);
108
109
110
111
112
113
114
115
116
117
  }

private:
  /*! \brief Number of data */
  data_size_t num_data_;
  /*! \brief Pointer of label */
  const float* label_;
  /*! \brief Pointer of weighs */
  const float* weights_;
  /*! \brief Sum weights */
118
  double sum_weights_;
119
  /*! \brief Name of this test set */
120
  std::vector<std::string> name_;
121
122
123
124
125
126
127
};

/*! \brief L2 loss for multiclass task */
class MultiErrorMetric: public MulticlassMetric<MultiErrorMetric> {
public:
  explicit MultiErrorMetric(const MetricConfig& config) :MulticlassMetric<MultiErrorMetric>(config) {}

zhangyafeikimi's avatar
zhangyafeikimi committed
128
  inline static double LossOnPoint(float label, std::vector<double>& score) {
129
    size_t k = static_cast<size_t>(label);
130
131
132
133
    for (size_t i = 0; i < score.size(); ++i) {
      if (i != k && score[i] >= score[k]) {
        return 1.0f;
      }
134
    }
Guolin Ke's avatar
Guolin Ke committed
135
    return 0.0f;
136
137
138
  }

  inline static const char* Name() {
139
    return "multi_error";
140
141
142
143
  }
};

/*! \brief Logloss for multiclass task */
Guolin Ke's avatar
Guolin Ke committed
144
class MultiSoftmaxLoglossMetric: public MulticlassMetric<MultiSoftmaxLoglossMetric> {
145
public:
Guolin Ke's avatar
Guolin Ke committed
146
  explicit MultiSoftmaxLoglossMetric(const MetricConfig& config) :MulticlassMetric<MultiSoftmaxLoglossMetric>(config) {}
147

zhangyafeikimi's avatar
zhangyafeikimi committed
148
  inline static double LossOnPoint(float label, std::vector<double>& score) {
149
150
    size_t k = static_cast<size_t>(label);
    if (score[k] > kEpsilon) {
151
      return static_cast<double>(-std::log(score[k]));
152
153
154
155
    } else {
      return -std::log(kEpsilon);
    }
  }
156

157
  inline static const char* Name() {
158
    return "multi_logloss";
159
160
161
162
163
  }
};

}  // namespace LightGBM
#endif   // LightGBM_METRIC_MULTICLASS_METRIC_HPP_