Unverified Commit 1c558a54 authored by James Lamb's avatar James Lamb Committed by GitHub
Browse files

fix possible precision loss in xentropy and fair loss objectives (#4651)

parent 29857c8a
......@@ -212,7 +212,7 @@ class FairLossMetric: public RegressionMetric<FairLossMetric> {
inline static double LossOnPoint(label_t label, double score, const Config& config) {
const double x = std::fabs(score - label);
const double c = config.fair_c;
return c * x - c * c * std::log(1.0f + x / c);
return c * x - c * c * std::log1p(x / c);
}
inline static const char* Name() {
......
......@@ -194,13 +194,13 @@ class CrossEntropyLambdaMetric : public Metric {
if (weights_ == nullptr) {
#pragma omp parallel for schedule(static) reduction(+:sum_loss)
for (data_size_t i = 0; i < num_data_; ++i) {
double hhat = std::log(1.0f + std::exp(score[i])); // auto-convert
double hhat = std::log1p(std::exp(score[i])); // auto-convert
sum_loss += XentLambdaLoss(label_[i], 1.0f, hhat);
}
} else {
#pragma omp parallel for schedule(static) reduction(+:sum_loss)
for (data_size_t i = 0; i < num_data_; ++i) {
double hhat = std::log(1.0f + std::exp(score[i])); // auto-convert
double hhat = std::log1p(std::exp(score[i])); // auto-convert
sum_loss += XentLambdaLoss(label_[i], weights_[i], hhat);
}
}
......
......@@ -203,7 +203,7 @@ class CrossEntropyLambda: public ObjectiveFunction {
const double w = weights_[i];
const double y = label_[i];
const double epf = std::exp(score[i]);
const double hhat = std::log(1.0f + epf);
const double hhat = std::log1p(epf);
const double z = 1.0f - std::exp(-w*hhat);
const double enf = 1.0f / epf; // = std::exp(-score[i]);
gradients[i] = static_cast<score_t>((1.0f - y / z) * w / (1.0f + enf));
......@@ -231,7 +231,7 @@ class CrossEntropyLambda: public ObjectiveFunction {
//
void ConvertOutput(const double* input, double* output) const override {
output[0] = std::log(1.0f + std::exp(input[0]));
output[0] = std::log1p(std::exp(input[0]));
}
std::string ToString() const override {
......@@ -259,7 +259,7 @@ class CrossEntropyLambda: public ObjectiveFunction {
}
}
double havg = suml / sumw;
double initscore = std::log(std::exp(havg) - 1.0f);
double initscore = std::log(std::expm1(havg));
Log::Info("[%s:%s]: havg = %f -> initscore = %f", GetName(), __func__, havg, initscore);
return initscore;
}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment