From 1c558a54e99c0081df1402b5384e1921ff74ebdf Mon Sep 17 00:00:00 2001 From: James Lamb Date: Thu, 7 Oct 2021 22:04:20 -0500 Subject: [PATCH] fix possible precision loss in xentropy and fair loss objectives (#4651) --- src/metric/regression_metric.hpp | 2 +- src/metric/xentropy_metric.hpp | 4 ++-- src/objective/xentropy_objective.hpp | 6 +++--- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/metric/regression_metric.hpp b/src/metric/regression_metric.hpp index d9631811d780..379c36c46aca 100644 --- a/src/metric/regression_metric.hpp +++ b/src/metric/regression_metric.hpp @@ -212,7 +212,7 @@ class FairLossMetric: public RegressionMetric { inline static double LossOnPoint(label_t label, double score, const Config& config) { const double x = std::fabs(score - label); const double c = config.fair_c; - return c * x - c * c * std::log(1.0f + x / c); + return c * x - c * c * std::log1p(x / c); } inline static const char* Name() { diff --git a/src/metric/xentropy_metric.hpp b/src/metric/xentropy_metric.hpp index 241b0a856efe..29d4984c64b3 100644 --- a/src/metric/xentropy_metric.hpp +++ b/src/metric/xentropy_metric.hpp @@ -194,13 +194,13 @@ class CrossEntropyLambdaMetric : public Metric { if (weights_ == nullptr) { #pragma omp parallel for schedule(static) reduction(+:sum_loss) for (data_size_t i = 0; i < num_data_; ++i) { - double hhat = std::log(1.0f + std::exp(score[i])); // auto-convert + double hhat = std::log1p(std::exp(score[i])); // auto-convert sum_loss += XentLambdaLoss(label_[i], 1.0f, hhat); } } else { #pragma omp parallel for schedule(static) reduction(+:sum_loss) for (data_size_t i = 0; i < num_data_; ++i) { - double hhat = std::log(1.0f + std::exp(score[i])); // auto-convert + double hhat = std::log1p(std::exp(score[i])); // auto-convert sum_loss += XentLambdaLoss(label_[i], weights_[i], hhat); } } diff --git a/src/objective/xentropy_objective.hpp b/src/objective/xentropy_objective.hpp index 22f9b4d33cbb..baee5bf991e4 100644 --- a/src/objective/xentropy_objective.hpp +++ b/src/objective/xentropy_objective.hpp @@ -203,7 +203,7 @@ class CrossEntropyLambda: public ObjectiveFunction { const double w = weights_[i]; const double y = label_[i]; const double epf = std::exp(score[i]); - const double hhat = std::log(1.0f + epf); + const double hhat = std::log1p(epf); const double z = 1.0f - std::exp(-w*hhat); const double enf = 1.0f / epf; // = std::exp(-score[i]); gradients[i] = static_cast((1.0f - y / z) * w / (1.0f + enf)); @@ -231,7 +231,7 @@ class CrossEntropyLambda: public ObjectiveFunction { // void ConvertOutput(const double* input, double* output) const override { - output[0] = std::log(1.0f + std::exp(input[0])); + output[0] = std::log1p(std::exp(input[0])); } std::string ToString() const override { @@ -259,7 +259,7 @@ class CrossEntropyLambda: public ObjectiveFunction { } } double havg = suml / sumw; - double initscore = std::log(std::exp(havg) - 1.0f); + double initscore = std::log(std::expm1(havg)); Log::Info("[%s:%s]: havg = %f -> initscore = %f", GetName(), __func__, havg, initscore); return initscore; }