Skip to content

Commit

Permalink
fix possible precision loss in xentropy and fair loss objectives (#4651)
Browse files Browse the repository at this point in the history
  • Loading branch information
jameslamb authored Oct 8, 2021
1 parent 29857c8 commit 1c558a5
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 6 deletions.
2 changes: 1 addition & 1 deletion src/metric/regression_metric.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@ class FairLossMetric: public RegressionMetric<FairLossMetric> {
inline static double LossOnPoint(label_t label, double score, const Config& config) {
const double x = std::fabs(score - label);
const double c = config.fair_c;
return c * x - c * c * std::log(1.0f + x / c);
return c * x - c * c * std::log1p(x / c);
}

inline static const char* Name() {
Expand Down
4 changes: 2 additions & 2 deletions src/metric/xentropy_metric.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -194,13 +194,13 @@ class CrossEntropyLambdaMetric : public Metric {
if (weights_ == nullptr) {
#pragma omp parallel for schedule(static) reduction(+:sum_loss)
for (data_size_t i = 0; i < num_data_; ++i) {
double hhat = std::log(1.0f + std::exp(score[i])); // auto-convert
double hhat = std::log1p(std::exp(score[i])); // auto-convert
sum_loss += XentLambdaLoss(label_[i], 1.0f, hhat);
}
} else {
#pragma omp parallel for schedule(static) reduction(+:sum_loss)
for (data_size_t i = 0; i < num_data_; ++i) {
double hhat = std::log(1.0f + std::exp(score[i])); // auto-convert
double hhat = std::log1p(std::exp(score[i])); // auto-convert
sum_loss += XentLambdaLoss(label_[i], weights_[i], hhat);
}
}
Expand Down
6 changes: 3 additions & 3 deletions src/objective/xentropy_objective.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ class CrossEntropyLambda: public ObjectiveFunction {
const double w = weights_[i];
const double y = label_[i];
const double epf = std::exp(score[i]);
const double hhat = std::log(1.0f + epf);
const double hhat = std::log1p(epf);
const double z = 1.0f - std::exp(-w*hhat);
const double enf = 1.0f / epf; // = std::exp(-score[i]);
gradients[i] = static_cast<score_t>((1.0f - y / z) * w / (1.0f + enf));
Expand Down Expand Up @@ -231,7 +231,7 @@ class CrossEntropyLambda: public ObjectiveFunction {
//

void ConvertOutput(const double* input, double* output) const override {
output[0] = std::log(1.0f + std::exp(input[0]));
output[0] = std::log1p(std::exp(input[0]));
}

std::string ToString() const override {
Expand Down Expand Up @@ -259,7 +259,7 @@ class CrossEntropyLambda: public ObjectiveFunction {
}
}
double havg = suml / sumw;
double initscore = std::log(std::exp(havg) - 1.0f);
double initscore = std::log(std::expm1(havg));
Log::Info("[%s:%s]: havg = %f -> initscore = %f", GetName(), __func__, havg, initscore);
return initscore;
}
Expand Down

0 comments on commit 1c558a5

Please sign in to comment.