From ea025f9e8085f5624db8bffbee801dbcd60f3ff5 Mon Sep 17 00:00:00 2001 From: jiamingy Date: Thu, 19 Jan 2023 14:06:37 +0800 Subject: [PATCH] remove. --- src/objective/rank_obj.cc | 1 + src/objective/rank_obj.h | 19 ------------------- 2 files changed, 1 insertion(+), 19 deletions(-) diff --git a/src/objective/rank_obj.cc b/src/objective/rank_obj.cc index a823a68fb19c..37a22ddda328 100644 --- a/src/objective/rank_obj.cc +++ b/src/objective/rank_obj.cc @@ -183,6 +183,7 @@ class LambdaMARTNDCG : public FitIntercept { auto gpairs = h_gpair.subspan(info.group_ptr_[g], cnt); auto labels = h_label.Slice(make_range(g), 0); if (ndcg_param_.lambdamart_unbiased) { + CHECK(false); this->CalcLambdaForGroup(predts, labels, gpairs, info, g); } else { this->CalcLambdaForGroup(predts, labels, gpairs, info, g); diff --git a/src/objective/rank_obj.h b/src/objective/rank_obj.h index 75a4752fbdc3..8d110cfe9acd 100644 --- a/src/objective/rank_obj.h +++ b/src/objective/rank_obj.h @@ -75,29 +75,10 @@ XGBOOST_DEVICE inline void LambdaNDCG(linalg::TensorView labels, // fixme: cache discounts double delta_NDCG = std::abs(DeltaNDCG(y_high, y_low, rank_high, rank_low, inv_IDCG)); - if (unbiased) { - // fixme: is this necessary? - delta_NDCG /= std::max(score_distance, 0.01); - - double cost = std::log(1.0 / (1.0 - sigmoid)) * delta_NDCG; - - int debias_high_rank = static_cast(std::min(rank_high, p.lambdamart_truncation - 1)); - int debias_low_rank = static_cast(std::min(rank_low, p.lambdamart_truncation - 1)); - - li(debias_high_rank) += cost / tj(debias_low_rank); - lj(debias_low_rank) += cost / ti(debias_high_rank); - } - constexpr double kEps = 1e-16f; auto lambda_ij = (sigmoid - 1.0) * delta_NDCG; auto hessian_ij = std::max(sigmoid * (1.0 - sigmoid), kEps) * delta_NDCG; - if (unbiased) { - lambda_ij /= (tj(rank_high) * ti(rank_low)); - hessian_ij /= (tj(rank_high) * ti(rank_low)); - } - - std::cout << "i:" << i << "j:" << j << ", h:" << idx_high << " l:" << idx_low << ", |Z|:" << delta_NDCG << ", lij:" << lambda_ij << ", hij:" << hessian_ij << std::endl; auto pg = GradientPair{static_cast(lambda_ij), static_cast(hessian_ij)}; auto ng = GradientPair{static_cast(-lambda_ij), static_cast(hessian_ij)};