Skip to content

Commit

Permalink
remove.
Browse files Browse the repository at this point in the history
  • Loading branch information
trivialfis committed Jan 19, 2023
1 parent 3fa18b5 commit ea025f9
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 19 deletions.
1 change: 1 addition & 0 deletions src/objective/rank_obj.cc
Original file line number Diff line number Diff line change
Expand Up @@ -183,6 +183,7 @@ class LambdaMARTNDCG : public FitIntercept {
auto gpairs = h_gpair.subspan(info.group_ptr_[g], cnt);
auto labels = h_label.Slice(make_range(g), 0);
if (ndcg_param_.lambdamart_unbiased) {
CHECK(false);
this->CalcLambdaForGroup<true>(predts, labels, gpairs, info, g);
} else {
this->CalcLambdaForGroup<false>(predts, labels, gpairs, info, g);
Expand Down
19 changes: 0 additions & 19 deletions src/objective/rank_obj.h
Original file line number Diff line number Diff line change
Expand Up @@ -75,29 +75,10 @@ XGBOOST_DEVICE inline void LambdaNDCG(linalg::TensorView<float const, 1> labels,
// fixme: cache discounts
double delta_NDCG = std::abs(DeltaNDCG(y_high, y_low, rank_high, rank_low, inv_IDCG));

if (unbiased) {
// fixme: is this necessary?
delta_NDCG /= std::max(score_distance, 0.01);

double cost = std::log(1.0 / (1.0 - sigmoid)) * delta_NDCG;

int debias_high_rank = static_cast<int>(std::min(rank_high, p.lambdamart_truncation - 1));
int debias_low_rank = static_cast<int>(std::min(rank_low, p.lambdamart_truncation - 1));

li(debias_high_rank) += cost / tj(debias_low_rank);
lj(debias_low_rank) += cost / ti(debias_high_rank);
}

constexpr double kEps = 1e-16f;
auto lambda_ij = (sigmoid - 1.0) * delta_NDCG;
auto hessian_ij = std::max(sigmoid * (1.0 - sigmoid), kEps) * delta_NDCG;

if (unbiased) {
lambda_ij /= (tj(rank_high) * ti(rank_low));
hessian_ij /= (tj(rank_high) * ti(rank_low));
}

std::cout << "i:" << i << "j:" << j << ", h:" << idx_high << " l:" << idx_low << ", |Z|:" << delta_NDCG << ", lij:" << lambda_ij << ", hij:" << hessian_ij << std::endl;
auto pg = GradientPair{static_cast<float>(lambda_ij), static_cast<float>(hessian_ij)};
auto ng = GradientPair{static_cast<float>(-lambda_ij), static_cast<float>(hessian_ij)};

Expand Down

0 comments on commit ea025f9

Please sign in to comment.