diff --git a/src/learner.cc b/src/learner.cc index 81d1b795b0bc..51f86aa67013 100644 --- a/src/learner.cc +++ b/src/learner.cc @@ -1317,7 +1317,9 @@ class LearnerImpl : public LearnerIO { if (metrics_.empty() && tparam_.disable_default_eval_metric <= 0) { metrics_.emplace_back(Metric::Create(obj_->DefaultEvalMetric(), &ctx_)); auto config = obj_->DefaultMetricConfig(); - metrics_.back()->LoadConfig(config); + if (!IsA(config)) { + metrics_.back()->LoadConfig(config); + } metrics_.back()->Configure({cfg_.begin(), cfg_.end()}); } diff --git a/src/objective/regression_obj.cu b/src/objective/regression_obj.cu index 4c5ed9ec81a8..a1a773f5340a 100644 --- a/src/objective/regression_obj.cu +++ b/src/objective/regression_obj.cu @@ -268,6 +268,13 @@ class PseudoHuberRegression : public FitIntercept { } FromJson(in["pseudo_huber_param"], ¶m_); } + [[nodiscard]] Json DefaultMetricConfig() const override { + CHECK(param_.GetInitialised()); + Json config{Object{}}; + config["name"] = String{this->DefaultEvalMetric()}; + config["pseudo_huber_param"] = ToJson(param_); + return config; + } }; XGBOOST_REGISTER_OBJECTIVE(PseudoHuberRegression, "reg:pseudohubererror") diff --git a/tests/cpp/objective/test_objective.cc b/tests/cpp/objective/test_objective.cc index 718f8f659495..21ffc7cafa37 100644 --- a/tests/cpp/objective/test_objective.cc +++ b/tests/cpp/objective/test_objective.cc @@ -6,6 +6,7 @@ #include #include "../helpers.h" +#include "../objective_helpers.h" TEST(Objective, UnknownFunction) { xgboost::ObjFunction* obj = nullptr; @@ -43,4 +44,61 @@ TEST(Objective, PredTransform) { ASSERT_TRUE(predts.HostCanWrite()); } } + +class TestDefaultObjConfig : public ::testing::TestWithParam { + Context ctx_; + + public: + void Run(std::string objective) { + auto Xy = MakeFmatForObjTest(objective); + std::unique_ptr learner{Learner::Create({Xy})}; + std::unique_ptr objfn{ObjFunction::Create(objective, &ctx_)}; + + learner->SetParam("objective", objective); + if (objective.find("multi") != std::string::npos) { + learner->SetParam("num_class", "3"); + objfn->Configure(Args{{"num_class", "3"}}); + } else if (objective.find("quantile") != std::string::npos) { + learner->SetParam("quantile_alpha", "0.5"); + objfn->Configure(Args{{"quantile_alpha", "0.5"}}); + } else { + objfn->Configure(Args{}); + } + learner->Configure(); + learner->UpdateOneIter(0, Xy); + learner->EvalOneIter(0, {Xy}, {"train"}); + Json config{Object{}}; + learner->SaveConfig(&config); + auto jobj = get(config["learner"]["objective"]); + + ASSERT_TRUE(jobj.find("name") != jobj.cend()); + // FIXME(jiamingy): We should have the following check, but some legacy parameter like + // "pos_weight", "delta_step" in objectives are not in metrics. + + // if (jobj.size() > 1) { + // ASSERT_FALSE(IsA(objfn->DefaultMetricConfig())); + // } + auto mconfig = objfn->DefaultMetricConfig(); + if (!IsA(mconfig)) { + // make sure metric can handle it + std::unique_ptr metricfn{Metric::Create(get(mconfig["name"]), &ctx_)}; + metricfn->LoadConfig(mconfig); + Json loaded(Object{}); + metricfn->SaveConfig(&loaded); + metricfn->Configure(Args{}); + ASSERT_EQ(mconfig, loaded); + } + } +}; + +TEST_P(TestDefaultObjConfig, Objective) { + std::string objective = GetParam(); + this->Run(objective); +} + +INSTANTIATE_TEST_SUITE_P(Objective, TestDefaultObjConfig, + ::testing::ValuesIn(MakeObjNamesForTest()), + [](const ::testing::TestParamInfo& info) { + return ObjTestNameGenerator(info); + }); } // namespace xgboost diff --git a/tests/cpp/objective_helpers.cc b/tests/cpp/objective_helpers.cc new file mode 100644 index 000000000000..ed80f71d512f --- /dev/null +++ b/tests/cpp/objective_helpers.cc @@ -0,0 +1,31 @@ +/** + * Copyright (c) 2023, XGBoost contributors + */ +#include "objective_helpers.h" + +#include "../../src/common/linalg_op.h" // for begin, end +#include "helpers.h" // for RandomDataGenerator + +namespace xgboost { +std::shared_ptr MakeFmatForObjTest(std::string const& obj) { + auto constexpr kRows = 10, kCols = 10; + auto p_fmat = RandomDataGenerator{kRows, kCols, 0}.GenerateDMatrix(true); + auto& h_upper = p_fmat->Info().labels_upper_bound_.HostVector(); + auto& h_lower = p_fmat->Info().labels_lower_bound_.HostVector(); + h_lower.resize(kRows); + h_upper.resize(kRows); + for (size_t i = 0; i < kRows; ++i) { + h_lower[i] = 1; + h_upper[i] = 10; + } + if (obj.find("rank:") != std::string::npos) { + auto h_label = p_fmat->Info().labels.HostView(); + std::size_t k = 0; + for (auto& v : h_label) { + v = k % 2 == 0; + ++k; + } + } + return p_fmat; +}; +} // namespace xgboost diff --git a/tests/cpp/objective_helpers.h b/tests/cpp/objective_helpers.h index b26470746a05..7f394ef8d523 100644 --- a/tests/cpp/objective_helpers.h +++ b/tests/cpp/objective_helpers.h @@ -1,6 +1,8 @@ /** * Copyright (c) 2023, XGBoost contributors */ +#pragma once + #include // for Registry #include #include // for ObjFunctionReg @@ -29,4 +31,6 @@ inline std::string ObjTestNameGenerator(const ::testing::TestParamInfo MakeFmatForObjTest(std::string const& obj); } // namespace xgboost diff --git a/tests/cpp/test_learner.cc b/tests/cpp/test_learner.cc index 48fd2d8e96c0..d4fe82dc5cab 100644 --- a/tests/cpp/test_learner.cc +++ b/tests/cpp/test_learner.cc @@ -655,33 +655,11 @@ TEST_F(InitBaseScore, InitWithPredict) { this->TestInitWithPredt(); } TEST_F(InitBaseScore, UpdateProcess) { this->TestUpdateProcess(); } class TestColumnSplit : public ::testing::TestWithParam { - static auto MakeFmat(std::string const& obj) { - auto constexpr kRows = 10, kCols = 10; - auto p_fmat = RandomDataGenerator{kRows, kCols, 0}.GenerateDMatrix(true); - auto& h_upper = p_fmat->Info().labels_upper_bound_.HostVector(); - auto& h_lower = p_fmat->Info().labels_lower_bound_.HostVector(); - h_lower.resize(kRows); - h_upper.resize(kRows); - for (size_t i = 0; i < kRows; ++i) { - h_lower[i] = 1; - h_upper[i] = 10; - } - if (obj.find("rank:") != std::string::npos) { - auto h_label = p_fmat->Info().labels.HostView(); - std::size_t k = 0; - for (auto& v : h_label) { - v = k % 2 == 0; - ++k; - } - } - return p_fmat; - }; - void TestBaseScore(std::string objective, float expected_base_score, Json expected_model) { auto const world_size = collective::GetWorldSize(); auto const rank = collective::GetRank(); - auto p_fmat = MakeFmat(objective); + auto p_fmat = MakeFmatForObjTest(objective); std::shared_ptr sliced{p_fmat->SliceCol(world_size, rank)}; std::unique_ptr learner{Learner::Create({sliced})}; learner->SetParam("tree_method", "approx"); @@ -705,7 +683,7 @@ class TestColumnSplit : public ::testing::TestWithParam { public: void Run(std::string objective) { - auto p_fmat = MakeFmat(objective); + auto p_fmat = MakeFmatForObjTest(objective); std::unique_ptr learner{Learner::Create({p_fmat})}; learner->SetParam("tree_method", "approx"); learner->SetParam("objective", objective);