From 782b0579941b6c4b5683ca28afa5a7ba7a4e347d Mon Sep 17 00:00:00 2001 From: xadupre Date: Thu, 3 Oct 2024 10:44:27 +0200 Subject: [PATCH] fix api Signed-off-by: xadupre --- tests/xgboost/test_xgboost_converters.py | 25 +++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/tests/xgboost/test_xgboost_converters.py b/tests/xgboost/test_xgboost_converters.py index c364257a..a7ba33c5 100644 --- a/tests/xgboost/test_xgboost_converters.py +++ b/tests/xgboost/test_xgboost_converters.py @@ -360,8 +360,13 @@ def test_xgboost_classifier_i5450_softmax(self): iris = load_iris() X, y = iris.data, iris.target X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=10) - clr = XGBClassifier(objective="multi:softmax", max_depth=1, n_estimators=2) - clr.fit(X_train, y_train, eval_set=[(X_test, y_test)], early_stopping_rounds=40) + clr = XGBClassifier( + objective="multi:softmax", + max_depth=1, + n_estimators=2, + early_stopping_rounds=40, + ) + clr.fit(X_train, y_train, eval_set=[(X_test, y_test)]) initial_type = [("float_input", FloatTensorType([None, 4]))] onx = convert_xgboost( clr, initial_types=initial_type, target_opset=TARGET_OPSET @@ -386,8 +391,13 @@ def test_xgboost_classifier_i5450(self): iris = load_iris() X, y = iris.data, iris.target X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=10) - clr = XGBClassifier(objective="multi:softprob", max_depth=1, n_estimators=2) - clr.fit(X_train, y_train, eval_set=[(X_test, y_test)], early_stopping_rounds=40) + clr = XGBClassifier( + objective="multi:softprob", + max_depth=1, + n_estimators=2, + early_stopping_rounds=40, + ) + clr.fit(X_train, y_train, eval_set=[(X_test, y_test)]) initial_type = [("float_input", FloatTensorType([None, 4]))] onx = convert_xgboost( clr, initial_types=initial_type, target_opset=TARGET_OPSET @@ -725,9 +735,10 @@ def test_xgb_classifier_13(self): colsample_bytree=0.75, random_state=42, verbosity=0, + early_stopping_rounds=40, ) - clr.fit(X_train, y_train, eval_set=[(X_test, y_test)], early_stopping_rounds=40) + clr.fit(X_train, y_train, eval_set=[(X_test, y_test)]) initial_type = [("float_input", FloatTensorType([None, 797]))] onx = convert_xgboost( @@ -750,20 +761,20 @@ def test_xgb_classifier_13_2(self): X.values.astype(np.float32), y.values.astype(np.float32), random_state=2022 ) + eval_metric = ["logloss", "auc", "error"] model_param = { "objective": "binary:logistic", "n_estimators": 1000, "early_stopping_rounds": 113, "random_state": 42, "max_depth": 3, + "eval_metric": eval_metric, } - eval_metric = ["logloss", "auc", "error"] model = XGBClassifier(**model_param) model.fit( X=x_train, y=y_train, eval_set=[(x_test, y_test)], - eval_metric=eval_metric, verbose=False, )