diff --git a/.azure-pipelines/linux-conda-CI.yml b/.azure-pipelines/linux-conda-CI.yml index eff402ed..b45760ba 100644 --- a/.azure-pipelines/linux-conda-CI.yml +++ b/.azure-pipelines/linux-conda-CI.yml @@ -25,13 +25,13 @@ jobs: numpy.version: '' scipy.version: '' - Python311-1150-RT1160-xgb175-lgbm40: + Python311-1150-RT1163-xgb175-lgbm40: python.version: '3.11' ONNX_PATH: 'onnx==1.15.0' - ONNXRT_PATH: 'onnxruntime==1.16.2' + ONNXRT_PATH: 'onnxruntime==1.16.3' COREML_PATH: NONE lightgbm.version: '>=4.0' - xgboost.version: '==1.7.5' + xgboost.version: '>=1.7.5,<2' numpy.version: '' scipy.version: '' @@ -41,7 +41,7 @@ jobs: ONNXRT_PATH: 'onnxruntime==1.16.2' COREML_PATH: NONE lightgbm.version: '>=4.0' - xgboost.version: '==1.7.5' + xgboost.version: '>=1.7.5,<2' numpy.version: '' scipy.version: '' @@ -51,7 +51,7 @@ jobs: ONNXRT_PATH: 'onnxruntime==1.15.1' COREML_PATH: NONE lightgbm.version: '<4.0' - xgboost.version: '==1.7.5' + xgboost.version: '>=1.7.5,<2' numpy.version: '' scipy.version: '' @@ -61,7 +61,7 @@ jobs: ONNXRT_PATH: 'onnxruntime==1.14.0' COREML_PATH: NONE lightgbm.version: '<4.0' - xgboost.version: '==1.7.5' + xgboost.version: '>=1.7.5,<2' numpy.version: '' scipy.version: '' @@ -71,7 +71,7 @@ jobs: ONNXRT_PATH: 'onnxruntime==1.15.1' COREML_PATH: NONE lightgbm.version: '>=4.0' - xgboost.version: '==1.7.5' + xgboost.version: '>=1.7.5,<2' numpy.version: '' scipy.version: '==1.8.0' diff --git a/.azure-pipelines/win32-conda-CI.yml b/.azure-pipelines/win32-conda-CI.yml index 7ccc5f03..f3a42ba5 100644 --- a/.azure-pipelines/win32-conda-CI.yml +++ b/.azure-pipelines/win32-conda-CI.yml @@ -15,6 +15,14 @@ jobs: strategy: matrix: + Python311-1150-RT1163: + python.version: '3.11' + ONNX_PATH: 'onnx==1.15.0' + ONNXRT_PATH: 'onnxruntime==1.16.3' + COREML_PATH: NONE + numpy.version: '' + xgboost.version: '2.0.2' + Python311-1150-RT1162: python.version: '3.11' ONNX_PATH: 'onnx==1.15.0' diff --git a/CHANGELOGS.md b/CHANGELOGS.md index 514e7508..1fe94578 100644 --- a/CHANGELOGS.md +++ b/CHANGELOGS.md @@ -2,6 +2,8 @@ ## 1.12.0 +* Fix early stopping for XGBClassifier and xgboost > 2 + [#597](https://github.com/onnx/onnxmltools/pull/597) * Fix discrepancies with XGBRegressor and xgboost > 2 [#670](https://github.com/onnx/onnxmltools/pull/670) * Support count:poisson for XGBRegressor diff --git a/onnxmltools/convert/xgboost/common.py b/onnxmltools/convert/xgboost/common.py index 8f267f58..3a79aaad 100644 --- a/onnxmltools/convert/xgboost/common.py +++ b/onnxmltools/convert/xgboost/common.py @@ -32,6 +32,14 @@ def get_xgb_params(xgb_node): bs = float(config["learner"]["learner_model_param"]["base_score"]) # xgboost >= 2.0 params["base_score"] = bs + + bst = xgb_node.get_booster() + if hasattr(bst, "best_ntree_limit"): + params["best_ntree_limit"] = bst.best_ntree_limit + if "gradient_booster" in config["learner"]: + gbp = config["learner"]["gradient_booster"]["gbtree_model_param"] + if "num_trees" in gbp: + params["best_ntree_limit"] = int(gbp["num_trees"]) return params diff --git a/onnxmltools/convert/xgboost/operator_converters/XGBoost.py b/onnxmltools/convert/xgboost/operator_converters/XGBoost.py index a9f31211..904630b1 100644 --- a/onnxmltools/convert/xgboost/operator_converters/XGBoost.py +++ b/onnxmltools/convert/xgboost/operator_converters/XGBoost.py @@ -40,6 +40,12 @@ def common_members(xgb_node, inputs): params = XGBConverter.get_xgb_params(xgb_node) objective = params["objective"] base_score = params["base_score"] + if hasattr(xgb_node, "best_ntree_limit"): + best_ntree_limit = xgb_node.best_ntree_limit + elif hasattr(xgb_node, "best_iteration"): + best_ntree_limit = xgb_node.best_iteration + 1 + else: + best_ntree_limit = params.get("best_ntree_limit", None) if base_score is None: base_score = 0.5 booster = xgb_node.get_booster() @@ -47,7 +53,7 @@ def common_members(xgb_node, inputs): # XGBoost 0.7 was the first version released with it. js_tree_list = booster.get_dump(with_stats=True, dump_format="json") js_trees = [json.loads(s) for s in js_tree_list] - return objective, base_score, js_trees + return objective, base_score, js_trees, best_ntree_limit @staticmethod def _get_default_tree_attribute_pairs(is_classifier): @@ -231,7 +237,9 @@ def _get_default_tree_attribute_pairs(): def convert(scope, operator, container): xgb_node = operator.raw_operator inputs = operator.inputs - objective, base_score, js_trees = XGBConverter.common_members(xgb_node, inputs) + objective, base_score, js_trees, best_ntree_limit = XGBConverter.common_members( + xgb_node, inputs + ) if objective in ["reg:gamma", "reg:tweedie"]: raise RuntimeError("Objective '{}' not supported.".format(objective)) @@ -239,9 +247,7 @@ def convert(scope, operator, container): attr_pairs = XGBRegressorConverter._get_default_tree_attribute_pairs() attr_pairs["base_values"] = [base_score] - bst = xgb_node.get_booster() - best_ntree_limit = getattr(bst, "best_ntree_limit", len(js_trees)) - if best_ntree_limit < len(js_trees): + if best_ntree_limit and best_ntree_limit < len(js_trees): js_trees = js_trees[:best_ntree_limit] XGBConverter.fill_tree_attributes( @@ -289,7 +295,9 @@ def convert(scope, operator, container): xgb_node = operator.raw_operator inputs = operator.inputs - objective, base_score, js_trees = XGBConverter.common_members(xgb_node, inputs) + objective, base_score, js_trees, best_ntree_limit = XGBConverter.common_members( + xgb_node, inputs + ) params = XGBConverter.get_xgb_params(xgb_node) n_estimators = get_n_estimators_classifier(xgb_node, params, js_trees) @@ -305,8 +313,9 @@ def convert(scope, operator, container): else: ncl = (max(attr_pairs["class_treeids"]) + 1) // n_estimators - bst = xgb_node.get_booster() - best_ntree_limit = getattr(bst, "best_ntree_limit", len(js_trees)) * ncl + best_ntree_limit = best_ntree_limit or len(js_trees) + if ncl > 0: + best_ntree_limit *= ncl if 0 < best_ntree_limit < len(js_trees): js_trees = js_trees[:best_ntree_limit] attr_pairs = XGBClassifierConverter._get_default_tree_attribute_pairs() diff --git a/tests/xgboost/data_bug.csv b/tests/xgboost/data_bug.csv new file mode 100644 index 00000000..d34f9b4f --- /dev/null +++ b/tests/xgboost/data_bug.csv @@ -0,0 +1,401 @@ +x0,x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,y +39.0,7.0,13.0,4.0,1.0,0.0,4.0,1.0,2174.0,0.0,40.0,39.0,0 +50.0,6.0,13.0,2.0,4.0,4.0,4.0,1.0,0.0,0.0,13.0,39.0,0 +38.0,4.0,9.0,0.0,6.0,0.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +53.0,4.0,7.0,2.0,6.0,4.0,2.0,1.0,0.0,0.0,40.0,39.0,0 +28.0,4.0,13.0,2.0,10.0,5.0,2.0,0.0,0.0,0.0,40.0,5.0,0 +37.0,4.0,14.0,2.0,4.0,5.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +49.0,4.0,5.0,3.0,8.0,0.0,2.0,0.0,0.0,0.0,16.0,23.0,0 +52.0,6.0,9.0,2.0,4.0,4.0,4.0,1.0,0.0,0.0,45.0,39.0,1 +31.0,4.0,14.0,4.0,10.0,0.0,4.0,0.0,14084.0,0.0,50.0,39.0,1 +42.0,4.0,13.0,2.0,4.0,4.0,4.0,1.0,5178.0,0.0,40.0,39.0,1 +37.0,4.0,10.0,2.0,4.0,4.0,2.0,1.0,0.0,0.0,80.0,39.0,1 +30.0,7.0,13.0,2.0,10.0,4.0,1.0,1.0,0.0,0.0,40.0,19.0,1 +23.0,4.0,13.0,4.0,1.0,3.0,4.0,0.0,0.0,0.0,30.0,39.0,0 +32.0,4.0,12.0,4.0,12.0,0.0,2.0,1.0,0.0,0.0,50.0,39.0,0 +40.0,4.0,11.0,2.0,3.0,4.0,1.0,1.0,0.0,0.0,40.0,0.0,1 +34.0,4.0,4.0,2.0,14.0,4.0,0.0,1.0,0.0,0.0,45.0,26.0,0 +25.0,6.0,9.0,4.0,5.0,3.0,4.0,1.0,0.0,0.0,35.0,39.0,0 +32.0,4.0,9.0,4.0,7.0,1.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +38.0,4.0,7.0,2.0,12.0,4.0,4.0,1.0,0.0,0.0,50.0,39.0,0 +43.0,6.0,14.0,0.0,4.0,1.0,4.0,0.0,0.0,0.0,45.0,39.0,1 +40.0,4.0,16.0,2.0,10.0,4.0,4.0,1.0,0.0,0.0,60.0,39.0,1 +54.0,4.0,9.0,5.0,8.0,1.0,2.0,0.0,0.0,0.0,20.0,39.0,0 +35.0,1.0,5.0,2.0,5.0,4.0,2.0,1.0,0.0,0.0,40.0,39.0,0 +43.0,4.0,7.0,2.0,14.0,4.0,4.0,1.0,0.0,2042.0,40.0,39.0,0 +59.0,4.0,9.0,0.0,13.0,1.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +56.0,2.0,13.0,2.0,13.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,1 +19.0,4.0,9.0,4.0,3.0,3.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +54.0,0.0,10.0,2.0,0.0,4.0,1.0,1.0,0.0,0.0,60.0,35.0,1 +39.0,4.0,9.0,0.0,4.0,0.0,4.0,1.0,0.0,0.0,80.0,39.0,0 +49.0,4.0,9.0,2.0,3.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +23.0,2.0,12.0,4.0,11.0,0.0,4.0,1.0,0.0,0.0,52.0,39.0,0 +20.0,4.0,10.0,4.0,12.0,3.0,2.0,1.0,0.0,0.0,44.0,39.0,0 +45.0,4.0,13.0,0.0,4.0,3.0,4.0,1.0,0.0,1408.0,40.0,39.0,0 +30.0,1.0,10.0,2.0,1.0,3.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +22.0,7.0,10.0,2.0,8.0,4.0,2.0,1.0,0.0,0.0,15.0,39.0,0 +48.0,4.0,7.0,4.0,7.0,1.0,4.0,1.0,0.0,0.0,40.0,33.0,0 +21.0,4.0,10.0,4.0,7.0,3.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +19.0,4.0,9.0,1.0,1.0,5.0,4.0,0.0,0.0,0.0,25.0,39.0,0 +31.0,4.0,10.0,2.0,12.0,4.0,4.0,1.0,0.0,0.0,38.0,0.0,1 +48.0,6.0,12.0,2.0,10.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +31.0,4.0,5.0,2.0,7.0,4.0,4.0,1.0,0.0,0.0,43.0,39.0,0 +53.0,6.0,13.0,2.0,10.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +24.0,4.0,13.0,2.0,13.0,4.0,4.0,1.0,0.0,0.0,50.0,39.0,0 +49.0,4.0,9.0,5.0,1.0,1.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +25.0,4.0,9.0,4.0,6.0,0.0,4.0,1.0,0.0,0.0,35.0,39.0,0 +57.0,1.0,13.0,2.0,10.0,4.0,2.0,1.0,0.0,0.0,40.0,39.0,1 +53.0,4.0,9.0,2.0,7.0,4.0,4.0,1.0,0.0,0.0,38.0,39.0,0 +44.0,4.0,14.0,0.0,4.0,1.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +41.0,7.0,11.0,2.0,3.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +29.0,4.0,11.0,4.0,10.0,0.0,4.0,1.0,0.0,0.0,43.0,39.0,0 +25.0,4.0,10.0,2.0,4.0,5.0,3.0,0.0,0.0,0.0,40.0,39.0,0 +18.0,4.0,9.0,4.0,8.0,3.0,4.0,0.0,0.0,0.0,30.0,0.0,0 +47.0,4.0,15.0,2.0,10.0,5.0,4.0,0.0,0.0,1902.0,60.0,16.0,1 +50.0,1.0,13.0,0.0,4.0,0.0,4.0,1.0,0.0,0.0,55.0,39.0,1 +47.0,5.0,9.0,0.0,4.0,0.0,4.0,1.0,0.0,0.0,60.0,39.0,0 +43.0,4.0,10.0,2.0,13.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,1 +46.0,4.0,3.0,2.0,7.0,4.0,4.0,1.0,0.0,0.0,40.0,26.0,0 +35.0,4.0,11.0,2.0,8.0,4.0,4.0,1.0,0.0,0.0,40.0,33.0,0 +41.0,4.0,9.0,2.0,1.0,4.0,4.0,1.0,0.0,0.0,48.0,39.0,0 +30.0,4.0,9.0,2.0,7.0,4.0,4.0,1.0,5013.0,0.0,40.0,39.0,0 +30.0,4.0,13.0,2.0,12.0,4.0,4.0,1.0,2407.0,0.0,40.0,39.0,0 +32.0,0.0,4.0,3.0,0.0,0.0,4.0,1.0,0.0,0.0,40.0,0.0,0 +48.0,4.0,9.0,2.0,14.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +42.0,4.0,16.0,2.0,10.0,4.0,4.0,1.0,0.0,0.0,45.0,39.0,1 +29.0,4.0,10.0,0.0,13.0,0.0,4.0,1.0,0.0,0.0,58.0,39.0,0 +36.0,4.0,9.0,2.0,3.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +28.0,4.0,10.0,0.0,1.0,0.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +53.0,4.0,9.0,2.0,1.0,5.0,4.0,0.0,0.0,0.0,40.0,39.0,1 +49.0,5.0,10.0,2.0,4.0,4.0,4.0,1.0,0.0,0.0,50.0,39.0,1 +25.0,0.0,10.0,4.0,0.0,3.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +19.0,4.0,10.0,4.0,10.0,3.0,4.0,1.0,0.0,0.0,32.0,39.0,0 +31.0,4.0,13.0,5.0,12.0,3.0,2.0,0.0,0.0,0.0,40.0,39.0,0 +29.0,6.0,13.0,2.0,12.0,4.0,4.0,1.0,0.0,0.0,70.0,39.0,1 +23.0,4.0,10.0,4.0,7.0,0.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +79.0,4.0,10.0,2.0,10.0,2.0,4.0,1.0,0.0,0.0,20.0,39.0,0 +27.0,4.0,9.0,4.0,8.0,3.0,4.0,1.0,0.0,0.0,40.0,26.0,0 +40.0,4.0,12.0,2.0,1.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +67.0,0.0,6.0,2.0,0.0,4.0,4.0,1.0,0.0,0.0,2.0,39.0,0 +18.0,4.0,7.0,4.0,8.0,3.0,4.0,0.0,0.0,0.0,22.0,39.0,0 +31.0,2.0,4.0,2.0,5.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +18.0,4.0,9.0,4.0,12.0,0.0,4.0,1.0,0.0,0.0,30.0,39.0,0 +52.0,4.0,13.0,2.0,8.0,4.0,4.0,1.0,0.0,0.0,40.0,5.0,0 +46.0,4.0,9.0,2.0,8.0,5.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +59.0,4.0,9.0,2.0,12.0,4.0,4.0,1.0,0.0,0.0,48.0,39.0,0 +44.0,4.0,9.0,0.0,3.0,0.0,4.0,0.0,14344.0,0.0,40.0,39.0,1 +53.0,4.0,9.0,0.0,12.0,3.0,4.0,0.0,0.0,0.0,35.0,39.0,0 +49.0,2.0,9.0,2.0,11.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,1 +33.0,4.0,14.0,2.0,10.0,4.0,4.0,1.0,0.0,0.0,50.0,39.0,0 +30.0,4.0,5.0,4.0,12.0,0.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +43.0,1.0,16.0,4.0,10.0,0.0,4.0,0.0,0.0,0.0,50.0,39.0,1 +57.0,4.0,11.0,2.0,10.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +37.0,4.0,10.0,0.0,3.0,1.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +28.0,4.0,10.0,0.0,7.0,1.0,2.0,0.0,0.0,0.0,25.0,39.0,0 +30.0,4.0,9.0,2.0,12.0,5.0,1.0,0.0,0.0,1573.0,35.0,0.0,0 +34.0,2.0,13.0,2.0,11.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,1 +29.0,2.0,10.0,4.0,6.0,0.0,4.0,1.0,0.0,0.0,50.0,39.0,0 +48.0,6.0,16.0,2.0,10.0,4.0,4.0,1.0,0.0,1902.0,60.0,39.0,1 +37.0,4.0,10.0,2.0,12.0,4.0,4.0,1.0,0.0,0.0,48.0,39.0,1 +48.0,4.0,12.0,0.0,4.0,1.0,4.0,0.0,0.0,0.0,40.0,9.0,0 +32.0,1.0,9.0,4.0,8.0,3.0,2.0,1.0,0.0,0.0,40.0,39.0,0 +76.0,4.0,14.0,2.0,4.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,1 +44.0,4.0,13.0,2.0,4.0,4.0,4.0,1.0,15024.0,0.0,60.0,39.0,1 +47.0,6.0,14.0,4.0,10.0,0.0,4.0,0.0,0.0,0.0,50.0,39.0,0 +20.0,4.0,10.0,4.0,13.0,3.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +29.0,4.0,9.0,4.0,3.0,0.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +32.0,5.0,9.0,2.0,3.0,4.0,4.0,1.0,7688.0,0.0,40.0,39.0,1 +17.0,0.0,6.0,4.0,0.0,3.0,4.0,0.0,34095.0,0.0,32.0,39.0,0 +30.0,4.0,7.0,4.0,6.0,3.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +31.0,2.0,9.0,4.0,1.0,0.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +42.0,4.0,9.0,2.0,6.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +24.0,4.0,13.0,4.0,12.0,2.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +38.0,4.0,15.0,2.0,10.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,1 +56.0,6.0,9.0,2.0,8.0,4.0,4.0,1.0,0.0,1887.0,50.0,2.0,1 +28.0,4.0,10.0,2.0,12.0,5.0,4.0,0.0,4064.0,0.0,25.0,39.0,0 +36.0,4.0,9.0,4.0,7.0,3.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +53.0,4.0,5.0,2.0,6.0,4.0,4.0,1.0,0.0,0.0,50.0,39.0,0 +56.0,5.0,10.0,2.0,12.0,4.0,4.0,1.0,0.0,0.0,50.0,39.0,0 +49.0,2.0,11.0,2.0,3.0,4.0,2.0,1.0,0.0,0.0,40.0,39.0,1 +55.0,4.0,10.0,2.0,12.0,4.0,4.0,1.0,0.0,0.0,56.0,39.0,0 +22.0,4.0,9.0,4.0,3.0,0.0,4.0,1.0,0.0,0.0,41.0,39.0,0 +21.0,4.0,10.0,4.0,8.0,3.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +40.0,4.0,13.0,2.0,4.0,4.0,4.0,1.0,0.0,0.0,60.0,39.0,0 +30.0,4.0,13.0,4.0,4.0,3.0,2.0,1.0,0.0,0.0,40.0,11.0,0 +29.0,7.0,13.0,2.0,10.0,4.0,4.0,1.0,0.0,0.0,50.0,39.0,1 +19.0,4.0,10.0,4.0,8.0,3.0,2.0,1.0,0.0,0.0,35.0,39.0,0 +47.0,4.0,13.0,2.0,4.0,5.0,4.0,0.0,0.0,0.0,40.0,39.0,1 +20.0,4.0,10.0,4.0,1.0,3.0,4.0,0.0,0.0,1719.0,28.0,39.0,0 +31.0,4.0,12.0,2.0,1.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,1 +35.0,0.0,9.0,2.0,0.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +39.0,4.0,10.0,0.0,3.0,0.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +28.0,4.0,12.0,4.0,12.0,0.0,4.0,0.0,0.0,0.0,60.0,39.0,0 +24.0,4.0,9.0,4.0,8.0,0.0,4.0,0.0,0.0,1762.0,40.0,39.0,0 +38.0,6.0,9.0,2.0,3.0,4.0,4.0,1.0,4386.0,0.0,35.0,39.0,0 +37.0,4.0,13.0,2.0,12.0,4.0,4.0,1.0,0.0,0.0,50.0,39.0,0 +46.0,4.0,12.0,0.0,13.0,0.0,2.0,0.0,0.0,0.0,36.0,39.0,0 +38.0,1.0,14.0,2.0,10.0,4.0,4.0,1.0,0.0,0.0,40.0,20.0,1 +43.0,6.0,9.0,2.0,3.0,4.0,4.0,1.0,0.0,0.0,60.0,39.0,0 +27.0,4.0,11.0,2.0,1.0,5.0,4.0,0.0,0.0,0.0,35.0,39.0,0 +20.0,4.0,10.0,4.0,12.0,3.0,4.0,1.0,0.0,0.0,20.0,39.0,0 +49.0,4.0,10.0,2.0,3.0,4.0,1.0,1.0,0.0,0.0,40.0,39.0,1 +61.0,5.0,9.0,2.0,3.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +27.0,4.0,10.0,4.0,12.0,3.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +19.0,4.0,10.0,4.0,8.0,3.0,4.0,1.0,0.0,0.0,30.0,26.0,0 +45.0,4.0,11.0,4.0,10.0,0.0,4.0,0.0,0.0,1564.0,40.0,39.0,1 +70.0,4.0,10.0,4.0,13.0,2.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +31.0,4.0,9.0,4.0,14.0,1.0,2.0,0.0,0.0,0.0,30.0,39.0,0 +22.0,4.0,10.0,2.0,8.0,4.0,4.0,1.0,0.0,0.0,24.0,39.0,0 +36.0,4.0,9.0,6.0,8.0,1.0,4.0,0.0,0.0,0.0,24.0,39.0,0 +64.0,4.0,7.0,2.0,3.0,4.0,4.0,1.0,0.0,2179.0,40.0,39.0,0 +43.0,0.0,10.0,0.0,0.0,0.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +47.0,2.0,10.0,0.0,1.0,1.0,4.0,0.0,0.0,0.0,38.0,26.0,0 +34.0,4.0,9.0,4.0,1.0,0.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +33.0,4.0,13.0,4.0,4.0,2.0,1.0,0.0,0.0,0.0,40.0,30.0,0 +21.0,4.0,9.0,4.0,3.0,3.0,4.0,1.0,0.0,0.0,35.0,39.0,0 +52.0,0.0,9.0,0.0,0.0,0.0,4.0,1.0,0.0,0.0,45.0,39.0,1 +48.0,4.0,9.0,2.0,3.0,4.0,4.0,1.0,0.0,0.0,46.0,39.0,0 +23.0,4.0,13.0,4.0,6.0,0.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +71.0,6.0,10.0,5.0,12.0,1.0,2.0,1.0,0.0,1816.0,2.0,39.0,0 +29.0,4.0,9.0,0.0,3.0,0.0,4.0,1.0,0.0,0.0,60.0,39.0,0 +42.0,4.0,13.0,5.0,8.0,2.0,2.0,1.0,0.0,0.0,50.0,39.0,0 +68.0,0.0,2.0,0.0,0.0,0.0,4.0,0.0,0.0,0.0,20.0,39.0,0 +25.0,4.0,9.0,4.0,8.0,1.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +44.0,5.0,14.0,0.0,4.0,1.0,1.0,0.0,0.0,0.0,40.0,39.0,0 +28.0,4.0,9.0,4.0,4.0,0.0,1.0,0.0,0.0,0.0,40.0,9.0,0 +45.0,6.0,14.0,2.0,12.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,1 +36.0,4.0,10.0,0.0,8.0,1.0,4.0,0.0,0.0,0.0,40.0,26.0,0 +39.0,1.0,12.0,4.0,4.0,0.0,4.0,1.0,0.0,0.0,42.0,39.0,0 +46.0,7.0,14.0,6.0,11.0,1.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +18.0,4.0,7.0,4.0,6.0,3.0,4.0,1.0,0.0,0.0,16.0,39.0,0 +66.0,2.0,11.0,6.0,10.0,0.0,4.0,0.0,0.0,0.0,20.0,39.0,0 +27.0,4.0,9.0,4.0,8.0,0.0,4.0,1.0,0.0,1980.0,40.0,39.0,0 +28.0,7.0,9.0,2.0,11.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +51.0,4.0,10.0,2.0,12.0,4.0,4.0,1.0,0.0,1977.0,40.0,39.0,1 +27.0,4.0,13.0,4.0,3.0,0.0,4.0,1.0,0.0,0.0,50.0,39.0,0 +28.0,7.0,13.0,2.0,10.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,1 +27.0,4.0,9.0,3.0,12.0,1.0,4.0,0.0,0.0,0.0,25.0,39.0,0 +21.0,4.0,9.0,4.0,3.0,3.0,2.0,1.0,0.0,0.0,40.0,39.0,0 +34.0,4.0,9.0,4.0,6.0,0.0,2.0,1.0,0.0,0.0,40.0,39.0,0 +18.0,4.0,9.0,4.0,8.0,3.0,4.0,1.0,0.0,0.0,12.0,39.0,0 +33.0,4.0,13.0,2.0,10.0,4.0,4.0,1.0,0.0,0.0,65.0,39.0,0 +44.0,2.0,10.0,2.0,4.0,4.0,2.0,1.0,0.0,0.0,38.0,39.0,1 +43.0,4.0,10.0,0.0,1.0,0.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +30.0,4.0,10.0,4.0,3.0,0.0,2.0,1.0,0.0,0.0,45.0,39.0,0 +40.0,4.0,4.0,2.0,7.0,4.0,4.0,1.0,0.0,0.0,40.0,26.0,1 +37.0,1.0,10.0,2.0,1.0,4.0,4.0,1.0,0.0,0.0,42.0,39.0,1 +34.0,4.0,9.0,2.0,1.0,4.0,2.0,1.0,0.0,0.0,40.0,39.0,0 +41.0,6.0,13.0,0.0,4.0,0.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +53.0,0.0,13.0,0.0,0.0,0.0,4.0,0.0,0.0,0.0,50.0,39.0,0 +31.0,4.0,14.0,2.0,10.0,5.0,4.0,0.0,0.0,0.0,50.0,39.0,0 +58.0,7.0,16.0,2.0,10.0,4.0,4.0,1.0,0.0,0.0,1.0,39.0,1 +38.0,4.0,10.0,0.0,7.0,0.0,2.0,0.0,0.0,0.0,28.0,39.0,0 +24.0,4.0,10.0,2.0,7.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +41.0,2.0,13.0,2.0,3.0,4.0,2.0,1.0,0.0,0.0,40.0,39.0,0 +47.0,4.0,10.0,2.0,13.0,4.0,4.0,1.0,0.0,0.0,45.0,39.0,0 +41.0,1.0,13.0,2.0,13.0,4.0,4.0,1.0,0.0,0.0,24.0,39.0,0 +23.0,4.0,3.0,2.0,14.0,2.0,4.0,1.0,0.0,0.0,40.0,26.0,0 +36.0,4.0,10.0,0.0,3.0,0.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +40.0,1.0,14.0,4.0,4.0,0.0,4.0,0.0,14084.0,0.0,55.0,39.0,1 +35.0,4.0,14.0,2.0,10.0,2.0,4.0,1.0,7298.0,0.0,40.0,39.0,1 +24.0,6.0,9.0,4.0,12.0,0.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +26.0,4.0,14.0,4.0,10.0,0.0,4.0,0.0,0.0,1876.0,40.0,39.0,0 +19.0,0.0,9.0,4.0,0.0,3.0,4.0,1.0,0.0,0.0,40.0,22.0,0 +51.0,4.0,13.0,2.0,4.0,4.0,4.0,1.0,0.0,0.0,50.0,39.0,1 +42.0,2.0,10.0,4.0,10.0,0.0,4.0,0.0,0.0,1340.0,40.0,39.0,0 +37.0,7.0,9.0,0.0,1.0,1.0,4.0,0.0,0.0,0.0,35.0,39.0,0 +18.0,4.0,7.0,4.0,12.0,3.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +36.0,4.0,13.0,2.0,8.0,4.0,2.0,1.0,7298.0,0.0,36.0,39.0,1 +35.0,4.0,9.0,0.0,3.0,0.0,4.0,1.0,0.0,0.0,60.0,39.0,0 +58.0,5.0,9.0,2.0,12.0,5.0,4.0,0.0,15024.0,0.0,35.0,39.0,1 +17.0,4.0,7.0,4.0,12.0,3.0,4.0,0.0,0.0,0.0,12.0,39.0,0 +44.0,2.0,9.0,2.0,14.0,4.0,2.0,1.0,0.0,0.0,40.0,39.0,0 +37.0,4.0,9.0,2.0,3.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,1 +35.0,4.0,10.0,4.0,12.0,0.0,0.0,0.0,0.0,0.0,40.0,39.0,0 +60.0,4.0,9.0,2.0,3.0,4.0,1.0,1.0,0.0,0.0,40.0,39.0,0 +54.0,5.0,4.0,2.0,7.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,1 +37.0,4.0,13.0,4.0,4.0,0.0,2.0,1.0,0.0,0.0,60.0,39.0,1 +50.0,4.0,12.0,0.0,12.0,0.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +38.0,4.0,13.0,2.0,12.0,4.0,4.0,1.0,0.0,0.0,50.0,31.0,0 +45.0,4.0,13.0,2.0,10.0,4.0,4.0,1.0,0.0,0.0,55.0,39.0,0 +25.0,4.0,6.0,4.0,3.0,0.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +31.0,4.0,10.0,2.0,6.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +64.0,0.0,2.0,0.0,0.0,0.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +90.0,4.0,9.0,4.0,8.0,0.0,2.0,1.0,0.0,2206.0,40.0,39.0,0 +54.0,4.0,9.0,2.0,1.0,4.0,2.0,1.0,0.0,0.0,20.0,39.0,0 +53.0,2.0,1.0,4.0,7.0,0.0,4.0,0.0,0.0,0.0,35.0,39.0,0 +18.0,4.0,9.0,4.0,12.0,3.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +60.0,0.0,6.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,10.0,39.0,0 +66.0,6.0,9.0,2.0,5.0,4.0,4.0,1.0,1409.0,0.0,50.0,39.0,0 +75.0,4.0,11.0,6.0,1.0,0.0,4.0,0.0,0.0,0.0,20.0,4.0,0 +65.0,4.0,9.0,0.0,1.0,0.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +35.0,4.0,7.0,5.0,14.0,0.0,2.0,1.0,3674.0,0.0,40.0,39.0,0 +41.0,4.0,9.0,0.0,12.0,1.0,2.0,0.0,0.0,0.0,38.0,39.0,0 +25.0,4.0,10.0,4.0,1.0,0.0,4.0,1.0,0.0,0.0,42.0,39.0,0 +33.0,4.0,10.0,0.0,3.0,2.0,3.0,0.0,0.0,0.0,40.0,39.0,0 +28.0,4.0,15.0,4.0,10.0,0.0,4.0,1.0,0.0,0.0,55.0,39.0,1 +59.0,7.0,9.0,2.0,8.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +40.0,7.0,10.0,4.0,1.0,0.0,4.0,0.0,0.0,0.0,38.0,39.0,0 +41.0,4.0,13.0,2.0,4.0,4.0,4.0,1.0,0.0,0.0,50.0,20.0,1 +38.0,2.0,14.0,2.0,4.0,4.0,4.0,1.0,0.0,0.0,70.0,39.0,1 +23.0,4.0,9.0,4.0,14.0,3.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +40.0,4.0,13.0,2.0,10.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,1 +41.0,6.0,9.0,2.0,8.0,5.0,4.0,0.0,0.0,0.0,20.0,39.0,0 +24.0,7.0,12.0,2.0,11.0,4.0,4.0,1.0,0.0,0.0,50.0,39.0,0 +20.0,0.0,10.0,4.0,0.0,3.0,4.0,1.0,0.0,0.0,20.0,39.0,0 +38.0,4.0,10.0,0.0,3.0,0.0,4.0,1.0,0.0,1741.0,40.0,39.0,0 +56.0,4.0,9.0,2.0,1.0,4.0,4.0,1.0,0.0,0.0,35.0,0.0,0 +58.0,4.0,9.0,2.0,1.0,4.0,4.0,1.0,0.0,0.0,45.0,39.0,1 +32.0,4.0,9.0,4.0,8.0,0.0,4.0,0.0,0.0,0.0,34.0,39.0,0 +40.0,4.0,12.0,2.0,13.0,4.0,4.0,1.0,0.0,1977.0,60.0,39.0,1 +45.0,4.0,9.0,2.0,1.0,4.0,4.0,1.0,0.0,0.0,40.0,0.0,0 +41.0,4.0,15.0,2.0,10.0,5.0,4.0,0.0,0.0,0.0,40.0,39.0,1 +42.0,4.0,13.0,2.0,1.0,5.0,2.0,0.0,0.0,0.0,40.0,39.0,0 +59.0,2.0,6.0,6.0,8.0,1.0,2.0,0.0,0.0,0.0,30.0,39.0,0 +19.0,2.0,10.0,4.0,1.0,3.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +58.0,4.0,10.0,4.0,14.0,0.0,4.0,1.0,0.0,0.0,20.0,39.0,0 +42.0,6.0,9.0,2.0,5.0,4.0,1.0,1.0,0.0,0.0,40.0,1.0,1 +20.0,4.0,9.0,4.0,8.0,3.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +32.0,4.0,9.0,5.0,8.0,0.0,4.0,0.0,0.0,0.0,30.0,39.0,0 +45.0,4.0,11.0,6.0,4.0,0.0,4.0,0.0,0.0,0.0,45.0,39.0,0 +50.0,4.0,4.0,0.0,3.0,0.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +36.0,4.0,13.0,0.0,10.0,3.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +45.0,4.0,9.0,2.0,12.0,4.0,4.0,1.0,0.0,0.0,60.0,39.0,0 +17.0,4.0,7.0,4.0,8.0,3.0,4.0,1.0,0.0,0.0,12.0,39.0,0 +59.0,4.0,10.0,2.0,12.0,4.0,4.0,1.0,4064.0,0.0,40.0,39.0,0 +26.0,4.0,7.0,2.0,3.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +37.0,5.0,10.0,2.0,4.0,4.0,1.0,1.0,0.0,0.0,75.0,37.0,1 +19.0,0.0,10.0,4.0,0.0,3.0,4.0,1.0,0.0,0.0,24.0,2.0,0 +64.0,4.0,9.0,2.0,4.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,1 +33.0,4.0,13.0,4.0,10.0,0.0,4.0,0.0,0.0,0.0,45.0,39.0,0 +33.0,4.0,9.0,2.0,3.0,5.0,4.0,0.0,0.0,0.0,40.0,39.0,1 +61.0,4.0,9.0,2.0,4.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,1 +17.0,4.0,5.0,4.0,8.0,3.0,4.0,1.0,1055.0,0.0,24.0,39.0,0 +50.0,6.0,14.0,2.0,5.0,4.0,4.0,1.0,2407.0,0.0,98.0,39.0,0 +27.0,2.0,14.0,4.0,10.0,3.0,4.0,1.0,0.0,0.0,35.0,39.0,0 +30.0,4.0,9.0,0.0,10.0,3.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +43.0,4.0,9.0,2.0,6.0,4.0,2.0,1.0,0.0,0.0,40.0,39.0,0 +44.0,4.0,10.0,2.0,3.0,4.0,4.0,1.0,7298.0,0.0,40.0,39.0,1 +35.0,4.0,10.0,4.0,4.0,0.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +25.0,4.0,10.0,4.0,13.0,3.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +24.0,4.0,10.0,2.0,7.0,5.0,4.0,0.0,7298.0,0.0,48.0,39.0,1 +22.0,4.0,13.0,4.0,10.0,0.0,4.0,0.0,0.0,0.0,15.0,11.0,0 +42.0,2.0,10.0,2.0,3.0,4.0,4.0,1.0,5178.0,0.0,40.0,39.0,1 +34.0,4.0,12.0,0.0,12.0,1.0,2.0,0.0,0.0,0.0,45.0,39.0,0 +60.0,4.0,13.0,0.0,10.0,0.0,4.0,0.0,0.0,0.0,42.0,39.0,0 +21.0,4.0,9.0,4.0,6.0,3.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +57.0,1.0,14.0,2.0,12.0,4.0,4.0,1.0,15024.0,0.0,40.0,39.0,1 +41.0,4.0,15.0,2.0,10.0,4.0,4.0,1.0,0.0,0.0,60.0,39.0,1 +50.0,4.0,10.0,0.0,8.0,0.0,4.0,1.0,0.0,0.0,45.0,39.0,0 +25.0,4.0,13.0,4.0,4.0,0.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +50.0,4.0,4.0,2.0,3.0,4.0,4.0,1.0,0.0,0.0,40.0,7.0,0 +36.0,4.0,13.0,2.0,10.0,4.0,4.0,1.0,0.0,0.0,45.0,39.0,0 +31.0,4.0,9.0,0.0,10.0,0.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +29.0,2.0,13.0,4.0,11.0,0.0,4.0,1.0,0.0,0.0,56.0,39.0,0 +21.0,4.0,10.0,4.0,12.0,3.0,4.0,1.0,0.0,0.0,45.0,39.0,0 +27.0,4.0,13.0,4.0,13.0,1.0,2.0,0.0,0.0,0.0,40.0,39.0,0 +65.0,4.0,9.0,2.0,14.0,4.0,4.0,1.0,0.0,0.0,16.0,39.0,0 +37.0,5.0,13.0,0.0,12.0,0.0,4.0,0.0,0.0,0.0,60.0,39.0,0 +39.0,0.0,14.0,2.0,0.0,5.0,1.0,0.0,3464.0,0.0,40.0,0.0,0 +24.0,4.0,9.0,4.0,3.0,3.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +38.0,4.0,9.0,0.0,12.0,0.0,4.0,1.0,0.0,0.0,80.0,39.0,0 +48.0,4.0,13.0,2.0,1.0,4.0,4.0,1.0,7688.0,0.0,40.0,39.0,1 +21.0,4.0,10.0,4.0,8.0,0.0,1.0,0.0,0.0,0.0,25.0,39.0,0 +31.0,4.0,9.0,2.0,14.0,4.0,4.0,1.0,0.0,0.0,45.0,39.0,1 +55.0,4.0,9.0,2.0,12.0,4.0,4.0,1.0,4386.0,0.0,40.0,39.0,1 +24.0,4.0,10.0,2.0,1.0,5.0,1.0,0.0,0.0,0.0,40.0,25.0,0 +43.0,4.0,9.0,0.0,1.0,0.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +26.0,4.0,12.0,4.0,13.0,0.0,4.0,1.0,0.0,0.0,45.0,39.0,0 +46.0,4.0,12.0,4.0,10.0,0.0,4.0,0.0,0.0,0.0,33.0,39.0,0 +35.0,4.0,12.0,2.0,1.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,1 +41.0,5.0,10.0,2.0,5.0,4.0,4.0,1.0,0.0,0.0,54.0,39.0,1 +26.0,4.0,13.0,4.0,12.0,0.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +34.0,4.0,14.0,2.0,4.0,4.0,1.0,1.0,7298.0,0.0,35.0,36.0,1 +19.0,0.0,10.0,4.0,0.0,3.0,2.0,0.0,0.0,0.0,25.0,39.0,0 +36.0,6.0,13.0,0.0,10.0,0.0,2.0,0.0,0.0,0.0,40.0,39.0,1 +22.0,4.0,10.0,4.0,12.0,3.0,4.0,1.0,0.0,0.0,15.0,39.0,0 +24.0,4.0,10.0,4.0,7.0,0.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +77.0,6.0,10.0,2.0,12.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +22.0,4.0,3.0,2.0,7.0,2.0,4.0,0.0,0.0,0.0,40.0,26.0,0 +29.0,4.0,10.0,4.0,13.0,0.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +62.0,4.0,9.0,6.0,8.0,0.0,4.0,0.0,0.0,0.0,24.0,39.0,0 +39.0,6.0,9.0,2.0,4.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +43.0,4.0,10.0,2.0,3.0,4.0,4.0,1.0,0.0,1485.0,50.0,39.0,0 +35.0,4.0,9.0,0.0,14.0,0.0,4.0,1.0,0.0,0.0,45.0,39.0,0 +29.0,4.0,7.0,4.0,4.0,0.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +76.0,6.0,14.0,2.0,3.0,4.0,4.0,1.0,0.0,0.0,10.0,39.0,0 +63.0,6.0,9.0,2.0,5.0,4.0,4.0,1.0,0.0,0.0,45.0,39.0,0 +23.0,0.0,11.0,4.0,0.0,3.0,2.0,0.0,0.0,0.0,15.0,39.0,0 +43.0,4.0,10.0,2.0,10.0,5.0,4.0,0.0,0.0,1887.0,50.0,39.0,1 +58.0,6.0,9.0,2.0,6.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +66.0,4.0,9.0,4.0,14.0,0.0,4.0,1.0,2050.0,0.0,55.0,39.0,0 +41.0,4.0,10.0,4.0,3.0,0.0,2.0,1.0,0.0,0.0,45.0,39.0,0 +26.0,4.0,13.0,4.0,1.0,0.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +47.0,4.0,11.0,4.0,8.0,0.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +55.0,2.0,6.0,2.0,1.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +53.0,4.0,9.0,2.0,14.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +17.0,4.0,3.0,4.0,8.0,2.0,4.0,1.0,0.0,0.0,48.0,26.0,0 +30.0,4.0,9.0,4.0,3.0,2.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +49.0,4.0,14.0,4.0,10.0,0.0,4.0,1.0,0.0,0.0,60.0,39.0,0 +19.0,4.0,9.0,4.0,12.0,1.0,2.0,1.0,0.0,0.0,30.0,14.0,0 +45.0,4.0,9.0,2.0,8.0,4.0,2.0,1.0,0.0,0.0,40.0,39.0,0 +26.0,4.0,12.0,4.0,12.0,0.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +38.0,4.0,11.0,2.0,3.0,4.0,4.0,1.0,7298.0,0.0,40.0,39.0,1 +36.0,7.0,9.0,2.0,11.0,4.0,4.0,1.0,7298.0,0.0,40.0,39.0,1 +33.0,4.0,9.0,4.0,5.0,0.0,4.0,1.0,0.0,0.0,20.0,39.0,0 +22.0,7.0,10.0,4.0,11.0,3.0,2.0,0.0,0.0,0.0,40.0,39.0,0 +43.0,6.0,13.0,4.0,12.0,0.0,4.0,1.0,0.0,0.0,42.0,39.0,0 +67.0,0.0,7.0,2.0,0.0,4.0,4.0,1.0,0.0,0.0,8.0,39.0,0 +30.0,0.0,11.0,0.0,0.0,1.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +56.0,4.0,12.0,3.0,8.0,0.0,4.0,1.0,0.0,0.0,25.0,20.0,0 +31.0,4.0,13.0,4.0,10.0,3.0,4.0,1.0,0.0,0.0,25.0,39.0,0 +33.0,4.0,13.0,2.0,4.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,1 +26.0,4.0,9.0,2.0,8.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +33.0,4.0,13.0,2.0,4.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,1 +46.0,2.0,7.0,2.0,14.0,4.0,4.0,1.0,0.0,0.0,30.0,39.0,0 +59.0,0.0,13.0,2.0,0.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,1 +38.0,1.0,9.0,2.0,1.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,1 +65.0,4.0,7.0,6.0,8.0,1.0,3.0,1.0,0.0,0.0,40.0,39.0,0 +40.0,4.0,11.0,2.0,13.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,1 +42.0,4.0,9.0,2.0,1.0,5.0,4.0,0.0,0.0,0.0,35.0,39.0,0 +26.0,6.0,5.0,2.0,3.0,4.0,4.0,1.0,0.0,0.0,40.0,32.0,0 +36.0,4.0,6.0,2.0,8.0,5.0,4.0,0.0,0.0,0.0,24.0,39.0,0 +62.0,4.0,14.0,2.0,10.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,1 +43.0,4.0,13.0,2.0,14.0,4.0,4.0,1.0,0.0,0.0,45.0,39.0,0 +43.0,4.0,13.0,0.0,4.0,0.0,4.0,1.0,0.0,1564.0,45.0,39.0,1 +22.0,4.0,9.0,3.0,12.0,0.0,4.0,1.0,0.0,0.0,55.0,39.0,0 +28.0,4.0,9.0,2.0,3.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +56.0,6.0,10.0,2.0,12.0,4.0,4.0,1.0,0.0,0.0,45.0,39.0,0 +22.0,4.0,12.0,4.0,12.0,0.0,4.0,0.0,0.0,0.0,15.0,39.0,0 +57.0,4.0,13.0,0.0,4.0,0.0,4.0,0.0,0.0,0.0,45.0,39.0,1 +39.0,4.0,9.0,0.0,12.0,1.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +26.0,1.0,10.0,4.0,1.0,1.0,4.0,0.0,0.0,0.0,15.0,39.0,0 +17.0,4.0,7.0,4.0,8.0,3.0,4.0,1.0,0.0,0.0,10.0,39.0,0 +40.0,7.0,11.0,2.0,7.0,4.0,4.0,1.0,0.0,0.0,38.0,39.0,1 +45.0,4.0,9.0,2.0,7.0,4.0,2.0,1.0,0.0,0.0,40.0,39.0,0 +44.0,4.0,9.0,4.0,3.0,0.0,2.0,1.0,0.0,0.0,40.0,39.0,0 +20.0,2.0,10.0,4.0,1.0,3.0,4.0,0.0,0.0,0.0,10.0,39.0,0 +33.0,4.0,9.0,2.0,3.0,5.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +23.0,4.0,10.0,4.0,12.0,0.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +46.0,6.0,14.0,0.0,4.0,0.0,4.0,1.0,0.0,0.0,30.0,39.0,0 +38.0,4.0,9.0,2.0,12.0,4.0,4.0,1.0,0.0,0.0,50.0,39.0,0 +54.0,4.0,9.0,0.0,14.0,0.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +46.0,4.0,3.0,0.0,3.0,0.0,4.0,0.0,0.0,2339.0,45.0,39.0,0 +25.0,4.0,10.0,2.0,3.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +46.0,4.0,10.0,0.0,12.0,0.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +36.0,2.0,10.0,2.0,14.0,5.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +23.0,4.0,9.0,4.0,8.0,0.0,4.0,0.0,0.0,0.0,25.0,39.0,0 +29.0,4.0,9.0,5.0,7.0,0.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +44.0,4.0,10.0,2.0,1.0,5.0,4.0,0.0,0.0,2415.0,6.0,39.0,1 +19.0,4.0,10.0,4.0,1.0,3.0,4.0,0.0,0.0,0.0,16.0,39.0,0 +19.0,4.0,9.0,4.0,8.0,2.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +35.0,4.0,9.0,2.0,8.0,5.0,4.0,0.0,0.0,0.0,40.0,39.0,1 +27.0,4.0,13.0,4.0,3.0,0.0,4.0,1.0,0.0,0.0,50.0,39.0,0 +46.0,6.0,12.0,5.0,3.0,0.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +34.0,7.0,13.0,2.0,4.0,4.0,4.0,1.0,7688.0,0.0,45.0,0.0,1 +34.0,4.0,9.0,4.0,8.0,0.0,4.0,0.0,0.0,0.0,35.0,39.0,0 +44.0,4.0,10.0,2.0,13.0,4.0,4.0,1.0,0.0,0.0,40.0,39.0,0 +45.0,4.0,10.0,0.0,6.0,1.0,4.0,0.0,0.0,0.0,40.0,39.0,0 +20.0,0.0,9.0,4.0,0.0,2.0,4.0,0.0,0.0,0.0,35.0,39.0,0 +25.0,4.0,13.0,2.0,4.0,4.0,4.0,1.0,0.0,0.0,55.0,39.0,1 +52.0,5.0,13.0,2.0,4.0,4.0,4.0,1.0,0.0,0.0,50.0,39.0,1 diff --git a/tests/xgboost/test_xgboost_converters.py b/tests/xgboost/test_xgboost_converters.py index 7a37dd3a..ab33e845 100644 --- a/tests/xgboost/test_xgboost_converters.py +++ b/tests/xgboost/test_xgboost_converters.py @@ -288,7 +288,6 @@ def test_xgboost_booster_reg(self): n_classes=2, n_features=5, n_samples=100, random_state=42, n_informative=3 ) y = y.astype(np.float32) + 0.567 - print(y) x_train, x_test, y_train, _ = train_test_split( x, y, test_size=0.5, random_state=42 ) @@ -676,6 +675,35 @@ def test_xgb_classifier_hinge(self): x_test, xgb, conv_model, basename="SklearnXGBClassifierHinge" ) + def test_doc_example(self): + iris = load_iris() + X, y = iris.data, iris.target + X = X.astype(np.float32) + X_train, X_test, y_train, y_test = train_test_split(X, y) + clr = XGBClassifier() + clr.fit(X_train, y_train) + expected_prob = clr.predict_proba(X_test) + + initial_type = [("float_input", FloatTensorType([None, 4]))] + onx = convert_xgboost(clr, initial_types=initial_type) + + sess = InferenceSession(onx.SerializeToString()) + input_name = sess.get_inputs()[0].name + pred_onx = sess.run(None, {input_name: X_test.astype(np.float32)}) + assert_almost_equal(expected_prob, pred_onx[1], decimal=5) + + dtrain = DMatrix(X_train, label=y_train) + dtest = DMatrix(X_test) + param = {"objective": "multi:softmax", "num_class": 3} + bst = train_xgb(param, dtrain, 10) + expected_prob = bst.predict(dtest, output_margin=True) + initial_type = [("float_input", FloatTensorType([None, 4]))] + onx = convert_xgboost(bst, initial_types=initial_type) + sess = InferenceSession(onx.SerializeToString()) + input_name = sess.get_inputs()[0].name + pred_onx = sess.run(None, {input_name: X_test.astype(np.float32)}) + assert_almost_equal(expected_prob, pred_onx[1], decimal=5) + def test_xgb_classifier_13(self): this = os.path.dirname(__file__) df = pandas.read_csv(os.path.join(this, "data_fail_empty.csv")) @@ -714,6 +742,50 @@ def test_xgb_classifier_13(self): assert_almost_equal(expected[1], got[1]) assert_almost_equal(expected[0], got[0]) + def test_xgb_classifier_13_2(self): + this = os.path.dirname(__file__) + df = pandas.read_csv(os.path.join(this, "data_bug.csv")) + X, y = df.drop("y", axis=1), df["y"] + x_train, x_test, y_train, y_test = train_test_split( + X.values.astype(np.float32), y.values.astype(np.float32), random_state=2022 + ) + + model_param = { + "objective": "binary:logistic", + "n_estimators": 1000, + "early_stopping_rounds": 113, + "random_state": 42, + "max_depth": 3, + } + eval_metric = ["logloss", "auc", "error"] + model = XGBClassifier(**model_param) + model.fit( + X=x_train, + y=y_train, + eval_set=[(x_test, y_test)], + eval_metric=eval_metric, + verbose=False, + ) + + initial_types = [("float_input", FloatTensorType([None, x_train.shape[1]]))] + onnx_model = convert_xgboost(model, initial_types=initial_types) + for att in onnx_model.graph.node[0].attribute: + if att.name == "nodes_treeids": + self.assertLess(max(att.ints), 1000) + if att.name == "class_ids": + self.assertEqual(set(att.ints), {0}) + if att.name == "base_values": + self.assertEqual(len(att.floats), 1) + if att.name == "post_transform": + self.assertEqual(att.s, b"LOGISTIC") + + expected = model.predict(x_test), model.predict_proba(x_test) + sess = InferenceSession(onnx_model.SerializeToString()) + got = sess.run(None, {"float_input": x_test}) + assert_almost_equal(expected[1], got[1]) + assert_almost_equal(expected[0], got[0]) + if __name__ == "__main__": + TestXGBoostModels().test_xgb_classifier_13_2() unittest.main(verbosity=2) diff --git a/tests/xgboost/test_xgboost_converters_rf.py b/tests/xgboost/test_xgboost_converters_rf.py index a0f24200..ca3ed55b 100644 --- a/tests/xgboost/test_xgboost_converters_rf.py +++ b/tests/xgboost/test_xgboost_converters_rf.py @@ -46,7 +46,7 @@ def _fit_classification_model(model, n_classes, is_str=False, dtype=None): class TestXGBoostRFModels(unittest.TestCase): - def test_xgbrf_regressor(self): + def test_xgbrf_aregressor(self): iris = load_diabetes() x = iris.data y = iris.target