Skip to content

Commit

Permalink
Merge branch 'ci18' of https://github.com/xadupre/onnxmltools into xg…
Browse files Browse the repository at this point in the history
…bempty
  • Loading branch information
sdpython committed Apr 15, 2021
2 parents e5e92cd + 57f0568 commit 75c538e
Show file tree
Hide file tree
Showing 9 changed files with 138 additions and 157 deletions.
5 changes: 0 additions & 5 deletions .azure-pipelines/linux-CI-nightly.yml
Original file line number Diff line number Diff line change
Expand Up @@ -51,14 +51,9 @@ jobs:
python -m pip install -r requirements-dev.txt
python -m pip install $(ORT_PATH)
python -m pip install pytest
git clone --recursive https://github.com/cjlin1/libsvm libsvm
cd libsvm
make lib
displayName: 'Install dependencies'
- script: |
export PYTHONPATH=$PYTHONPATH:libsvm/python
python -c "import svmutil"
python -c "import onnxconverter_common"
python -c "import onnxruntime"
pytest tests --ignore=tests/sparkml --doctest-modules --junitxml=junit/test-results.xml
Expand Down
9 changes: 4 additions & 5 deletions .azure-pipelines/linux-conda-CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -90,18 +90,17 @@ jobs:
pip install xgboost$(xgboost.version)
pip install $(ONNXRT_PATH)
pip install pytest
git clone --recursive https://github.com/cjlin1/libsvm libsvm
cd libsvm
make lib
displayName: 'Install dependencies'
- script: |
python -m flake8 ./onnxmltools
displayName: 'run flake8 check'
- script: |
export PYTHONPATH=$PYTHONPATH:libsvm/python
python -c "import svmutil"
pip install -e .
displayName: 'local installation'
- script: |
python -c "import onnxconverter_common"
python -c "import onnxruntime"
pytest tests --ignore=tests/sparkml --doctest-modules --junitxml=junit/test-results.xml
Expand Down
139 changes: 66 additions & 73 deletions .azure-pipelines/win32-CI-nightly.yml
Original file line number Diff line number Diff line change
@@ -1,73 +1,66 @@
# Python package
# Create and test a Python package on multiple Python versions.
# Add steps that analyze code, save the dist with the build record, publish to a PyPI-compatible index, and more:
# https://docs.microsoft.com/azure/devops/pipelines/languages/python

trigger:
- master

jobs:

- job: 'Test'
pool:
vmImage: 'vs2017-win2016'
strategy:
matrix:
Python36-nightly:
python.version: '3.6'
ONNX_PATH: onnx==1.7.0
ONNXRT_PATH: -i https://test.pypi.org/simple/ ort-nightly
COREML_PATH: git+https://github.com/apple/coremltools@3.1
Python37-nightly:
python.version: '3.7'
ONNX_PATH: onnx==1.8.0
ONNXRT_PATH: -i https://test.pypi.org/simple/ ort-nightly
COREML_PATH: git+https://github.com/apple/coremltools@3.1
maxParallel: 3

steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '$(python.version)'
architecture: 'x64'

- powershell: Write-Host "##vso[task.prependpath]$env:CONDA\Scripts"
displayName: Add conda to PATH

- script: conda create --yes --quiet --name py$(python.version) -c conda-forge python=$(python.version) numpy protobuf
displayName: Create Anaconda environment

- script: |
call activate py$(python.version)
python -m pip install --upgrade pip numpy
echo Test numpy installation... && python -c "import numpy"
pip install %COREML_PATH% %ONNX_PATH%
python -m pip install tensorflow-cpu==1.15.0
python -m pip install tf2onnx==1.5.6
python -m pip install git+https://github.com/microsoft/onnxconverter-common
python -m pip install git+https://github.com/onnx/keras-onnx
echo Test onnxconverter-common installation... && python -c "import onnxconverter_common"
pip install -r requirements.txt
pip install -r requirements-dev.txt
pip install %ONNXRT_PATH%
echo Test onnxruntime installation... && python -c "import onnxruntime"
REM install libsvm from github
git clone --recursive https://github.com/cjlin1/libsvm libsvm
copy libsvm\windows\*.dll libsvm\python
set PYTHONPATH=libsvm\python;%PYTHONPATH%
dir libsvm\python
echo Test libsvm installation... && python -c "import svmutil"
displayName: 'Install dependencies'
- script: |
call activate py$(python.version)
set PYTHONPATH=libsvm\python;%PYTHONPATH%
pip install -e .
python -m pytest tests --ignore=tests/sparkml --doctest-modules --junitxml=junit/test-results.xml
displayName: 'pytest - onnxmltools'
- task: PublishTestResults@2
inputs:
testResultsFiles: '**/test-results.xml'
testRunTitle: 'Python $(python.version)'
condition: succeededOrFailed()
# Python package
# Create and test a Python package on multiple Python versions.
# Add steps that analyze code, save the dist with the build record, publish to a PyPI-compatible index, and more:
# https://docs.microsoft.com/azure/devops/pipelines/languages/python

trigger:
- master

jobs:

- job: 'Test'
pool:
vmImage: 'vs2017-win2016'
strategy:
matrix:
Python36-nightly:
python.version: '3.6'
ONNX_PATH: onnx==1.7.0
ONNXRT_PATH: -i https://test.pypi.org/simple/ ort-nightly
COREML_PATH: git+https://github.com/apple/coremltools@3.1
Python37-nightly:
python.version: '3.7'
ONNX_PATH: onnx==1.8.0
ONNXRT_PATH: -i https://test.pypi.org/simple/ ort-nightly
COREML_PATH: git+https://github.com/apple/coremltools@3.1
maxParallel: 3

steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '$(python.version)'
architecture: 'x64'

- powershell: Write-Host "##vso[task.prependpath]$env:CONDA\Scripts"
displayName: Add conda to PATH

- script: conda create --yes --quiet --name py$(python.version) -c conda-forge python=$(python.version) numpy protobuf
displayName: Create Anaconda environment

- script: |
call activate py$(python.version)
python -m pip install --upgrade pip numpy
echo Test numpy installation... && python -c "import numpy"
pip install %COREML_PATH% %ONNX_PATH%
python -m pip install tensorflow-cpu==1.15.0
python -m pip install tf2onnx==1.5.6
python -m pip install git+https://github.com/microsoft/onnxconverter-common
python -m pip install git+https://github.com/onnx/keras-onnx
echo Test onnxconverter-common installation... && python -c "import onnxconverter_common"
pip install -r requirements.txt
pip install -r requirements-dev.txt
pip install %ONNXRT_PATH%
echo Test onnxruntime installation... && python -c "import onnxruntime"
displayName: 'Install dependencies'
- script: |
call activate py$(python.version)
pip install -e .
python -m pytest tests --ignore=tests/sparkml --doctest-modules --junitxml=junit/test-results.xml
displayName: 'pytest - onnxmltools'
- task: PublishTestResults@2
inputs:
testResultsFiles: '**/test-results.xml'
testRunTitle: 'Python $(python.version)'
condition: succeededOrFailed()
14 changes: 0 additions & 14 deletions .azure-pipelines/win32-conda-CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,6 @@ jobs:
vmImage: 'vs2017-win2016'
strategy:
matrix:
Python35-141-RT030:
python.version: '3.5'
ONNX_PATH: onnx==1.4.1
ONNXRT_PATH: onnxruntime==0.3.0
COREML_PATH: https://github.com/apple/coremltools/archive/v2.0.zip
sklearn.version: '==0.19.1'

Python36-141-RT030:
python.version: '3.6'
ONNX_PATH: onnx==1.4.1
Expand Down Expand Up @@ -91,12 +84,6 @@ jobs:
python -m pip install %ONNXRT_PATH%
python -m pip install scikit-learn$(sklearn.version)
echo Test onnxruntime installation... && python -c "import onnxruntime"
REM install libsvm from github
git clone --recursive https://github.com/cjlin1/libsvm libsvm
copy libsvm\windows\*.dll libsvm\python
set PYTHONPATH=libsvm\python;%PYTHONPATH%
dir libsvm\python
echo Test libsvm installation... && python -c "import svmutil"
echo "debug environment" && path
python -m pip show pytest
displayName: 'Install dependencies'
Expand All @@ -108,7 +95,6 @@ jobs:
- script: |
call activate py$(python.version)
set PYTHONPATH=libsvm\python;%PYTHONPATH%
python -m pip install -e .
python -m pytest tests --ignore=tests/sparkml --doctest-modules --junitxml=junit/test-results.xml
displayName: 'pytest - onnxmltools'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,10 @@
from ...common.utils import cast_list
import numpy
try:
from libsvm import svm, svmutil
from libsvm import svm
except ImportError:
# Older version of libsvm.
import svm
import svmutil


class SVMConverter:
Expand Down
4 changes: 2 additions & 2 deletions requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@ codecov
coremltools
cython
flatbuffers
lightgbm
libsvm
lightgbm!=3.2.1
h2o==3.28.0.3
mleap
numpy
Expand All @@ -14,7 +15,6 @@ pytest
pytest-cov
scikit-learn
scipy
svm
wheel
xgboost
catboost
Expand Down
101 changes: 51 additions & 50 deletions tests/coreml/test_cml_TreeEnsembleRegressorConverterXGBoost.py
Original file line number Diff line number Diff line change
@@ -1,50 +1,51 @@
# SPDX-License-Identifier: Apache-2.0

"""
Tests CoreML TreeEnsembleRegressor converter.
"""
import os
import sys
import unittest
import numpy
import pandas
try:
from sklearn.impute import SimpleImputer as Imputer
import sklearn.preprocessing
if not hasattr(sklearn.preprocessing, 'Imputer'):
# coremltools 3.1 does not work with scikit-learn 0.22
setattr(sklearn.preprocessing, 'Imputer', Imputer)
except ImportError:
from sklearn.preprocessing import Imputer
from coremltools.converters.xgboost import convert as convert_xgb_to_coreml
from onnxmltools.convert.coreml import convert as convert_cml
from xgboost import XGBRegressor
from onnxmltools.utils import dump_data_and_model


class TestCoreMLTreeEnsembleRegressorConverterXGBoost(unittest.TestCase):

def test_tree_ensemble_regressor_xgboost(self):

this = os.path.dirname(__file__)
data_train = pandas.read_csv(os.path.join(this, "xgboost.model.xgb.n4.d3.train.txt"), header=None)

X = data_train.iloc[:, 1:].values
y = data_train.iloc[:, 0].values

params = dict(n_estimator=4, max_depth=3)
model = XGBRegressor(**params).fit(X, y)
# See https://github.com/apple/coremltools/issues/51.
model.booster = model.get_booster
model_coreml = convert_xgb_to_coreml(model)
model_onnx = convert_cml(model_coreml)
assert model_onnx is not None
if sys.version_info[0] >= 3:
# python 2.7 returns TypeError: can't pickle instancemethod objects
dump_data_and_model(X.astype(numpy.float32), model, model_onnx,
basename="CmlXGBoostRegressor-OneOff-Reshape",
allow_failure=True)


if __name__ == "__main__":
unittest.main()
# SPDX-License-Identifier: Apache-2.0

"""
Tests CoreML TreeEnsembleRegressor converter.
"""
import os
import sys
import unittest
import numpy
import pandas
try:
from sklearn.impute import SimpleImputer as Imputer
import sklearn.preprocessing
if not hasattr(sklearn.preprocessing, 'Imputer'):
# coremltools 3.1 does not work with scikit-learn 0.22
setattr(sklearn.preprocessing, 'Imputer', Imputer)
except ImportError:
from sklearn.preprocessing import Imputer
from coremltools.converters.xgboost import convert as convert_xgb_to_coreml
from onnxmltools.convert.coreml import convert as convert_cml
from xgboost import XGBRegressor
from onnxmltools.utils import dump_data_and_model


class TestCoreMLTreeEnsembleRegressorConverterXGBoost(unittest.TestCase):

@unittest.skipIf(True, reason="broken")
def test_tree_ensemble_regressor_xgboost(self):

this = os.path.dirname(__file__)
data_train = pandas.read_csv(os.path.join(this, "xgboost.model.xgb.n4.d3.train.txt"), header=None)

X = data_train.iloc[:, 1:].values
y = data_train.iloc[:, 0].values

params = dict(n_estimator=4, max_depth=3)
model = XGBRegressor(**params).fit(X, y)
# See https://github.com/apple/coremltools/issues/51.
model.booster = model.get_booster
model_coreml = convert_xgb_to_coreml(model)
model_onnx = convert_cml(model_coreml)
assert model_onnx is not None
if sys.version_info[0] >= 3:
# python 2.7 returns TypeError: can't pickle instancemethod objects
dump_data_and_model(X.astype(numpy.float32), model, model_onnx,
basename="CmlXGBoostRegressor-OneOff-Reshape",
allow_failure=True)


if __name__ == "__main__":
unittest.main()
13 changes: 8 additions & 5 deletions tests/svmlib/test_SVMConverters.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,20 +6,23 @@
import tempfile
import numpy
try:
import svm
from libsvm.svm import C_SVC as SVC, EPSILON_SVR as SVR, NU_SVC as NuSVC, NU_SVR as NuSVR
import libsvm.svm as svm
import libsvm.svmutil as svmutil
except ImportError:
import libsvm.svm as svm
import svm
from svm import C_SVC as SVC, EPSILON_SVR as SVR, NU_SVC as NuSVC, NU_SVR as NuSVR
import svmutil

import numpy as np
import unittest
from sklearn.datasets import load_iris
from onnxmltools.convert.libsvm import convert
from svm import C_SVC as SVC, EPSILON_SVR as SVR, NU_SVC as NuSVC, NU_SVR as NuSVR
import svmutil
from onnxmltools.convert.common.data_types import FloatTensorType
from onnxmltools.utils import dump_data_and_model

try:
from svm import PRINT_STRING_FUN, print_null
from libsvm.svm import PRINT_STRING_FUN, print_null
noprint = PRINT_STRING_FUN(print_null)
except ImportError:
# This was recently added.
Expand Down
7 changes: 6 additions & 1 deletion tests/utils/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
"""
import os
import unittest
import warnings
import onnxmltools
from onnxmltools.utils import load_model, save_model
from onnxmltools.utils import set_model_version, set_model_domain, set_model_doc_string
Expand Down Expand Up @@ -59,7 +60,11 @@ def test_set_docstring_blank(self):
class TestWrapper(unittest.TestCase):

def test_keras_with_tf2onnx(self):
import keras2onnx
try:
import keras2onnx
except ImportError:
warnings.warn("keras2onnx or one of its dependencies is missing.")
return
from keras2onnx.proto import keras
from keras2onnx.proto.tfcompat import is_tf2
if not is_tf2: # tf2onnx is not available for tensorflow 2.0 yet.
Expand Down

0 comments on commit 75c538e

Please sign in to comment.