diff --git a/CMakeLists.txt b/CMakeLists.txt index 3b8bbd2e0272..161705643194 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -227,6 +227,7 @@ if(USE_MKLDNN) include(cmake/DownloadMKLML.cmake) # CPU architecture (e.g., C5) can't run on another architecture (e.g., g3). if(NOT MSVC) + set(MKLDNN_LIBRARY_TYPE "STATIC" CACHE INTERNAL "" FORCE) set(ARCH_OPT_FLAGS "-mtune=generic") else() set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /EHsc") diff --git a/CONTRIBUTORS.md b/CONTRIBUTORS.md index d1dd9b90708a..b9f84d592a70 100644 --- a/CONTRIBUTORS.md +++ b/CONTRIBUTORS.md @@ -192,6 +192,7 @@ List of Contributors * [Rahul Padmanabhan](https://github.com/rahul3) * [Yuxi Hu](https://github.com/yuxihu) * [Harsh Patel](https://github.com/harshp8l) +* [Xiao Wang](https://github.com/BeyonderXX) Label Bot --------- diff --git a/Makefile b/Makefile index 16ea59f3d585..31722e86c085 100644 --- a/Makefile +++ b/Makefile @@ -132,7 +132,12 @@ ifeq ($(USE_MKLDNN), 1) LDFLAGS += -L$(MKLROOT)/lib endif CFLAGS += -I$(MKLDNNROOT)/include - LDFLAGS += -L$(MKLDNNROOT)/lib -lmkldnn -Wl,-rpath,'$${ORIGIN}' + # MKLDNN but to needs to be dynamically linked for windows as not all VS compilers support static linking + ifneq ($(UNAME_S), Windows) + LIB_DEP += $(MKLDNNROOT)/lib/libmkldnn.a + else + LDFLAGS += -L$(MKLDNNROOT)/lib -lmkldnn -Wl,-rpath,'$${ORIGIN}' + endif endif # setup opencv @@ -486,6 +491,11 @@ build/plugin/%.o: plugin/%.cc @mkdir -p $(@D) $(CXX) -std=c++11 -c $(CFLAGS) -MMD -Isrc/operator -c $< -o $@ +# Set install path for libmxnet.so on Mac OS +ifeq ($(UNAME_S), Darwin) + LDFLAGS += -Wl,-install_name,@rpath/libmxnet.so +endif + # NOTE: to statically link libmxnet.a we need the option # --Wl,--whole-archive -lmxnet --Wl,--no-whole-archive lib/libmxnet.a: $(ALLX_DEP) @@ -600,11 +610,19 @@ rpkgtest: Rscript -e 'res<-covr:::package_coverage("R-package");fileConn<-file(paste("r-package_coverage_",toString(runif(1)),".json"));writeLines(covr:::to_codecov(res), fileConn);close(fileConn)' scalaclean: - (cd $(ROOTDIR)/scala-package; \ + (cd $(ROOTDIR)/scala-package && \ mvn clean -P$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE)) +scalatestcompile: + (cd $(ROOTDIR)/scala-package && \ + mvn test-compile -P$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE) -Dcxx="$(CXX)" \ + -Dbuild.platform="$(SCALA_PKG_PROFILE)" \ + -Dcflags="$(CFLAGS)" -Dldflags="$(LDFLAGS)" \ + -Dcurrent_libdir="$(ROOTDIR)/lib" \ + -Dlddeps="$(LIB_DEP) $(ROOTDIR)/lib/libmxnet.a") + scalapkg: - (cd $(ROOTDIR)/scala-package; \ + (cd $(ROOTDIR)/scala-package && \ mvn package -P$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE) -Dcxx="$(CXX)" \ -Dbuild.platform="$(SCALA_PKG_PROFILE)" \ -Dcflags="$(CFLAGS)" -Dldflags="$(LDFLAGS)" \ @@ -612,49 +630,58 @@ scalapkg: -Dlddeps="$(LIB_DEP) $(ROOTDIR)/lib/libmxnet.a") scalaunittest: - (cd $(ROOTDIR)/scala-package; \ + (cd $(ROOTDIR)/scala-package && \ mvn integration-test -P$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE),unittest -Dcxx="$(CXX)" \ -Dcflags="$(CFLAGS)" -Dldflags="$(LDFLAGS)" \ -Dlddeps="$(LIB_DEP) $(ROOTDIR)/lib/libmxnet.a" $(SCALA_TEST_ARGS)) scalaintegrationtest: - (cd $(ROOTDIR)/scala-package; \ + (cd $(ROOTDIR)/scala-package && \ mvn integration-test -P$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE),integrationtest -Dcxx="$(CXX)" \ -Dcflags="$(CFLAGS)" -Dldflags="$(LDFLAGS)" \ -Dlddeps="$(LIB_DEP) $(ROOTDIR)/lib/libmxnet.a" $(SCALA_TEST_ARGS)) scalainstall: - (cd $(ROOTDIR)/scala-package; \ + (cd $(ROOTDIR)/scala-package && \ mvn install -P$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE) -DskipTests=true -Dcxx="$(CXX)" \ -Dbuild.platform="$(SCALA_PKG_PROFILE)" \ -Dcflags="$(CFLAGS)" -Dldflags="$(LDFLAGS)" \ -Dlddeps="$(LIB_DEP) $(ROOTDIR)/lib/libmxnet.a") scalarelease-dryrun: - (cd $(ROOTDIR)/scala-package; \ + (cd $(ROOTDIR)/scala-package && \ mvn release:clean release:prepare -DdryRun=true -DautoVersionSubmodules=true \ -Papache-release,$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE) \ -Darguments=""-Dbuild\.platform=\""$(SCALA_PKG_PROFILE)\""\ -DskipTests=true\ -Dcflags=\""$(CFLAGS)\""\ -Dcxx=\""$(CXX)\""\ -Dldflags=\""$(LDFLAGS)\""\ -Dlddeps=\""$(LIB_DEP) $(ROOTDIR)/lib/libmxnet.a\"""") scalarelease-prepare: - (cd $(ROOTDIR)/scala-package; \ + (cd $(ROOTDIR)/scala-package && \ mvn release:clean release:prepare -DautoVersionSubmodules=true \ -Papache-release,$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE) \ -Darguments=""-Dbuild\.platform=\""$(SCALA_PKG_PROFILE)\""\ -DskipTests=true\ -Dcflags=\""$(CFLAGS)\""\ -Dcxx=\""$(CXX)\""\ -Dldflags=\""$(LDFLAGS)\""\ -Dlddeps=\""$(LIB_DEP) $(ROOTDIR)/lib/libmxnet.a\"""") scalarelease-perform: - (cd $(ROOTDIR)/scala-package; \ + (cd $(ROOTDIR)/scala-package && \ mvn release:perform -DautoVersionSubmodules=true \ -Papache-release,$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE) \ -Darguments=""-Dbuild\.platform=\""$(SCALA_PKG_PROFILE)\""\ -DskipTests=true\ -Dcflags=\""$(CFLAGS)\""\ -Dcxx=\""$(CXX)\""\ -Dldflags=\""$(LDFLAGS)\""\ -Dlddeps=\""$(LIB_DEP) $(ROOTDIR)/lib/libmxnet.a\"""") scaladeploy: - (cd $(ROOTDIR)/scala-package; \ + (cd $(ROOTDIR)/scala-package && \ mvn deploy -Papache-release,$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE) \-DskipTests=true -Dcxx="$(CXX)" \ -Dbuild.platform="$(SCALA_PKG_PROFILE)" \ -Dcflags="$(CFLAGS)" -Dldflags="$(LDFLAGS)" \ -Dlddeps="$(LIB_DEP) $(ROOTDIR)/lib/libmxnet.a") +scaladeploylocal: + (cd $(ROOTDIR)/scala-package && \ + mvn deploy -Papache-release,deployLocal,$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE) \-DskipTests=true -Dcxx="$(CXX)" \ + -DaltDeploymentRepository=snapshot-repo::default::file:local-snapshot \ + -Dgpg.skip \ + -Dbuild.platform="$(SCALA_PKG_PROFILE)" \ + -Dcflags="$(CFLAGS)" -Dldflags="$(LDFLAGS)" \ + -Dlddeps="$(LIB_DEP) $(ROOTDIR)/lib/libmxnet.a") + jnilint: 3rdparty/dmlc-core/scripts/lint.py mxnet-jnicpp cpp scala-package/native/src diff --git a/ci/docker/Dockerfile.build.ubuntu_cpu b/ci/docker/Dockerfile.build.ubuntu_cpu index 7c7e2240ee61..2df9f5887f54 100644 --- a/ci/docker/Dockerfile.build.ubuntu_cpu +++ b/ci/docker/Dockerfile.build.ubuntu_cpu @@ -54,6 +54,9 @@ RUN /work/ubuntu_clang.sh COPY install/ubuntu_gcc8.sh /work/ RUN /work/ubuntu_gcc8.sh +COPY install/ubuntu_mkl.sh /work/ +RUN /work/ubuntu_mkl.sh + COPY install/ubuntu_mklml.sh /work/ RUN /work/ubuntu_mklml.sh diff --git a/ci/docker/install/ubuntu_mkl.sh b/ci/docker/install/ubuntu_mkl.sh new file mode 100755 index 000000000000..36fc7b07ffdc --- /dev/null +++ b/ci/docker/install/ubuntu_mkl.sh @@ -0,0 +1,31 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# build and install are separated so changes to build don't invalidate +# the whole docker cache for the image + +set -ex + +apt-get update || true +# Install Intel Math Kernel Library (latest major release) +# https://software.intel.com/en-us/articles/installing-intel-free-libs-and-python-apt-repo +wget -O - wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS-2019.PUB | apt-key add - && \ + sh -c 'echo deb https://apt.repos.intel.com/mkl all main > /etc/apt/sources.list.d/intel-mkl.list' && \ + apt-get update && \ + apt-get install -y intel-mkl-2019.1-053 diff --git a/ci/docker/runtime_functions.sh b/ci/docker/runtime_functions.sh index 1fc10bf0e085..82e6feb2a728 100755 --- a/ci/docker/runtime_functions.sh +++ b/ci/docker/runtime_functions.sh @@ -326,6 +326,20 @@ build_ubuntu_cpu_openblas() { -j$(nproc) } +build_ubuntu_cpu_mkl() { + set -ex + export CC="ccache gcc" + export CXX="ccache g++" + make \ + DEV=1 \ + ENABLE_TESTCOVERAGE=1 \ + USE_CPP_PACKAGE=1 \ + USE_BLAS=mkl \ + USE_INTEL_PATH=/opt/intel \ + USE_DIST_KVSTORE=1 \ + -j$(nproc) +} + build_ubuntu_cpu_cmake_debug() { set -ex pushd . @@ -480,6 +494,20 @@ build_ubuntu_cpu_mkldnn() { -j$(nproc) } +build_ubuntu_cpu_mkldnn_mkl() { + set -ex + + build_ccache_wrappers + + make \ + DEV=1 \ + ENABLE_TESTCOVERAGE=1 \ + USE_CPP_PACKAGE=1 \ + USE_BLAS=mkl \ + USE_MKLDNN=1 \ + -j$(nproc) +} + build_ubuntu_gpu() { build_ubuntu_gpu_cuda91_cudnn7 } @@ -629,9 +657,6 @@ build_ubuntu_gpu_cmake_mkldnn() { /work/mxnet ninja -v - # libmkldnn.so.0 is a link file. We need an actual binary file named libmkldnn.so.0. - cp 3rdparty/mkldnn/src/libmkldnn.so.0 3rdparty/mkldnn/src/libmkldnn.so.0.tmp - mv 3rdparty/mkldnn/src/libmkldnn.so.0.tmp 3rdparty/mkldnn/src/libmkldnn.so.0 } build_ubuntu_gpu_cmake() { diff --git a/ci/jenkins/Jenkins_steps.groovy b/ci/jenkins/Jenkins_steps.groovy index f48a26737308..d5cbd97683ed 100644 --- a/ci/jenkins/Jenkins_steps.groovy +++ b/ci/jenkins/Jenkins_steps.groovy @@ -23,19 +23,19 @@ utils = load('ci/Jenkinsfile_utils.groovy') // mxnet libraries -mx_lib = 'lib/libmxnet.so, lib/libmxnet.a, 3rdparty/dmlc-core/libdmlc.a, 3rdparty/tvm/nnvm/lib/libnnvm.a' +mx_lib = 'lib/libmxnet.so, lib/libmxnet.a, lib/libiomp5.so, lib/libmklml_intel.so, 3rdparty/dmlc-core/libdmlc.a, 3rdparty/tvm/nnvm/lib/libnnvm.a' // Python wheels mx_pip = 'build/*.whl' // for scala build, need to pass extra libs when run with dist_kvstore -mx_dist_lib = 'lib/libmxnet.so, lib/libmxnet.a, 3rdparty/dmlc-core/libdmlc.a, 3rdparty/tvm/nnvm/lib/libnnvm.a, 3rdparty/ps-lite/build/libps.a, deps/lib/libprotobuf-lite.a, deps/lib/libzmq.a' +mx_dist_lib = 'lib/libmxnet.so, lib/libmxnet.a, 3rdparty/dmlc-core/libdmlc.a, 3rdparty/tvm/nnvm/lib/libnnvm.a, 3rdparty/ps-lite/build/libps.a, deps/lib/libprotobuf-lite.a, deps/lib/libzmq.a, lib/libmkldnn.a' // mxnet cmake libraries, in cmake builds we do not produce a libnvvm static library by default. mx_cmake_lib = 'build/libmxnet.so, build/libmxnet.a, build/3rdparty/dmlc-core/libdmlc.a, build/tests/mxnet_unit_tests, build/3rdparty/openmp/runtime/src/libomp.so' // mxnet cmake libraries, in cmake builds we do not produce a libnvvm static library by default. mx_cmake_lib_debug = 'build/libmxnet.so, build/libmxnet.a, build/3rdparty/dmlc-core/libdmlc.a, build/tests/mxnet_unit_tests' -mx_cmake_mkldnn_lib = 'build/libmxnet.so, build/libmxnet.a, build/3rdparty/dmlc-core/libdmlc.a, build/tests/mxnet_unit_tests, build/3rdparty/openmp/runtime/src/libomp.so, build/3rdparty/mkldnn/src/libmkldnn.so.0' -mx_mkldnn_lib = 'lib/libmxnet.so, lib/libmxnet.a, lib/libiomp5.so, lib/libmkldnn.so.0, lib/libmklml_intel.so, 3rdparty/dmlc-core/libdmlc.a, 3rdparty/tvm/nnvm/lib/libnnvm.a' +mx_cmake_mkldnn_lib = 'build/libmxnet.so, build/libmxnet.a, build/3rdparty/dmlc-core/libdmlc.a, build/tests/mxnet_unit_tests, build/3rdparty/openmp/runtime/src/libomp.so' +mx_mkldnn_lib = 'lib/libmxnet.so, lib/libmxnet.a, lib/libiomp5.so, lib/libmklml_intel.so, 3rdparty/dmlc-core/libdmlc.a, 3rdparty/tvm/nnvm/lib/libnnvm.a' mx_tensorrt_lib = 'lib/libmxnet.so, lib/libnvonnxparser_runtime.so.0, lib/libnvonnxparser.so.0, lib/libonnx_proto.so, lib/libonnx.so' mx_lib_cpp_examples = 'lib/libmxnet.so, lib/libmxnet.a, 3rdparty/dmlc-core/libdmlc.a, 3rdparty/tvm/nnvm/lib/libnnvm.a, 3rdparty/ps-lite/build/libps.a, deps/lib/libprotobuf-lite.a, deps/lib/libzmq.a, build/cpp-package/example/*' mx_lib_cpp_examples_cpu = 'build/libmxnet.so, build/cpp-package/example/*' @@ -121,6 +121,20 @@ def compile_unix_openblas_debug_cpu() { }] } +def compile_unix_mkl_cpu() { + return ['CPU: MKL': { + node(NODE_LINUX_CPU) { + ws('workspace/build-cpu-mkl') { + timeout(time: max_time, unit: 'MINUTES') { + utils.init_git() + utils.docker_run('ubuntu_cpu', 'build_ubuntu_cpu_mkl', false) + utils.pack_lib('cpu_mkl', mx_dist_lib, true) + } + } + } + }] +} + def compile_unix_mkldnn_cpu() { return ['CPU: MKLDNN': { node(NODE_LINUX_CPU) { @@ -135,6 +149,20 @@ def compile_unix_mkldnn_cpu() { }] } +def compile_unix_mkldnn_mkl_cpu() { + return ['CPU: MKLDNN_MKL': { + node(NODE_LINUX_CPU) { + ws('workspace/build-mkldnn-cpu') { + timeout(time: max_time, unit: 'MINUTES') { + utils.init_git() + utils.docker_run('ubuntu_cpu', 'build_ubuntu_cpu_mkldnn_mkl', false) + utils.pack_lib('mkldnn_mkl_cpu', mx_mkldnn_lib, true) + } + } + } + }] +} + def compile_unix_mkldnn_gpu() { return ['GPU: MKLDNN': { node(NODE_LINUX_CPU) { @@ -580,6 +608,23 @@ def test_unix_python3_cpu() { }] } +def test_unix_python3_mkl_cpu() { + return ['Python3: MKL-CPU': { + node(NODE_LINUX_CPU) { + ws('workspace/ut-python3-cpu') { + try { + utils.unpack_and_init('cpu_mkl', mx_lib, true) + python3_ut('ubuntu_cpu') + utils.publish_test_coverage() + } finally { + utils.collect_test_results_unix('nosetests_unittest.xml', 'nosetests_python3_cpu_unittest.xml') + utils.collect_test_results_unix('nosetests_quantization.xml', 'nosetests_python3_cpu_quantization.xml') + } + } + } + }] +} + def test_unix_python3_gpu() { return ['Python3: GPU': { node(NODE_LINUX_GPU) { @@ -665,6 +710,23 @@ def test_unix_python3_mkldnn_cpu() { }] } +def test_unix_python3_mkldnn_mkl_cpu() { + return ['Python3: MKLDNN-MKL-CPU': { + node(NODE_LINUX_CPU) { + ws('workspace/ut-python3-mkldnn-mkl-cpu') { + try { + utils.unpack_and_init('mkldnn_mkl_cpu', mx_mkldnn_lib, true) + python3_ut_mkldnn('ubuntu_cpu') + utils.publish_test_coverage() + } finally { + utils.collect_test_results_unix('nosetests_unittest.xml', 'nosetests_python3_mkldnn_cpu_unittest.xml') + utils.collect_test_results_unix('nosetests_mkl.xml', 'nosetests_python3_mkldnn_cpu_mkl.xml') + } + } + } + }] +} + def test_unix_python3_mkldnn_gpu() { return ['Python3: MKLDNN-GPU': { node(NODE_LINUX_GPU) { diff --git a/ci/jenkins/Jenkinsfile_unix_cpu b/ci/jenkins/Jenkinsfile_unix_cpu index 9c9a41503772..e581bcf65dc5 100644 --- a/ci/jenkins/Jenkinsfile_unix_cpu +++ b/ci/jenkins/Jenkinsfile_unix_cpu @@ -36,15 +36,19 @@ core_logic: { utils.parallel_stage('Build', [ custom_steps.compile_unix_cpu_openblas(), custom_steps.compile_unix_openblas_debug_cpu(), - custom_steps.compile_unix_mkldnn_cpu() + custom_steps.compile_unix_mkl_cpu(), + custom_steps.compile_unix_mkldnn_cpu(), + custom_steps.compile_unix_mkldnn_mkl_cpu() ]) utils.parallel_stage('Tests', [ custom_steps.test_unix_python2_cpu(), custom_steps.test_unix_python3_cpu(), custom_steps.test_unix_python3_debug_cpu(), + custom_steps.test_unix_python3_mkl_cpu(), custom_steps.test_unix_python2_mkldnn_cpu(), custom_steps.test_unix_python3_mkldnn_cpu(), + custom_steps.test_unix_python3_mkldnn_mkl_cpu(), custom_steps.test_unix_scala_cpu(), custom_steps.test_unix_clojure_cpu(), custom_steps.test_unix_r_cpu(), diff --git a/contrib/clojure-package/examples/cnn-text-classification/src/cnn_text_classification/classifier.clj b/contrib/clojure-package/examples/cnn-text-classification/src/cnn_text_classification/classifier.clj index 29ff36fe1ec0..94fd4f518c60 100644 --- a/contrib/clojure-package/examples/cnn-text-classification/src/cnn_text_classification/classifier.clj +++ b/contrib/clojure-package/examples/cnn-text-classification/src/cnn_text_classification/classifier.clj @@ -16,7 +16,9 @@ ;; (ns cnn-text-classification.classifier - (:require [cnn-text-classification.data-helper :as data-helper] + (:require [clojure.java.io :as io] + [clojure.java.shell :refer [sh]] + [cnn-text-classification.data-helper :as data-helper] [org.apache.clojure-mxnet.eval-metric :as eval-metric] [org.apache.clojure-mxnet.io :as mx-io] [org.apache.clojure-mxnet.module :as m] @@ -26,12 +28,18 @@ [org.apache.clojure-mxnet.context :as context]) (:gen-class)) +(def data-dir "data/") (def mr-dataset-path "data/mr-data") ;; the MR polarity dataset path (def glove-file-path "data/glove/glove.6B.50d.txt") (def num-filter 100) (def num-label 2) (def dropout 0.5) + + +(when-not (.exists (io/file (str data-dir))) + (do (println "Retrieving data for cnn text classification...") (sh "./get_data.sh"))) + (defn shuffle-data [test-num {:keys [data label sentence-count sentence-size embedding-size]}] (println "Shuffling the data and splitting into training and test sets") (println {:sentence-count sentence-count @@ -103,10 +111,10 @@ ;;; omit max-examples if you want to run all the examples in the movie review dataset ;; to limit mem consumption set to something like 1000 and adjust test size to 100 (println "Running with context devices of" devs) - (train-convnet {:devs [(context/cpu)] :embedding-size 50 :batch-size 10 :test-size 100 :num-epoch 10 :max-examples 1000}) + (train-convnet {:devs devs :embedding-size 50 :batch-size 10 :test-size 100 :num-epoch 10 :max-examples 1000}) ;; runs all the examples #_(train-convnet {:embedding-size 50 :batch-size 100 :test-size 1000 :num-epoch 10}))) (comment - (train-convnet {:devs [(context/cpu)] :embedding-size 50 :batch-size 10 :test-size 100 :num-epoch 10 :max-examples 1000})) + (train-convnet {:devs devs :embedding-size 50 :batch-size 10 :test-size 100 :num-epoch 10 :max-examples 1000})) diff --git a/contrib/clojure-package/examples/cnn-text-classification/test/cnn_text_classification/classifier_test.clj b/contrib/clojure-package/examples/cnn-text-classification/test/cnn_text_classification/classifier_test.clj new file mode 100644 index 000000000000..918a46f474d8 --- /dev/null +++ b/contrib/clojure-package/examples/cnn-text-classification/test/cnn_text_classification/classifier_test.clj @@ -0,0 +1,44 @@ +;; +;; Licensed to the Apache Software Foundation (ASF) under one or more +;; contributor license agreements. See the NOTICE file distributed with +;; this work for additional information regarding copyright ownership. +;; The ASF licenses this file to You under the Apache License, Version 2.0 +;; (the "License"); you may not use this file except in compliance with +;; the License. You may obtain a copy of the License at +;; +;; http://www.apache.org/licenses/LICENSE-2.0 +;; +;; Unless required by applicable law or agreed to in writing, software +;; distributed under the License is distributed on an "AS IS" BASIS, +;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +;; See the License for the specific language governing permissions and +;; limitations under the License. +;; + +(ns cnn-text-classification.classifier-test + (:require + [clojure.test :refer :all] + [org.apache.clojure-mxnet.module :as module] + [org.apache.clojure-mxnet.ndarray :as ndarray] + [org.apache.clojure-mxnet.util :as util] + [org.apache.clojure-mxnet.context :as context] + [cnn-text-classification.classifier :as classifier])) + +; +; The one and unique classifier test +; +(deftest classifier-test + (let [train + (classifier/train-convnet + {:devs [(context/default-context)] + :embedding-size 50 + :batch-size 10 + :test-size 100 + :num-epoch 1 + :max-examples 1000})] + (is (= ["data"] (util/scala-vector->vec (module/data-names train)))) + (is (= 20 (count (ndarray/->vec (-> train module/outputs first first))))))) + ;(prn (util/scala-vector->vec (data-shapes train))) + ;(prn (util/scala-vector->vec (label-shapes train))) + ;(prn (output-names train)) + ;(prn (output-shapes train)) \ No newline at end of file diff --git a/contrib/clojure-package/examples/gan/project.clj b/contrib/clojure-package/examples/gan/project.clj index b8f6903cabba..a326f7a5605f 100644 --- a/contrib/clojure-package/examples/gan/project.clj +++ b/contrib/clojure-package/examples/gan/project.clj @@ -20,5 +20,6 @@ :plugins [[lein-cljfmt "0.5.7"]] :dependencies [[org.clojure/clojure "1.9.0"] [org.apache.mxnet.contrib.clojure/clojure-mxnet "1.5.0-SNAPSHOT"] - [nu.pattern/opencv "2.4.9-7"]] + [org.openpnp/opencv "3.4.2-1"] + ] :main gan.gan-mnist) diff --git a/contrib/clojure-package/examples/gan/src/gan/gan_mnist.clj b/contrib/clojure-package/examples/gan/src/gan/gan_mnist.clj index e2e3364535ec..944791bce604 100644 --- a/contrib/clojure-package/examples/gan/src/gan/gan_mnist.clj +++ b/contrib/clojure-package/examples/gan/src/gan/gan_mnist.clj @@ -157,7 +157,9 @@ (save-img-diff i n calc-diff)))) -(defn train [devs] +(defn train + ([devs] (train devs num-epoch)) + ([devs num-epoch] (let [mod-d (-> (m/module (discriminator) {:contexts devs :data-names ["data"] :label-names ["label"]}) (m/bind {:data-shapes (mx-io/provide-data-desc mnist-iter) :label-shapes (mx-io/provide-label-desc mnist-iter) @@ -203,7 +205,7 @@ (save-img-gout i n (ndarray/copy (ffirst out-g))) (save-img-data i n batch) (calc-diff i n (ffirst diff-d))) - (inc n))))))) + (inc n)))))))) (defn -main [& args] (let [[dev dev-num] args diff --git a/contrib/clojure-package/examples/gan/src/gan/viz.clj b/contrib/clojure-package/examples/gan/src/gan/viz.clj index 8b57b9432a7e..67f78806de66 100644 --- a/contrib/clojure-package/examples/gan/src/gan/viz.clj +++ b/contrib/clojure-package/examples/gan/src/gan/viz.clj @@ -22,7 +22,7 @@ (:import (nu.pattern OpenCV) (org.opencv.core Core CvType Mat Size) (org.opencv.imgproc Imgproc) - (org.opencv.highgui Highgui))) + (org.opencv.imgcodecs Imgcodecs))) ;;; Viz stuff (OpenCV/loadShared) @@ -83,5 +83,5 @@ _ (Core/vconcat (java.util.ArrayList. line-mats) result)] (do (Imgproc/resize result resized-img (new Size (* (.width result) 1.5) (* (.height result) 1.5))) - (Highgui/imwrite (str output-path title ".jpg") resized-img) + (Imgcodecs/imwrite (str output-path title ".jpg") resized-img) (Thread/sleep 1000)))) diff --git a/contrib/clojure-package/examples/gan/test/gan/gan_test.clj b/contrib/clojure-package/examples/gan/test/gan/gan_test.clj new file mode 100644 index 000000000000..71b9126cae25 --- /dev/null +++ b/contrib/clojure-package/examples/gan/test/gan/gan_test.clj @@ -0,0 +1,25 @@ +;; +;; Licensed to the Apache Software Foundation (ASF) under one or more +;; contributor license agreements. See the NOTICE file distributed with +;; this work for additional information regarding copyright ownership. +;; The ASF licenses this file to You under the Apache License, Version 2.0 +;; (the "License"); you may not use this file except in compliance with +;; the License. You may obtain a copy of the License at +;; +;; http://www.apache.org/licenses/LICENSE-2.0 +;; +;; Unless required by applicable law or agreed to in writing, software +;; distributed under the License is distributed on an "AS IS" BASIS, +;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +;; See the License for the specific language governing permissions and +;; limitations under the License. +;; + +(ns gan.gan_test + (:require + [gan.gan-mnist :refer :all] + [org.apache.clojure-mxnet.context :as context] + [clojure.test :refer :all])) + +(deftest check-pdf + (train [(context/cpu)] 1)) \ No newline at end of file diff --git a/contrib/clojure-package/examples/imclassification/src/imclassification/train_mnist.clj b/contrib/clojure-package/examples/imclassification/src/imclassification/train_mnist.clj index a43dc3b69bd9..e61e9ebf6fbb 100644 --- a/contrib/clojure-package/examples/imclassification/src/imclassification/train_mnist.clj +++ b/contrib/clojure-package/examples/imclassification/src/imclassification/train_mnist.clj @@ -32,7 +32,7 @@ (def batch-size 10) ;; the batch size (def optimizer (optimizer/sgd {:learning-rate 0.01 :momentum 0.0})) (def eval-metric (eval-metric/accuracy)) -(def num-epoch 5) ;; the number of training epochs +(def num-epoch 1) ;; the number of training epochs (def kvstore "local") ;; the kvstore type ;;; Note to run distributed you might need to complile the engine with an option set (def role "worker") ;; scheduler/server/worker @@ -82,7 +82,9 @@ (sym/fully-connected "fc3" {:data data :num-hidden 10}) (sym/softmax-output "softmax" {:data data}))) -(defn start [devs] +(defn start + ([devs] (start devs num-epoch)) + ([devs _num-epoch] (when scheduler-host (println "Initing PS enviornments with " envs) (kvstore-server/init envs)) @@ -94,14 +96,18 @@ (do (println "Starting Training of MNIST ....") (println "Running with context devices of" devs) - (let [mod (m/module (get-symbol) {:contexts devs})] - (m/fit mod {:train-data train-data + (let [_mod (m/module (get-symbol) {:contexts devs})] + (m/fit _mod {:train-data train-data :eval-data test-data - :num-epoch num-epoch + :num-epoch _num-epoch :fit-params (m/fit-params {:kvstore kvstore :optimizer optimizer - :eval-metric eval-metric})})) - (println "Finish fit")))) + :eval-metric eval-metric})}) + (println "Finish fit") + _mod + ) + + )))) (defn -main [& args] (let [[dev dev-num] args diff --git a/contrib/clojure-package/examples/imclassification/test/imclassification/train_mnist_test.clj b/contrib/clojure-package/examples/imclassification/test/imclassification/train_mnist_test.clj new file mode 100644 index 000000000000..2ebefc2fc664 --- /dev/null +++ b/contrib/clojure-package/examples/imclassification/test/imclassification/train_mnist_test.clj @@ -0,0 +1,39 @@ +;; +;; Licensed to the Apache Software Foundation (ASF) under one or more +;; contributor license agreements. See the NOTICE file distributed with +;; this work for additional information regarding copyright ownership. +;; The ASF licenses this file to You under the Apache License, Version 2.0 +;; (the "License"); you may not use this file except in compliance with +;; the License. You may obtain a copy of the License at +;; +;; http://www.apache.org/licenses/LICENSE-2.0 +;; +;; Unless required by applicable law or agreed to in writing, software +;; distributed under the License is distributed on an "AS IS" BASIS, +;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +;; See the License for the specific language governing permissions and +;; limitations under the License. +;; + +(ns imclassification.train-mnist-test + (:require + [clojure.test :refer :all] + [clojure.java.io :as io] + [clojure.string :as s] + [org.apache.clojure-mxnet.context :as context] + [org.apache.clojure-mxnet.module :as module] + [imclassification.train-mnist :as mnist])) + +(defn- file-to-filtered-seq [file] + (->> + file + (io/file) + (io/reader) + (line-seq) + (filter #(not (s/includes? % "mxnet_version"))))) + +(deftest mnist-two-epochs-test + (module/save-checkpoint (mnist/start [(context/cpu)] 2) {:prefix "target/test" :epoch 2}) + (is (= + (file-to-filtered-seq "test/test-symbol.json.ref") + (file-to-filtered-seq "target/test-symbol.json")))) \ No newline at end of file diff --git a/contrib/clojure-package/examples/imclassification/test/test-symbol.json.ref b/contrib/clojure-package/examples/imclassification/test/test-symbol.json.ref new file mode 100644 index 000000000000..ba1d2fad3a8a --- /dev/null +++ b/contrib/clojure-package/examples/imclassification/test/test-symbol.json.ref @@ -0,0 +1,105 @@ +{ + "nodes": [ + { + "op": "null", + "name": "data", + "inputs": [] + }, + { + "op": "null", + "name": "fc1_weight", + "attrs": {"num_hidden": "128"}, + "inputs": [] + }, + { + "op": "null", + "name": "fc1_bias", + "attrs": {"num_hidden": "128"}, + "inputs": [] + }, + { + "op": "FullyConnected", + "name": "fc1", + "attrs": {"num_hidden": "128"}, + "inputs": [[0, 0, 0], [1, 0, 0], [2, 0, 0]] + }, + { + "op": "Activation", + "name": "relu1", + "attrs": {"act_type": "relu"}, + "inputs": [[3, 0, 0]] + }, + { + "op": "null", + "name": "fc2_weight", + "attrs": {"num_hidden": "64"}, + "inputs": [] + }, + { + "op": "null", + "name": "fc2_bias", + "attrs": {"num_hidden": "64"}, + "inputs": [] + }, + { + "op": "FullyConnected", + "name": "fc2", + "attrs": {"num_hidden": "64"}, + "inputs": [[4, 0, 0], [5, 0, 0], [6, 0, 0]] + }, + { + "op": "Activation", + "name": "relu2", + "attrs": {"act_type": "relu"}, + "inputs": [[7, 0, 0]] + }, + { + "op": "null", + "name": "fc3_weight", + "attrs": {"num_hidden": "10"}, + "inputs": [] + }, + { + "op": "null", + "name": "fc3_bias", + "attrs": {"num_hidden": "10"}, + "inputs": [] + }, + { + "op": "FullyConnected", + "name": "fc3", + "attrs": {"num_hidden": "10"}, + "inputs": [[8, 0, 0], [9, 0, 0], [10, 0, 0]] + }, + { + "op": "null", + "name": "softmax_label", + "inputs": [] + }, + { + "op": "SoftmaxOutput", + "name": "softmax", + "inputs": [[11, 0, 0], [12, 0, 0]] + } + ], + "arg_nodes": [0, 1, 2, 5, 6, 9, 10, 12], + "node_row_ptr": [ + 0, + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9, + 10, + 11, + 12, + 13, + 14 + ], + "heads": [[13, 0, 0]], + "attrs": {"mxnet_version": ["int", 10400]} +} \ No newline at end of file diff --git a/contrib/clojure-package/examples/module/test/mnist_mlp_test.clj b/contrib/clojure-package/examples/module/test/mnist_mlp_test.clj new file mode 100644 index 000000000000..5fbcdd3c0b39 --- /dev/null +++ b/contrib/clojure-package/examples/module/test/mnist_mlp_test.clj @@ -0,0 +1,29 @@ +;; +;; Licensed to the Apache Software Foundation (ASF) under one or more +;; contributor license agreements. See the NOTICE file distributed with +;; this work for additional information regarding copyright ownership. +;; The ASF licenses this file to You under the Apache License, Version 2.0 +;; (the "License"); you may not use this file except in compliance with +;; the License. You may obtain a copy of the License at +;; +;; http://www.apache.org/licenses/LICENSE-2.0 +;; +;; Unless required by applicable law or agreed to in writing, software +;; distributed under the License is distributed on an "AS IS" BASIS, +;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +;; See the License for the specific language governing permissions and +;; limitations under the License. +;; +(ns mnist-mlp-test + (:require + [mnist-mlp :refer :all] + [org.apache.clojure-mxnet.context :as context] + [clojure.test :refer :all])) + +(deftest run-those-tests + (let [devs [(context/cpu)]] + (run-intermediate-level-api :devs devs) + (run-intermediate-level-api :devs devs :load-model-epoch (dec num-epoch)) + (run-high-level-api devs) + (run-prediction-iterator-api devs) + (run-predication-and-calc-accuracy-manually devs))) \ No newline at end of file diff --git a/contrib/clojure-package/examples/multi-label/test/multi_label_test.clj b/contrib/clojure-package/examples/multi-label/test/multi_label_test.clj new file mode 100644 index 000000000000..446a84626e72 --- /dev/null +++ b/contrib/clojure-package/examples/multi-label/test/multi_label_test.clj @@ -0,0 +1,26 @@ +;; +;; Licensed to the Apache Software Foundation (ASF) under one or more +;; contributor license agreements. See the NOTICE file distributed with +;; this work for additional information regarding copyright ownership. +;; The ASF licenses this file to You under the Apache License, Version 2.0 +;; (the "License"); you may not use this file except in compliance with +;; the License. You may obtain a copy of the License at +;; +;; http://www.apache.org/licenses/LICENSE-2.0 +;; +;; Unless required by applicable law or agreed to in writing, software +;; distributed under the License is distributed on an "AS IS" BASIS, +;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +;; See the License for the specific language governing permissions and +;; limitations under the License. +;; + +(ns multi_label_test + (:require + [multi-label.core :as label] + [clojure.java.io :as io] + [org.apache.clojure-mxnet.context :as context] + [clojure.test :refer :all])) + +(deftest run-multi-label + (label/train [(context/cpu)])) \ No newline at end of file diff --git a/contrib/clojure-package/examples/neural-style/src/neural_style/core.clj b/contrib/clojure-package/examples/neural-style/src/neural_style/core.clj index fcf402f3466d..ac1f537f1c26 100644 --- a/contrib/clojure-package/examples/neural-style/src/neural_style/core.clj +++ b/contrib/clojure-package/examples/neural-style/src/neural_style/core.clj @@ -24,6 +24,8 @@ [org.apache.clojure-mxnet.random :as random] [org.apache.clojure-mxnet.shape :as mx-shape] [org.apache.clojure-mxnet.symbol :as sym] + [clojure.java.io :as io] + [clojure.java.shell :refer [sh]] [mikera.image.core :as img] [mikera.image.filters :as img-filter] [think.image.pixel :as pixel] @@ -31,6 +33,9 @@ (:gen-class));; An Implementation of the paper A Neural Algorithm of Artistic Style ;;by Leon A. Gatys, Alexander S. Ecker, and Matthias Bethge +(when-not (.exists (io/file "input")) + (do (println "Retrieving data...") (sh "./download.sh"))) + (def content-image "input/IMG_4343.jpg") (def style-image "input/starry_night.jpg") (def model-path "model/vgg19.params") @@ -39,7 +44,7 @@ (def content-weight 5) ;; the weight for the content image (def blur-radius 1) ;; the blur filter radius (def output-dir "output") -(def lr 10) ;; the learning rate +(def lr 10.0) ;; the learning rate (def tv-weight 0.01) ;; the magnitude on the tv loss (def num-epochs 1000) (def num-channels 3) @@ -157,9 +162,10 @@ out (ndarray/* out tv-weight)] (sym/bind out ctx {"img" img "kernel" kernel})))) -(defn train [devs] - - (let [dev (first devs) +(defn train + ([devs] (train devs 20)) + ([devs n-epochs] + (let [dev (first devs) content-np (preprocess-content-image content-image max-long-edge) content-np-shape (mx-shape/->vec (ndarray/shape content-np)) style-np (preprocess-style-image style-image content-np-shape) @@ -212,7 +218,7 @@ tv-grad-executor (get-tv-grad-executor img dev tv-weight) eps 0.0 e 0] - (doseq [i (range 20)] + (doseq [i (range n-epochs)] (ndarray/set (:data model-executor) img) (-> (:executor model-executor) (executor/forward) @@ -237,8 +243,10 @@ (println "Epoch " i "relative change " eps) (when (zero? (mod i 2)) (save-image (ndarray/copy img) (str output-dir "/out_" i ".png") blur-radius true))) - - (ndarray/set old-img img)))) + (ndarray/set old-img img)) + ; (save-image (ndarray/copy img) (str output-dir "/final.png") 0 false) + ; (postprocess-image img) + ))) (defn -main [& args] ;;; Note this only works on cpu right now diff --git a/contrib/clojure-package/examples/neural-style/test/neural_style/vgg_19_test.clj b/contrib/clojure-package/examples/neural-style/test/neural_style/vgg_19_test.clj new file mode 100644 index 000000000000..a7c978607e4f --- /dev/null +++ b/contrib/clojure-package/examples/neural-style/test/neural_style/vgg_19_test.clj @@ -0,0 +1,53 @@ +;; +;; Licensed to the Apache Software Foundation (ASF) under one or more +;; contributor license agreements. See the NOTICE file distributed with +;; this work for additional information regarding copyright ownership. +;; The ASF licenses this file to You under the Apache License, Version 2.0 +;; (the "License"); you may not use this file except in compliance with +;; the License. You may obtain a copy of the License at +;; +;; http://www.apache.org/licenses/LICENSE-2.0 +;; +;; Unless required by applicable law or agreed to in writing, software +;; distributed under the License is distributed on an "AS IS" BASIS, +;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +;; See the License for the specific language governing permissions and +;; limitations under the License. +;; + +(ns neural-style.vgg-19-test + (:require + [clojure.test :refer :all] + [mikera.image.core :as img] + [clojure.java.io :as io] + [org.apache.clojure-mxnet.ndarray :as ndarray] + [org.apache.clojure-mxnet.context :as context] + [neural-style.core :as neural])) + +(defn pic-to-ndarray-vec[path] + (-> path + img/load-image + neural/image->ndarray + ndarray/->vec)) + +(defn last-modified-check[x] + (let [t (- (System/currentTimeMillis) (.lastModified x)) ] + (if (> 10000 t) ; 10 seconds + x + (throw (Exception. (str "Generated File Too Old: (" t " ms) [" x "]")))))) + +(defn latest-pic-to-ndarray-vec[folder] + (->> folder + io/as-file + (.listFiles) + (sort-by #(.lastModified %)) + last + (last-modified-check) + (.getPath) + pic-to-ndarray-vec)) + +(deftest vgg-19-test + (neural/train [(context/cpu)] 3) + (is (not (nil? (latest-pic-to-ndarray-vec "output"))))) +; generated file different depending on the platform :/ +; (pic-to-ndarray-vec "test/ref_out_2.png")))) \ No newline at end of file diff --git a/contrib/clojure-package/examples/profiler/src/profiler/core.clj b/contrib/clojure-package/examples/profiler/src/profiler/core.clj index e366c578c551..67ba0feb8a9b 100644 --- a/contrib/clojure-package/examples/profiler/src/profiler/core.clj +++ b/contrib/clojure-package/examples/profiler/src/profiler/core.clj @@ -27,9 +27,9 @@ (def profiler-mode "symbolic") ;; can be symbolic, imperative, api, mem (def output-path ".") ;; the profile file output directory (def profiler-name "profile-matmul-20iter.json") -(def iter-num 100) -(def begin-profiling-iter 50) -(def end-profiling-iter 70) +(def iter-num 5) +(def begin-profiling-iter 0) +(def end-profiling-iter 1) (def gpu? false) (defn run [] diff --git a/contrib/clojure-package/examples/profiler/test/core_test.clj b/contrib/clojure-package/examples/profiler/test/core_test.clj new file mode 100644 index 000000000000..1173f0755bbd --- /dev/null +++ b/contrib/clojure-package/examples/profiler/test/core_test.clj @@ -0,0 +1,31 @@ +;; +;; Licensed to the Apache Software Foundation (ASF) under one or more +;; contributor license agreements. See the NOTICE file distributed with +;; this work for additional information regarding copyright ownership. +;; The ASF licenses this file to You under the Apache License, Version 2.0 +;; (the "License"); you may not use this file except in compliance with +;; the License. You may obtain a copy of the License at +;; +;; http://www.apache.org/licenses/LICENSE-2.0 +;; +;; Unless required by applicable law or agreed to in writing, software +;; distributed under the License is distributed on an "AS IS" BASIS, +;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +;; See the License for the specific language governing permissions and +;; limitations under the License. +;; + +(ns core_test + (:require + [profiler.core :as profiler] + [clojure.java.io :as io] + [clojure.test :refer :all])) + +(defn count-lines[file] + (count (line-seq (io/reader (io/as-file file))))) + +(deftest run-profiler + (profiler/run) + (let [new-file (clojure.java.io/as-file profiler/profiler-name)] + (is (.exists new-file)) + (is (> 10000 (- (System/currentTimeMillis) (.lastModified new-file)))))) \ No newline at end of file diff --git a/contrib/clojure-package/examples/profiler/test/profile-matmul-20iter.json.ref b/contrib/clojure-package/examples/profiler/test/profile-matmul-20iter.json.ref new file mode 100644 index 000000000000..d6baa42114cd --- /dev/null +++ b/contrib/clojure-package/examples/profiler/test/profile-matmul-20iter.json.ref @@ -0,0 +1,271 @@ +{ + "traceEvents": [ + { + "ph": "M", + "args": { + "name": "cpu/0" + }, + "pid": 0, + "name": "process_name" + }, + { + "ph": "M", + "args": { + "name": "cpu/1" + }, + "pid": 1, + "name": "process_name" + }, + { + "ph": "M", + "args": { + "name": "cpu/2" + }, + "pid": 2, + "name": "process_name" + }, + { + "ph": "M", + "args": { + "name": "cpu/3" + }, + "pid": 3, + "name": "process_name" + }, + { + "ph": "M", + "args": { + "name": "cpu pinned/" + }, + "pid": 4, + "name": "process_name" + }, + { + "ph": "M", + "args": { + "name": "cpu shared/" + }, + "pid": 5, + "name": "process_name" + }, { + "ph": "M", + "args": { + "name": "MXNET_C_API" + }, + "pid": 13841910479334118176, + "name": "process_name" + }, + + { + "name": "MXNet C API Calls", + "cat": "MXNET_C_API", + "ph": "C", + "ts": 51195258331, + "args": { "MXNet C API Calls": 1 }, + "pid": 13841910479334118176, + "tid": 6902988396839073221 + } +, + { + "name": "MXNet C API Concurrency", + "cat": "MXNET_C_API", + "ph": "C", + "ts": 51195258338, + "args": { "MXNet C API Concurrency": 1 }, + "pid": 13841910479334118176, + "tid": 6902988396839073221 + } +, + { + "name": "MXExecutorForward", + "cat": "MXNET_C_API", + "ph": "b", + "ts": 51195258348, + "id": 6902988396839073221, + "pid": 13841910479334118176, + "tid": 6902988396839073221 + } +, + { + "name": "MXExecutorForward", + "cat": "MXNET_C_API", + "ph": "e", + "ts": 51195258357, + "id": 6902988396839073221, + "pid": 13841910479334118176, + "tid": 6902988396839073221 + } +, + { + "name": "MXNet C API Concurrency", + "cat": "MXNET_C_API", + "ph": "C", + "ts": 51195258358, + "args": { "MXNet C API Concurrency": 0 }, + "pid": 13841910479334118176, + "tid": 6902988396839073221 + } +, { + "ph": "M", + "args": { + "name": "Device Storage" + }, + "pid": 13545698322897290393, + "name": "process_name" + }, + + { + "name": "Memory: cpu/0", + "cat": "Device Storage", + "ph": "C", + "ts": 51195543378, + "args": { "Memory: cpu/0": 8 }, + "pid": 13545698322897290393, + "tid": 5603937861270119161 + } +, + { + "name": "MXNet C API Calls", + "cat": "MXNET_C_API", + "ph": "C", + "ts": 51195258559, + "args": { "MXNet C API Calls": 2 }, + "pid": 13841910479334118176, + "tid": 6902988396839073221 + } +, + { + "name": "Memory: cpu/0", + "cat": "Device Storage", + "ph": "C", + "ts": 51195857697, + "args": { "Memory: cpu/0": 67108872 }, + "pid": 13545698322897290393, + "tid": 5603937861270119161 + } +, + { + "name": "MXNet C API Concurrency", + "cat": "MXNET_C_API", + "ph": "C", + "ts": 51195258560, + "args": { "MXNet C API Concurrency": 1 }, + "pid": 13841910479334118176, + "tid": 6902988396839073221 + } +, + + { + "name": "[dot]", + "cat": "operator", + "ph": "B", + "ts": 51195857671, + "pid": 0, + "tid": 5603937861270119161 + } +, + { + "name": "[dot]", + "cat": "operator", + "ph": "E", + "ts": 51196931353, + "pid": 0, + "tid": 5603937861270119161 + } +, + + { + "name": "WaitForVar", + "cat": "operator", + "ph": "B", + "ts": 51196931369, + "pid": 0, + "tid": 5603937861270119161 + } +, + { + "name": "WaitForVar", + "cat": "operator", + "ph": "E", + "ts": 51196931376, + "pid": 0, + "tid": 5603937861270119161 + } +, { + "ph": "M", + "args": { + "name": "operator" + }, + "pid": 10847949044720084585, + "name": "process_name" + }, + + { + "name": "[dot]", + "cat": "operator", + "ph": "b", + "ts": 51195857671, + "id": 5603937861270119161, + "pid": 10847949044720084585, + "tid": 5603937861270119161 + } +, + { + "name": "[dot]", + "cat": "operator", + "ph": "e", + "ts": 51196931350, + "id": 5603937861270119161, + "pid": 10847949044720084585, + "tid": 5603937861270119161 + } +, + { + "name": "MXNDArrayWaitToRead", + "cat": "MXNET_C_API", + "ph": "b", + "ts": 51195258561, + "id": 6902988396839073221, + "pid": 13841910479334118176, + "tid": 6902988396839073221 + } +, + { + "name": "MXNDArrayWaitToRead", + "cat": "MXNET_C_API", + "ph": "e", + "ts": 51196931386, + "id": 6902988396839073221, + "pid": 13841910479334118176, + "tid": 6902988396839073221 + } +, + { + "name": "WaitForVar", + "cat": "operator", + "ph": "b", + "ts": 51196931369, + "id": 5603937861270119161, + "pid": 10847949044720084585, + "tid": 5603937861270119161 + } +, + { + "name": "WaitForVar", + "cat": "operator", + "ph": "e", + "ts": 51196931376, + "id": 5603937861270119161, + "pid": 10847949044720084585, + "tid": 5603937861270119161 + } +, + { + "name": "MXNet C API Concurrency", + "cat": "MXNET_C_API", + "ph": "C", + "ts": 51196931391, + "args": { "MXNet C API Concurrency": 0 }, + "pid": 13841910479334118176, + "tid": 6902988396839073221 + } diff --git a/contrib/clojure-package/examples/rnn/src/rnn/test_char_rnn.clj b/contrib/clojure-package/examples/rnn/src/rnn/test_char_rnn.clj index d03b1a6b36e4..22a2982f222b 100644 --- a/contrib/clojure-package/examples/rnn/src/rnn/test_char_rnn.clj +++ b/contrib/clojure-package/examples/rnn/src/rnn/test_char_rnn.clj @@ -17,6 +17,7 @@ (ns rnn.test-char-rnn (:require [clojure.string :as string] + [clojure.java.shell :refer [sh]] [rnn.util :as util] [rnn.lstm :as lstm] [org.apache.clojure-mxnet.context :as context] @@ -24,6 +25,9 @@ [org.apache.clojure-mxnet.module :as m] [org.apache.clojure-mxnet.ndarray :as ndarray])) +(when-not (.exists (clojure.java.io/file "data")) + (do (println "Retrieving data...") (sh "./get_data.sh"))) + (def data-path "data/obama.txt") (def model-prefix) (def start-sentence "The joke ") diff --git a/contrib/clojure-package/examples/rnn/src/rnn/train_char_rnn.clj b/contrib/clojure-package/examples/rnn/src/rnn/train_char_rnn.clj index 150cd94e673c..41a764f7af95 100644 --- a/contrib/clojure-package/examples/rnn/src/rnn/train_char_rnn.clj +++ b/contrib/clojure-package/examples/rnn/src/rnn/train_char_rnn.clj @@ -17,6 +17,7 @@ (ns rnn.train-char-rnn (:require [clojure.string :as string] + [clojure.java.shell :refer [sh]] [rnn.util :as util] [rnn.lstm :as lstm] [rnn.test-char-rnn :as test-rnn] @@ -34,6 +35,9 @@ ;;https://github.com/apache/incubator-mxnet/blob/master/example/rnn/old/char-rnn.ipynb +(when-not (.exists (clojure.java.io/file "data")) + (do (println "Retrieving data...") (sh "./get_data.sh"))) + ;; batch size for training (def batch-size 32) ;; we can support various length input diff --git a/contrib/clojure-package/examples/rnn/test/rnn/core_test.clj b/contrib/clojure-package/examples/rnn/test/rnn/core_test.clj new file mode 100644 index 000000000000..b198577241c3 --- /dev/null +++ b/contrib/clojure-package/examples/rnn/test/rnn/core_test.clj @@ -0,0 +1,26 @@ +;; +;; Licensed to the Apache Software Foundation (ASF) under one or more +;; contributor license agreements. See the NOTICE file distributed with +;; this work for additional information regarding copyright ownership. +;; The ASF licenses this file to You under the Apache License, Version 2.0 +;; (the "License"); you may not use this file except in compliance with +;; the License. You may obtain a copy of the License at +;; +;; http://www.apache.org/licenses/LICENSE-2.0 +;; +;; Unless required by applicable law or agreed to in writing, software +;; distributed under the License is distributed on an "AS IS" BASIS, +;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +;; See the License for the specific language governing permissions and +;; limitations under the License. +;; + +(ns rnn.core_test + (:require + [rnn.test-char-rnn :as rnn] + [clojure.test :refer :all])) + +(deftest check-trained-network + (is (= + "The joke that we can start by the challenges of the American people. The American people have been talking about how to compete with the streets of San Antonio who the courage to come together as one " + (rnn/rnn-test "data/obama" 75 200 false)))) \ No newline at end of file diff --git a/contrib/clojure-package/examples/tutorial/.gitignore b/contrib/clojure-package/examples/tutorial/.gitignore index c53038ec0e3d..338927e78384 100644 --- a/contrib/clojure-package/examples/tutorial/.gitignore +++ b/contrib/clojure-package/examples/tutorial/.gitignore @@ -9,3 +9,4 @@ pom.xml.asc /.nrepl-port .hgignore .hg/ +filename \ No newline at end of file diff --git a/contrib/clojure-package/examples/tutorial/project.clj b/contrib/clojure-package/examples/tutorial/project.clj index 8a78ec6a6abf..58a10f04f28b 100644 --- a/contrib/clojure-package/examples/tutorial/project.clj +++ b/contrib/clojure-package/examples/tutorial/project.clj @@ -19,6 +19,8 @@ :description "MXNET tutorials" :plugins [[lein-cljfmt "0.5.7"]] :dependencies [[org.clojure/clojure "1.9.0"] + [org.apache.mxnet.contrib.clojure/clojure-mxnet "1.5.0-SNAPSHOT"] + ;; Uncomment the one appropriate for your machine & configuration: #_[org.apache.mxnet.contrib.clojure/clojure-mxnet-linux-cpu "1.4.0"] #_[org.apache.mxnet.contrib.clojure/clojure-mxnet-linux-gpu "1.4.0"] diff --git a/contrib/clojure-package/examples/tutorial/src/tutorial/module.clj b/contrib/clojure-package/examples/tutorial/src/tutorial/module.clj index 4ca50ff5cd44..e19498111022 100644 --- a/contrib/clojure-package/examples/tutorial/src/tutorial/module.clj +++ b/contrib/clojure-package/examples/tutorial/src/tutorial/module.clj @@ -184,7 +184,7 @@ ])) (m/save-checkpoint mod {:prefix save-prefix :epoch epoch-num - :save-opt-states true}))) + :save-opt-states true}))) ;; INFO org.apache.mxnet.module.Module: Saved checkpoint to my-model-0000.params ;; INFO org.apache.mxnet.module.Module: Saved optimizer state to my-model-0000.states @@ -247,7 +247,40 @@ new-mod ;=> #object[org.apache.mxnet.module.Module 0x5304d0f4 "org.apache.mxnet. ;; Create `fit-params` and then use it to set `begin-epoch` so that ;; `fit` knows to resume from a saved epoch. + + +(comment +;; FIXME +; Caused by: java.io.EOFException +; at java.io.DataInputStream.readInt(DataInputStream.java:392) +; at java.io.ObjectInputStream$BlockDataInputStream.readInt(ObjectInputStream.java:3182) +; at java.io.ObjectInputStream.readInt(ObjectInputStream.java:1032) +; at org.apache.mxnet.Optimizer$$anon$1$$anonfun$deserializeState$1.apply$mcVI$sp(Optimizer.scala:84) +; at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:160) +; at org.apache.mxnet.Optimizer$$anon$1.deserializeState(Optimizer.scala:83) +; at org.apache.mxnet.module.Module$$anonfun$loadOptimizerStates$3.apply(Module.scala:594) +; at org.apache.mxnet.module.Module$$anonfun$loadOptimizerStates$3.apply(Module.scala:589) +; at scala.Option.foreach(Option.scala:257) +; at org.apache.mxnet.module.Module.loadOptimizerStates(Module.scala:589) +; at org.apache.mxnet.module.Module$$anonfun$initOptimizer$4.apply(Module.scala:407) +; at org.apache.mxnet.module.Module$$anonfun$initOptimizer$4.apply(Module.scala:406) +; at scala.Option.foreach(Option.scala:257) +; at org.apache.mxnet.module.Module.initOptimizer(Module.scala:406) +; at org.apache.mxnet.module.BaseModule.fit(BaseModule.scala:407) +; at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) +; at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) +; at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) +; at java.lang.reflect.Method.invoke(Method.java:498) +; at clojure.lang.Reflector.invokeMatchingMethod(Reflector.java:93) +; at clojure.lang.Reflector.invokeInstanceMethod(Reflector.java:28) +; at org.apache.clojure_mxnet.module$fit.invokeStatic(module.clj:551) +; at org.apache.clojure_mxnet.module$fit.invoke(module.clj:538) +; at tutorial.module$eval1787.invokeStatic(module.clj:250) +; at tutorial.module$eval1787.invoke(module.clj:250) + (m/fit new-mod {:train-data train-data :eval-data test-data :num-epoch 2 :fit-params (m/fit-params {:begin-epoch 1})}) + +) \ No newline at end of file diff --git a/contrib/clojure-package/examples/tutorial/src/tutorial/ndarray.clj b/contrib/clojure-package/examples/tutorial/src/tutorial/ndarray.clj index 8e51de215157..d18bb53daaf1 100644 --- a/contrib/clojure-package/examples/tutorial/src/tutorial/ndarray.clj +++ b/contrib/clojure-package/examples/tutorial/src/tutorial/ndarray.clj @@ -91,8 +91,8 @@ (ndarray/save "filename" {"arr1" arr1 "arr2" arr2}) ;; (you can also do "s3://path" or "hdfs") -(ndarray/save "/Users/daveliepmann/src/coursework/mxnet-clj-tutorials/abc" - {"arr1" arr1 "arr2" arr2}) +;; (ndarray/save "/Users/daveliepmann/src/coursework/mxnet-clj-tutorials/abc" +;; {"arr1" arr1 "arr2" arr2}) ;; To load: (def from-file (ndarray/load "filename")) @@ -114,7 +114,9 @@ from-file ;=>{"arr1" #object[org.apache.mxnet.NDArray 0x6115ba61 "org.apache.mxn (def cpu-a (ndarray/zeros [100 200])) (ndarray/context cpu-a) ;=> #object[org.apache.mxnet.Context 0x3f376123 "cpu(0)"] -(def gpu-b (ndarray/zeros [100 200] {:ctx (context/gpu 0)})) ;; to use with gpu +(comment + (def gpu-b (ndarray/zeros [100 200] {:ctx (context/gpu 0)})) ;; to use with gpu +) ;; Currently, we do not allow operations among arrays from different ;; contexts. To manually enable this, use the `copy-to` function to diff --git a/contrib/clojure-package/examples/tutorial/src/tutorial/symbol.clj b/contrib/clojure-package/examples/tutorial/src/tutorial/symbol.clj index ebf4f7e96797..e88260069015 100644 --- a/contrib/clojure-package/examples/tutorial/src/tutorial/symbol.clj +++ b/contrib/clojure-package/examples/tutorial/src/tutorial/symbol.clj @@ -125,7 +125,9 @@ net ;=> #object[org.apache.mxnet.Symbol 0x5c78c8c2 "org.apache.mxnet.Symbol@5c78 (first) (ndarray/->vec));=> [2.0 2.0 2.0 2.0] -;; We can evaluate the same symbol on GPU with different data. -;; (To do this you must have the correct native library jar defined as a dependency.) -(def ex (sym/bind c (context/gpu 0) {"a" (ndarray/ones [2 2]) - "b" (ndarray/ones [2 2])})) +(comment + ;; We can evaluate the same symbol on GPU with different data. + ;; (To do this you must have the correct native library jar defined as a dependency.) + (def ex (sym/bind c (context/gpu 0) {"a" (ndarray/ones [2 2]) + "b" (ndarray/ones [2 2])})) +) diff --git a/contrib/clojure-package/examples/tutorial/test/tutorial/core_test.clj b/contrib/clojure-package/examples/tutorial/test/tutorial/core_test.clj new file mode 100644 index 000000000000..0e5169c5cfaa --- /dev/null +++ b/contrib/clojure-package/examples/tutorial/test/tutorial/core_test.clj @@ -0,0 +1,27 @@ +;; +;; Licensed to the Apache Software Foundation (ASF) under one or more +;; contributor license agreements. See the NOTICE file distributed with +;; this work for additional information regarding copyright ownership. +;; The ASF licenses this file to You under the Apache License, Version 2.0 +;; (the "License"); you may not use this file except in compliance with +;; the License. You may obtain a copy of the License at +;; +;; http://www.apache.org/licenses/LICENSE-2.0 +;; +;; Unless required by applicable law or agreed to in writing, software +;; distributed under the License is distributed on an "AS IS" BASIS, +;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +;; See the License for the specific language governing permissions and +;; limitations under the License. +;; + +(ns tutorial.core_test + (:require [clojure.test :refer :all]) + (:require + [tutorial.introduction] + [tutorial.kvstore] + [tutorial.module] + [tutorial.ndarray] + [tutorial.symbol])) + +(deftest if-this-goes-here-then-tutorials-have-loaded-properly (is true)) \ No newline at end of file diff --git a/contrib/clojure-package/examples/visualization/test/visualization/core_test.clj b/contrib/clojure-package/examples/visualization/test/visualization/core_test.clj new file mode 100644 index 000000000000..1b10695cb34c --- /dev/null +++ b/contrib/clojure-package/examples/visualization/test/visualization/core_test.clj @@ -0,0 +1,28 @@ +;; +;; Licensed to the Apache Software Foundation (ASF) under one or more +;; contributor license agreements. See the NOTICE file distributed with +;; this work for additional information regarding copyright ownership. +;; The ASF licenses this file to You under the Apache License, Version 2.0 +;; (the "License"); you may not use this file except in compliance with +;; the License. You may obtain a copy of the License at +;; +;; http://www.apache.org/licenses/LICENSE-2.0 +;; +;; Unless required by applicable law or agreed to in writing, software +;; distributed under the License is distributed on an "AS IS" BASIS, +;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +;; See the License for the specific language governing permissions and +;; limitations under the License. +;; + +(ns visualization.core_test + (:require + [visualization.core :as visualization] + [clojure.test :refer :all])) + +(deftest check-pdf + (visualization/test-viz) + (let [new-pdf (clojure.java.io/as-file "testviz.pdf")] + (is (.exists new-pdf)) + (is (> 10000 (- (System/currentTimeMillis) (.lastModified new-pdf)))))) + \ No newline at end of file diff --git a/contrib/clojure-package/integration-tests.sh b/contrib/clojure-package/integration-tests.sh new file mode 100755 index 000000000000..3297fdc2c329 --- /dev/null +++ b/contrib/clojure-package/integration-tests.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +set -evx + +MXNET_HOME=${PWD} +EXAMPLES_HOME=${MXNET_HOME}/contrib/clojure-package/examples +#cd ${MXNET_HOME}/contrib/clojure-package +#lein test +#lein cloverage --codecov +for i in `find ${EXAMPLES_HOME} -name test` ; do +cd ${i} && lein test +done diff --git a/docs/install/build_from_source.md b/docs/install/build_from_source.md index e41b1d0f1804..e807fb44b599 100644 --- a/docs/install/build_from_source.md +++ b/docs/install/build_from_source.md @@ -2,6 +2,7 @@ This document explains how to build MXNet from source code. +**For Java/Scala/Clojure, please follow [this guide instead](./scala_setup.md)** ## Overview @@ -27,7 +28,6 @@ MXNet's newest and most popular API is Gluon. Gluon is built into the Python bin - [Python (includes Gluon)](../api/python/index.html) - [C++](../api/c++/index.html) - [Clojure](../api/clojure/index.html) - - Java (coming soon) - [Julia](../api/julia/index.html) - [Perl](../api/perl/index.html) - [R](../api/r/index.html) @@ -35,6 +35,7 @@ MXNet's newest and most popular API is Gluon. Gluon is built into the Python bin - [Java](../api/java/index.html)
+ ## Build Instructions by Operating System Detailed instructions are provided per operating system. Each of these guides also covers how to install the specific [Language Bindings](#installing-mxnet-language-bindings) you require. @@ -160,7 +161,7 @@ More information on turning these features on or off are found in the following ## Build Configurations There is a configuration file for make, -[`make/config.mk`](https://github.com/apache/incubator-mxnet/blob/master/make/config.mk), that contains all the compilation options. You can edit it and then run `make` or `cmake`. `cmake` is recommended for building MXNet (and is required to build with MKLDNN), however you may use `make` instead. +[`make/config.mk`](https://github.com/apache/incubator-mxnet/blob/master/make/config.mk), that contains all the compilation options. You can edit it and then run `make` or `cmake`. `cmake` is recommended for building MXNet (and is required to build with MKLDNN), however you may use `make` instead. For building with Java/Scala/Clojure, only `make` is supported.
@@ -203,18 +204,18 @@ It is recommended to set environment variable NCCL_LAUNCH_MODE to PARALLEL when ### Build MXNet with C++ -* To enable C++ package, just add `USE_CPP_PACKAGE=1` when you run `make` or `cmake`. +* To enable C++ package, just add `USE_CPP_PACKAGE=1` when you run `make` or `cmake` (see examples).
### Usage Examples -* `-j` runs multiple jobs against multi-core CPUs. - For example, you can specify using all cores on Linux as follows: ```bash -cmake -j$(nproc) +mkdir build && cd build +cmake -GNinja . +ninja -v ``` @@ -222,28 +223,36 @@ cmake -j$(nproc) * Build MXNet with `cmake` and install with MKL DNN, GPU, and OpenCV support: ```bash -cmake -j USE_CUDA=1 USE_CUDA_PATH=/usr/local/cuda USE_CUDNN=1 USE_MKLDNN=1 +mkdir build && cd build +cmake -DUSE_CUDA=1 -DUSE_CUDA_PATH=/usr/local/cuda -DUSE_CUDNN=1 -DUSE_MKLDNN=1 -GNinja . +ninja -v ``` #### Recommended for Systems with NVIDIA GPUs * Build with both OpenBLAS, GPU, and OpenCV support: ```bash -cmake -j BLAS=open USE_CUDA=1 USE_CUDA_PATH=/usr/local/cuda USE_CUDNN=1 +mkdir build && cd build +cmake -DBLAS=open -DUSE_CUDA=1 -DUSE_CUDA_PATH=/usr/local/cuda -DUSE_CUDNN=1 -GNinja . +ninja -v ``` #### Recommended for Systems with Intel CPUs * Build MXNet with `cmake` and install with MKL DNN, and OpenCV support: ```bash -cmake -j USE_CUDA=0 USE_MKLDNN=1 +mkdir build && cd build +cmake -DUSE_CUDA=0 -DUSE_MKLDNN=1 -GNinja . +ninja -v ``` #### Recommended for Systems with non-Intel CPUs * Build MXNet with `cmake` and install with OpenBLAS and OpenCV support: ```bash -cmake -j USE_CUDA=0 BLAS=open +mkdir build && cd build +cmake -DUSE_CUDA=0 -DBLAS=open -GNinja . +ninja -v ``` #### Other Examples @@ -251,20 +260,26 @@ cmake -j USE_CUDA=0 BLAS=open * Build without using OpenCV: ```bash -cmake USE_OPENCV=0 +mkdir build && cd build +cmake -DUSE_OPENCV=0 -GNinja . +ninja -v ``` * Build on **macOS** with the default BLAS library (Apple Accelerate) and Clang installed with `xcode` (OPENMP is disabled because it is not supported by the Apple version of Clang): ```bash -cmake -j BLAS=apple USE_OPENCV=0 USE_OPENMP=0 +mkdir build && cd build +cmake -DBLAS=apple -DUSE_OPENCV=0 -DUSE_OPENMP=0 -GNinja . +ninja -v ``` * To use OpenMP on **macOS** you need to install the Clang compiler, `llvm` (the one provided by Apple does not support OpenMP): ```bash brew install llvm -cmake -j BLAS=apple USE_OPENMP=1 +mkdir build && cd build +cmake -DBLAS=apple -DUSE_OPENMP=1 -GNinja . +ninja -v ```
diff --git a/docs/install/c_plus_plus.md b/docs/install/c_plus_plus.md index 6078877c27c8..6ad67e2803db 100644 --- a/docs/install/c_plus_plus.md +++ b/docs/install/c_plus_plus.md @@ -6,7 +6,8 @@ To enable C++ package, just add `USE_CPP_PACKAGE=1` in the [build from source](b For example to build MXNet with GPU support and the C++ package, OpenCV, and OpenBLAS, from the project root you would run: ```bash -make -j USE_CPP_PACKAGE=1 USE_OPENCV=1 USE_BLAS=openblas USE_CUDA=1 +cmake -DUSE_CUDA=1 -DUSE_CUDA_PATH=/usr/local/cuda -DUSE_CUDNN=1 -DUSE_MKLDNN=1 -DUSE_CPP_PACKAGE=1 -GNinja . +ninja -v ``` You may also want to add the MXNet shared library to your `LD_LIBRARY_PATH`: diff --git a/docs/install/java_setup.md b/docs/install/java_setup.md index 34b0967c421e..0075e9205880 100644 --- a/docs/install/java_setup.md +++ b/docs/install/java_setup.md @@ -89,11 +89,13 @@ The official Java Packages will be released with the release of MXNet 1.4 and wi The previously mentioned setup with Maven is recommended. Otherwise, the following instructions for macOS and Ubuntu are provided for reference only: +**If you have already built mxnet from source using `cmake`, run `make clean` and then follow the appropriate guide below*** + | OS | Step 1 | Step 2 | |---|---|---| |macOS | [Shared Library for macOS](../install/osx_setup.html#build-the-shared-library) | [Scala Package for macOS](http://mxnet.incubator.apache.org/install/osx_setup.html#install-the-mxnet-package-for-scala) | | Ubuntu | [Shared Library for Ubuntu](../install/ubuntu_setup.html#installing-mxnet-on-ubuntu) | [Scala Package for Ubuntu](http://mxnet.incubator.apache.org/install/ubuntu_setup.html#install-the-mxnet-package-for-scala) | -| Windows | [Shared Library for Windows](../install/windows_setup.html#build-the-shared-library) | Call for Contribution | +| Windows | | Call for Contribution | #### Build Java from an Existing MXNet Installation diff --git a/docs/install/osx_setup.md b/docs/install/osx_setup.md index 4e9293efce93..a2b59fe03618 100644 --- a/docs/install/osx_setup.md +++ b/docs/install/osx_setup.md @@ -96,7 +96,14 @@ The file called ```osx.mk``` has the configuration required for building MXNet o To build with MKLDNN ```bash -LIBRARY_PATH=$(brew --prefix llvm)/lib/ make -j $(sysctl -n hw.ncpu) CC=$(brew --prefix llvm)/bin/clang++ CXX=$(brew --prefix llvm)/bin/clang++ USE_OPENCV=1 USE_OPENMP=1 USE_MKLDNN=1 USE_BLAS=apple USE_PROFILER=1 +echo "CC=$(brew --prefix llvm)/bin/clang++" >> ./config.mk +echo "CXX=$(brew --prefix llvm)/bin/clang++" >> ./config.mk +echo "USE_OPENCV=1" >> ./config.mk +echo "USE_OPENMP=1" >> ./config.mk +echo "USE_MKLDNN=1" >> ./config.mk +echo "USE_BLAS=apple" >> ./config.mk +echo "USE_PROFILER=1" >> ./config.mk +LIBRARY_PATH=$(brew --prefix llvm)/lib/ make -j $(sysctl -n hw.ncpu) ``` If building with ```GPU``` support, add the following configuration to config.mk and build: diff --git a/docs/install/scala_setup.md b/docs/install/scala_setup.md index 0dadd8bca400..98e752b21dd5 100644 --- a/docs/install/scala_setup.md +++ b/docs/install/scala_setup.md @@ -79,11 +79,13 @@ https://mvnrepository.com/artifact/org.apache.mxnet The previously mentioned setup with Maven is recommended. Otherwise, the following instructions for macOS, Ubuntu, and Windows are provided for reference only: +**If you have already built mxnet from source using `cmake`, run `make clean` and then follow the appropriate guide below*** + | OS | Step 1 | Step 2 | |---|---|---| |macOS | [Shared Library for macOS](http://mxnet.incubator.apache.org/install/osx_setup.html#build-the-shared-library) | [Scala Package for macOS](http://mxnet.incubator.apache.org/install/osx_setup.html#install-the-mxnet-package-for-scala) | | Ubuntu | [Shared Library for Ubuntu](http://mxnet.incubator.apache.org/install/ubuntu_setup.html#installing-mxnet-on-ubuntu) | [Scala Package for Ubuntu](http://mxnet.incubator.apache.org/install/ubuntu_setup.html#install-the-mxnet-package-for-scala) | -| Windows | [Shared Library for Windows](http://mxnet.incubator.apache.org/install/windows_setup.html#build-the-shared-library) | Call for Contribution | +| Windows | | Call for Contribution | #### Build Scala from an Existing MXNet Installation diff --git a/docs/install/ubuntu_setup.md b/docs/install/ubuntu_setup.md index bd1b441d5556..bf964182b50a 100644 --- a/docs/install/ubuntu_setup.md +++ b/docs/install/ubuntu_setup.md @@ -153,7 +153,9 @@ If building on CPU and using OpenBLAS: ```bash git clone --recursive https://github.com/apache/incubator-mxnet.git cd incubator-mxnet - make -j $(nproc) USE_OPENCV=1 USE_BLAS=openblas + echo "USE_OPENCV = 1" >> ./config.mk + echo "USE_BLAS = openblas" >> ./config.mk + make -j $(nproc) ``` If building on CPU and using MKL and MKL-DNN (make sure MKL is installed according to [Math Library Selection](build_from_source.html#math-library-selection) and [MKL-DNN README](https://github.com/apache/incubator-mxnet/blob/master/MKLDNN_README.md)): @@ -161,7 +163,11 @@ If building on CPU and using MKL and MKL-DNN (make sure MKL is installed accordi ```bash git clone --recursive https://github.com/apache/incubator-mxnet.git cd incubator-mxnet - make -j $(nproc) USE_OPENCV=1 USE_BLAS=mkl USE_MKLDNN=1 + echo "USE_OPENCV = 1" >> ./config.mk + echo "USE_BLAS = openblas" >> ./config.mk + echo "USE_CUDA = 0" >> ./config.mk + echo "USE_MKLDNN = 1" >> ./config.mk + make -j $(nproc) ``` If building on GPU and you want OpenCV and OpenBLAS (make sure you have installed the [CUDA dependencies first](#cuda-dependencies)): @@ -169,7 +175,12 @@ If building on GPU and you want OpenCV and OpenBLAS (make sure you have installe ```bash git clone --recursive https://github.com/apache/incubator-mxnet.git cd incubator-mxnet - make -j $(nproc) USE_OPENCV=1 USE_BLAS=openblas USE_CUDA=1 USE_CUDA_PATH=/usr/local/cuda USE_CUDNN=1 + echo "USE_OPENCV = 1" >> ./config.mk + echo "USE_BLAS = openblas" >> ./config.mk + echo "USE_CUDA = 1" >> ./config.mk + echo "USE_CUDA_PATH = /usr/local/cuda" >> ./config.mk + echo "USE_CUDNN = 1" >> ./config.mk + make -j $(nproc) ``` *Note* - USE_OPENCV and USE_BLAS are make file flags to set compilation options to use OpenCV and BLAS library. You can explore and use more compilation options in `make/config.mk` and also review common [usage examples](build_from_source.html#usage-examples). @@ -339,7 +350,9 @@ $ sudo apt-get install -y libopencv-dev ```bash $ git clone --recursive https://github.com/apache/incubator-mxnet $ cd incubator-mxnet -$ make -j $(nproc) USE_OPENCV=1 USE_BLAS=openblas +$ echo "USE_OPENCV = 1" >> ./config.mk +$ echo "USE_BLAS = openblas" >> ./config.mk +$ make -j $(nproc) ``` *Note* - USE_OPENCV and USE_BLAS are make file flags to set compilation options to use OpenCV and BLAS library. You can explore and use more compilation options in `make/config.mk`. diff --git a/mkldnn.mk b/mkldnn.mk index d79bbe7d2a0e..5af3e9b1d741 100644 --- a/mkldnn.mk +++ b/mkldnn.mk @@ -19,14 +19,20 @@ ifeq ($(USE_MKLDNN), 1) MKLDNN_SUBMODDIR = $(ROOTDIR)/3rdparty/mkldnn MKLDNN_BUILDDIR = $(MKLDNN_SUBMODDIR)/build MXNET_LIBDIR = $(ROOTDIR)/lib + MKLDNN_LIBRARY_TYPE=STATIC ifeq ($(UNAME_S), Darwin) OMP_LIBFILE = $(MKLDNNROOT)/lib/libiomp5.dylib MKLML_LIBFILE = $(MKLDNNROOT)/lib/libmklml.dylib - MKLDNN_LIBFILE = $(MKLDNNROOT)/lib/libmkldnn.0.dylib + MKLDNN_LIBFILE = $(MKLDNNROOT)/lib/libmkldnn.a +else ifeq ($(UNAME_S), Windows) + OMP_LIBFILE = $(MKLDNNROOT)/lib/libiomp5.so + MKLML_LIBFILE = $(MKLDNNROOT)/lib/libmklml_intel.so + MKLDNN_LIBFILE = $(MKLDNNROOT)/lib/libmkldnn.so + MKLDNN_LIBRARY_TYPE=SHARED else OMP_LIBFILE = $(MKLDNNROOT)/lib/libiomp5.so MKLML_LIBFILE = $(MKLDNNROOT)/lib/libmklml_intel.so - MKLDNN_LIBFILE = $(MKLDNNROOT)/lib/libmkldnn.so.0 + MKLDNN_LIBFILE = $(MKLDNNROOT)/lib/libmkldnn.a endif endif @@ -37,7 +43,7 @@ mkldnn_build: $(MKLDNN_LIBFILE) $(MKLDNN_LIBFILE): mkdir -p $(MKLDNNROOT) cd $(MKLDNN_SUBMODDIR) && rm -rf external && cd scripts && ./prepare_mkl.sh && cd .. && cp -a external/*/* $(MKLDNNROOT)/. - cmake $(MKLDNN_SUBMODDIR) -DCMAKE_INSTALL_PREFIX=$(MKLDNNROOT) -B$(MKLDNN_BUILDDIR) -DARCH_OPT_FLAGS="-mtune=generic" -DWITH_TEST=OFF -DWITH_EXAMPLE=OFF + cmake $(MKLDNN_SUBMODDIR) -DCMAKE_INSTALL_PREFIX=$(MKLDNNROOT) -B$(MKLDNN_BUILDDIR) -DARCH_OPT_FLAGS="-mtune=generic" -DWITH_TEST=OFF -DWITH_EXAMPLE=OFF -DMKLDNN_LIBRARY_TYPE=$(MKLDNN_LIBRARY_TYPE) $(MAKE) -C $(MKLDNN_BUILDDIR) VERBOSE=1 $(MAKE) -C $(MKLDNN_BUILDDIR) install mkdir -p $(MXNET_LIBDIR) diff --git a/python/mxnet/gluon/rnn/rnn_cell.py b/python/mxnet/gluon/rnn/rnn_cell.py index 98e96fc6da17..6ef3604eb973 100644 --- a/python/mxnet/gluon/rnn/rnn_cell.py +++ b/python/mxnet/gluon/rnn/rnn_cell.py @@ -102,6 +102,23 @@ def _mask_sequence_variable_length(F, data, length, valid_length, time_axis, mer squeeze_axis=True)) return outputs +def _reverse_sequences(sequences, unroll_step, valid_length=None): + if isinstance(sequences[0], symbol.Symbol): + F = symbol + else: + F = ndarray + + if valid_length is None: + reversed_sequences = list(reversed(sequences)) + else: + reversed_sequences = F.SequenceReverse(F.stack(*sequences, axis=0), + sequence_length=valid_length, + use_sequence_length=True) + reversed_sequences = F.split(reversed_sequences, axis=0, num_outputs=unroll_step, squeeze_axis=True) + + return reversed_sequences + + class RecurrentCell(Block): """Abstract base class for RNN cells @@ -1035,14 +1052,7 @@ def unroll(self, length, inputs, begin_state=None, layout='NTC', merge_outputs=N self.reset() inputs, axis, F, batch_size = _format_sequence(length, inputs, layout, False) - if valid_length is None: - reversed_inputs = list(reversed(inputs)) - else: - reversed_inputs = F.SequenceReverse(F.stack(*inputs, axis=0), - sequence_length=valid_length, - use_sequence_length=True) - reversed_inputs = _as_list(F.split(reversed_inputs, axis=0, num_outputs=length, - squeeze_axis=True)) + reversed_inputs = list(_reverse_sequences(inputs, length, valid_length)) begin_state = _get_begin_state(self, F, begin_state, inputs, batch_size) states = begin_state @@ -1056,15 +1066,8 @@ def unroll(self, length, inputs, begin_state=None, layout='NTC', merge_outputs=N begin_state=states[len(l_cell.state_info(batch_size)):], layout=layout, merge_outputs=False, valid_length=valid_length) - if valid_length is None: - reversed_r_outputs = list(reversed(r_outputs)) - else: - reversed_r_outputs = F.SequenceReverse(F.stack(*r_outputs, axis=0), - sequence_length=valid_length, - use_sequence_length=True, - axis=0) - reversed_r_outputs = _as_list(F.split(reversed_r_outputs, axis=0, num_outputs=length, - squeeze_axis=True)) + reversed_r_outputs = _reverse_sequences(r_outputs, length, valid_length) + if merge_outputs is None: merge_outputs = isinstance(l_outputs, tensor_types) l_outputs, _, _, _ = _format_sequence(None, l_outputs, layout, merge_outputs) diff --git a/python/mxnet/ndarray/ndarray.py b/python/mxnet/ndarray/ndarray.py index 4e6d0cdc929f..9a62620da85c 100644 --- a/python/mxnet/ndarray/ndarray.py +++ b/python/mxnet/ndarray/ndarray.py @@ -157,11 +157,13 @@ def waitall(): """Wait for all async operations to finish in MXNet. This function is used for benchmarking only. + .. warning:: - If your code has exceptions, `waitall` can cause silent failures. - For this reason you should avoid `waitall` in your code. - Use it only if you are confident that your code is error free. - Then make sure you call `wait_to_read` on all outputs after `waitall`. + + If your code has exceptions, `waitall` can cause silent failures. + For this reason you should avoid `waitall` in your code. + Use it only if you are confident that your code is error free. + Then make sure you call `wait_to_read` on all outputs after `waitall`. """ check_call(_LIB.MXNDArrayWaitAll()) diff --git a/scala-package/.gitignore b/scala-package/.gitignore index 6aa4da6b1cfc..8bc87f53e802 100644 --- a/scala-package/.gitignore +++ b/scala-package/.gitignore @@ -6,3 +6,4 @@ core/src/main/scala/org/apache/mxnet/SymbolAPIBase.scala core/src/main/scala/org/apache/mxnet/SymbolBase.scala examples/scripts/infer/images/ examples/scripts/infer/models/ +local-snapshot \ No newline at end of file diff --git a/scala-package/assembly/linux-x86_64-cpu/pom.xml b/scala-package/assembly/linux-x86_64-cpu/pom.xml index abefead175c7..1658f36e6bbd 100644 --- a/scala-package/assembly/linux-x86_64-cpu/pom.xml +++ b/scala-package/assembly/linux-x86_64-cpu/pom.xml @@ -14,6 +14,10 @@ MXNet Scala Package - Full Linux-x86_64 CPU-only jar + + ${project.parent.parent.basedir}/.. + + org.apache.mxnet diff --git a/scala-package/assembly/linux-x86_64-cpu/src/main/assembly/assembly.xml b/scala-package/assembly/linux-x86_64-cpu/src/main/assembly/assembly.xml index a574f8af25d9..f4c2017c8241 100644 --- a/scala-package/assembly/linux-x86_64-cpu/src/main/assembly/assembly.xml +++ b/scala-package/assembly/linux-x86_64-cpu/src/main/assembly/assembly.xml @@ -25,4 +25,10 @@ + + + ${MXNET_DIR}/lib/libmxnet.so + lib/native + + diff --git a/scala-package/assembly/linux-x86_64-gpu/pom.xml b/scala-package/assembly/linux-x86_64-gpu/pom.xml index 96ffa38c6af2..c80515e7b107 100644 --- a/scala-package/assembly/linux-x86_64-gpu/pom.xml +++ b/scala-package/assembly/linux-x86_64-gpu/pom.xml @@ -14,6 +14,10 @@ MXNet Scala Package - Full Linux-x86_64 GPU jar + + ${project.parent.parent.basedir}/.. + + org.apache.mxnet diff --git a/scala-package/assembly/linux-x86_64-gpu/src/main/assembly/assembly.xml b/scala-package/assembly/linux-x86_64-gpu/src/main/assembly/assembly.xml index 3a064bf9f2ce..2aca64bdf1a9 100644 --- a/scala-package/assembly/linux-x86_64-gpu/src/main/assembly/assembly.xml +++ b/scala-package/assembly/linux-x86_64-gpu/src/main/assembly/assembly.xml @@ -25,4 +25,10 @@ + + + ${MXNET_DIR}/lib/libmxnet.so + lib/native + + diff --git a/scala-package/assembly/osx-x86_64-cpu/main/assembly/assembly.xml b/scala-package/assembly/osx-x86_64-cpu/main/assembly/assembly.xml deleted file mode 100644 index fecafecad31e..000000000000 --- a/scala-package/assembly/osx-x86_64-cpu/main/assembly/assembly.xml +++ /dev/null @@ -1,30 +0,0 @@ - - full - - jar - - false - - - - *:*:jar - - / - true - true - runtime - - - lib/native - ${artifact.artifactId}${dashClassifier?}.${artifact.extension} - false - false - false - - *:*:dll:* - *:*:so:* - *:*:jnilib:* - - - - diff --git a/scala-package/assembly/osx-x86_64-cpu/pom.xml b/scala-package/assembly/osx-x86_64-cpu/pom.xml index 5c5733a9a4ce..62979a140fdc 100644 --- a/scala-package/assembly/osx-x86_64-cpu/pom.xml +++ b/scala-package/assembly/osx-x86_64-cpu/pom.xml @@ -14,6 +14,10 @@ MXNet Scala Package - Full OSX-x86_64 CPU-only jar + + ${project.parent.parent.basedir}/.. + + org.apache.mxnet diff --git a/scala-package/assembly/osx-x86_64-cpu/src/main/assembly/assembly.xml b/scala-package/assembly/osx-x86_64-cpu/src/main/assembly/assembly.xml index bdbd09f170c0..e9bc3728fcd0 100644 --- a/scala-package/assembly/osx-x86_64-cpu/src/main/assembly/assembly.xml +++ b/scala-package/assembly/osx-x86_64-cpu/src/main/assembly/assembly.xml @@ -25,4 +25,10 @@ + + + ${MXNET_DIR}/lib/libmxnet.so + lib/native + + diff --git a/scala-package/core/pom.xml b/scala-package/core/pom.xml index 484fbbd96790..976383f2e7d5 100644 --- a/scala-package/core/pom.xml +++ b/scala-package/core/pom.xml @@ -12,6 +12,7 @@ true + ${project.parent.basedir}/.. mxnet-core_2.11 @@ -77,6 +78,9 @@ -Djava.library.path=${project.parent.basedir}/native/${platform}/target \ -Dlog4j.configuration=file://${project.basedir}/src/test/resources/log4j.properties + + ${MXNET_DIR}/lib + @@ -88,6 +92,10 @@ -Djava.library.path=${project.parent.basedir}/native/${platform}/target ${skipTests} + always + + ${MXNET_DIR}/lib + diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/util/NativeLibraryLoader.scala b/scala-package/core/src/main/scala/org/apache/mxnet/util/NativeLibraryLoader.scala index e94d320391fa..2ce893b478ed 100644 --- a/scala-package/core/src/main/scala/org/apache/mxnet/util/NativeLibraryLoader.scala +++ b/scala-package/core/src/main/scala/org/apache/mxnet/util/NativeLibraryLoader.scala @@ -85,12 +85,10 @@ private[mxnet] object NativeLibraryLoader { } logger.debug(s"Attempting to load $loadLibname") val libFileInJar = libPathInJar + loadLibname - val is: InputStream = getClass.getResourceAsStream(libFileInJar) - if (is == null) { - throw new UnsatisfiedLinkError(s"Couldn't find the resource $loadLibname") - } - logger.info(s"Loading $loadLibname from $libPathInJar copying to $libname") - loadLibraryFromStream(libname, is) + saveLibraryToTemp("libmxnet.so", "/lib/native/libmxnet.so") + val tempfile: File = saveLibraryToTemp(libname, libFileInJar) + + loadLibraryFromFile(libname, tempfile) } /** @@ -109,7 +107,7 @@ private[mxnet] object NativeLibraryLoader { @throws(classOf[IOException]) private def createTempFile(name: String): File = { - new File(_tempDir + File.separator + name) + new File(_tempDir, name) } /** @@ -117,11 +115,34 @@ private[mxnet] object NativeLibraryLoader { * and loads from there. * * @param libname name of the library (just used in constructing the library name) - * @param is InputStream pointing to the library + * @param tempfile File pointing to the library */ - private def loadLibraryFromStream(libname: String, is: InputStream) { + private def loadLibraryFromFile(libname: String, tempfile: File) { + try { + logger.debug("Loading library from {}", tempfile.getPath) + System.load(tempfile.getPath) + } catch { + case ule: UnsatisfiedLinkError => + logger.error("Couldn't load copied link file: {}", ule.toString) + throw ule + } + } + + /** + * Load a system library from a stream. Copies the library to a temp file + * and loads from there. + * + * @param libname name of the library (just used in constructing the library name) + * @param resource String resource path in the jar file + */ + private def saveLibraryToTemp(libname: String, resource: String): File = { try { - val tempfile: File = createTempFile(libname) + val is: InputStream = getClass.getResourceAsStream(resource) + if (is == null) { + throw new UnsatisfiedLinkError(s"Couldn't find the resource $resource") + } + + val tempfile: File = new File(_tempDir, libname) val os: OutputStream = new FileOutputStream(tempfile) logger.debug("tempfile.getPath() = {}", tempfile.getPath) val savedTime: Long = System.currentTimeMillis @@ -131,20 +152,14 @@ private[mxnet] object NativeLibraryLoader { os.write(buf, 0, len) len = is.read(buf) } - os.flush() - val lock: InputStream = new FileInputStream(tempfile) os.close() + is.close() val seconds: Double = (System.currentTimeMillis - savedTime).toDouble / 1e3 - logger.debug(s"Copying took $seconds seconds.") - logger.debug("Loading library from {}", tempfile.getPath) - System.load(tempfile.getPath) - lock.close() + logger.debug(s"Copying $libname took $seconds seconds.") + tempfile } catch { case io: IOException => - logger.error("Could not create the temp file: {}", io.toString) - case ule: UnsatisfiedLinkError => - logger.error("Couldn't load copied link file: {}", ule.toString) - throw ule + throw new UnsatisfiedLinkError(s"Could not create temp file for $libname") } } } diff --git a/scala-package/examples/pom.xml b/scala-package/examples/pom.xml index 8d3d156a0b18..3ebb39b9a67e 100644 --- a/scala-package/examples/pom.xml +++ b/scala-package/examples/pom.xml @@ -15,6 +15,7 @@ true + ${project.parent.basedir}/.. @@ -42,6 +43,20 @@ linux-x86_64-gpu + + deployLocal + + + + org.apache.maven.plugins + maven-deploy-plugin + + false + + + + + release @@ -137,6 +152,9 @@ -Djava.library.path=${project.parent.basedir}/native/${platform}/target \ -Dlog4j.configuration=file://${project.basedir}/src/test/resources/log4j.properties + + ${MXNET_DIR}/lib + diff --git a/scala-package/infer/pom.xml b/scala-package/infer/pom.xml index ac76cdd19f3b..fb5cf370a009 100644 --- a/scala-package/infer/pom.xml +++ b/scala-package/infer/pom.xml @@ -15,6 +15,7 @@ true + ${project.parent.basedir}/.. @@ -77,6 +78,9 @@ -Djava.library.path=${project.parent.basedir}/native/${platform}/target \ -Dlog4j.configuration=file://${project.basedir}/src/test/resources/log4j.properties + + ${MXNET_DIR}/lib + diff --git a/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/Predictor.scala b/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/Predictor.scala index 8c48742e6f0d..0466693be9bc 100644 --- a/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/Predictor.scala +++ b/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/Predictor.scala @@ -80,10 +80,11 @@ class Predictor private[mxnet] (val predictor: org.apache.mxnet.infer.Predictor) An extra List is needed for when the model has more than one input. * @return Indexed sequence array of outputs */ - def predict(input: java.util.List[java.util.List[Float]]): - java.util.List[java.util.List[Float]] = { + def predict(input: java.util.List[java.util.List[java.lang.Float]]): + java.util.List[java.util.List[java.lang.Float]] = { val in = JavaConverters.asScalaIteratorConverter(input.iterator).asScala.toIndexedSeq - (predictor.predict(in map {a => a.asScala.toArray}) map {b => b.toList.asJava}).asJava + (predictor.predict(in map {a => a.asScala.map(Float2float).toArray}) + map {b => b.map(float2Float).toList.asJava}).asJava } diff --git a/scala-package/init-native/linux-x86_64/pom.xml b/scala-package/init-native/linux-x86_64/pom.xml index b71d7cf71528..242f2f3d5626 100644 --- a/scala-package/init-native/linux-x86_64/pom.xml +++ b/scala-package/init-native/linux-x86_64/pom.xml @@ -16,6 +16,10 @@ so + + ${project.parent.parent.basedir}/.. + + org.apache.mxnet @@ -62,22 +66,24 @@ -std=c++0x - -I${project.basedir}/../../../include - ${all_includes} - ${cflags} + -I${MXNET_DIR}/include + -I${MXNET_DIR}/3rdparty/dmlc-core/include + -I${MXNET_DIR}/3rdparty/mshadow + -I${MXNET_DIR}/3rdparty/dlpack/include + -I${MXNET_DIR}/3rdparty/tvm/nnvm/include + -DMSHADOW_USE_MKL=0 -DMSHADOW_USE_CUDA=0 + -O3 -DNDEBUG=1 -fPIC -msse3 -mf16c + -Wall -Wsign-compare -Wno-unused-parameter -Wno-unknown-pragmas -Wno-unused-local-typedefs -shared - ${all_ldpaths} -Wl,--whole-archive - ${lddeps} - -Wl,--no-whole-archive + -Wl,--no-whole-archive -pthread -lm -fopenmp -lrt - ${ldflags} - -fopenmp + -Wl,-rpath=${dollar}ORIGIN -lmxnet -L${MXNET_DIR}/lib @@ -86,7 +92,6 @@ javah generate-sources - linux default ${project.build.directory}/custom-javah ${basedir} @@ -101,6 +106,25 @@ + + + org.codehaus.mojo + exec-maven-plugin + 1.6.0 + + + link-native-lib + generate-resources + + exec + + + ln + -sf ${MXNET_DIR}/lib/libmxnet.so ${project.build.directory}/libmxnet.so + + + + diff --git a/scala-package/init-native/osx-x86_64/pom.xml b/scala-package/init-native/osx-x86_64/pom.xml index b4a0b1d6584a..12f4d800eba4 100644 --- a/scala-package/init-native/osx-x86_64/pom.xml +++ b/scala-package/init-native/osx-x86_64/pom.xml @@ -16,6 +16,10 @@ jnilib + + ${project.parent.parent.basedir}/.. + + org.apache.mxnet @@ -62,8 +66,14 @@ -std=c++0x - -I${project.basedir}/../../../include - ${cflags} + -I${MXNET_DIR}/include + -I${MXNET_DIR}/3rdparty/dmlc-core/include + -I${MXNET_DIR}/3rdparty/mshadow + -I${MXNET_DIR}/3rdparty/dlpack/include + -I${MXNET_DIR}/3rdparty/tvm/nnvm/include + -DMSHADOW_USE_MKL=0 -DMSHADOW_USE_CUDA=0 + -g -O0 -fPIC -msse3 -mf16c + -Wall -Wsign-compare -Wno-unused-parameter -Wno-unknown-pragmas -Wno-unused-local-typedefs -shared @@ -72,11 +82,9 @@ -framework JavaVM -Wl,-exported_symbol,_Java_* -Wl,-x - ${lddeps} - -force_load ${project.basedir}/../../../lib/libmxnet.a - ${ldflags} + -lmxnet -L${MXNET_DIR}/lib @@ -85,7 +93,6 @@ javah generate-sources - darwin default ${project.build.directory}/custom-javah ${basedir} @@ -100,6 +107,36 @@ + + + org.codehaus.mojo + exec-maven-plugin + 1.6.0 + + + post-native-build + package + + exec + + + install_name_tool + -change lib/libmxnet.so @loader_path/libmxnet.so ${project.build.directory}/${artifactId}.jnilib + + + + link-native-lib + generate-resources + + exec + + + ln + -sf ${MXNET_DIR}/lib/libmxnet.so ${project.build.directory}/libmxnet.so + + + + diff --git a/scala-package/native/README.md b/scala-package/native/README.md new file mode 100644 index 000000000000..cb6dd3890dd2 --- /dev/null +++ b/scala-package/native/README.md @@ -0,0 +1,63 @@ +# MXNet Scala JNI + +MXNet Scala JNI is a thin wrapper layer of underlying libmxnet.so. + +## javah +JNI native code requires a header file that matches the java/scala interface, +this file is usually generated with javah. + +In our case, jni_helper_func.h is generated and will be used to compile native code. + + +## Linker options + +Scala JNI (libmxnet-scala.so/libmxnet-scala.jnilib) is dynamically linked to libmxnet.so. +MXNet Scala will trying to load libmxnet.so from system LD_LIBRARY_PATH first. +If it failed, the try to resolve libmxnet.so in the same location as libmxnet-scala.so file. + +### Linux +``` +-Wl,-rpath=$ORIGIN -lmxnet +``` +Above option will tell system to looking for libmxnet.so from the same location. + + +### Mac OSX +On Mac, we have to execute install_name_tool command to change library loading path: +```bash +install_name_tool -change lib/libmxnet.so @loader_path/libmxnet.so libmxnet-scala.jnilib +``` + +Other linker options: +* -shared : link as shared library +* -Wl,-install_name,libmxnet-scala.jnilib : avoid use build machine's absolute path +* -framework JavaVM : Stand jni options for mac +* -Wl,-exported_symbol,_Java_* : Stand jni options for mac +* -Wl,-x : Do not put non-global symbols in the output file's symbol table. + + +## Compiler flags + +Scala JNI code technically doesn't need on any of MXNet make flags, +however c_api.h header links to many other dependencies header file, +which requires us to add DMSHADOW_USE_MKL and DMSHADOW_USE_CUDA to compile the JNI code. +These flags are not actually used by JNI and won't impact Scala's behavior. + + +### Linux + +``` +-DMSHADOW_USE_MKL=0 +-DMSHADOW_USE_CUDA=0 +-O3 -DNDEBUG=1 -fPIC -msse3 -mf16c +-Wall -Wsign-compare -Wno-unused-parameter -Wno-unknown-pragmas -Wno-unused-local-typedefs +``` + +### Mac OSX + +``` +-DMSHADOW_USE_MKL=0 +-DMSHADOW_USE_CUDA=0 +-g -O0 -fPIC -msse3 -mf16c +-Wall -Wsign-compare -Wno-unused-parameter -Wno-unknown-pragmas -Wno-unused-local-typedefs +``` diff --git a/scala-package/native/linux-x86_64-cpu/pom.xml b/scala-package/native/linux-x86_64-cpu/pom.xml index 2415cf7d26db..7cfd01a4ef79 100644 --- a/scala-package/native/linux-x86_64-cpu/pom.xml +++ b/scala-package/native/linux-x86_64-cpu/pom.xml @@ -16,6 +16,10 @@ so + + ${project.parent.parent.basedir}/.. + + org.apache.mxnet @@ -62,22 +66,20 @@ -std=c++0x - -I${project.basedir}/../../../include - ${all_includes} - ${cflags} + -I${MXNET_DIR}/include + -I${MXNET_DIR}/3rdparty/dmlc-core/include + -I${MXNET_DIR}/3rdparty/mshadow + -I${MXNET_DIR}/3rdparty/dlpack/include + -I${MXNET_DIR}/3rdparty/tvm/nnvm/include + -DMSHADOW_USE_MKL=0 -DMSHADOW_USE_CUDA=0 + -O3 -DNDEBUG=1 -fPIC -msse3 -mf16c + -Wall -Wsign-compare -Wno-unused-parameter -Wno-unknown-pragmas -Wno-unused-local-typedefs -shared - - ${all_ldpaths} - -Wl,--whole-archive - ${lddeps} - -Wl,--no-whole-archive - - ${ldflags} - -fopenmp + -Wl,-rpath=${dollar}ORIGIN -lmxnet -L${MXNET_DIR}/lib @@ -86,7 +88,6 @@ javah generate-sources - linux default ${project.build.directory}/custom-javah ${basedir} diff --git a/scala-package/native/linux-x86_64-gpu/pom.xml b/scala-package/native/linux-x86_64-gpu/pom.xml index 0186217234bc..668f330b5ff9 100644 --- a/scala-package/native/linux-x86_64-gpu/pom.xml +++ b/scala-package/native/linux-x86_64-gpu/pom.xml @@ -16,6 +16,10 @@ so + + ${project.parent.parent.basedir}/.. + + org.apache.mxnet @@ -62,22 +66,20 @@ -std=c++0x - -I${project.basedir}/../../../include - ${all_includes} - ${cflags} + -I${MXNET_DIR}/include + -I${MXNET_DIR}/3rdparty/dmlc-core/include + -I${MXNET_DIR}/3rdparty/mshadow + -I${MXNET_DIR}/3rdparty/dlpack/include + -I${MXNET_DIR}/3rdparty/tvm/nnvm/include + -DMSHADOW_USE_MKL=0 -DMSHADOW_USE_CUDA=0 + -O3 -DNDEBUG=1 -fPIC -msse3 -mf16c + -Wall -Wsign-compare -Wno-unused-parameter -Wno-unknown-pragmas -Wno-unused-local-typedefs -shared - - ${all_ldpaths} - -Wl,--whole-archive - ${lddeps} - -Wl,--no-whole-archive - - ${ldflags} - -fopenmp + -Wl,-rpath=${dollar}ORIGIN -lmxnet -L${MXNET_DIR}/lib @@ -86,7 +88,6 @@ javah generate-sources - linux default ${project.build.directory}/custom-javah ${basedir} diff --git a/scala-package/native/osx-x86_64-cpu/pom.xml b/scala-package/native/osx-x86_64-cpu/pom.xml index 0ab7ca1dd0f0..425ca96815de 100644 --- a/scala-package/native/osx-x86_64-cpu/pom.xml +++ b/scala-package/native/osx-x86_64-cpu/pom.xml @@ -16,6 +16,10 @@ jnilib + + ${project.parent.parent.basedir}/.. + + org.apache.mxnet @@ -62,8 +66,14 @@ -std=c++0x - -I../../../include - ${cflags} + -I${MXNET_DIR}/include + -I${MXNET_DIR}/3rdparty/dmlc-core/include + -I${MXNET_DIR}/3rdparty/mshadow + -I${MXNET_DIR}/3rdparty/dlpack/include + -I${MXNET_DIR}/3rdparty/tvm/nnvm/include + -DMSHADOW_USE_MKL=0 -DMSHADOW_USE_CUDA=0 + -g -O0 -fPIC -msse3 -mf16c + -Wall -Wsign-compare -Wno-unused-parameter -Wno-unknown-pragmas -Wno-unused-local-typedefs -shared @@ -72,12 +82,9 @@ -framework JavaVM -Wl,-exported_symbol,_Java_* -Wl,-x - ${lddeps} - -force_load ${project.basedir}/../../../lib/libmxnet.a - -force_load ${project.basedir}/../../../3rdparty/tvm/nnvm/lib/libnnvm.a - ${ldflags} + -Wl,-install_name,libmxnet-scala.jnilib -lmxnet -L${MXNET_DIR}/lib @@ -86,7 +93,6 @@ javah generate-sources - darwin default ${project.build.directory}/custom-javah ${basedir} @@ -101,6 +107,36 @@ + + + org.codehaus.mojo + exec-maven-plugin + 1.6.0 + + + post-native-build + package + + exec + + + install_name_tool + -change lib/libmxnet.so @loader_path/libmxnet.so ${project.build.directory}/${artifactId}.jnilib + + + + link-native-lib + generate-resources + + exec + + + ln + -sf ${MXNET_DIR}/lib/libmxnet.so ${project.build.directory}/libmxnet.so + + + + diff --git a/scala-package/packageTest/Makefile b/scala-package/packageTest/Makefile new file mode 100644 index 000000000000..6073ff8a722f --- /dev/null +++ b/scala-package/packageTest/Makefile @@ -0,0 +1,87 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +SCALA_VERSION_PROFILE := 2.11 +SCALA_VERSION := 2.11.8 +MXNET_VERSION := "[1.3.0-SNAPSHOT,)" + +MXNET_REPO = https://repository.apache.org/content/repositories/snapshots + +ifeq ($(OS),Windows_NT) + UNAME_S := Windows +else + UNAME_S := $(shell uname -s) +endif + +ifeq ($(UNAME_S), Windows) + # TODO: currently scala package does not support windows + SCALA_PKG_PROFILE := windows +else + ifeq ($(UNAME_S), Darwin) + SCALA_PKG_PROFILE := osx-x86_64-cpu + else + SCALA_PKG_PROFILE := linux-x86_64 + ifeq ($(USE_CUDA), 1) + SCALA_PKG_PROFILE := $(SCALA_PKG_PROFILE)-gpu + else + SCALA_PKG_PROFILE := $(SCALA_PKG_PROFILE)-cpu + endif + endif +endif + +PROFILES := -Ptest +ifeq ($(UNIT), 1) + PROFILES := "$(PROFILES),unittest" +endif +ifeq ($(INTEGRATION), 1) + PROFILES := "$(PROFILES),integrationtest" +endif + +ifneq ($(UNIT), 1) + ifneq ($(INTEGRATION), 1) + PROFILES := "$(PROFILES),unittest,integrationtest" + endif +endif + + +clean: + (mvn clean -Dmxnet.profile=$(SCALA_PKG_PROFILE) \ + -Dmxnet.scalaprofile=$(SCALA_VERSION_PROFILE) \ + -Dmxnet.version=$(MXNET_VERSION) \ + -Dscala.version=$(SCALA_VERSION)) + +testinstall: + (mvn integration-test -Dmxnet.profile=$(SCALA_PKG_PROFILE) \ + $(PROFILES) \ + -Dmxnet.scalaprofile=$(SCALA_VERSION_PROFILE) \ + -Dmxnet.version=$(MXNET_VERSION) \ + -Dscala.version=$(SCALA_VERSION)) + +testlocal: + (mvn integration-test -Dmxnet.profile=$(SCALA_PKG_PROFILE) \ + $(PROFILES),fromLocal \ + -Dmxnet.scalaprofile=$(SCALA_VERSION_PROFILE) \ + -Dmxnet.version=$(MXNET_VERSION) \ + -Dscala.version=$(SCALA_VERSION)) + +testsnapshot: + (mvn integration-test -Dmxnet.profile=$(SCALA_PKG_PROFILE) \ + $(PROFILES),fromSnapshots \ + -Dmxnet.scalaprofile=$(SCALA_VERSION_PROFILE) \ + -Dmxnet.repo=$(MXNET_REPO) \ + -Dmxnet.version=$(MXNET_VERSION) \ + -Dscala.version=$(SCALA_VERSION)) diff --git a/scala-package/packageTest/README.md b/scala-package/packageTest/README.md new file mode 100644 index 000000000000..3f1eeb842f07 --- /dev/null +++ b/scala-package/packageTest/README.md @@ -0,0 +1,72 @@ +# MXNet Scala Package Test + +This is an project created to run the test suite on a fully packaged mxnet jar. The test suite is found locally but mxnet is from the target jarfile. + +## General Setup + +To setup the packageTest, you must first build your tests. To build the tests, follow these steps from the mxnet main directory: + +1. Build MXNet and the scala package from source following the directions [here](https://mxnet.incubator.apache.org/install/scala_setup.html#source) +2. Build the tests by running `make scalatestcompile`. +3. Follow setup instructions below for your testing goal + +## Running + +There are three different modes of operation for testing based on the location of the jar and where it is coming from: + +### Test Installed Jars + +If you have a jar file, you can install it to your maven cache repository(`~/.m2/repository`). This might be useful if you acquire the .jar file from elsewhere. To install, it is easiest to use `mvn install:install-file -Dfile= -DpomFile=`. If the pom file is not available, you can also run `mvn install:install-file -Dfile= -DgroupId= -DartifactId= -Dversion= -Dpackaging=`. With the full mxnet jar, this might look like `mvn install:install-file -Dfile= -DgroupId=org.apache.mxnet -DartifactId=mxnet-full_2.11-linux-x86_64-cpu -Dversion=1.3.0 -Dpackaging=jar`. + +You can also run `make scalainstall` to install from a local build. + +After installing, run `make testinstall` in the package test directory to run the tests. Note that unless you also install an additional mxnetexamples jar, you can only run the unit tests. + +### Test Local Deployment + +To test the jars that would be produced by a deployment, you can run `make scaladeploylocal` from the main mxnet directory. This produces a local snapshot located at `scala-package/local-snapshot`. To test this local snapshot, run `make testlocal`. + +### Remote Repository Snapshot + +This mode is to test a jar located in a remote repository. The default repository is the apache snapshot repisotory located at `https://repository.apache.org/content/repositories/snapshots`. Note that the actual jar in a repisotory should be located at `$repoUrl/org/apache/mxnet/mxnet-full_$scalaVersion-$osMode/$version/*.jar`. + +Test the snapshot repo using `make testsnapshot` or a different repo using `make testsnapshot MXNET_REPO=$NEW_REPO_URL`. + +### Options + +You are able to run unit tests, integration tests, or both using this utility. To run the unit tests, add the flag `UNIT=1` to make (e.g. `make testsnapshot UNIT=1`). Use `INTEGRATION=1` for integration tests. The default behavior is to run both the unit and integration tests. However, the integration tests require that the mxnet examples be installed in addition to the full mxnet package (see test mode instructions above). + +An additional option, you can specify the mxnet version with `MXNET_VERSION=1.3.1-SNAPSHOT`. + +## Cleaning Up + +You can clean temporary files and target artifacts by running `make scalaclean`. + +## Troubleshooting + +### Missing Examples + +If you fail with the following error +``` +[ERROR] Failed to execute goal org.scalatest:scalatest-maven-plugin:1.0:test (test) on project mxnet-scala-packagetest-examples_2.11: There are test failures -> [Help 1] +[ERROR] +[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch. +[ERROR] Re-run Maven using the -X switch to enable full debug logging. +[ERROR] +[ERROR] For more information about the errors and possible solutions, please read the following articles: +[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException +[ERROR] +[ERROR] After correcting the problems, you can resume the build with the command +[ERROR] mvn -rf :mxnet-scala-packagetest-examples_2.11 +Makefile:57: recipe for target 'scalaintegrationtest' failed +make: *** [scalaintegrationtest] Error 1 +``` + +and stacktrace begins with the following, + +``` +*** RUN ABORTED *** + java.lang.NoClassDefFoundError: org/apache/mxnetexamples/Util$ +``` + +you are missing the mxnetexamples package. See your test mode installation section for details. diff --git a/scala-package/packageTest/core/pom.xml b/scala-package/packageTest/core/pom.xml new file mode 100644 index 000000000000..bdcd7662f082 --- /dev/null +++ b/scala-package/packageTest/core/pom.xml @@ -0,0 +1,39 @@ + + + 4.0.0 + + PackageTest + mxnet-scala-packagetest_2.11 + 1.0-SNAPSHOT + ../pom.xml + + + mxnet-scala-packagetest-core_2.11 + MXNet Scala Package Test + pom + + + + unittest + + false + + + + + + + + org.scalatest + scalatest-maven-plugin + 1.0 + + ${project.build.outputDirectory},${project.build.testOutputDirectory},../../core/target/test-classes + + + + + + diff --git a/scala-package/packageTest/core/scripts b/scala-package/packageTest/core/scripts new file mode 120000 index 000000000000..f806668aa847 --- /dev/null +++ b/scala-package/packageTest/core/scripts @@ -0,0 +1 @@ +../../core/scripts \ No newline at end of file diff --git a/scala-package/packageTest/examples/pom.xml b/scala-package/packageTest/examples/pom.xml new file mode 100644 index 000000000000..e11be657e225 --- /dev/null +++ b/scala-package/packageTest/examples/pom.xml @@ -0,0 +1,48 @@ + + + 4.0.0 + + PackageTest + mxnet-scala-packagetest_2.11 + 1.0-SNAPSHOT + ../pom.xml + + + mxnet-scala-packagetest-examples_2.11 + MXNet Scala Package Test + pom + + + + integrationtest + + false + + + + + + + + org.scalatest + scalatest-maven-plugin + 1.0 + + ${project.build.outputDirectory},${project.build.testOutputDirectory},../../examples/target/test-classes + + + + + + + + org.apache.mxnet + mxnet-examples_${mxnet.scalaprofile} + ${mxnet.version} + test + + + + diff --git a/scala-package/packageTest/examples/scripts b/scala-package/packageTest/examples/scripts new file mode 120000 index 000000000000..2bba4eeece74 --- /dev/null +++ b/scala-package/packageTest/examples/scripts @@ -0,0 +1 @@ +../../examples/scripts \ No newline at end of file diff --git a/scala-package/packageTest/infer/pom.xml b/scala-package/packageTest/infer/pom.xml new file mode 100644 index 000000000000..7c5a096d6e14 --- /dev/null +++ b/scala-package/packageTest/infer/pom.xml @@ -0,0 +1,38 @@ + + + 4.0.0 + + PackageTest + mxnet-scala-packagetest_2.11 + 1.0-SNAPSHOT + ../pom.xml + + + mxnet-scala-packagetest-infer_2.11 + MXNet Scala Package Test + pom + + + + unittest + + false + + + + + + + + org.scalatest + scalatest-maven-plugin + 1.0 + + ${project.build.outputDirectory},${project.build.testOutputDirectory},../../infer/target/test-classes + + + + + diff --git a/scala-package/packageTest/pom.xml b/scala-package/packageTest/pom.xml new file mode 100644 index 000000000000..9c5c11cf2779 --- /dev/null +++ b/scala-package/packageTest/pom.xml @@ -0,0 +1,196 @@ + + + 4.0.0 + PackageTest + mxnet-scala-packagetest_2.11 + 1.0-SNAPSHOT + MXNet Scala Package Test + pom + + + core + infer + + + + + test + + + integrationtest + + examples + + + + fromSnapshots + + + apache-snapshots + ${mxnet.repo} + default + + true + + + + + + fromLocal + + + local-snapshot + file://${basedir}/../local-snapshot + + true + + + + + + + + true + + + + + org.apache.mxnet + mxnet-full_${mxnet.scalaprofile}-${mxnet.profile} + ${mxnet.version} + + + org.scala-lang + scala-library + ${scala.version} + + + commons-io + commons-io + 2.4 + + + org.scalatest + scalatest_${mxnet.scalaprofile} + 3.0.4 + test + + + org.scalacheck + scalacheck_${mxnet.scalaprofile} + 1.13.5 + test + + + org.mockito + mockito-all + 1.10.19 + test + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.3 + + 1.6 + 1.6 + UTF-8 + + + + maven-resources-plugin + 2.7 + + + org.apache.maven.plugins + maven-dependency-plugin + 2.9 + + + copy-dependencies + package + + copy-dependencies + + + ${project.build.outputDirectory}/lib + runtime + test,provided + false + false + true + + + + + + org.apache.maven.plugins + maven-jar-plugin + 2.5 + + + package + + jar + + + + **/* + + + + + + + net.alchim31.maven + scala-maven-plugin + 3.2.2 + + + compile + + compile + + compile + + + + + org.apache.maven.plugins + maven-surefire-plugin + 2.19 + + true + + + + org.scalatest + scalatest-maven-plugin + 1.0 + + ${skipTests} + ${project.build.directory}/surefire-reports + . + F + WDF TestSuite.txt + + + + test + integration-test + + test + + + + + + + + diff --git a/scala-package/pom.xml b/scala-package/pom.xml index 151462cbcc68..6eb573bf3e23 100644 --- a/scala-package/pom.xml +++ b/scala-package/pom.xml @@ -39,6 +39,8 @@ 2.11.8 2.11 + g++ + $ pom diff --git a/src/operator/quantization/quantize_graph_pass.cc b/src/operator/quantization/quantize_graph_pass.cc index b1e3bb67ad79..fcd0fb4218be 100644 --- a/src/operator/quantization/quantize_graph_pass.cc +++ b/src/operator/quantization/quantize_graph_pass.cc @@ -222,7 +222,7 @@ Graph QuantizeGraph(Graph &&src) { // skip non-quantized input continue; } - if (quantized_op_map.count(e.node->op())) { + if (NeedQuantize(e.node, excluded_nodes)) { // here we calculate the output number (exclude min/max, in order to // calculate min/max index from mirror node) based on assumption that // there is only 1min and 1max output from mirror node (which is @@ -314,7 +314,8 @@ Graph QuantizeGraph(Graph &&src) { std::vector outputs; for (const auto& e : src.outputs) { - if (quantized_op_map.count(e.node->op())) { + if (NeedQuantize(e.node, excluded_nodes)) { + // Only insert dequantize for those Ops supports quantize and not excluded. NodePtr mirror_node = mirror_map.at(e.node.get()); NodeEntry mirror_entry = NodeEntry{mirror_node, e.index, e.version}; size_t num_inputs = e.node->num_inputs(); diff --git a/tests/cpp/operator/mkldnn_operator_test.cc b/tests/cpp/operator/mkldnn_operator_test.cc index a500d4c2df6d..3bf3228a4b44 100644 --- a/tests/cpp/operator/mkldnn_operator_test.cc +++ b/tests/cpp/operator/mkldnn_operator_test.cc @@ -347,6 +347,31 @@ OpAttrs GetDeconvBackwardOp(int kernel, int num_filters, int dim, int stride, in return attrs; } +OpAttrs GetBNOp() { + OpAttrs attrs; + attrs.attrs.op = Op::Get("BatchNorm"); + attrs.num_inputs = 5; + attrs.num_outputs = 3; + attrs.accept_dims.insert(4); + attrs.requests.insert(OpReqType::kWriteTo); + attrs.attrs.op->attr_parser(&attrs.attrs); + attrs.input_types = ArrayTypes::Normal | + ArrayTypes::MKLDNN; + attrs.output_types = ArrayTypes::Normal | + ArrayTypes::MKLDNN; + return attrs; +} + +OpAttrs GetBNBackwardOp() { + OpAttrs attrs; + attrs.attrs.op = Op::Get("_backward_BatchNorm"); + attrs.num_inputs = 8; + attrs.num_outputs = 3; + attrs.attrs.op->attr_parser(&attrs.attrs); + attrs.requests.insert(OpReqType::kWriteTo); + return attrs; +} + void AssertEqual(const std::vector &in_arrs, const std::vector &out_arrs, float rtol = 1e-5, float atol = 1e-8) { @@ -710,7 +735,7 @@ void TestOpEx(const OpAttrs &forward_attrs, const OpAttrs &backwards_attrs) { // If the array is a view, we shouldn't write data to it. if (in_arr.arr.IsView()) - continue; + continue; NDArrayAttrs orig(in_arr.arr.Copy(in_arr.arr.ctx()), "InPlace Copy"); for (int i = 0; i < forward_attrs.num_inputs; i++) @@ -735,6 +760,124 @@ void TestOpEx(const OpAttrs &forward_attrs, const OpAttrs &backwards_attrs) { } } + +void TestOpExBNBackward(const OpAttrs &forward_attrs, + const OpAttrs &backwards_attrs, + const OpReqType &req, + const std::vector &inputs, + const std::vector &outputs, + const NDArrayAttrs &in_arr, + NDArrayAttrs* out_arr) { + std::vector backwards_input(backwards_attrs.num_inputs); + + std::vector backwards_buffer(backwards_attrs.num_outputs); + std::vector backwards_buffer2(backwards_attrs.num_outputs); + + std::vector backwards_outputs(backwards_attrs.num_outputs); + std::vector backwards_ex_outputs(backwards_attrs.num_outputs); + std::vector backwards_req(backwards_attrs.num_outputs); + + if (req == kWriteTo) { + backwards_input[0] = &(out_arr->arr); // output grad + backwards_input[1] = outputs[1]; // mean + backwards_input[2] = outputs[2]; // var + backwards_input[3] = inputs[0]; // data + backwards_input[4] = inputs[1]; // gamma + backwards_input[5] = inputs[2]; // beta + backwards_input[6] = inputs[3]; // moving mean + backwards_input[7] = inputs[4]; // moving var + + for (size_t i = 0; i < backwards_attrs.num_outputs; i++) { + auto tmp_output = in_arr.arr; + backwards_buffer.emplace_back(tmp_output.Copy(Context())); + backwards_buffer2.emplace_back(tmp_output.Copy(Context())); + backwards_outputs[i] = &backwards_buffer.back(); + backwards_ex_outputs[i] = &backwards_buffer2.back(); + Engine::Get()->WaitForAll(); + backwards_req[i] = kWriteTo; + } + + std::cout << "Backwards: "; + PrintVerifyMsg(*out_arr, in_arr); + Imperative::Get()->InvokeOp( + Context(), backwards_attrs.attrs, backwards_input, backwards_outputs, + backwards_req, DispatchMode::kFCompute, mxnet::OpStatePtr()); + Imperative::Get()->InvokeOp( + Context(), backwards_attrs.attrs, backwards_input, backwards_ex_outputs, + backwards_req, DispatchMode::kFComputeEx, mxnet::OpStatePtr()); + Engine::Get()->WaitForAll(); + AssertEqual(backwards_outputs, backwards_ex_outputs); + } +} + +// compares output of fcompute with fcomputex +void TestOpExBN(const OpAttrs &forward_attrs, const OpAttrs &backwards_attrs) { + std::vector inputs(forward_attrs.num_inputs); + std::vector inputs2(forward_attrs.num_inputs); + std::vector inputs_buffer(forward_attrs.num_inputs); + std::vector inputs2_buffer(forward_attrs.num_inputs); + std::vector outputs(forward_attrs.num_outputs); + std::vector ex_outputs(forward_attrs.num_outputs); + std::vector req(forward_attrs.num_outputs); + + TestArrayShapes tas = GetTestArrayShapes(); + std::vector pds = tas.pds; + + std::vector in_arrs = GetTestInputArrays(forward_attrs.input_types, false); + std::vector> out_arrs(forward_attrs.num_outputs); + std::vector> ex_out_arrs(forward_attrs.num_outputs); + + if (forward_attrs.requests.find(OpReqType::kWriteTo) != forward_attrs.requests.end()) { + for (int i1 = 0; i1 < in_arrs.size(); i1++) { + auto in_arr = in_arrs[i1]; + + CHECK_NE(forward_attrs.accept_dims.size(), 0); + if (forward_attrs.accept_dims.find(in_arr.arr.shape().ndim()) == + forward_attrs.accept_dims.end()) + continue; + for (int i = 0; i < forward_attrs.num_outputs; i++) { + out_arrs[i] = + GetTestOutputArrays(in_arr.arr.shape(), pds, {1}, true, forward_attrs.output_types); + ex_out_arrs[i] = + GetTestOutputArrays(in_arr.arr.shape(), pds, {1}, true, forward_attrs.output_types); + } + for (size_t output_i = 0; output_i < out_arrs[0].size(); output_i++) { + inputs_buffer.clear(); + inputs2_buffer.clear(); + + for (int i = 0; i < forward_attrs.num_inputs; i++) { + inputs_buffer.emplace_back(in_arr.arr.Copy(Context())); + inputs2_buffer.emplace_back(in_arr.arr.Copy(Context())); + Engine::Get()->WaitForAll(); + inputs[i] = &inputs_buffer.back(); + inputs2[i] = &inputs2_buffer.back(); + } + for (int i = 0; i < forward_attrs.num_outputs; i++) { + req[i] = kWriteTo; + outputs[i] = &out_arrs[i][output_i].arr; + ex_outputs[i] = &ex_out_arrs[i][output_i].arr; + } + Imperative::Get()->set_is_training(true); + + PrintVerifyMsg(in_arr, out_arrs[0][output_i]); + Imperative::Get()->InvokeOp( + Context(), forward_attrs.attrs, inputs, outputs, req, + DispatchMode::kFCompute, mxnet::OpStatePtr()); + Imperative::Get()->InvokeOp( + Context(), forward_attrs.attrs, inputs2, ex_outputs, req, + DispatchMode::kFComputeEx, mxnet::OpStatePtr()); + Engine::Get()->WaitForAll(); + AssertEqual(outputs, ex_outputs); + + if (!backwards_attrs.requests.empty()) { + TestOpExBNBackward(forward_attrs, backwards_attrs, OpReqType::kWriteTo, + inputs, outputs, in_arr, &out_arrs[0][output_i]); + } + } + } + } +} + // Computes second dimension of FC weight matrix based on input shape uint32_t GetFCWeightDim2(const nnvm::TShape arr) { uint32_t dim = 1; @@ -1204,4 +1347,10 @@ TEST(IMPERATIVE, DeconvOp) { } } +TEST(IMPERATIVE, BNOp) { + OpAttrs forward_attrs = GetBNOp(); + OpAttrs backwards_attrs = GetBNBackwardOp(); + TestOpExBN(forward_attrs, backwards_attrs); +} + #endif diff --git a/tests/cpp/unittest.mk b/tests/cpp/unittest.mk index 746ee2f096f1..665ce6982874 100644 --- a/tests/cpp/unittest.mk +++ b/tests/cpp/unittest.mk @@ -41,22 +41,22 @@ gtest-all.o : $(GTEST_SRCS_) gtest.a : gtest-all.o $(AR) $(ARFLAGS) $@ $^ -build/tests/cpp/%.o : tests/cpp/%.cc | mkldnn +build/tests/cpp/%.o : tests/cpp/%.cc @mkdir -p $(@D) $(CXX) -std=c++11 $(TEST_CFLAGS) -I$(GTEST_INC) -MM -MT tests/cpp/$* $< > build/tests/cpp/$*.d $(CXX) -c -std=c++11 $(TEST_CFLAGS) -I$(GTEST_INC) -o build/tests/cpp/$*.o $(filter %.cc %.a, $^) -build/tests/cpp/operator/%.o : tests/cpp/operator/%.cc | mkldnn +build/tests/cpp/operator/%.o : tests/cpp/operator/%.cc @mkdir -p $(@D) $(CXX) -std=c++11 $(TEST_CFLAGS) -I$(GTEST_INC) -MM -MT tests/cpp/operator/$* $< > build/tests/cpp/operator/$*.d $(CXX) -c -std=c++11 $(TEST_CFLAGS) -I$(GTEST_INC) -o build/tests/cpp/operator/$*.o $(filter %.cc %.a, $^) -build/tests/cpp/storage/%.o : tests/cpp/storage/%.cc | mkldnn +build/tests/cpp/storage/%.o : tests/cpp/storage/%.cc @mkdir -p $(@D) $(CXX) -std=c++11 $(TEST_CFLAGS) -I$(GTEST_INC) -MM -MT tests/cpp/storage/$* $< > build/tests/cpp/storage/$*.d $(CXX) -c -std=c++11 $(TEST_CFLAGS) -I$(GTEST_INC) -o build/tests/cpp/storage/$*.o $(filter %.cc %.a, $^) -build/tests/cpp/engine/%.o : tests/cpp/engine/%.cc | mkldnn +build/tests/cpp/engine/%.o : tests/cpp/engine/%.cc @mkdir -p $(@D) $(CXX) -std=c++11 $(TEST_CFLAGS) -I$(GTEST_INC) -MM -MT tests/cpp/engine/$* $< > build/tests/cpp/engine/$*.d $(CXX) -c -std=c++11 $(TEST_CFLAGS) -I$(GTEST_INC) -o build/tests/cpp/engine/$*.o $(filter %.cc %.a, $^) diff --git a/tests/nightly/apache_rat_license_check/rat-excludes b/tests/nightly/apache_rat_license_check/rat-excludes index 0d95792efc15..a488eb84d069 100755 --- a/tests/nightly/apache_rat_license_check/rat-excludes +++ b/tests/nightly/apache_rat_license_check/rat-excludes @@ -58,4 +58,6 @@ moderngpu/* deformable_im2col.cuh deformable_im2col.h REQUIRE -include/* \ No newline at end of file +include/* +*/test/test-symbol.json.ref +*/profiler/test/profile-matmul-20iter.json.ref \ No newline at end of file diff --git a/tests/python/mkl/test_mkldnn.py b/tests/python/mkl/test_mkldnn.py index c6c0a0832f1f..d9d3abfc3ced 100644 --- a/tests/python/mkl/test_mkldnn.py +++ b/tests/python/mkl/test_mkldnn.py @@ -27,7 +27,6 @@ from mxnet import gluon from mxnet.gluon import nn from mxnet.test_utils import * -import test_mkldnn_install as install curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__))) sys.path.append(os.path.join(curr_path, '../unittest/')) from common import with_seed @@ -441,7 +440,4 @@ def backward(self, req, out_grad, in_data, out_data, in_grad, aux): custom = mx.symbol.Custom(name='custom', data=conv, op_type='custom') exec1 = custom.bind(mx.cpu(), args={'data': mx.nd.ones([10,3,96,96]), 'conv_weight': mx.nd.ones([8,3,5,5])}) exec1.forward()[0].wait_to_read() - - -if __name__ == '__main__': - install.test_mkldnn_install() + diff --git a/tests/python/mkl/test_mkldnn_install.py b/tests/python/mkl/test_mkldnn_install.py deleted file mode 100644 index c2f26df72f2e..000000000000 --- a/tests/python/mkl/test_mkldnn_install.py +++ /dev/null @@ -1,56 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -""" -MKL-DNN related test cases -""" - -import sys -import os -import logging - - -def test_mkldnn_install(): - """ - This test will verify that MXNet is built/installed correctly when - compiled with Intel MKL-DNN library. The method will try to import - the mxnet module and see if the mkldnn library is mapped to this - process's address space. - """ - logging.basicConfig(level=logging.INFO) - - if not sys.platform.startswith('linux'): - logging.info("Bypass mkldnn install test for non-Linux OS") - return - - try: - #pylint: disable=unused-variable - import mxnet as mx - except (ImportError, OSError) as e: - assert 0, "Import mxnet error: %s. Please double check your build/" \ - "install steps or environment variable settings" % str(e) - - pid = os.getpid() - rc = os.system("cat /proc/" + str(pid) + - "/maps | grep libmkldnn > /dev/null") - - if rc == 0: - logging.info("MXNet is built/installed correctly with MKL-DNN") - else: - assert 0, "MXNet is built/installed incorrectly with MKL-DNN, please " \ - "double check your build/install steps or environment " \ - "variable settings" diff --git a/tests/python/quantization/test_quantization.py b/tests/python/quantization/test_quantization.py index ca8070cfc224..518b69626246 100644 --- a/tests/python/quantization/test_quantization.py +++ b/tests/python/quantization/test_quantization.py @@ -406,12 +406,16 @@ def get_fp32_sym(): def get_fp32_residual(): data = mx.sym.Variable('data') - conv = mx.sym.Convolution(data=data, num_filter=4, kernel=(1,1), pad=(0,0), - no_bias=True, name='conv') - bn = mx.sym.BatchNorm(data=conv, fix_gamma=False, eps=2e-5, momentum=0.9, name='bn') - act = mx.sym.Activation(data=bn + data, act_type='relu', name='relu') - pool = mx.sym.Pooling(act, kernel=(4, 4), pool_type='avg', name='pool') - fc = mx.sym.FullyConnected(pool, num_hidden=10, flatten=True, name='fc') + conv0 = mx.sym.Convolution(data=data, num_filter=4, kernel=(1,1), pad=(0,0), + no_bias=True, name='conv0') + bn = mx.sym.BatchNorm(data=conv0, fix_gamma=False, eps=2e-5, momentum=0.9, name='bn') + act0 = mx.sym.Activation(data=bn + data, act_type='relu', name='relu0') + pool0 = mx.sym.Pooling(act0, kernel=(4, 4), pool_type='avg', name='pool0') + conv1 = mx.sym.Convolution(data=pool0, num_filter=4, kernel=(1,1), pad=(0,0), + no_bias=False, name='conv1') + act1 = mx.sym.Activation(data=conv1, act_type='relu', name='relu1') + pool1 = mx.sym.Pooling(act1, kernel=(4, 4), pool_type='avg', name='pool1') + fc = mx.sym.FullyConnected(pool1, num_hidden=10, flatten=True, name='fc') sym = mx.sym.SoftmaxOutput(fc, grad_scale=1, ignore_label=-1, multi_output=False, out_grad=False, preserve_shape=False, use_ignore=False, name='softmax') return sym @@ -574,38 +578,47 @@ def check_qsym_forward(qsym, qarg_params, qaux_params, data_shape, label_shape): mod.init_params() arg_params, aux_params = mod.get_params() - excluded_sym_names = [] + excluded_names = [] if mx.current_context() == mx.cpu(): - excluded_sym_names += ['fc'] - excluded_sym_names += ['concat'] - qsym, qarg_params, qaux_params = mx.contrib.quant.quantize_model(sym=s, - arg_params=arg_params, - aux_params=aux_params, - excluded_sym_names=excluded_sym_names, - ctx=mx.current_context(), - quantized_dtype=qdtype, - calib_mode='none') - check_params(arg_params, qarg_params, qsym) - check_params(aux_params, qaux_params) - check_qsym_forward(qsym, qarg_params, qaux_params, dshape, lshape) - - calib_data = mx.nd.random.uniform(shape=dshape) - calib_data = NDArrayIter(data=calib_data, batch_size=batch_size) - calib_data = DummyIter(calib_data) - qsym, qarg_params, qaux_params = mx.contrib.quant.quantize_model(sym=s, - arg_params=arg_params, - aux_params=aux_params, - excluded_sym_names=excluded_sym_names, - ctx=mx.current_context(), - quantized_dtype=qdtype, - calib_mode='naive', - calib_data=calib_data, - num_calib_examples=20) - check_params(arg_params, qarg_params, qsym) - check_params(aux_params, qaux_params) - check_qsym_calibrated(qsym) - check_qsym_qdtype(qsym, qdtype) - check_qsym_forward(qsym, qarg_params, qaux_params, dshape, lshape) + excluded_names += ['fc'] + excluded_names += ['concat'] + + optional_names = ['pool0'] + for skip_optional_names in [False, True]: + exclude_sym_names = [] + if skip_optional_names: + excluded_sym_names = excluded_names + else: + excluded_sym_names = excluded_names + optional_names + + qsym, qarg_params, qaux_params = mx.contrib.quant.quantize_model(sym=s, + arg_params=arg_params, + aux_params=aux_params, + excluded_sym_names=excluded_sym_names, + ctx=mx.current_context(), + quantized_dtype=qdtype, + calib_mode='none') + check_params(arg_params, qarg_params, qsym) + check_params(aux_params, qaux_params) + check_qsym_forward(qsym, qarg_params, qaux_params, dshape, lshape) + + calib_data = mx.nd.random.uniform(shape=dshape) + calib_data = NDArrayIter(data=calib_data, batch_size=batch_size) + calib_data = DummyIter(calib_data) + qsym, qarg_params, qaux_params = mx.contrib.quant.quantize_model(sym=s, + arg_params=arg_params, + aux_params=aux_params, + excluded_sym_names=excluded_sym_names, + ctx=mx.current_context(), + quantized_dtype=qdtype, + calib_mode='naive', + calib_data=calib_data, + num_calib_examples=20) + check_params(arg_params, qarg_params, qsym) + check_params(aux_params, qaux_params) + check_qsym_calibrated(qsym) + check_qsym_qdtype(qsym, qdtype) + check_qsym_forward(qsym, qarg_params, qaux_params, dshape, lshape) for qdtype in ['int8', 'uint8']: check_quantize_model(qdtype) diff --git a/tests/python/unittest/test_gluon_rnn.py b/tests/python/unittest/test_gluon_rnn.py index eee3adda2c65..edc43d21b36b 100644 --- a/tests/python/unittest/test_gluon_rnn.py +++ b/tests/python/unittest/test_gluon_rnn.py @@ -600,6 +600,34 @@ def test_layer_fill_shape(): assert layer.l0_i2h_weight.shape[1] == 7, layer.l0_i2h_weight.shape[1] +def test_bidirectional_unroll_valid_length(): + # Test BidirectionalCell. + # In 1.3.1 version, after hybridize( ), BidirectionalCell would failed when pass valid_length to unroll( ). + class BiLSTM(gluon.nn.HybridBlock): + def __init__(self, rnn_size, time_step, **kwargs): + super(BiLSTM, self).__init__(**kwargs) + self.time_step = time_step + with self.name_scope(): + self.bi_lstm = gluon.rnn.BidirectionalCell( + gluon.rnn.LSTMCell(rnn_size, prefix='rnn_l0_'), + gluon.rnn.LSTMCell(rnn_size, prefix='rnn_r0_'), + output_prefix='lstm_bi_') + + def hybrid_forward(self, F, inputs, valid_len): + outputs, states = self.bi_lstm.unroll(self.time_step, inputs, valid_length=valid_len, + layout='NTC', merge_outputs=True) + return outputs, states + + rnn_size, time_step = 100, 3 + net = BiLSTM(rnn_size, time_step) + net.initialize() + net.hybridize() + inputs_data = mx.nd.random.uniform(shape=(10, 3, 50)) + valid_len = mx.nd.array([1]*10) + outputs, _ = net(inputs_data, valid_len) + assert outputs.shape == (10, 3, 200) + + if __name__ == '__main__': import nose nose.runmodule() diff --git a/tools/dependencies/README.md b/tools/dependencies/README.md new file mode 100644 index 000000000000..cfe3d6c75dc9 --- /dev/null +++ b/tools/dependencies/README.md @@ -0,0 +1,14 @@ +# Overview + +This folder contains scripts for building the dependencies from source. The static libraries from +the build artifacts can be used to create self-contained shared object for mxnet through static +linking. + +# Settings + +The scripts use the following environment variables for setting behavior: + +`DEPS_PATH`: the location in which the libraries are downloaded, built, and installed. +`PLATFORM`: name of the OS in lower case. Supported options are 'linux' and 'darwin'. + +It also expects the following build tools in path: make, cmake, tar, unzip, autoconf, nasm diff --git a/tools/dependencies/cityhash.sh b/tools/dependencies/cityhash.sh new file mode 100755 index 000000000000..81cc9cbaad3a --- /dev/null +++ b/tools/dependencies/cityhash.sh @@ -0,0 +1,32 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This script builds the static library of cityhash that can be used as dependency of mxnet. +CITYHASH_VERSION=1.1.1 +if [[ ! -f $DEPS_PATH/lib/libcityhash.a ]]; then + # Download and build cityhash + >&2 echo "Building cityhash..." + git clone https://github.com/google/cityhash $DEPS_PATH/cityhash-$CITYHASH_VERSION + cd $DEPS_PATH/cityhash-$CITYHASH_VERSION + git reset --hard 8af9b8c2b889d80c22d6bc26ba0df1afb79a30db + ./configure -prefix=$DEPS_PATH --enable-sse4.2 + make CXXFLAGS="-g -O3 -msse4.2" + make install + cd - +fi diff --git a/tools/dependencies/curl.sh b/tools/dependencies/curl.sh new file mode 100755 index 000000000000..9633edb78538 --- /dev/null +++ b/tools/dependencies/curl.sh @@ -0,0 +1,64 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This script builds the static library of libcurl that can be used as dependency of mxnet. +LIBCURL_VERSION=7.61.0 +if [[ ! -f $DEPS_PATH/lib/libcurl.a ]]; then + # download and build libcurl + >&2 echo "Building libcurl..." + curl -s -L https://curl.haxx.se/download/curl-$LIBCURL_VERSION.zip -o $DEPS_PATH/libcurl.zip + unzip -q $DEPS_PATH/libcurl.zip -d $DEPS_PATH + cd $DEPS_PATH/curl-$LIBCURL_VERSION + if [[ $PLATFORM == 'linux' ]]; then + CONFIG_FLAG="" + elif [[ $PLATFORM == 'darwin' ]]; then + CONFIG_FLAG="--with-darwinssl" + fi + ./configure $CONFIG_FLAG \ + --with-zlib \ + --with-nghttps2 \ + --without-zsh-functions-dir \ + --without-librtmp \ + --without-libssh2 \ + --disable-debug \ + --disable-curldebug \ + --enable-symbol-hiding=yes \ + --enable-optimize=yes \ + --enable-shared=no \ + --enable-http=yes \ + --enable-ipv6=yes \ + --disable-ftp \ + --disable-ldap \ + --disable-ldaps \ + --disable-rtsp \ + --disable-proxy \ + --disable-dict \ + --disable-telnet \ + --disable-tftp \ + --disable-pop3 \ + --disable-imap \ + --disable-smb \ + --disable-smtp \ + --disable-gopher \ + --disable-manual \ + --prefix=$DEPS_PATH + make + make install + cd - +fi diff --git a/tools/dependencies/eigen.sh b/tools/dependencies/eigen.sh new file mode 100755 index 000000000000..ac2f75a03a52 --- /dev/null +++ b/tools/dependencies/eigen.sh @@ -0,0 +1,34 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This script imports the headers from eigen3 that can be used to in opencv. +EIGEN_VERSION=3.3.4 +if [[ ! -d $DEPS_PATH/include/eigen3 ]]; then + # download eigen + >&2 echo "Loading eigen..." + curl -s -L https://github.com/eigenteam/eigen-git-mirror/archive/$EIGEN_VERSION.zip -o $DEPS_PATH/eigen.zip + unzip -q $DEPS_PATH/eigen.zip -d $DEPS_PATH + mkdir -p $DEPS_PATH/eigen-git-mirror-$EIGEN_VERSION/build + cd $DEPS_PATH/eigen-git-mirror-$EIGEN_VERSION/build + cmake \ + -D CMAKE_BUILD_TYPE=RELEASE \ + -D CMAKE_INSTALL_PREFIX=$DEPS_PATH .. + make install + cd - +fi diff --git a/tools/dependencies/libpng.sh b/tools/dependencies/libpng.sh new file mode 100755 index 000000000000..d1523c654478 --- /dev/null +++ b/tools/dependencies/libpng.sh @@ -0,0 +1,40 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This script builds the static library of libpng that can be used as dependency of mxnet/opencv. +PNG_VERSION=1.6.34 +if [[ ! -f $DEPS_PATH/lib/libpng.a ]]; then + # download and build libpng + >&2 echo "Building libpng..." + curl -s -L https://github.com/glennrp/libpng/archive/v$PNG_VERSION.zip -o $DEPS_PATH/libpng.zip + unzip -q $DEPS_PATH/libpng.zip -d $DEPS_PATH + mkdir -p $DEPS_PATH/libpng-$PNG_VERSION/build + cd $DEPS_PATH/libpng-$PNG_VERSION/build + cmake \ + -D PNG_SHARED=OFF \ + -D PNG_STATIC=ON \ + -D CMAKE_BUILD_TYPE=RELEASE \ + -D CMAKE_INSTALL_PREFIX=$DEPS_PATH \ + -D CMAKE_C_FLAGS=-fPIC .. + make + make install + mkdir -p $DEPS_PATH/include/libpng + ln -s $DEPS_PATH/include/png.h $DEPS_PATH/include/libpng/png.h + cd - +fi diff --git a/tools/dependencies/libtiff.sh b/tools/dependencies/libtiff.sh new file mode 100755 index 000000000000..14dcb2d7bde0 --- /dev/null +++ b/tools/dependencies/libtiff.sh @@ -0,0 +1,32 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This script builds the static library of libtiff that can be used as dependency of mxnet/opencv. +TIFF_VERSION="4-0-9" +if [[ ! -f $DEPS_PATH/lib/libtiff.a ]]; then + # download and build libtiff + >&2 echo "Building libtiff..." + curl -s -L https://gitlab.com/libtiff/libtiff/-/archive/Release-v$TIFF_VERSION/libtiff-Release-v$TIFF_VERSION.zip -o $DEPS_PATH/libtiff.zip + unzip -q $DEPS_PATH/libtiff.zip -d $DEPS_PATH + cd $DEPS_PATH/libtiff-Release-v$TIFF_VERSION + ./configure --quiet --disable-shared --disable-jpeg --disable-zlib --disable-jbig --disable-lzma --prefix=$DEPS_PATH + make + make install + cd - +fi diff --git a/tools/dependencies/libturbojpeg.sh b/tools/dependencies/libturbojpeg.sh new file mode 100755 index 000000000000..4991906f8878 --- /dev/null +++ b/tools/dependencies/libturbojpeg.sh @@ -0,0 +1,47 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This script builds the static library of libturbojpeg that can be used as dependency of +# mxnet/opencv. +TURBO_JPEG_VERSION=1.5.90 +if [[ $PLATFORM == 'darwin' ]]; then + JPEG_NASM_OPTION="-D CMAKE_ASM_NASM_COMPILER=/usr/local/bin/nasm" +fi + +if [[ ! -f $DEPS_PATH/lib/libjpeg.a ]] || [[ ! -f $DEPS_PATH/lib/libturbojpeg.a ]]; then + # download and build libjpeg + >&2 echo "Building libjpeg-turbo..." + curl -s -L https://github.com/libjpeg-turbo/libjpeg-turbo/archive/$TURBO_JPEG_VERSION.zip -o $DEPS_PATH/libjpeg.zip + unzip -q $DEPS_PATH/libjpeg.zip -d $DEPS_PATH + mkdir -p $DEPS_PATH/libjpeg-turbo-$TURBO_JPEG_VERSION/build + cd $DEPS_PATH/libjpeg-turbo-$TURBO_JPEG_VERSION/build + cmake \ + -G"Unix Makefiles" \ + -D CMAKE_BUILD_TYPE=RELEASE \ + -D CMAKE_INSTALL_PREFIX=$DEPS_PATH \ + -D CMAKE_C_FLAGS=-fPIC \ + -D WITH_JAVA=FALSE \ + -D WITH_JPEG7=TRUE \ + -D WITH_JPEG8=TRUE \ + $JPEG_NASM_OPTION \ + -D ENABLE_SHARED=FALSE .. + make + make install + cd - +fi diff --git a/tools/dependencies/libz.sh b/tools/dependencies/libz.sh new file mode 100755 index 000000000000..927f1de82e72 --- /dev/null +++ b/tools/dependencies/libz.sh @@ -0,0 +1,36 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This script builds the static library of libz that can be used as dependency of mxnet. +ZLIB_VERSION=1.2.6 +if [[ ! -f $DEPS_PATH/lib/libz.a ]]; then + # Download and build zlib + >&2 echo "Building zlib..." + curl -s -L https://github.com/LuaDist/zlib/archive/$ZLIB_VERSION.zip -o $DEPS_PATH/zlib.zip + unzip -q $DEPS_PATH/zlib.zip -d $DEPS_PATH + mkdir -p $DEPS_PATH/zlib-$ZLIB_VERSION/build + cd $DEPS_PATH/zlib-$ZLIB_VERSION/build + cmake \ + -D CMAKE_BUILD_TYPE=RELEASE \ + -D CMAKE_INSTALL_PREFIX=$DEPS_PATH \ + -D BUILD_SHARED_LIBS=OFF .. + make + make install + cd - +fi diff --git a/tools/dependencies/lz4.sh b/tools/dependencies/lz4.sh new file mode 100755 index 000000000000..a4269bf29bb9 --- /dev/null +++ b/tools/dependencies/lz4.sh @@ -0,0 +1,31 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This script builds the static library of lz4 that can be used as dependency of mxnet. +LZ4_VERSION=r130 +if [[ ! -f $DEPS_PATH/lib/liblz4.a ]]; then + # Download and build lz4 + >&2 echo "Building lz4..." + curl -s -L https://github.com/lz4/lz4/archive/$LZ4_VERSION.zip -o $DEPS_PATH/lz4.zip + unzip -q $DEPS_PATH/lz4.zip -d $DEPS_PATH + cd $DEPS_PATH/lz4-$LZ4_VERSION + make + make PREFIX=$DEPS_PATH install + cd - +fi diff --git a/tools/dependencies/openblas.sh b/tools/dependencies/openblas.sh new file mode 100755 index 000000000000..9463e3325e0d --- /dev/null +++ b/tools/dependencies/openblas.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This script builds the static library of openblas that can be used as dependency of mxnet. +OPENBLAS_VERSION=0.3.3 +if [[ ! -e $DEPS_PATH/lib/libopenblas.a ]]; then + # download and build openblas + >&2 echo "Building openblas..." + + curl -s -L https://github.com/xianyi/OpenBLAS/archive/v$OPENBLAS_VERSION.zip -o $DEPS_PATH/openblas.zip + unzip -q $DEPS_PATH/openblas.zip -d $DEPS_PATH + cd $DEPS_PATH/OpenBLAS-$OPENBLAS_VERSION + + make DYNAMIC_ARCH=1 NO_SHARED=1 USE_OPENMP=1 + make PREFIX=$DEPS_PATH install + cd - + ln -s libopenblas.a $DEPS_PATH/lib/libcblas.a + ln -s libopenblas.a $DEPS_PATH/lib/liblapack.a +fi diff --git a/tools/dependencies/opencv.sh b/tools/dependencies/opencv.sh new file mode 100755 index 000000000000..98ff115f1765 --- /dev/null +++ b/tools/dependencies/opencv.sh @@ -0,0 +1,191 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This script builds the static library of opencv that can be used as dependency of mxnet. +# It expects openblas, libjpeg, libpng, libtiff, eigen, etc., to be in $DEPS_PATH. +OPENCV_VERSION=3.4.2 +if [[ $PLATFORM == 'linux' ]]; then + OPENCV_LAPACK_OPTIONS=" \ + -D OpenBLAS_HOME=$DEPS_PATH \ + -D OpenBLAS_INCLUDE_DIR=$DEPS_PATH/include \ + -D OpenBLAS_LIB=$DEPS_PATH/lib/libopenblas.a \ + -D LAPACK_INCLUDE_DIR=$DEPS_PATH/include \ + -D LAPACK_LINK_LIBRARIES=$DEPS_PATH/lib/ \ + -D LAPACK_LIBRARIES=$DEPS_PATH/lib/libopenblas.a \ + -D LAPACK_CBLAS_H='cblas.h' \ + -D LAPACK_LAPACKE_H='lapacke.h' \ + -D LAPACK_IMPL='OpenBLAS' \ + -D HAVE_LAPACK=1" +fi + +if [[ ! -f $DEPS_PATH/lib/libopencv_core.a ]] || [[ ! -f $DEPS_PATH/lib/libopencv_imgcodecs.a ]] || [[ ! -f $DEPS_PATH/lib/libopencv_imgproc.a ]]; then + # download and build opencv since we need the static library + >&2 echo "Building opencv..." + curl -s -L https://github.com/opencv/opencv/archive/$OPENCV_VERSION.zip -o $DEPS_PATH/opencv.zip + unzip -q $DEPS_PATH/opencv.zip -d $DEPS_PATH + mkdir -p $DEPS_PATH/opencv-$OPENCV_VERSION/build + cd $DEPS_PATH/opencv-$OPENCV_VERSION/build + cmake \ + -D OPENCV_ENABLE_NONFREE=OFF \ + -D WITH_1394=OFF \ + -D WITH_ARAVIS=OFF \ + -D WITH_AVFOUNDATION=OFF \ + -D WITH_CAROTENE=OFF \ + -D WITH_CLP=OFF \ + -D WITH_CSTRIPES=OFF \ + -D WITH_CPUFEATURES=OFF \ + -D WITH_CUBLAS=OFF \ + -D WITH_CUDA=OFF \ + -D WITH_CUFFT=OFF \ + -D WITH_DIRECTX=OFF \ + -D WITH_DSHOW=OFF \ + -D WITH_EIGEN=ON \ + -D WITH_FFMPEG=OFF \ + -D WITH_GDAL=OFF \ + -D WITH_GDCM=OFF \ + -D WITH_GIGEAPI=OFF \ + -D WITH_GPHOTO2=OFF \ + -D WITH_GSTREAMER=OFF \ + -D WITH_GSTREAMER_0_10=OFF \ + -D WITH_GTK=OFF \ + -D WITH_GTK_2_X=OFF \ + -D WITH_HALIDE=OFF \ + -D WITH_IMAGEIO=OFF \ + -D WITH_IMGCODEC_HDR=OFF \ + -D WITH_IMGCODEC_PXM=OFF \ + -D WITH_IMGCODEC_SUNRASTER=OFF \ + -D WITH_INF_ENGINE=OFF \ + -D WITH_INTELPERC=OFF \ + -D WITH_IPP=OFF \ + -D WITH_IPP_A=OFF \ + -D WITH_ITT=OFF \ + -D WITH_JASPER=OFF \ + -D WITH_JPEG=ON \ + -D WITH_LAPACK=ON \ + -D WITH_LIBREALSENSE=OFF \ + -D WITH_LIBV4L=OFF \ + -D WITH_MATLAB=OFF \ + -D WITH_MFX=OFF \ + -D WITH_MSMF=OFF \ + -D WITH_NVCUVID=OFF \ + -D WITH_OPENCL=OFF \ + -D WITH_OPENCLAMDBLAS=OFF \ + -D WITH_OPENCLAMDFFT=OFF \ + -D WITH_OPENCL_SVM=OFF \ + -D WITH_OPENEXR=OFF \ + -D WITH_OPENGL=OFF \ + -D WITH_OPENMP=OFF \ + -D WITH_OPENNI=OFF \ + -D WITH_OPENNI2=OFF \ + -D WITH_OPENVX=OFF \ + -D WITH_PNG=ON \ + -D WITH_PROTOBUF=OFF \ + -D WITH_PTHREADS_PF=ON \ + -D WITH_PVAPI=OFF \ + -D WITH_QT=OFF \ + -D WITH_QTKIT=OFF \ + -D WITH_QUICKTIME=OFF \ + -D WITH_TBB=OFF \ + -D WITH_TIFF=ON \ + -D WITH_UNICAP=OFF \ + -D WITH_V4L=OFF \ + -D WITH_VA=OFF \ + -D WITH_VA_INTEL=OFF \ + -D WITH_VFW=OFF \ + -D WITH_VTK=OFF \ + -D WITH_WEBP=OFF \ + -D WITH_WIN32UI=OFF \ + -D WITH_XIMEA=OFF \ + -D WITH_XINE=OFF \ + -D BUILD_ANDROID_EXAMPLES=OFF \ + -D BUILD_ANDROID_PROJECTS=OFF \ + -D BUILD_ANDROID_SERVICE=OFF \ + -D BUILD_CUDA_STUBS=OFF \ + -D BUILD_DOCS=OFF \ + -D BUILD_EXAMPLES=OFF \ + -D BUILD_FAT_JAVA_LIB=OFF \ + -D BUILD_IPP_IW=OFF \ + -D BUILD_ITT_IW=OFF \ + -D BUILD_JAVA=OFF \ + -D BUILD_JASPER=OFF \ + -D BUILD_JPEG=OFF \ + -D BUILD_OPENEXR=OFF \ + -D BUILD_PACKAGE=OFF \ + -D BUILD_PERF_TESTS=OFF \ + -D BUILD_PNG=OFF \ + -D BUILD_SHARED_LIBS=OFF \ + -D BUILD_TBB=OFF \ + -D BUILD_TESTS=OFF \ + -D BUILD_TIFF=OFF \ + -D BUILD_WEBP=OFF \ + -D BUILD_WITH_DEBUG_INFO=OFF \ + -D BUILD_WITH_DYNAMIC_IPP=OFF \ + -D BUILD_WITH_STATIC_CRT=OFF \ + -D BUILD_ZLIB=OFF \ + -D BUILD_opencv_apps=OFF \ + -D BUILD_opencv_aruco=OFF \ + -D BUILD_opencv_calib3d=OFF \ + -D BUILD_opencv_contrib=OFF \ + -D BUILD_opencv_dnn=OFF \ + -D BUILD_opencv_features2d=OFF \ + -D BUILD_opencv_flann=OFF \ + -D BUILD_opencv_gpu=OFF \ + -D BUILD_opencv_gpuarithm=OFF \ + -D BUILD_opencv_gpubgsegm=OFF \ + -D BUILD_opencv_gpucodec=OFF \ + -D BUILD_opencv_gpufeatures2d=OFF \ + -D BUILD_opencv_gpufilters=OFF \ + -D BUILD_opencv_gpuimgproc=OFF \ + -D BUILD_opencv_gpulegacy=OFF \ + -D BUILD_opencv_gpuoptflow=OFF \ + -D BUILD_opencv_gpustereo=OFF \ + -D BUILD_opencv_gpuwarping=OFF \ + -D BUILD_opencv_highgui=OFF \ + -D BUILD_opencv_java=OFF \ + -D BUILD_opencv_js=OFF \ + -D BUILD_opencv_ml=OFF \ + -D BUILD_opencv_ml=OFF \ + -D BUILD_opencv_nonfree=OFF \ + -D BUILD_opencv_objdetect=OFF \ + -D BUILD_opencv_photo=OFF \ + -D BUILD_opencv_python=OFF \ + -D BUILD_opencv_python2=OFF \ + -D BUILD_opencv_python3=OFF \ + -D BUILD_opencv_superres=OFF \ + -D BUILD_opencv_video=OFF \ + -D BUILD_opencv_videoio=OFF \ + -D BUILD_opencv_videostab=OFF \ + -D BUILD_opencv_viz=OFF \ + -D BUILD_opencv_world=OFF \ + $OPENCV_LAPACK_OPTIONS \ + -D OPENCV_LIB_INSTALL_PATH=lib \ + -D OPENCV_INCLUDE_INSTALL_PATH=include \ + -D CMAKE_LIBRARY_PATH=$DEPS_PATH/lib \ + -D CMAKE_INCLUDE_PATH=$DEPS_PATH/include \ + -D CMAKE_BUILD_TYPE=RELEASE \ + -D CMAKE_INSTALL_PREFIX=$DEPS_PATH .. + if [[ $PLATFORM == 'linux' ]]; then + cp $DEPS_PATH/../patch/opencv_lapack.h ./ + fi + make + make install + cd - + # @szha: compatibility header + cat $DEPS_PATH/include/opencv2/imgcodecs/imgcodecs_c.h >> $DEPS_PATH/include/opencv2/imgcodecs.hpp +fi diff --git a/tools/dependencies/openssl.sh b/tools/dependencies/openssl.sh new file mode 100755 index 000000000000..b7e4317d4a89 --- /dev/null +++ b/tools/dependencies/openssl.sh @@ -0,0 +1,38 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This script builds the static library of openssl that can be used as dependency of mxnet. +OPENSSL_VERSION=1.0.2l +if [[ ! -f $DEPS_PATH/lib/libssl.a ]] || [[ ! -f $DEPS_PATH/lib/libcrypto.a ]]; then + # download and build openssl + >&2 echo "Building openssl..." + OPENSSL_VERSION=$(echo $OPENSSL_VERSION | sed 's/\./_/g') + curl -s -L https://github.com/openssl/openssl/archive/OpenSSL_$OPENSSL_VERSION.zip -o $DEPS_PATH/openssl.zip + unzip -q $DEPS_PATH/openssl.zip -d $DEPS_PATH + cd $DEPS_PATH/openssl-OpenSSL_$OPENSSL_VERSION + if [[ $PLATFORM == 'linux' ]]; then + TARGET=linux-x86_64 + elif [[ $PLATFORM == 'darwin' ]]; then + TARGET=darwin64-x86_64-cc + fi + ./Configure no-shared no-zlib --prefix=$DEPS_PATH --openssldir=$DEPS_PATH/ssl $TARGET + make + make install + cd - +fi diff --git a/tools/dependencies/protobuf.sh b/tools/dependencies/protobuf.sh new file mode 100755 index 000000000000..dfa3d71f3750 --- /dev/null +++ b/tools/dependencies/protobuf.sh @@ -0,0 +1,41 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This script builds the static library of protobuf along with protoc, that can be used as dependency of mxnet. +PROTOBUF_VERSION=3.5.1 +if [[ $PLATFORM == 'darwin' ]]; then + DY_EXT="dylib" +else + DY_EXT="so" +fi + +LIBPROTOBUF="$DEPS_PATH/lib/libprotobuf.$DY_EXT" +LIBPROTOC="$DEPS_PATH/lib/libprotoc.$DY_EXT" +if [[ ! -e $LIBPROTOBUF ]] || [[ ! -e $LIBPROTOC ]]; then + # Download and build protobuf + >&2 echo "Building protobuf..." + curl -s -L https://github.com/google/protobuf/archive/v$PROTOBUF_VERSION.zip -o $DEPS_PATH/protobuf.zip + unzip -q $DEPS_PATH/protobuf.zip -d $DEPS_PATH + cd $DEPS_PATH/protobuf-$PROTOBUF_VERSION + ./autogen.sh + ./configure -prefix=$DEPS_PATH + make + make install + cd - +fi diff --git a/tools/dependencies/zmq.sh b/tools/dependencies/zmq.sh new file mode 100755 index 000000000000..55e17798c2d3 --- /dev/null +++ b/tools/dependencies/zmq.sh @@ -0,0 +1,38 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This script builds the static library of zeroMQ that can be used as dependency of mxnet. +ZEROMQ_VERSION=4.2.2 +if [[ ! -f $DEPS_PATH/lib/libzmq.a ]]; then + # Download and build zmq + >&2 echo "Building zmq..." + curl -s -L https://github.com/zeromq/libzmq/archive/v$ZEROMQ_VERSION.zip -o $DEPS_PATH/zeromq.zip + unzip -q $DEPS_PATH/zeromq.zip -d $DEPS_PATH + mkdir -p $DEPS_PATH/libzmq-$ZEROMQ_VERSION/build + cd $DEPS_PATH/libzmq-$ZEROMQ_VERSION/build + cmake \ + -D CMAKE_BUILD_TYPE=RELEASE \ + -D CMAKE_INSTALL_PREFIX=$DEPS_PATH \ + -D WITH_LIBSODIUM=OFF \ + -D BUILD_SHARED_LIBS=OFF .. + make + make install + cp $DEPS_PATH/lib/x86_64-linux-gnu/libzmq.a $DEPS_PATH/lib/libzmq.a + cd - +fi