From 64b9ac776bc79344a6bf8f6baf30dc42ea494ca2 Mon Sep 17 00:00:00 2001 From: Lanking Date: Mon, 15 Oct 2018 13:58:57 -0700 Subject: [PATCH 01/38] [MXNET-984] Add Java NDArray and introduce Java Operator Builder class (#12816) * clean history and add commit * add lint header * bypass the java unittest when make the package * clean up redundant test * clean spacing issue * revert the change * clean up * cleanup the JMacros * adding line escape * revert some changes and fix scala style * fixes regarding to Naveen's comment --- Makefile | 2 +- scala-package/core/pom.xml | 5 +- .../org/apache/mxnet/javaapi/Context.scala | 1 - .../org/apache/mxnet/javaapi/NDArray.scala | 202 +++++++++++++++++ .../org/apache/mxnet/javaapi/NDArrayTest.java | 85 ++++++++ .../mxnet/javaapi/JavaNDArrayMacro.scala | 203 ++++++++++++++++++ .../apache/mxnet/utils/CToScalaUtils.scala | 22 +- .../scala/org/apache/mxnet/MacrosSuite.scala | 2 +- 8 files changed, 507 insertions(+), 15 deletions(-) create mode 100644 scala-package/core/src/main/scala/org/apache/mxnet/javaapi/NDArray.scala create mode 100644 scala-package/core/src/test/java/org/apache/mxnet/javaapi/NDArrayTest.java create mode 100644 scala-package/macros/src/main/scala/org/apache/mxnet/javaapi/JavaNDArrayMacro.scala diff --git a/Makefile b/Makefile index a4b41b8d8371..fe2df2c20afa 100644 --- a/Makefile +++ b/Makefile @@ -606,7 +606,7 @@ scalaclean: scalapkg: (cd $(ROOTDIR)/scala-package; \ - mvn package -P$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE) -Dcxx="$(CXX)" \ + mvn package -P$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE),integrationtest -Dcxx="$(CXX)" \ -Dbuild.platform="$(SCALA_PKG_PROFILE)" \ -Dcflags="$(CFLAGS)" -Dldflags="$(LDFLAGS)" \ -Dcurrent_libdir="$(ROOTDIR)/lib" \ diff --git a/scala-package/core/pom.xml b/scala-package/core/pom.xml index ea3a2d68c9f4..6e2d8d6e9cc7 100644 --- a/scala-package/core/pom.xml +++ b/scala-package/core/pom.xml @@ -86,7 +86,10 @@ maven-surefire-plugin 2.22.0 - false + + -Djava.library.path=${project.parent.basedir}/native/${platform}/target + + ${skipTests} diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Context.scala b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Context.scala index 5f0caedcc402..2f4f3e6409ed 100644 --- a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Context.scala +++ b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Context.scala @@ -42,6 +42,5 @@ object Context { val gpu: Context = org.apache.mxnet.Context.gpu() val devtype2str = org.apache.mxnet.Context.devstr2type.asJava val devstr2type = org.apache.mxnet.Context.devstr2type.asJava - def defaultCtx: Context = org.apache.mxnet.Context.defaultCtx } diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/NDArray.scala b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/NDArray.scala new file mode 100644 index 000000000000..c77b440d8802 --- /dev/null +++ b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/NDArray.scala @@ -0,0 +1,202 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.mxnet.javaapi + +import org.apache.mxnet.javaapi.DType.DType + +import collection.JavaConverters._ + +@AddJNDArrayAPIs(false) +object NDArray { + implicit def fromNDArray(nd: org.apache.mxnet.NDArray): NDArray = new NDArray(nd) + + implicit def toNDArray(jnd: NDArray): org.apache.mxnet.NDArray = jnd.nd + + def waitall(): Unit = org.apache.mxnet.NDArray.waitall() + + def onehotEncode(indices: NDArray, out: NDArray): NDArray + = org.apache.mxnet.NDArray.onehotEncode(indices, out) + + def empty(shape: Shape, ctx: Context, dtype: DType.DType): NDArray + = org.apache.mxnet.NDArray.empty(shape, ctx, dtype) + def empty(ctx: Context, shape: Array[Int]): NDArray + = org.apache.mxnet.NDArray.empty(new Shape(shape), ctx) + def empty(ctx : Context, shape : java.util.List[java.lang.Integer]) : NDArray + = org.apache.mxnet.NDArray.empty(new Shape(shape), ctx) + def zeros(shape: Shape, ctx: Context, dtype: DType.DType): NDArray + = org.apache.mxnet.NDArray.zeros(shape, ctx, dtype) + def zeros(ctx: Context, shape: Array[Int]): NDArray + = org.apache.mxnet.NDArray.zeros(new Shape(shape), ctx) + def zeros(ctx : Context, shape : java.util.List[java.lang.Integer]) : NDArray + = org.apache.mxnet.NDArray.zeros(new Shape(shape), ctx) + def ones(shape: Shape, ctx: Context, dtype: DType.DType): NDArray + = org.apache.mxnet.NDArray.ones(shape, ctx, dtype) + def ones(ctx: Context, shape: Array[Int]): NDArray + = org.apache.mxnet.NDArray.ones(new Shape(shape), ctx) + def ones(ctx : Context, shape : java.util.List[java.lang.Integer]) : NDArray + = org.apache.mxnet.NDArray.ones(new Shape(shape), ctx) + def full(shape: Shape, value: Float, ctx: Context): NDArray + = org.apache.mxnet.NDArray.full(shape, value, ctx) + + def power(lhs: NDArray, rhs: NDArray): NDArray = org.apache.mxnet.NDArray.power(lhs, rhs) + def power(lhs: NDArray, rhs: Float): NDArray = org.apache.mxnet.NDArray.power(lhs, rhs) + def power(lhs: Float, rhs: NDArray): NDArray = org.apache.mxnet.NDArray.power(lhs, rhs) + + def maximum(lhs: NDArray, rhs: NDArray): NDArray = org.apache.mxnet.NDArray.maximum(lhs, rhs) + def maximum(lhs: NDArray, rhs: Float): NDArray = org.apache.mxnet.NDArray.maximum(lhs, rhs) + def maximum(lhs: Float, rhs: NDArray): NDArray = org.apache.mxnet.NDArray.maximum(lhs, rhs) + + def minimum(lhs: NDArray, rhs: NDArray): NDArray = org.apache.mxnet.NDArray.minimum(lhs, rhs) + def minimum(lhs: NDArray, rhs: Float): NDArray = org.apache.mxnet.NDArray.minimum(lhs, rhs) + def minimum(lhs: Float, rhs: NDArray): NDArray = org.apache.mxnet.NDArray.minimum(lhs, rhs) + + def equal(lhs: NDArray, rhs: NDArray): NDArray = org.apache.mxnet.NDArray.equal(lhs, rhs) + def equal(lhs: NDArray, rhs: Float): NDArray = org.apache.mxnet.NDArray.equal(lhs, rhs) + + def notEqual(lhs: NDArray, rhs: NDArray): NDArray = org.apache.mxnet.NDArray.notEqual(lhs, rhs) + def notEqual(lhs: NDArray, rhs: Float): NDArray = org.apache.mxnet.NDArray.notEqual(lhs, rhs) + + def greater(lhs: NDArray, rhs: NDArray): NDArray = org.apache.mxnet.NDArray.greater(lhs, rhs) + def greater(lhs: NDArray, rhs: Float): NDArray = org.apache.mxnet.NDArray.greater(lhs, rhs) + + def greaterEqual(lhs: NDArray, rhs: NDArray): NDArray + = org.apache.mxnet.NDArray.greaterEqual(lhs, rhs) + def greaterEqual(lhs: NDArray, rhs: Float): NDArray + = org.apache.mxnet.NDArray.greaterEqual(lhs, rhs) + + def lesser(lhs: NDArray, rhs: NDArray): NDArray = org.apache.mxnet.NDArray.lesser(lhs, rhs) + def lesser(lhs: NDArray, rhs: Float): NDArray = org.apache.mxnet.NDArray.lesser(lhs, rhs) + + def lesserEqual(lhs: NDArray, rhs: NDArray): NDArray + = org.apache.mxnet.NDArray.lesserEqual(lhs, rhs) + def lesserEqual(lhs: NDArray, rhs: Float): NDArray + = org.apache.mxnet.NDArray.lesserEqual(lhs, rhs) + + def array(sourceArr: java.util.List[java.lang.Float], shape: Shape, ctx: Context = null): NDArray + = org.apache.mxnet.NDArray.array( + sourceArr.asScala.map(ele => Float.unbox(ele)).toArray, shape, ctx) + + def arange(start: Float, stop: Float, step: Float, repeat: Int, + ctx: Context, dType: DType.DType): NDArray = + org.apache.mxnet.NDArray.arange(start, Some(stop), step, repeat, ctx, dType) +} + +class NDArray(val nd : org.apache.mxnet.NDArray ) { + + def this(arr : Array[Float], shape : Shape, ctx : Context) = { + this(org.apache.mxnet.NDArray.array(arr, shape, ctx)) + } + + def this(arr : java.util.List[java.lang.Float], shape : Shape, ctx : Context) = { + this(NDArray.array(arr, shape, ctx)) + } + + def serialize() : Array[Byte] = nd.serialize() + + def dispose() : Unit = nd.dispose() + def disposeDeps() : NDArray = nd.disposeDepsExcept() + // def disposeDepsExcept(arr : Array[NDArray]) : NDArray = nd.disposeDepsExcept() + + def slice(start : Int, stop : Int) : NDArray = nd.slice(start, stop) + + def slice (i : Int) : NDArray = nd.slice(i) + + def at(idx : Int) : NDArray = nd.at(idx) + + def T : NDArray = nd.T + + def dtype : DType = nd.dtype + + def asType(dtype : DType) : NDArray = nd.asType(dtype) + + def reshape(dims : Array[Int]) : NDArray = nd.reshape(dims) + + def waitToRead(): Unit = nd.waitToRead() + + def context : Context = nd.context + + def set(value : Float) : NDArray = nd.set(value) + def set(other : NDArray) : NDArray = nd.set(other) + def set(other : Array[Float]) : NDArray = nd.set(other) + + def add(other : NDArray) : NDArray = this.nd + other.nd + def add(other : Float) : NDArray = this.nd + other + def _add(other : NDArray) : NDArray = this.nd += other + def _add(other : Float) : NDArray = this.nd += other + def subtract(other : NDArray) : NDArray = this.nd - other + def subtract(other : Float) : NDArray = this.nd - other + def _subtract(other : NDArray) : NDArray = this.nd -= other + def _subtract(other : Float) : NDArray = this.nd -= other + def multiply(other : NDArray) : NDArray = this.nd * other + def multiply(other : Float) : NDArray = this.nd * other + def _multiply(other : NDArray) : NDArray = this.nd *= other + def _multiply(other : Float) : NDArray = this.nd *= other + def div(other : NDArray) : NDArray = this.nd / other + def div(other : Float) : NDArray = this.nd / other + def _div(other : NDArray) : NDArray = this.nd /= other + def _div(other : Float) : NDArray = this.nd /= other + def pow(other : NDArray) : NDArray = this.nd ** other + def pow(other : Float) : NDArray = this.nd ** other + def _pow(other : NDArray) : NDArray = this.nd **= other + def _pow(other : Float) : NDArray = this.nd **= other + def mod(other : NDArray) : NDArray = this.nd % other + def mod(other : Float) : NDArray = this.nd % other + def _mod(other : NDArray) : NDArray = this.nd %= other + def _mod(other : Float) : NDArray = this.nd %= other + def greater(other : NDArray) : NDArray = this.nd > other + def greater(other : Float) : NDArray = this.nd > other + def greaterEqual(other : NDArray) : NDArray = this.nd >= other + def greaterEqual(other : Float) : NDArray = this.nd >= other + def lesser(other : NDArray) : NDArray = this.nd < other + def lesser(other : Float) : NDArray = this.nd < other + def lesserEqual(other : NDArray) : NDArray = this.nd <= other + def lesserEqual(other : Float) : NDArray = this.nd <= other + + def toArray : Array[Float] = nd.toArray + + def toScalar : Float = nd.toScalar + + def copyTo(other : NDArray) : NDArray = nd.copyTo(other) + + def copyTo(ctx : Context) : NDArray = nd.copyTo(ctx) + + def copy() : NDArray = copyTo(this.context) + + def shape : Shape = nd.shape + + def size : Int = shape.product + + def asInContext(context: Context): NDArray = nd.asInContext(context) + + override def equals(obj: Any): Boolean = nd.equals(obj) + override def hashCode(): Int = nd.hashCode +} + +object NDArrayFuncReturn { + implicit def toNDFuncReturn(javaFunReturn : NDArrayFuncReturn) + : org.apache.mxnet.NDArrayFuncReturn = javaFunReturn.ndFuncReturn + implicit def toJavaNDFuncReturn(ndFuncReturn : org.apache.mxnet.NDArrayFuncReturn) + : NDArrayFuncReturn = new NDArrayFuncReturn(ndFuncReturn) +} + +private[mxnet] class NDArrayFuncReturn(val ndFuncReturn : org.apache.mxnet.NDArrayFuncReturn) { + def head : NDArray = ndFuncReturn.head + def get : NDArray = ndFuncReturn.get + def apply(i : Int) : NDArray = ndFuncReturn.apply(i) + // TODO: Add JavaNDArray operational stuff +} diff --git a/scala-package/core/src/test/java/org/apache/mxnet/javaapi/NDArrayTest.java b/scala-package/core/src/test/java/org/apache/mxnet/javaapi/NDArrayTest.java new file mode 100644 index 000000000000..a9bad83f62d6 --- /dev/null +++ b/scala-package/core/src/test/java/org/apache/mxnet/javaapi/NDArrayTest.java @@ -0,0 +1,85 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.mxnet.javaapi; + +import org.junit.Test; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import static org.junit.Assert.assertTrue; + +public class NDArrayTest { + @Test + public void testCreateNDArray() { + NDArray nd = new NDArray(new float[]{1.0f, 2.0f, 3.0f}, + new Shape(new int[]{1, 3}), + new Context("cpu", 0)); + int[] arr = new int[]{1, 3}; + assertTrue(Arrays.equals(nd.shape().toArray(), arr)); + assertTrue(nd.at(0).at(0).toArray()[0] == 1.0f); + List list = Arrays.asList(1.0f, 2.0f, 3.0f); + // Second way creating NDArray + nd = NDArray.array(list, + new Shape(new int[]{1, 3}), + new Context("cpu", 0)); + assertTrue(Arrays.equals(nd.shape().toArray(), arr)); + } + + @Test + public void testZeroOneEmpty(){ + NDArray ones = NDArray.ones(new Context("cpu", 0), new int[]{100, 100}); + NDArray zeros = NDArray.zeros(new Context("cpu", 0), new int[]{100, 100}); + NDArray empty = NDArray.empty(new Context("cpu", 0), new int[]{100, 100}); + int[] arr = new int[]{100, 100}; + assertTrue(Arrays.equals(ones.shape().toArray(), arr)); + assertTrue(Arrays.equals(zeros.shape().toArray(), arr)); + assertTrue(Arrays.equals(empty.shape().toArray(), arr)); + } + + @Test + public void testComparison(){ + NDArray nd = new NDArray(new float[]{1.0f, 2.0f, 3.0f}, new Shape(new int[]{3}), new Context("cpu", 0)); + NDArray nd2 = new NDArray(new float[]{3.0f, 4.0f, 5.0f}, new Shape(new int[]{3}), new Context("cpu", 0)); + nd = nd.add(nd2); + float[] greater = new float[]{1, 1, 1}; + assertTrue(Arrays.equals(nd.greater(nd2).toArray(), greater)); + nd = nd.subtract(nd2); + nd = nd.subtract(nd2); + float[] lesser = new float[]{0, 0, 0}; + assertTrue(Arrays.equals(nd.greater(nd2).toArray(), lesser)); + } + + @Test + public void testGenerated(){ + NDArray$ NDArray = NDArray$.MODULE$; + float[] arr = new float[]{1.0f, 2.0f, 3.0f}; + NDArray nd = new NDArray(arr, new Shape(new int[]{3}), new Context("cpu", 0)); + float result = NDArray.norm(nd).invoke().get().toArray()[0]; + float cal = 0.0f; + for (float ele : arr) { + cal += ele * ele; + } + cal = (float) Math.sqrt(cal); + assertTrue(Math.abs(result - cal) < 1e-5); + NDArray dotResult = new NDArray(new float[]{0}, new Shape(new int[]{1}), new Context("cpu", 0)); + NDArray.dot(nd, nd).setout(dotResult).invoke().get(); + assertTrue(Arrays.equals(dotResult.toArray(), new float[]{14.0f})); + } +} diff --git a/scala-package/macros/src/main/scala/org/apache/mxnet/javaapi/JavaNDArrayMacro.scala b/scala-package/macros/src/main/scala/org/apache/mxnet/javaapi/JavaNDArrayMacro.scala new file mode 100644 index 000000000000..c530c730a449 --- /dev/null +++ b/scala-package/macros/src/main/scala/org/apache/mxnet/javaapi/JavaNDArrayMacro.scala @@ -0,0 +1,203 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.mxnet.javaapi + +import org.apache.mxnet.init.Base._ +import org.apache.mxnet.utils.CToScalaUtils + +import scala.annotation.StaticAnnotation +import scala.collection.mutable.ListBuffer +import scala.language.experimental.macros +import scala.reflect.macros.blackbox + +private[mxnet] class AddJNDArrayAPIs(isContrib: Boolean) extends StaticAnnotation { + private[mxnet] def macroTransform(annottees: Any*) = macro JavaNDArrayMacro.typeSafeAPIDefs +} + +private[mxnet] object JavaNDArrayMacro { + case class NDArrayArg(argName: String, argType: String, isOptional : Boolean) + case class NDArrayFunction(name: String, listOfArgs: List[NDArrayArg]) + + // scalastyle:off havetype + def typeSafeAPIDefs(c: blackbox.Context)(annottees: c.Expr[Any]*) = { + typeSafeAPIImpl(c)(annottees: _*) + } + // scalastyle:off havetype + + private val ndarrayFunctions: List[NDArrayFunction] = initNDArrayModule() + + private def typeSafeAPIImpl(c: blackbox.Context)(annottees: c.Expr[Any]*) : c.Expr[Any] = { + import c.universe._ + + val isContrib: Boolean = c.prefix.tree match { + case q"new AddJNDArrayAPIs($b)" => c.eval[Boolean](c.Expr(b)) + } + // Defines Operators that should not generated + val notGenerated = Set("Custom") + + val newNDArrayFunctions = { + if (isContrib) ndarrayFunctions.filter( + func => func.name.startsWith("_contrib_") || !func.name.startsWith("_")) + else ndarrayFunctions.filterNot(_.name.startsWith("_")) + }.filterNot(ele => notGenerated.contains(ele.name)).groupBy(_.name.toLowerCase).map(ele => { + // Pattern matching for not generating depreciated method + if (ele._2.length == 1) ele._2.head + else { + if (ele._2.head.name.head.isLower) ele._2.head + else ele._2.last + } + }) + + val functionDefs = ListBuffer[DefDef]() + val classDefs = ListBuffer[ClassDef]() + + newNDArrayFunctions.foreach { ndarrayfunction => + + // Construct argument field with all required args + var argDef = ListBuffer[String]() + // Construct Optional Arg + var OptionArgDef = ListBuffer[String]() + // Construct function Implementation field (e.g norm) + var impl = ListBuffer[String]() + impl += "val map = scala.collection.mutable.Map[String, Any]()" + // scalastyle:off + impl += "val args= scala.collection.mutable.ArrayBuffer.empty[org.apache.mxnet.NDArray]" + // scalastyle:on + // Construct Class Implementation (e.g normBuilder) + var classImpl = ListBuffer[String]() + ndarrayfunction.listOfArgs.foreach({ ndarrayArg => + // var is a special word used to define variable in Scala, + // need to changed to something else in order to make it work + var currArgName = ndarrayArg.argName match { + case "var" => "vari" + case "type" => "typeOf" + case _ => ndarrayArg.argName + } + if (ndarrayArg.isOptional) { + OptionArgDef += s"private var $currArgName : ${ndarrayArg.argType} = null" + val tempDef = s"def set$currArgName($currArgName : ${ndarrayArg.argType})" + val tempImpl = s"this.$currArgName = $currArgName\nthis" + classImpl += s"$tempDef = {$tempImpl}" + } else { + argDef += s"$currArgName : ${ndarrayArg.argType}" + } + // NDArray arg implementation + val returnType = "org.apache.mxnet.javaapi.NDArray" + val base = + if (ndarrayArg.argType.equals(returnType)) { + s"args += this.$currArgName" + } else if (ndarrayArg.argType.equals(s"Array[$returnType]")){ + s"this.$currArgName.foreach(args+=_)" + } else { + "map(\"" + ndarrayArg.argName + "\") = this." + currArgName + } + impl.append( + if (ndarrayArg.isOptional) s"if (this.$currArgName != null) $base" + else base + ) + }) + // add default out parameter + classImpl += + "def setout(out : org.apache.mxnet.javaapi.NDArray) = {this.out = out\nthis}" + impl += "if (this.out != null) map(\"out\") = this.out" + OptionArgDef += "private var out : org.apache.mxnet.NDArray = null" + val returnType = "org.apache.mxnet.javaapi.NDArrayFuncReturn" + // scalastyle:off + // Combine and build the function string + impl += "org.apache.mxnet.NDArray.genericNDArrayFunctionInvoke(\"" + ndarrayfunction.name + "\", args.toSeq, map.toMap)" + val classDef = s"class ${ndarrayfunction.name}Builder(${argDef.mkString(",")})" + val classBody = s"${OptionArgDef.mkString("\n")}\n${classImpl.mkString("\n")}\ndef invoke() : $returnType = {${impl.mkString("\n")}}" + val classFinal = s"$classDef {$classBody}" + val functionDef = s"def ${ndarrayfunction.name} (${argDef.mkString(",")})" + val functionBody = s"new ${ndarrayfunction.name}Builder(${argDef.map(_.split(":")(0)).mkString(",")})" + val functionFinal = s"$functionDef = $functionBody" + // scalastyle:on + functionDefs += c.parse(functionFinal).asInstanceOf[DefDef] + classDefs += c.parse(classFinal).asInstanceOf[ClassDef] + } + + structGeneration(c)(functionDefs.toList, classDefs.toList, annottees : _*) + } + + private def structGeneration(c: blackbox.Context) + (funcDef : List[c.universe.DefDef], + classDef : List[c.universe.ClassDef], + annottees: c.Expr[Any]*) + : c.Expr[Any] = { + import c.universe._ + val inputs = annottees.map(_.tree).toList + // pattern match on the inputs + var modDefs = inputs map { + case ClassDef(mods, name, something, template) => + val q = template match { + case Template(superMaybe, emptyValDef, defs) => + Template(superMaybe, emptyValDef, defs ++ funcDef ++ classDef) + case ex => + throw new IllegalArgumentException(s"Invalid template: $ex") + } + ClassDef(mods, name, something, q) + case ModuleDef(mods, name, template) => + val q = template match { + case Template(superMaybe, emptyValDef, defs) => + Template(superMaybe, emptyValDef, defs ++ funcDef ++ classDef) + case ex => + throw new IllegalArgumentException(s"Invalid template: $ex") + } + ModuleDef(mods, name, q) + case ex => + throw new IllegalArgumentException(s"Invalid macro input: $ex") + } + // modDefs ++= classDef + // wrap the result up in an Expr, and return it + val result = c.Expr(Block(modDefs, Literal(Constant()))) + result + } + + // List and add all the atomic symbol functions to current module. + private def initNDArrayModule(): List[NDArrayFunction] = { + val opNames = ListBuffer.empty[String] + _LIB.mxListAllOpNames(opNames) + opNames.map(opName => { + val opHandle = new RefLong + _LIB.nnGetOpHandle(opName, opHandle) + makeNDArrayFunction(opHandle.value, opName) + }).toList + } + + // Create an atomic symbol function by handle and function name. + private def makeNDArrayFunction(handle: NDArrayHandle, aliasName: String) + : NDArrayFunction = { + val name = new RefString + val desc = new RefString + val keyVarNumArgs = new RefString + val numArgs = new RefInt + val argNames = ListBuffer.empty[String] + val argTypes = ListBuffer.empty[String] + val argDescs = ListBuffer.empty[String] + + _LIB.mxSymbolGetAtomicSymbolInfo( + handle, name, desc, numArgs, argNames, argTypes, argDescs, keyVarNumArgs) + val argList = argNames zip argTypes map { case (argName, argType) => + val typeAndOption = + CToScalaUtils.argumentCleaner(argName, argType, + "org.apache.mxnet.javaapi.NDArray", "javaapi.Shape") + new NDArrayArg(argName, typeAndOption._1, typeAndOption._2) + } + new NDArrayFunction(aliasName, argList.toList) + } +} diff --git a/scala-package/macros/src/main/scala/org/apache/mxnet/utils/CToScalaUtils.scala b/scala-package/macros/src/main/scala/org/apache/mxnet/utils/CToScalaUtils.scala index d0ebe5b1d2cb..48d8fdf38bc4 100644 --- a/scala-package/macros/src/main/scala/org/apache/mxnet/utils/CToScalaUtils.scala +++ b/scala-package/macros/src/main/scala/org/apache/mxnet/utils/CToScalaUtils.scala @@ -21,19 +21,19 @@ private[mxnet] object CToScalaUtils { // Convert C++ Types to Scala Types - def typeConversion(in : String, argType : String = "", - argName : String, returnType : String) : String = { + def typeConversion(in : String, argType : String = "", argName : String, + returnType : String, shapeType : String = "Shape") : String = { in match { - case "Shape(tuple)" | "ShapeorNone" => "org.apache.mxnet.Shape" + case "Shape(tuple)" | "ShapeorNone" => s"org.apache.mxnet.$shapeType" case "Symbol" | "NDArray" | "NDArray-or-Symbol" => returnType case "Symbol[]" | "NDArray[]" | "NDArray-or-Symbol[]" | "SymbolorSymbol[]" => s"Array[$returnType]" - case "float" | "real_t" | "floatorNone" => "org.apache.mxnet.Base.MXFloat" - case "int" | "intorNone" | "int(non-negative)" => "Int" - case "long" | "long(non-negative)" => "Long" - case "double" | "doubleorNone" => "Double" + case "float" | "real_t" | "floatorNone" => "java.lang.Float" + case "int" | "intorNone" | "int(non-negative)" => "java.lang.Integer" + case "long" | "long(non-negative)" => "java.lang.Long" + case "double" | "doubleorNone" => "java.lang.Double" case "string" => "String" - case "boolean" | "booleanorNone" => "Boolean" + case "boolean" | "booleanorNone" => "java.lang.Boolean" case "tupleof" | "tupleof" | "tupleof<>" | "ptr" | "" => "Any" case default => throw new IllegalArgumentException( s"Invalid type for args: $default\nString argType: $argType\nargName: $argName") @@ -52,8 +52,8 @@ private[mxnet] object CToScalaUtils { * @param argType Raw arguement Type description * @return (Scala_Type, isOptional) */ - def argumentCleaner(argName: String, - argType : String, returnType : String) : (String, Boolean) = { + def argumentCleaner(argName: String, argType : String, + returnType : String, shapeType : String = "Shape") : (String, Boolean) = { val spaceRemoved = argType.replaceAll("\\s+", "") var commaRemoved : Array[String] = new Array[String](0) // Deal with the case e.g: stype : {'csr', 'default', 'row_sparse'} @@ -73,7 +73,7 @@ private[mxnet] object CToScalaUtils { s"""expected "default=..." got ${commaRemoved(2)}""") (typeConversion(commaRemoved(0), argType, argName, returnType), true) } else if (commaRemoved.length == 2 || commaRemoved.length == 1) { - val tempType = typeConversion(commaRemoved(0), argType, argName, returnType) + val tempType = typeConversion(commaRemoved(0), argType, argName, returnType, shapeType) val tempOptional = tempType.equals("org.apache.mxnet.Symbol") (tempType, tempOptional) } else { diff --git a/scala-package/macros/src/test/scala/org/apache/mxnet/MacrosSuite.scala b/scala-package/macros/src/test/scala/org/apache/mxnet/MacrosSuite.scala index c3a7c58c1afc..4404b0885d57 100644 --- a/scala-package/macros/src/test/scala/org/apache/mxnet/MacrosSuite.scala +++ b/scala-package/macros/src/test/scala/org/apache/mxnet/MacrosSuite.scala @@ -36,7 +36,7 @@ class MacrosSuite extends FunSuite with BeforeAndAfterAll { ) val output = List( ("org.apache.mxnet.Symbol", true), - ("Int", false), + ("java.lang.Integer", false), ("org.apache.mxnet.Shape", true), ("String", true), ("Any", false) From 2bc818e72a3f7029c210bf5860573f11ff421886 Mon Sep 17 00:00:00 2001 From: Andrew Ayres Date: Fri, 19 Oct 2018 15:47:14 -0700 Subject: [PATCH 02/38] Java Inference api and SSD example (#12830) * New Java inference API and SSD example * Adding license to java files and fixing SSD example * Fixing SSD example to point to ObjectDetector instead of ImageClassifier * Make scripts for object detector independent to os and hw cpu/gpu * Added API Docs to Java Inference API. Small fixes for PR * Cosmetic updates for API DOCS requested during PR * Attempt to fix the CI Javafx compiler issue * Migrate from Javafx to apache commons for Pair implementation * Removing javafx from pom file * Fixes to appease the ScalaStyle deity * Minor fix in SSD script and Readme * Added ObjectDetectorOutput which is a POJO for Object Detector to simplify the return type * Removing Apache Commons Immutable Pair * Adding license to new file * Minor style fixes * minor style fix * Updating to be in scala style and not explicitly declare some unnecessary variables --- .../infer/objectdetector/run_ssd_example.sh | 14 +- .../objectdetector/run_ssd_java_example.sh | 47 +++++ .../infer/javapi/objectdetector/README.md | 116 ++++++++++ .../objectdetector/SSDClassifierExample.java | 199 ++++++++++++++++++ .../infer/objectdetector/README.md | 4 +- .../mxnet/infer/javaapi/ObjectDetector.scala | 106 ++++++++++ .../infer/javaapi/ObjectDetectorOutput.scala | 34 +++ .../mxnet/infer/javaapi/Predictor.scala | 69 ++++++ 8 files changed, 586 insertions(+), 3 deletions(-) create mode 100755 scala-package/examples/scripts/infer/objectdetector/run_ssd_java_example.sh create mode 100644 scala-package/examples/src/main/java/org/apache/mxnetexamples/infer/javapi/objectdetector/README.md create mode 100644 scala-package/examples/src/main/java/org/apache/mxnetexamples/infer/javapi/objectdetector/SSDClassifierExample.java create mode 100644 scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/ObjectDetector.scala create mode 100644 scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/ObjectDetectorOutput.scala create mode 100644 scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/Predictor.scala diff --git a/scala-package/examples/scripts/infer/objectdetector/run_ssd_example.sh b/scala-package/examples/scripts/infer/objectdetector/run_ssd_example.sh index 8cea892b5809..adb8830de06e 100755 --- a/scala-package/examples/scripts/infer/objectdetector/run_ssd_example.sh +++ b/scala-package/examples/scripts/infer/objectdetector/run_ssd_example.sh @@ -17,9 +17,21 @@ # specific language governing permissions and limitations # under the License. +hw_type=cpu +if [[ $1 = gpu ]] +then + hw_type=gpu +fi + +platform=linux-x86_64 + +if [[ $OSTYPE = [darwin]* ]] +then + platform=osx-x86_64 +fi MXNET_ROOT=$(cd "$(dirname $0)/../../../../../"; pwd) -CLASS_PATH=$MXNET_ROOT/scala-package/assembly/osx-x86_64-cpu/target/*:$MXNET_ROOT/scala-package/examples/target/*:$MXNET_ROOT/scala-package/examples/target/classes/lib/*:$MXNET_ROOT/scala-package/infer/target/* +CLASS_PATH=$MXNET_ROOT/scala-package/assembly/$platform-$hw_type/target/*:$MXNET_ROOT/scala-package/examples/target/*:$MXNET_ROOT/scala-package/examples/target/classes/lib/*:$MXNET_ROOT/scala-package/infer/target/* # model dir and prefix MODEL_DIR=$1 diff --git a/scala-package/examples/scripts/infer/objectdetector/run_ssd_java_example.sh b/scala-package/examples/scripts/infer/objectdetector/run_ssd_java_example.sh new file mode 100755 index 000000000000..f444a3a59af7 --- /dev/null +++ b/scala-package/examples/scripts/infer/objectdetector/run_ssd_java_example.sh @@ -0,0 +1,47 @@ +#!/bin/bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +hw_type=cpu +if [[ $4 = gpu ]] +then + hw_type=gpu +fi + +platform=linux-x86_64 + +if [[ $OSTYPE = [darwin]* ]] +then + platform=osx-x86_64 +fi + +MXNET_ROOT=$(cd "$(dirname $0)/../../../../../"; pwd) +CLASS_PATH=$MXNET_ROOT/scala-package/assembly/$platform-$hw_type/target/*:$MXNET_ROOT/scala-package/examples/target/*:$MXNET_ROOT/scala-package/examples/target/classes/lib/*:$MXNET_ROOT/scala-package/infer/target/*:$MXNET_ROOT/scala-package/examples/src/main/scala/org/apache/mxnetexamples/api/java/infer/imageclassifier/* + +# model dir and prefix +MODEL_DIR=$1 +# input image +INPUT_IMG=$2 +# which input image dir +INPUT_DIR=$3 + +java -Xmx8G -cp $CLASS_PATH \ + org.apache.mxnetexamples.infer.javapi.objectdetector.SSDClassifierExample \ + --model-path-prefix $MODEL_DIR \ + --input-image $INPUT_IMG \ + --input-dir $INPUT_DIR diff --git a/scala-package/examples/src/main/java/org/apache/mxnetexamples/infer/javapi/objectdetector/README.md b/scala-package/examples/src/main/java/org/apache/mxnetexamples/infer/javapi/objectdetector/README.md new file mode 100644 index 000000000000..63b9f929a82e --- /dev/null +++ b/scala-package/examples/src/main/java/org/apache/mxnetexamples/infer/javapi/objectdetector/README.md @@ -0,0 +1,116 @@ +# Single Shot Multi Object Detection using Scala Inference API + +In this example, you will learn how to use Scala Inference API to run Inference on pre-trained Single Shot Multi Object Detection (SSD) MXNet model. + +The model is trained on the [Pascal VOC 2012 dataset](http://host.robots.ox.ac.uk/pascal/VOC/voc2012/index.html). The network is a SSD model built on Resnet50 as base network to extract image features. The model is trained to detect the following entities (classes): ['aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car', 'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse', 'motorbike', 'person', 'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor']. For more details about the model, you can refer to the [MXNet SSD example](https://github.com/apache/incubator-mxnet/tree/master/example/ssd). + + +## Contents + +1. [Prerequisites](#prerequisites) +2. [Download artifacts](#download-artifacts) +3. [Setup datapath and parameters](#setup-datapath-and-parameters) +4. [Run the image inference example](#run-the-image-inference-example) +5. [Infer APIs](#infer-api-details) +6. [Next steps](#next-steps) + + +## Prerequisites + +1. MXNet +2. MXNet Scala Package +3. [IntelliJ IDE (or alternative IDE) project setup](http://mxnet.incubator.apache.org/tutorials/scala/mxnet_scala_on_intellij.html) with the MXNet Scala Package +4. wget + + +## Setup Guide + +### Download Artifacts +#### Step 1 +You can download the files using the script `get_ssd_data.sh`. It will download and place the model files in a `model` folder and the test image files in a `image` folder in the current directory. +From the `scala-package/examples/scripts/infer/imageclassifier/` folder run: + +```bash +./get_ssd_data.sh +``` + +**Note**: You may need to run `chmod +x get_resnet_data.sh` before running this script. + +Alternatively use the following links to download the Symbol and Params files via your browser: +- [resnet50_ssd_model-symbol.json](https://s3.amazonaws.com/model-server/models/resnet50_ssd/resnet50_ssd_model-symbol.json) +- [resnet50_ssd_model-0000.params](https://s3.amazonaws.com/model-server/models/resnet50_ssd/resnet50_ssd_model-0000.params) +- [synset.txt](https://github.com/awslabs/mxnet-model-server/blob/master/examples/ssd/synset.txt) + +In the pre-trained model, the `input_name` is `data` and shape is `(1, 3, 512, 512)`. +This shape translates to: a batch of `1` image, the image has color and uses `3` channels (RGB), and the image has the dimensions of `512` pixels in height by `512` pixels in width. + +`image/jpeg` is the expected input type, since this example's image pre-processor only supports the handling of binary JPEG images. + +The output shape is `(1, 6132, 6)`. As with the input, the `1` is the number of images. `6132` is the number of prediction results, and `6` is for the size of each prediction. Each prediction contains the following components: +- `Class` +- `Accuracy` +- `Xmin` +- `Ymin` +- `Xmax` +- `Ymax` + + +### Setup Datapath and Parameters +#### Step 2 +The code `Line 31: val baseDir = System.getProperty("user.dir")` in the example will automatically searches the work directory you have defined. Please put the files in your [work directory](https://stackoverflow.com/questions/16239130/java-user-dir-property-what-exactly-does-it-mean). + +Alternatively, if you would like to use your own path, please change line 31 into your own path +```scala +val baseDir = +``` + +The followings is the parameters defined for this example, you can find more information in the `class SSDClassifierExample`. + +| Argument | Comments | +| ----------------------------- | ---------------------------------------- | +| `model-path-prefix` | Folder path with prefix to the model (including json, params, and any synset file). | +| `input-image` | The image to run inference on. | +| `input-dir` | The directory of images to run inference on. | + + +## How to Run Inference +After the previous steps, you should be able to run the code using the following script that will pass all of the required parameters to the Infer API. + +From the `scala-package/examples/scripts/inferexample/objectdetector/` folder run: + +```bash +./run_ssd_example.sh ../models/resnet50_ssd/resnet50_ssd/resnet50_ssd_model ../images/dog.jpg ../images +``` + +**Notes**: +* These are relative paths to this script. +* You may need to run `chmod +x run_ssd_example.sh` before running this script. + +The example should give expected output as shown below: +``` +Class: car +Probabilties: 0.99847263 +(Coord:,312.21335,72.0291,456.01443,150.66176) +Class: bicycle +Probabilties: 0.90473825 +(Coord:,155.95807,149.96362,383.8369,418.94513) +Class: dog +Probabilties: 0.8226818 +(Coord:,83.82353,179.13998,206.63783,476.7875) +``` +the outputs come from the the input image, with top3 predictions picked. + + +## Infer API Details +This example uses ObjectDetector class provided by MXNet's scala package Infer APIs. It provides methods to load the images, create NDArray out of Java BufferedImage and run prediction using Classifier and Predictor APIs. + + +## References +This documentation used the model and inference setup guide from the [MXNet Model Server SSD example](https://github.com/awslabs/mxnet-model-server/blob/master/examples/ssd/README.md). + + +## Next Steps + +Check out the following related tutorials and examples for the Infer API: + +* [Image Classification with the MXNet Scala Infer API](../imageclassifier/README.md) diff --git a/scala-package/examples/src/main/java/org/apache/mxnetexamples/infer/javapi/objectdetector/SSDClassifierExample.java b/scala-package/examples/src/main/java/org/apache/mxnetexamples/infer/javapi/objectdetector/SSDClassifierExample.java new file mode 100644 index 000000000000..13f9d2d9a3e5 --- /dev/null +++ b/scala-package/examples/src/main/java/org/apache/mxnetexamples/infer/javapi/objectdetector/SSDClassifierExample.java @@ -0,0 +1,199 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.mxnetexamples.infer.javapi.objectdetector; + +import org.apache.mxnet.infer.javaapi.ObjectDetectorOutput; +import org.kohsuke.args4j.CmdLineParser; +import org.kohsuke.args4j.Option; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.apache.mxnet.javaapi.*; +import org.apache.mxnet.infer.javaapi.ObjectDetector; + +// scalastyle:off +import java.awt.image.BufferedImage; +// scalastyle:on + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import java.io.File; + +public class SSDClassifierExample { + @Option(name = "--model-path-prefix", usage = "input model directory and prefix of the model") + private String modelPathPrefix = "/model/ssd_resnet50_512"; + @Option(name = "--input-image", usage = "the input image") + private String inputImagePath = "/images/dog.jpg"; + @Option(name = "--input-dir", usage = "the input batch of images directory") + private String inputImageDir = "/images/"; + + final static Logger logger = LoggerFactory.getLogger(SSDClassifierExample.class); + + static List> + runObjectDetectionSingle(String modelPathPrefix, String inputImagePath, List context) { + Shape inputShape = new Shape(new int[] {1, 3, 512, 512}); + List inputDescriptors = new ArrayList(); + inputDescriptors.add(new DataDesc("data", inputShape, DType.Float32(), "NCHW")); + BufferedImage img = ObjectDetector.loadImageFromFile(inputImagePath); + ObjectDetector objDet = new ObjectDetector(modelPathPrefix, inputDescriptors, context, 0); + return objDet.imageObjectDetect(img, 3); + } + + static List>> + runObjectDetectionBatch(String modelPathPrefix, String inputImageDir, List context) { + Shape inputShape = new Shape(new int[]{1, 3, 512, 512}); + List inputDescriptors = new ArrayList(); + inputDescriptors.add(new DataDesc("data", inputShape, DType.Float32(), "NCHW")); + ObjectDetector objDet = new ObjectDetector(modelPathPrefix, inputDescriptors, context, 0); + + // Loading batch of images from the directory path + List> batchFiles = generateBatches(inputImageDir, 20); + List>> outputList + = new ArrayList>>(); + + for (List batchFile : batchFiles) { + List imgList = ObjectDetector.loadInputBatch(batchFile); + // Running inference on batch of images loaded in previous step + List> tmp + = objDet.imageBatchObjectDetect(imgList, 5); + outputList.add(tmp); + } + return outputList; + } + + static List> generateBatches(String inputImageDirPath, int batchSize) { + File dir = new File(inputImageDirPath); + + List> output = new ArrayList>(); + List batch = new ArrayList(); + for (File imgFile : dir.listFiles()) { + batch.add(imgFile.getPath()); + if (batch.size() == batchSize) { + output.add(batch); + batch = new ArrayList(); + } + } + if (batch.size() > 0) { + output.add(batch); + } + return output; + } + + public static void main(String[] args) { + SSDClassifierExample inst = new SSDClassifierExample(); + CmdLineParser parser = new CmdLineParser(inst); + try { + parser.parseArgument(args); + } catch (Exception e) { + logger.error(e.getMessage(), e); + parser.printUsage(System.err); + System.exit(1); + } + + String mdprefixDir = inst.modelPathPrefix; + String imgPath = inst.inputImagePath; + String imgDir = inst.inputImageDir; + + if (!checkExist(Arrays.asList(mdprefixDir + "-symbol.json", imgDir, imgPath))) { + logger.error("Model or input image path does not exist"); + System.exit(1); + } + + List context = new ArrayList(); + if (System.getenv().containsKey("SCALA_TEST_ON_GPU") && + Integer.valueOf(System.getenv("SCALA_TEST_ON_GPU")) == 1) { + context.add(Context.gpu()); + } else { + context.add(Context.cpu()); + } + + try { + Shape inputShape = new Shape(new int[] {1, 3, 512, 512}); + Shape outputShape = new Shape(new int[] {1, 6132, 6}); + + + int width = inputShape.get(2); + int height = inputShape.get(3); + String outputStr = "\n"; + + List> output + = runObjectDetectionSingle(mdprefixDir, imgPath, context); + + for (List ele : output) { + for (ObjectDetectorOutput i : ele) { + outputStr += "Class: " + i.getClassName() + "\n"; + outputStr += "Probabilties: " + i.getProbability() + "\n"; + + List coord = Arrays.asList(i.getXMin() * width, + i.getXMax() * height, i.getYMin() * width, i.getYMax() * height); + StringBuilder sb = new StringBuilder(); + for (float c: coord) { + sb.append(", ").append(c); + } + outputStr += "Coord:" + sb.substring(2)+ "\n"; + } + } + logger.info(outputStr); + + List>> outputList = + runObjectDetectionBatch(mdprefixDir, imgDir, context); + + outputStr = "\n"; + int index = 0; + for (List> i: outputList) { + for (List j : i) { + outputStr += "*** Image " + (index + 1) + "***" + "\n"; + for (ObjectDetectorOutput k : j) { + outputStr += "Class: " + k.getClassName() + "\n"; + outputStr += "Probabilties: " + k.getProbability() + "\n"; + List coord = Arrays.asList(k.getXMin() * width, + k.getXMax() * height, k.getYMin() * width, k.getYMax() * height); + + StringBuilder sb = new StringBuilder(); + for (float c : coord) { + sb.append(", ").append(c); + } + outputStr += "Coord:" + sb.substring(2) + "\n"; + } + index++; + } + } + logger.info(outputStr); + + } catch (Exception e) { + logger.error(e.getMessage(), e); + parser.printUsage(System.err); + System.exit(1); + } + System.exit(0); + } + + static Boolean checkExist(List arr) { + Boolean exist = true; + for (String item : arr) { + exist = new File(item).exists() && exist; + if (!exist) { + logger.error("Cannot find: " + item); + } + } + return exist; + } +} diff --git a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/infer/objectdetector/README.md b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/infer/objectdetector/README.md index 69328a44bab6..bf4a44a76d00 100644 --- a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/infer/objectdetector/README.md +++ b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/infer/objectdetector/README.md @@ -31,7 +31,7 @@ You can download the files using the script `get_ssd_data.sh`. It will download From the `scala-package/examples/scripts/infer/imageclassifier/` folder run: ```bash -./get_resnet_data.sh +./get_ssd_data.sh ``` **Note**: You may need to run `chmod +x get_resnet_data.sh` before running this script. @@ -79,7 +79,7 @@ After the previous steps, you should be able to run the code using the following From the `scala-package/examples/scripts/inferexample/objectdetector/` folder run: ```bash -./run_ssd_example.sh ../models/resnet50_ssd_model ../images/dog.jpg ../images +./run_ssd_example.sh ../models/resnet50_ssd/resnet50_ssd_model ../images/dog.jpg ../images ``` **Notes**: diff --git a/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/ObjectDetector.scala b/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/ObjectDetector.scala new file mode 100644 index 000000000000..6cd3df6b896b --- /dev/null +++ b/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/ObjectDetector.scala @@ -0,0 +1,106 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.mxnet.infer.javaapi + +// scalastyle:off +import java.awt.image.BufferedImage +// scalastyle:on + +import org.apache.mxnet.javaapi.{Context, DataDesc, NDArray} + +import scala.collection.JavaConverters +import scala.collection.JavaConverters._ + + +class ObjectDetector(val objDetector: org.apache.mxnet.infer.ObjectDetector){ + + def this(modelPathPrefix: String, inputDescriptors: java.util.List[DataDesc], contexts: + java.util.List[Context], epoch: Int) + = this { + val informationDesc = JavaConverters.asScalaIteratorConverter(inputDescriptors.iterator) + .asScala.toIndexedSeq map {a => a: org.apache.mxnet.DataDesc} + val inContexts = (contexts.asScala.toList map {a => a: org.apache.mxnet.Context}).toArray + // scalastyle:off + new org.apache.mxnet.infer.ObjectDetector(modelPathPrefix, informationDesc, inContexts, Some(epoch)) + // scalastyle:on + } + + /** + * Detects objects and returns bounding boxes with corresponding class/label + * + * @param inputImage Path prefix of the input image + * @param topK Number of result elements to return, sorted by probability + * @return List of list of tuples of + * (class, [probability, xmin, ymin, xmax, ymax]) + */ + def imageObjectDetect(inputImage: BufferedImage, topK: Int): + java.util.List[java.util.List[ObjectDetectorOutput]] = { + val ret = objDetector.imageObjectDetect(inputImage, Some(topK)) + (ret map {a => (a map {e => new ObjectDetectorOutput(e._1, e._2)}).asJava}).asJava + } + + /** + * Takes input images as NDArrays. Useful when you want to perform multiple operations on + * the input array, or when you want to pass a batch of input images. + * + * @param input Indexed Sequence of NDArrays + * @param topK (Optional) How many top_k (sorting will be based on the last axis) + * elements to return. If not passed, returns all unsorted output. + * @return List of list of tuples of + * (class, [probability, xmin, ymin, xmax, ymax]) + */ + def objectDetectWithNDArray(input: java.util.List[NDArray], topK: Int): + java.util.List[java.util.List[ObjectDetectorOutput]] = { + val ret = objDetector.objectDetectWithNDArray(convert(input.asScala.toIndexedSeq), Some(topK)) + (ret map {a => (a map {e => new ObjectDetectorOutput(e._1, e._2)}).asJava}).asJava + } + + /** + * To classify batch of input images according to the provided model + * + * @param inputBatch Input array of buffered images + * @param topK Number of result elements to return, sorted by probability + * @return List of list of tuples of (class, probability) + */ + def imageBatchObjectDetect(inputBatch: java.util.List[BufferedImage], topK: Int): + java.util.List[java.util.List[ObjectDetectorOutput]] = { + val ret = objDetector.imageBatchObjectDetect(inputBatch.asScala, Some(topK)) + (ret map {a => (a map {e => new ObjectDetectorOutput(e._1, e._2)}).asJava}).asJava + } + + def convert[B, A <% B](l: IndexedSeq[A]): IndexedSeq[B] = l map { a => a: B } + +} + + +object ObjectDetector { + implicit def fromObjectDetector(OD: org.apache.mxnet.infer.ObjectDetector): + ObjectDetector = new ObjectDetector(OD) + + implicit def toObjectDetector(jOD: ObjectDetector): + org.apache.mxnet.infer.ObjectDetector = jOD.objDetector + + def loadImageFromFile(inputImagePath: String): BufferedImage = { + org.apache.mxnet.infer.ImageClassifier.loadImageFromFile(inputImagePath) + } + + def loadInputBatch(inputImagePaths: java.util.List[String]): java.util.List[BufferedImage] = { + org.apache.mxnet.infer.ImageClassifier + .loadInputBatch(inputImagePaths.asScala.toList).toList.asJava + } +} diff --git a/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/ObjectDetectorOutput.scala b/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/ObjectDetectorOutput.scala new file mode 100644 index 000000000000..13369c8fcef5 --- /dev/null +++ b/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/ObjectDetectorOutput.scala @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.mxnet.infer.javaapi + +class ObjectDetectorOutput (className: String, args: Array[Float]){ + + def getClassName: String = className + + def getProbability: Float = args(0) + + def getXMin: Float = args(1) + + def getXMax: Float = args(2) + + def getYMin: Float = args(3) + + def getYMax: Float = args(4) + +} diff --git a/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/Predictor.scala b/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/Predictor.scala new file mode 100644 index 000000000000..26ccd06cf466 --- /dev/null +++ b/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/Predictor.scala @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.mxnet.infer.javaapi + +import org.apache.mxnet.javaapi.{Context, DataDesc, NDArray} + +import scala.collection.JavaConverters +import scala.collection.JavaConverters._ + +class Predictor(val predictor: org.apache.mxnet.infer.Predictor){ + def this(modelPathPrefix: String, inputDescriptors: java.util.List[DataDesc], + contexts: java.util.List[Context], epoch: Int) + = this { + val informationDesc = JavaConverters.asScalaIteratorConverter(inputDescriptors.iterator) + .asScala.toIndexedSeq map {a => a: org.apache.mxnet.DataDesc} + val inContexts = (contexts.asScala.toList map {a => a: org.apache.mxnet.Context}).toArray + new org.apache.mxnet.infer.Predictor(modelPathPrefix, informationDesc, inContexts, Some(epoch)) + } + + + /** + * Takes input as List of one dimensional arrays and creates the NDArray needed for inference + * The array will be reshaped based on the input descriptors. + * + * @param input: A List of a one-dimensional array. + An extra List is needed for when the model has more than one input. + * @return Indexed sequence array of outputs + */ + def predict(input: java.util.List[java.util.List[Float]]): + java.util.List[java.util.List[Float]] = { + val in = JavaConverters.asScalaIteratorConverter(input.iterator).asScala.toIndexedSeq + (predictor.predict(in map {a => a.asScala.toArray}) map {b => b.toList.asJava}).asJava + } + + + /** + * Predict using NDArray as input + * This method is useful when the input is a batch of data + * Note: User is responsible for managing allocation/deallocation of input/output NDArrays. + * + * @param input List of NDArrays + * @return Output of predictions as NDArrays + */ + def predictWithNDArray(input: java.util.List[NDArray]): + java.util.List[NDArray] = { + val ret = predictor.predictWithNDArray(convert(JavaConverters + .asScalaIteratorConverter(input.iterator).asScala.toIndexedSeq)) + // TODO: For some reason the implicit wasn't working here when trying to use convert. + // So did it this way. Needs to be figured out + (ret map {a => new NDArray(a)}).asJava + } + + private def convert[B, A <% B](l: IndexedSeq[A]): IndexedSeq[B] = l map { a => a: B } +} From 94f36651233258217d5005fe0e2894a7e88ffa21 Mon Sep 17 00:00:00 2001 From: Andrew Ayres Date: Fri, 19 Oct 2018 16:21:06 -0700 Subject: [PATCH 03/38] NativeResource Management in Scala (#12647) (#12883) * add Generic MXNetHandle trait and MXNetHandlePhantomRef class that will be used by all MXNetObjects * Generic Handle with AutoCloseable * add NativeResource and NativeResourceManager with Periodic GC calling * use NativeResource trait in NDArray, Symbol and Executor * add run train mnist script * create a Generic ResourceScope that can collect all NativeResources to dispose at the end * modify NativeResource and ResourceScope, extend NativeResource in NDArray, Symbol and Executor * remove GCExecutor * deRegister PhantomReferences by when calling dispose() * add Finalizer(temporary) to NativeResource * refactor NativeResource.dispose() method * update NativeResource/add Unit Test for NativeResource * updates to NativeResource/NativeResourceRef and unit tests to NativeResource * remove redundant code added because of the object equality that was needed * add ResourceScope * Fix NativeResource to not remove from Scope, add Unit Tests to ResourceScope * cleanup log/print debug statements * use TreeSet inplace of ArrayBuffer to speedup removal of resources from ResourceScope Fix Executor dispose and make KVStore a NativeResource * fix segfault that was happening because of NDArray creation on the fly in Optimizer * Add comments for dispose(param:Boolean) --- scala-package/core/pom.xml | 7 + .../scala/org/apache/mxnet/Executor.scala | 20 +- .../main/scala/org/apache/mxnet/KVStore.scala | 21 +- .../main/scala/org/apache/mxnet/Model.scala | 122 +++++------ .../main/scala/org/apache/mxnet/NDArray.scala | 18 +- .../org/apache/mxnet/NativeResource.scala | 189 +++++++++++++++++ .../scala/org/apache/mxnet/Optimizer.scala | 22 +- .../org/apache/mxnet/ResourceScope.scala | 196 ++++++++++++++++++ .../main/scala/org/apache/mxnet/Symbol.scala | 25 +-- .../org/apache/mxnet/io/MXDataIter.scala | 19 +- .../org/apache/mxnet/optimizer/SGD.scala | 10 +- .../apache/mxnet/NativeResourceSuite.scala | 69 ++++++ .../org/apache/mxnet/ResourceScopeSuite.scala | 151 ++++++++++++++ .../examples/scripts/run_train_mnist.sh | 33 +++ 14 files changed, 778 insertions(+), 124 deletions(-) create mode 100644 scala-package/core/src/main/scala/org/apache/mxnet/NativeResource.scala create mode 100644 scala-package/core/src/main/scala/org/apache/mxnet/ResourceScope.scala create mode 100644 scala-package/core/src/test/scala/org/apache/mxnet/NativeResourceSuite.scala create mode 100644 scala-package/core/src/test/scala/org/apache/mxnet/ResourceScopeSuite.scala create mode 100755 scala-package/examples/scripts/run_train_mnist.sh diff --git a/scala-package/core/pom.xml b/scala-package/core/pom.xml index 6e2d8d6e9cc7..d5396dab1e67 100644 --- a/scala-package/core/pom.xml +++ b/scala-package/core/pom.xml @@ -126,5 +126,12 @@ commons-io 2.1 + + + org.mockito + mockito-all + 1.10.19 + test + diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/Executor.scala b/scala-package/core/src/main/scala/org/apache/mxnet/Executor.scala index fc791d5cd9a3..19fb6fe5cee5 100644 --- a/scala-package/core/src/main/scala/org/apache/mxnet/Executor.scala +++ b/scala-package/core/src/main/scala/org/apache/mxnet/Executor.scala @@ -45,7 +45,7 @@ object Executor { * @see Symbol.bind : to create executor */ class Executor private[mxnet](private[mxnet] val handle: ExecutorHandle, - private[mxnet] val symbol: Symbol) extends WarnIfNotDisposed { + private[mxnet] val symbol: Symbol) extends NativeResource { private[mxnet] var argArrays: Array[NDArray] = null private[mxnet] var gradArrays: Array[NDArray] = null private[mxnet] var auxArrays: Array[NDArray] = null @@ -59,14 +59,15 @@ class Executor private[mxnet](private[mxnet] val handle: ExecutorHandle, private[mxnet] var _group2ctx: Map[String, Context] = null private val logger: Logger = LoggerFactory.getLogger(classOf[Executor]) - private var disposed = false - protected def isDisposed = disposed - - def dispose(): Unit = { - if (!disposed) { - outputs.foreach(_.dispose()) - _LIB.mxExecutorFree(handle) - disposed = true + override def nativeAddress: CPtrAddress = handle + override def nativeDeAllocator: (CPtrAddress => Int) = _LIB.mxExecutorFree + // cannot determine the off-heap size of this object + override val bytesAllocated: Long = 0 + override val ref: NativeResourceRef = super.register() + override def dispose(): Unit = { + if (!super.isDisposed) { + super.dispose() + outputs.foreach(o => o.dispose()) } } @@ -305,4 +306,5 @@ class Executor private[mxnet](private[mxnet] val handle: ExecutorHandle, checkCall(_LIB.mxExecutorPrint(handle, str)) str.value } + } diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/KVStore.scala b/scala-package/core/src/main/scala/org/apache/mxnet/KVStore.scala index 8e89ce76b877..45189a13aefc 100644 --- a/scala-package/core/src/main/scala/org/apache/mxnet/KVStore.scala +++ b/scala-package/core/src/main/scala/org/apache/mxnet/KVStore.scala @@ -52,22 +52,17 @@ object KVStore { } } -class KVStore(private[mxnet] val handle: KVStoreHandle) extends WarnIfNotDisposed { +class KVStore(private[mxnet] val handle: KVStoreHandle) extends NativeResource { private val logger: Logger = LoggerFactory.getLogger(classOf[KVStore]) private var updaterFunc: MXKVStoreUpdater = null - private var disposed = false - protected def isDisposed = disposed - /** - * Release the native memory. - * The object shall never be used after it is disposed. - */ - def dispose(): Unit = { - if (!disposed) { - _LIB.mxKVStoreFree(handle) - disposed = true - } - } + override def nativeAddress: CPtrAddress = handle + + override def nativeDeAllocator: CPtrAddress => MXUint = _LIB.mxKVStoreFree + + override val ref: NativeResourceRef = super.register() + + override val bytesAllocated: Long = 0L /** * Initialize a single or a sequence of key-value pairs into the store. diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/Model.scala b/scala-package/core/src/main/scala/org/apache/mxnet/Model.scala index 4bb9cdd331a6..b835c4964dd0 100644 --- a/scala-package/core/src/main/scala/org/apache/mxnet/Model.scala +++ b/scala-package/core/src/main/scala/org/apache/mxnet/Model.scala @@ -259,7 +259,9 @@ object Model { workLoadList: Seq[Float] = Nil, monitor: Option[Monitor] = None, symGen: SymbolGenerator = null): Unit = { - val executorManager = new DataParallelExecutorManager( + ResourceScope.using() { + + val executorManager = new DataParallelExecutorManager( symbol = symbol, symGen = symGen, ctx = ctx, @@ -269,17 +271,17 @@ object Model { auxNames = auxNames, workLoadList = workLoadList) - monitor.foreach(executorManager.installMonitor) - executorManager.setParams(argParams, auxParams) + monitor.foreach(executorManager.installMonitor) + executorManager.setParams(argParams, auxParams) - // updater for updateOnKVStore = false - val updaterLocal = Optimizer.getUpdater(optimizer) + // updater for updateOnKVStore = false + val updaterLocal = Optimizer.getUpdater(optimizer) - kvStore.foreach(initializeKVStore(_, executorManager.paramArrays, - argParams, executorManager.paramNames, updateOnKVStore)) - if (updateOnKVStore) { - kvStore.foreach(_.setOptimizer(optimizer)) - } + kvStore.foreach(initializeKVStore(_, executorManager.paramArrays, + argParams, executorManager.paramNames, updateOnKVStore)) + if (updateOnKVStore) { + kvStore.foreach(_.setOptimizer(optimizer)) + } // Now start training for (epoch <- beginEpoch until endEpoch) { @@ -290,45 +292,46 @@ object Model { var epochDone = false // Iterate over training data. trainData.reset() - while (!epochDone) { - var doReset = true - while (doReset && trainData.hasNext) { - val dataBatch = trainData.next() - executorManager.loadDataBatch(dataBatch) - monitor.foreach(_.tic()) - executorManager.forward(isTrain = true) - executorManager.backward() - if (updateOnKVStore) { - updateParamsOnKVStore(executorManager.paramArrays, - executorManager.gradArrays, - kvStore, executorManager.paramNames) - } else { - updateParams(executorManager.paramArrays, - executorManager.gradArrays, - updaterLocal, ctx.length, - executorManager.paramNames, - kvStore) - } - monitor.foreach(_.tocPrint()) - // evaluate at end, so out_cpu_array can lazy copy - executorManager.updateMetric(evalMetric, dataBatch.label) + ResourceScope.using() { + while (!epochDone) { + var doReset = true + while (doReset && trainData.hasNext) { + val dataBatch = trainData.next() + executorManager.loadDataBatch(dataBatch) + monitor.foreach(_.tic()) + executorManager.forward(isTrain = true) + executorManager.backward() + if (updateOnKVStore) { + updateParamsOnKVStore(executorManager.paramArrays, + executorManager.gradArrays, + kvStore, executorManager.paramNames) + } else { + updateParams(executorManager.paramArrays, + executorManager.gradArrays, + updaterLocal, ctx.length, + executorManager.paramNames, + kvStore) + } + monitor.foreach(_.tocPrint()) + // evaluate at end, so out_cpu_array can lazy copy + executorManager.updateMetric(evalMetric, dataBatch.label) - nBatch += 1 - batchEndCallback.foreach(_.invoke(epoch, nBatch, evalMetric)) + nBatch += 1 + batchEndCallback.foreach(_.invoke(epoch, nBatch, evalMetric)) - // this epoch is done possibly earlier - if (epochSize != -1 && nBatch >= epochSize) { - doReset = false + // this epoch is done possibly earlier + if (epochSize != -1 && nBatch >= epochSize) { + doReset = false + } + } + if (doReset) { + trainData.reset() } - } - if (doReset) { - trainData.reset() - } - // this epoch is done - epochDone = (epochSize == -1 || nBatch >= epochSize) + // this epoch is done + epochDone = (epochSize == -1 || nBatch >= epochSize) + } } - val (name, value) = evalMetric.get name.zip(value).foreach { case (n, v) => logger.info(s"Epoch[$epoch] Train-$n=$v") @@ -336,20 +339,22 @@ object Model { val toc = System.currentTimeMillis logger.info(s"Epoch[$epoch] Time cost=${toc - tic}") - evalData.foreach { evalDataIter => - evalMetric.reset() - evalDataIter.reset() - // TODO: make DataIter implement Iterator - while (evalDataIter.hasNext) { - val evalBatch = evalDataIter.next() - executorManager.loadDataBatch(evalBatch) - executorManager.forward(isTrain = false) - executorManager.updateMetric(evalMetric, evalBatch.label) - } + ResourceScope.using() { + evalData.foreach { evalDataIter => + evalMetric.reset() + evalDataIter.reset() + // TODO: make DataIter implement Iterator + while (evalDataIter.hasNext) { + val evalBatch = evalDataIter.next() + executorManager.loadDataBatch(evalBatch) + executorManager.forward(isTrain = false) + executorManager.updateMetric(evalMetric, evalBatch.label) + } - val (name, value) = evalMetric.get - name.zip(value).foreach { case (n, v) => - logger.info(s"Epoch[$epoch] Train-$n=$v") + val (name, value) = evalMetric.get + name.zip(value).foreach { case (n, v) => + logger.info(s"Epoch[$epoch] Validation-$n=$v") + } } } @@ -359,8 +364,7 @@ object Model { epochEndCallback.foreach(_.invoke(epoch, symbol, argParams, auxParams)) } - updaterLocal.dispose() - executorManager.dispose() + } } // scalastyle:on parameterNum } diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/NDArray.scala b/scala-package/core/src/main/scala/org/apache/mxnet/NDArray.scala index 9b6a7dc66540..f2a7603caa85 100644 --- a/scala-package/core/src/main/scala/org/apache/mxnet/NDArray.scala +++ b/scala-package/core/src/main/scala/org/apache/mxnet/NDArray.scala @@ -562,16 +562,20 @@ object NDArray extends NDArrayBase { */ class NDArray private[mxnet](private[mxnet] val handle: NDArrayHandle, val writable: Boolean = true, - addToCollector: Boolean = true) extends WarnIfNotDisposed { + addToCollector: Boolean = true) extends NativeResource { if (addToCollector) { NDArrayCollector.collect(this) } + override def nativeAddress: CPtrAddress = handle + override def nativeDeAllocator: (CPtrAddress => Int) = _LIB.mxNDArrayFree + override val bytesAllocated: Long = DType.numOfBytes(this.dtype) * this.shape.product + + override val ref: NativeResourceRef = super.register() + // record arrays who construct this array instance // we use weak reference to prevent gc blocking private[mxnet] val dependencies = mutable.HashMap.empty[Long, WeakReference[NDArray]] - @volatile private var disposed = false - def isDisposed: Boolean = disposed def serialize(): Array[Byte] = { val buf = ArrayBuffer.empty[Byte] @@ -584,11 +588,10 @@ class NDArray private[mxnet](private[mxnet] val handle: NDArrayHandle, * The NDArrays it depends on will NOT be disposed.
* The object shall never be used after it is disposed. */ - def dispose(): Unit = { - if (!disposed) { - _LIB.mxNDArrayFree(handle) + override def dispose(): Unit = { + if (!super.isDisposed) { + super.dispose() dependencies.clear() - disposed = true } } @@ -1034,6 +1037,7 @@ class NDArray private[mxnet](private[mxnet] val handle: NDArrayHandle, // TODO: naive implementation shape.hashCode + toArray.hashCode } + } private[mxnet] object NDArrayConversions { diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/NativeResource.scala b/scala-package/core/src/main/scala/org/apache/mxnet/NativeResource.scala new file mode 100644 index 000000000000..48d4b0c193b1 --- /dev/null +++ b/scala-package/core/src/main/scala/org/apache/mxnet/NativeResource.scala @@ -0,0 +1,189 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.mxnet + +import org.apache.mxnet.Base.CPtrAddress +import java.lang.ref.{PhantomReference, ReferenceQueue, WeakReference} +import java.util.concurrent._ + +import org.apache.mxnet.Base.checkCall +import java.util.concurrent.atomic.AtomicLong + + +/** + * NativeResource trait is used to manage MXNet Objects + * such as NDArray, Symbol, Executor, etc., + * The MXNet Object calls NativeResource.register + * and assign the returned NativeResourceRef to PhantomReference + * NativeResource also implements AutoCloseable so MXNetObjects + * can be used like Resources in try-with-resources paradigm + */ +private[mxnet] trait NativeResource + extends AutoCloseable with WarnIfNotDisposed { + + /** + * native Address associated with this object + */ + def nativeAddress: CPtrAddress + + /** + * Function Pointer to the NativeDeAllocator of nativeAddress + */ + def nativeDeAllocator: (CPtrAddress => Int) + + /** Call NativeResource.register to get the reference + */ + val ref: NativeResourceRef + + /** + * Off-Heap Bytes Allocated for this object + */ + // intentionally making it a val, so it gets evaluated when defined + val bytesAllocated: Long + + private[mxnet] var scope: Option[ResourceScope] = None + + @volatile private var disposed = false + + override def isDisposed: Boolean = disposed || isDeAllocated + + /** + * Register this object for PhantomReference tracking and in + * ResourceScope if used inside ResourceScope. + * @return NativeResourceRef that tracks reachability of this object + * using PhantomReference + */ + def register(): NativeResourceRef = { + scope = ResourceScope.getCurrentScope() + if (scope.isDefined) scope.get.add(this) + + NativeResource.totalBytesAllocated.getAndAdd(bytesAllocated) + // register with PhantomRef tracking to release incase the objects go + // out of reference within scope but are held for long time + NativeResourceRef.register(this, nativeDeAllocator) + } + + // Implements [[@link AutoCloseable.close]] + override def close(): Unit = { + dispose() + } + + // Implements [[@link WarnIfNotDisposed.dispose]] + def dispose(): Unit = dispose(true) + + /** + * This method deAllocates nativeResource and deRegisters + * from PhantomRef and removes from Scope if + * removeFromScope is set to true. + * @param removeFromScope remove from the currentScope if true + */ + // the parameter here controls whether to remove from current scope. + // [[ResourceScope.close]] calls NativeResource.dispose + // if we remove from the ResourceScope ie., from the container in ResourceScope. + // while iterating on the container, calling iterator.next is undefined and not safe. + // Note that ResourceScope automatically disposes all the resources within. + private[mxnet] def dispose(removeFromScope: Boolean = true): Unit = { + if (!disposed) { + checkCall(nativeDeAllocator(this.nativeAddress)) + NativeResourceRef.deRegister(ref) // removes from PhantomRef tracking + if (removeFromScope && scope.isDefined) scope.get.remove(this) + NativeResource.totalBytesAllocated.getAndAdd(-1*bytesAllocated) + disposed = true + } + } + + /* + this is used by the WarnIfNotDisposed finalizer, + the object could be disposed by the GC without the need for explicit disposal + but the finalizer might not have run, then the WarnIfNotDisposed throws a warning + */ + private[mxnet] def isDeAllocated(): Boolean = NativeResourceRef.isDeAllocated(ref) + +} + +private[mxnet] object NativeResource { + var totalBytesAllocated : AtomicLong = new AtomicLong(0) +} + +// Do not make [[NativeResource.resource]] a member of the class, +// this will hold reference and GC will not clear the object. +private[mxnet] class NativeResourceRef(resource: NativeResource, + val resourceDeAllocator: CPtrAddress => Int) + extends PhantomReference[NativeResource](resource, NativeResourceRef.refQ) {} + +private[mxnet] object NativeResourceRef { + + private[mxnet] val refQ: ReferenceQueue[NativeResource] + = new ReferenceQueue[NativeResource] + + private[mxnet] val refMap = new ConcurrentHashMap[NativeResourceRef, CPtrAddress]() + + private[mxnet] val cleaner = new ResourceCleanupThread() + + cleaner.start() + + def register(resource: NativeResource, nativeDeAllocator: (CPtrAddress => Int)): + NativeResourceRef = { + val ref = new NativeResourceRef(resource, nativeDeAllocator) + refMap.put(ref, resource.nativeAddress) + ref + } + + // remove from PhantomRef tracking + def deRegister(ref: NativeResourceRef): Unit = refMap.remove(ref) + + /** + * This method will check if the cleaner ran and deAllocated the object + * As a part of GC, when the object is unreachable GC inserts a phantomRef + * to the ReferenceQueue which the cleaner thread will deallocate, however + * the finalizer runs much later depending on the GC. + * @param resource resource to verify if it has been deAllocated + * @return true if already deAllocated + */ + def isDeAllocated(ref: NativeResourceRef): Boolean = { + !refMap.containsKey(ref) + } + + def cleanup: Unit = { + // remove is a blocking call + val ref: NativeResourceRef = refQ.remove().asInstanceOf[NativeResourceRef] + // phantomRef will be removed from the map when NativeResource.close is called. + val resource = refMap.get(ref) + if (resource != 0L) { // since CPtrAddress is Scala a Long, it cannot be null + ref.resourceDeAllocator(resource) + refMap.remove(ref) + } + } + + protected class ResourceCleanupThread extends Thread { + setPriority(Thread.MAX_PRIORITY) + setName("NativeResourceDeAllocatorThread") + setDaemon(true) + + override def run(): Unit = { + while (true) { + try { + NativeResourceRef.cleanup + } + catch { + case _: InterruptedException => Thread.currentThread().interrupt() + } + } + } + } +} diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/Optimizer.scala b/scala-package/core/src/main/scala/org/apache/mxnet/Optimizer.scala index 758cbc829618..c3f8aaec6d60 100644 --- a/scala-package/core/src/main/scala/org/apache/mxnet/Optimizer.scala +++ b/scala-package/core/src/main/scala/org/apache/mxnet/Optimizer.scala @@ -19,6 +19,8 @@ package org.apache.mxnet import java.io._ +import org.apache.mxnet.Base.CPtrAddress + import scala.collection.mutable import scala.util.Either @@ -38,8 +40,10 @@ object Optimizer { } override def dispose(): Unit = { - states.values.foreach(optimizer.disposeState) - states.clear() + if (!super.isDisposed) { + states.values.foreach(optimizer.disposeState) + states.clear() + } } override def serializeState(): Array[Byte] = { @@ -285,7 +289,8 @@ abstract class Optimizer extends Serializable { } } -trait MXKVStoreUpdater { +trait MXKVStoreUpdater extends + NativeResource { /** * user-defined updater for the kvstore * It's this updater's responsibility to delete recv and local @@ -294,9 +299,14 @@ trait MXKVStoreUpdater { * @param local the value stored on local on this key */ def update(key: Int, recv: NDArray, local: NDArray): Unit - def dispose(): Unit - // def serializeState(): Array[Byte] - // def deserializeState(bytes: Array[Byte]): Unit + + // This is a hack to make Optimizers work with ResourceScope + // otherwise the user has to manage calling dispose on this object. + override def nativeAddress: CPtrAddress = hashCode() + override def nativeDeAllocator: CPtrAddress => Int = doNothingDeAllocator + private def doNothingDeAllocator(dummy: CPtrAddress): Int = 0 + override val ref: NativeResourceRef = super.register() + override val bytesAllocated: Long = 0L } trait MXKVStoreCachedStates { diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/ResourceScope.scala b/scala-package/core/src/main/scala/org/apache/mxnet/ResourceScope.scala new file mode 100644 index 000000000000..1c5782d873a9 --- /dev/null +++ b/scala-package/core/src/main/scala/org/apache/mxnet/ResourceScope.scala @@ -0,0 +1,196 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.mxnet + +import java.util.HashSet + +import org.slf4j.LoggerFactory + +import scala.collection.mutable +import scala.collection.mutable.ArrayBuffer +import scala.util.Try +import scala.util.control.{ControlThrowable, NonFatal} + +/** + * This class manages automatically releasing of [[NativeResource]]s + */ +class ResourceScope extends AutoCloseable { + + // HashSet does not take a custom comparator + private[mxnet] val resourceQ = new mutable.TreeSet[NativeResource]()(nativeAddressOrdering) + + private object nativeAddressOrdering extends Ordering[NativeResource] { + def compare(a: NativeResource, b: NativeResource): Int = { + a.nativeAddress compare b.nativeAddress + } + } + + ResourceScope.addToThreadLocal(this) + + /** + * Releases all the [[NativeResource]] by calling + * the associated [[NativeResource.close()]] method + */ + override def close(): Unit = { + ResourceScope.removeFromThreadLocal(this) + resourceQ.foreach(resource => if (resource != null) resource.dispose(false) ) + resourceQ.clear() + } + + /** + * Add a NativeResource to the scope + * @param resource + */ + def add(resource: NativeResource): Unit = { + resourceQ.+=(resource) + } + + /** + * Remove NativeResource from the Scope, this uses + * object equality to find the resource in the stack. + * @param resource + */ + def remove(resource: NativeResource): Unit = { + resourceQ.-=(resource) + } +} + +object ResourceScope { + + private val logger = LoggerFactory.getLogger(classOf[ResourceScope]) + + /** + * Captures all Native Resources created using the ResourceScope and + * at the end of the body, de allocates all the Native resources by calling close on them. + * This method will not deAllocate NativeResources returned from the block. + * @param scope (Optional). Scope in which to capture the native resources + * @param body block of code to execute in this scope + * @tparam A return type + * @return result of the operation, if the result is of type NativeResource, it is not + * de allocated so the user can use it and then de allocate manually by calling + * close or enclose in another resourceScope. + */ + // inspired from slide 21 of https://www.slideshare.net/Odersky/fosdem-2009-1013261 + // and https://github.com/scala/scala/blob/2.13.x/src/library/scala/util/Using.scala + // TODO: we should move to the Scala util's Using method when we move to Scala 2.13 + def using[A](scope: ResourceScope = null)(body: => A): A = { + + val curScope = if (scope != null) scope else new ResourceScope() + + val prevScope: Option[ResourceScope] = ResourceScope.getPrevScope() + + @inline def resourceInGeneric(g: scala.collection.Iterable[_]) = { + g.foreach( n => + n match { + case nRes: NativeResource => { + removeAndAddToPrevScope(nRes) + } + case kv: scala.Tuple2[_, _] => { + if (kv._1.isInstanceOf[NativeResource]) removeAndAddToPrevScope( + kv._1.asInstanceOf[NativeResource]) + if (kv._2.isInstanceOf[NativeResource]) removeAndAddToPrevScope( + kv._2.asInstanceOf[NativeResource]) + } + } + ) + } + + @inline def removeAndAddToPrevScope(r: NativeResource) = { + curScope.remove(r) + if (prevScope.isDefined) { + prevScope.get.add(r) + r.scope = prevScope + } + } + + @inline def safeAddSuppressed(t: Throwable, suppressed: Throwable): Unit = { + if (!t.isInstanceOf[ControlThrowable]) t.addSuppressed(suppressed) + } + + var retThrowable: Throwable = null + + try { + val ret = body + ret match { + // don't de-allocate if returning any collection that contains NativeResource. + case resInGeneric: scala.collection.Iterable[_] => resourceInGeneric(resInGeneric) + case nRes: NativeResource => removeAndAddToPrevScope(nRes) + case ndRet: NDArrayFuncReturn => ndRet.arr.foreach( nd => removeAndAddToPrevScope(nd) ) + case _ => // do nothing + } + ret + } catch { + case t: Throwable => + retThrowable = t + null.asInstanceOf[A] // we'll throw in finally + } finally { + var toThrow: Throwable = retThrowable + if (retThrowable eq null) curScope.close() + else { + try { + curScope.close + } catch { + case closeThrowable: Throwable => + if (NonFatal(retThrowable) && !NonFatal(closeThrowable)) toThrow = closeThrowable + else safeAddSuppressed(retThrowable, closeThrowable) + } finally { + throw toThrow + } + } + } + } + + // thread local Scopes + private[mxnet] val threadLocalScopes = new ThreadLocal[ArrayBuffer[ResourceScope]] { + override def initialValue(): ArrayBuffer[ResourceScope] = + new ArrayBuffer[ResourceScope]() + } + + /** + * Add resource to current ThreadLocal DataStructure + * @param r ResourceScope to add. + */ + private[mxnet] def addToThreadLocal(r: ResourceScope): Unit = { + threadLocalScopes.get() += r + } + + /** + * Remove resource from current ThreadLocal DataStructure + * @param r ResourceScope to remove + */ + private[mxnet] def removeFromThreadLocal(r: ResourceScope): Unit = { + threadLocalScopes.get() -= r + } + + /** + * Get the latest Scope in the stack + * @return + */ + private[mxnet] def getCurrentScope(): Option[ResourceScope] = { + Try(Some(threadLocalScopes.get().last)).getOrElse(None) + } + + /** + * Get the Last but one Scope from threadLocal Scopes. + * @return n-1th scope or None when not found + */ + private[mxnet] def getPrevScope(): Option[ResourceScope] = { + val scopes = threadLocalScopes.get() + Try(Some(scopes(scopes.size - 2))).getOrElse(None) + } +} diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/Symbol.scala b/scala-package/core/src/main/scala/org/apache/mxnet/Symbol.scala index b1a3e392f41e..a009e7e343f2 100644 --- a/scala-package/core/src/main/scala/org/apache/mxnet/Symbol.scala +++ b/scala-package/core/src/main/scala/org/apache/mxnet/Symbol.scala @@ -29,21 +29,15 @@ import scala.collection.mutable.{ArrayBuffer, ListBuffer} * WARNING: it is your responsibility to clear this object through dispose(). * */ -class Symbol private(private[mxnet] val handle: SymbolHandle) extends WarnIfNotDisposed { +class Symbol private(private[mxnet] val handle: SymbolHandle) extends NativeResource { private val logger: Logger = LoggerFactory.getLogger(classOf[Symbol]) - private var disposed = false - protected def isDisposed = disposed - /** - * Release the native memory. - * The object shall never be used after it is disposed. - */ - def dispose(): Unit = { - if (!disposed) { - _LIB.mxSymbolFree(handle) - disposed = true - } - } + // unable to get the byteAllocated for Symbol + override val bytesAllocated: Long = 0L + override def nativeAddress: CPtrAddress = handle + override def nativeDeAllocator: (CPtrAddress => Int) = _LIB.mxSymbolFree + override val ref: NativeResourceRef = super.register() + def +(other: Symbol): Symbol = Symbol.createFromListedSymbols("_Plus")(Array(this, other)) def +[@specialized(Int, Float, Double) V](other: V): Symbol = { @@ -793,7 +787,7 @@ class Symbol private(private[mxnet] val handle: SymbolHandle) extends WarnIfNotD } val execHandle = new ExecutorHandleRef - val sharedHadle = if (sharedExec != null) sharedExec.handle else 0L + val sharedHandle = if (sharedExec != null) sharedExec.handle else 0L checkCall(_LIB.mxExecutorBindEX(handle, ctx.deviceTypeid, ctx.deviceId, @@ -806,7 +800,7 @@ class Symbol private(private[mxnet] val handle: SymbolHandle) extends WarnIfNotD argsGradHandle, reqsArray, auxArgsHandle, - sharedHadle, + sharedHandle, execHandle)) val executor = new Executor(execHandle.value, this.clone()) executor.argArrays = argsNDArray @@ -832,6 +826,7 @@ class Symbol private(private[mxnet] val handle: SymbolHandle) extends WarnIfNotD checkCall(_LIB.mxSymbolSaveToJSON(handle, jsonStr)) jsonStr.value } + } /** diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/io/MXDataIter.scala b/scala-package/core/src/main/scala/org/apache/mxnet/io/MXDataIter.scala index f7f858deb82d..998017750db2 100644 --- a/scala-package/core/src/main/scala/org/apache/mxnet/io/MXDataIter.scala +++ b/scala-package/core/src/main/scala/org/apache/mxnet/io/MXDataIter.scala @@ -33,7 +33,7 @@ import scala.collection.mutable.ListBuffer private[mxnet] class MXDataIter(private[mxnet] val handle: DataIterHandle, dataName: String = "data", labelName: String = "label") - extends DataIter with WarnIfNotDisposed { + extends DataIter with NativeResource { private val logger = LoggerFactory.getLogger(classOf[MXDataIter]) @@ -67,20 +67,13 @@ private[mxnet] class MXDataIter(private[mxnet] val handle: DataIterHandle, } } + override def nativeAddress: CPtrAddress = handle - private var disposed = false - protected def isDisposed = disposed + override def nativeDeAllocator: CPtrAddress => MXUint = _LIB.mxDataIterFree - /** - * Release the native memory. - * The object shall never be used after it is disposed. - */ - def dispose(): Unit = { - if (!disposed) { - _LIB.mxDataIterFree(handle) - disposed = true - } - } + override val ref: NativeResourceRef = super.register() + + override val bytesAllocated: Long = 0L /** * reset the iterator diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/SGD.scala b/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/SGD.scala index e20b433ed1ed..d349feac3e93 100644 --- a/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/SGD.scala +++ b/scala-package/core/src/main/scala/org/apache/mxnet/optimizer/SGD.scala @@ -17,7 +17,7 @@ package org.apache.mxnet.optimizer -import org.apache.mxnet.{Optimizer, LRScheduler, NDArray} +import org.apache.mxnet._ import org.apache.mxnet.NDArrayConversions._ /** @@ -92,7 +92,13 @@ class SGD(val learningRate: Float = 0.01f, momentum: Float = 0.0f, if (momentum == 0.0f) { null } else { - NDArray.zeros(weight.shape, weight.context) + val s = NDArray.zeros(weight.shape, weight.context) + // this is created on the fly and shared between runs, + // we don't want it to be dispose from the scope + // and should be handled by the dispose + val scope = ResourceScope.getCurrentScope() + if (scope.isDefined) scope.get.remove(s) + s } } diff --git a/scala-package/core/src/test/scala/org/apache/mxnet/NativeResourceSuite.scala b/scala-package/core/src/test/scala/org/apache/mxnet/NativeResourceSuite.scala new file mode 100644 index 000000000000..81a9f605a887 --- /dev/null +++ b/scala-package/core/src/test/scala/org/apache/mxnet/NativeResourceSuite.scala @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.mxnet + +import java.lang.ref.ReferenceQueue +import java.util.concurrent.ConcurrentHashMap + +import org.apache.mxnet.Base.CPtrAddress +import org.mockito.Matchers.any +import org.scalatest.{BeforeAndAfterAll, FunSuite, Matchers, TagAnnotation} +import org.mockito.Mockito._ + +@TagAnnotation("resource") +class NativeResourceSuite extends FunSuite with BeforeAndAfterAll with Matchers { + + object TestRef { + def getRefQueue: ReferenceQueue[NativeResource] = { NativeResourceRef.refQ} + def getRefMap: ConcurrentHashMap[NativeResourceRef, CPtrAddress] + = {NativeResourceRef.refMap} + def getCleaner: Thread = { NativeResourceRef.cleaner } + } + + class TestRef(resource: NativeResource, + resourceDeAllocator: CPtrAddress => Int) + extends NativeResourceRef(resource, resourceDeAllocator) { + } + + test(testName = "test native resource setup/teardown") { + val a = spy(NDArray.ones(Shape(2, 3))) + val aRef = a.ref + val spyRef = spy(aRef) + + assert(TestRef.getRefMap.containsKey(aRef) == true) + a.close() + verify(a).dispose() + verify(a).nativeDeAllocator + // resourceDeAllocator does not get called when explicitly closing + verify(spyRef, times(0)).resourceDeAllocator + + assert(TestRef.getRefMap.containsKey(aRef) == false) + assert(a.isDisposed == true, "isDisposed should be set to true after calling close") + } + + test(testName = "test dispose") { + val a: NDArray = spy(NDArray.ones(Shape(3, 4))) + val aRef = a.ref + val spyRef = spy(aRef) + a.dispose() + verify(a).nativeDeAllocator + assert(TestRef.getRefMap.containsKey(aRef) == false) + assert(a.isDisposed == true, "isDisposed should be set to true after calling close") + } +} + diff --git a/scala-package/core/src/test/scala/org/apache/mxnet/ResourceScopeSuite.scala b/scala-package/core/src/test/scala/org/apache/mxnet/ResourceScopeSuite.scala new file mode 100644 index 000000000000..41dfa7d0ead2 --- /dev/null +++ b/scala-package/core/src/test/scala/org/apache/mxnet/ResourceScopeSuite.scala @@ -0,0 +1,151 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.mxnet + +import java.lang.ref.ReferenceQueue +import java.util.concurrent.ConcurrentHashMap + +import org.apache.mxnet.Base.CPtrAddress +import org.apache.mxnet.ResourceScope.logger +import org.mockito.Matchers.any +import org.scalatest.{BeforeAndAfterAll, FunSuite, Matchers} +import org.mockito.Mockito._ +import scala.collection.mutable.HashMap + +class ResourceScopeSuite extends FunSuite with BeforeAndAfterAll with Matchers { + + class TestNativeResource extends NativeResource { + /** + * native Address associated with this object + */ + override def nativeAddress: CPtrAddress = hashCode() + + /** + * Function Pointer to the NativeDeAllocator of nativeAddress + */ + override def nativeDeAllocator: CPtrAddress => Int = TestNativeResource.deAllocator + + /** Call NativeResource.register to get the reference + */ + override val ref: NativeResourceRef = super.register() + /** + * Off-Heap Bytes Allocated for this object + */ + override val bytesAllocated: Long = 0 + } + object TestNativeResource { + def deAllocator(handle: CPtrAddress): Int = 0 + } + + object TestPhantomRef { + def getRefQueue: ReferenceQueue[NativeResource] = { NativeResourceRef.refQ} + def getRefMap: ConcurrentHashMap[NativeResourceRef, CPtrAddress] + = {NativeResourceRef.refMap} + def getCleaner: Thread = { NativeResourceRef.cleaner } + + } + + class TestPhantomRef(resource: NativeResource, + resourceDeAllocator: CPtrAddress => Int) + extends NativeResourceRef(resource, resourceDeAllocator) { + } + + test(testName = "test NDArray Auto Release") { + var a: NDArray = null + var aRef: NativeResourceRef = null + var b: NDArray = null + + ResourceScope.using() { + b = ResourceScope.using() { + a = NDArray.ones(Shape(3, 4)) + aRef = a.ref + val x = NDArray.ones(Shape(3, 4)) + x + } + val bRef: NativeResourceRef = b.ref + assert(a.isDisposed == true, + "objects created within scope should have isDisposed set to true") + assert(b.isDisposed == false, + "returned NativeResource should not be released") + assert(TestPhantomRef.getRefMap.containsKey(aRef) == false, + "reference of resource in Scope should be removed refMap") + assert(TestPhantomRef.getRefMap.containsKey(bRef) == true, + "reference of resource outside scope should be not removed refMap") + } + assert(b.isDisposed, "resource returned from inner scope should be released in outer scope") + } + + test("test return object release from outer scope") { + var a: TestNativeResource = null + ResourceScope.using() { + a = ResourceScope.using() { + new TestNativeResource() + } + assert(a.isDisposed == false, "returned object should not be disposed within Using") + } + assert(a.isDisposed == true, "returned object should be disposed in the outer scope") + } + + test(testName = "test NativeResources in returned Lists are not disposed") { + var ndListRet: IndexedSeq[TestNativeResource] = null + ResourceScope.using() { + ndListRet = ResourceScope.using() { + val ndList: IndexedSeq[TestNativeResource] = + IndexedSeq(new TestNativeResource(), new TestNativeResource()) + ndList + } + ndListRet.foreach(nd => assert(nd.isDisposed == false, + "NativeResources within a returned collection should not be disposed")) + } + ndListRet.foreach(nd => assert(nd.isDisposed == true, + "NativeResources returned from inner scope should be disposed in outer scope")) + } + + test("test native resource inside a map") { + var nRInKeyOfMap: HashMap[TestNativeResource, String] = null + var nRInValOfMap: HashMap[String, TestNativeResource] = HashMap[String, TestNativeResource]() + + ResourceScope.using() { + nRInKeyOfMap = ResourceScope.using() { + val ret = HashMap[TestNativeResource, String]() + ret.put(new TestNativeResource, "hello") + ret + } + assert(!nRInKeyOfMap.isEmpty) + + nRInKeyOfMap.keysIterator.foreach(it => assert(it.isDisposed == false, + "NativeResources returned in Traversable should not be disposed")) + } + + nRInKeyOfMap.keysIterator.foreach(it => assert(it.isDisposed)) + + ResourceScope.using() { + + nRInValOfMap = ResourceScope.using() { + val ret = HashMap[String, TestNativeResource]() + ret.put("world!", new TestNativeResource) + ret + } + assert(!nRInValOfMap.isEmpty) + nRInValOfMap.valuesIterator.foreach(it => assert(it.isDisposed == false, + "NativeResources returned in Collection should not be disposed")) + } + nRInValOfMap.valuesIterator.foreach(it => assert(it.isDisposed)) + } + +} diff --git a/scala-package/examples/scripts/run_train_mnist.sh b/scala-package/examples/scripts/run_train_mnist.sh new file mode 100755 index 000000000000..ea53c1ade66f --- /dev/null +++ b/scala-package/examples/scripts/run_train_mnist.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +set -e + +MXNET_ROOT=$(cd "$(dirname $0)/../../.."; pwd) +echo $MXNET_ROOT +CLASS_PATH=$MXNET_ROOT/scala-package/assembly/linux-x86_64-cpu/target/*:$MXNET_ROOT/scala-package/examples/target/*:$MXNET_ROOT/scala-package/examples/target/classes/lib/*:$MXNET_ROOT/scala-package/infer/target/* + +# model dir +DATA_PATH=$2 + +java -XX:+PrintGC -Xms256M -Xmx512M -Dmxnet.traceLeakedObjects=false -cp $CLASS_PATH \ + org.apache.mxnetexamples.imclassification.TrainMnist \ + --data-dir /home/ubuntu/mxnet_scala/scala-package/examples/mnist/ \ + --num-epochs 10000000 \ + --batch-size 1024 \ No newline at end of file From 58d4efbe06984b0fef23139b51171cbd56c45c06 Mon Sep 17 00:00:00 2001 From: Piyush Ghai Date: Wed, 24 Oct 2018 06:41:08 -0700 Subject: [PATCH 04/38] Added unit tests for Resource Scope in Java (#12955) --- .../mxnet/javaapi/ResourceScopeTestSuite.java | 104 ++++++++++++++++++ scala-package/pom.xml | 4 +- 2 files changed, 106 insertions(+), 2 deletions(-) create mode 100644 scala-package/core/src/test/java/org/apache/mxnet/javaapi/ResourceScopeTestSuite.java diff --git a/scala-package/core/src/test/java/org/apache/mxnet/javaapi/ResourceScopeTestSuite.java b/scala-package/core/src/test/java/org/apache/mxnet/javaapi/ResourceScopeTestSuite.java new file mode 100644 index 000000000000..f570ba927faa --- /dev/null +++ b/scala-package/core/src/test/java/org/apache/mxnet/javaapi/ResourceScopeTestSuite.java @@ -0,0 +1,104 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +package org.apache.mxnet.javaapi; + +import org.apache.mxnet.NativeResourceRef; +import org.apache.mxnet.ResourceScope; +import org.junit.Test; + +import java.util.*; +import java.util.concurrent.Callable; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +public class ResourceScopeTestSuite { + + /** + * This is a placeholder class to test out whether NDArray References get collected or not when using + * try-with-resources in Java. + * + */ + class TestNDArray { + NDArray selfArray; + + public TestNDArray(Context context, int[] shape) { + this.selfArray = NDArray.ones(context, shape); + } + + public boolean verifyIsDisposed() { + return this.selfArray.nd().isDisposed(); + } + + public NativeResourceRef getNDArrayReference() { + return this.selfArray.nd().ref(); + } + } + + @Test + public void testNDArrayAutoRelease() { + TestNDArray test = null; + + try (ResourceScope scope = new ResourceScope()) { + test = new TestNDArray(Context.cpu(), new int[]{100, 100}); + } + + assertTrue(test.verifyIsDisposed()); + } + + @Test + public void testObjectReleaseFromList() { + List list = new ArrayList<>(); + + try (ResourceScope scope = new ResourceScope()) { + for (int i = 0;i < 10; i++) { + list.add(new TestNDArray(Context.cpu(), new int[] {100, 100})); + } + } + + assertEquals(list.size() , 10); + list.forEach(n -> assertTrue(n.verifyIsDisposed())); + } + + @Test + public void testObjectReleaseFromMap() { + Map stringToNDArrayMap = new HashMap<>(); + + try (ResourceScope scope = new ResourceScope()) { + for (int i = 0;i < 10; i++) { + stringToNDArrayMap.put(String.valueOf(i),new TestNDArray(Context.cpu(), new int[] {i, i})); + } + } + + assertEquals(stringToNDArrayMap.size(), 10); + stringToNDArrayMap.forEach((key, value) -> assertTrue(value.verifyIsDisposed())); + + Map ndArrayToStringMap = new HashMap<>(); + + try (ResourceScope scope = new ResourceScope()) { + for (int i = 0;i < 10; i++) { + ndArrayToStringMap.put(new TestNDArray(Context.cpu(), new int[] {i, i}), String.valueOf(i)); + } + } + + assertEquals(ndArrayToStringMap.size(), 10); + ndArrayToStringMap.forEach((key, value) -> assertTrue(key.verifyIsDisposed())); + + } +} diff --git a/scala-package/pom.xml b/scala-package/pom.xml index fe78a629ed20..eb3f6f0b3354 100644 --- a/scala-package/pom.xml +++ b/scala-package/pom.xml @@ -190,8 +190,8 @@ maven-compiler-plugin 3.3 - 1.6 - 1.6 + 1.8 + 1.8 UTF-8
From f7599841411001778d50e7e076a3413a1c3d7a18 Mon Sep 17 00:00:00 2001 From: Piyush Ghai Date: Wed, 24 Oct 2018 14:45:27 -0700 Subject: [PATCH 05/38] Bumping down minimum java support from 8 to 7 (#12965) --- .../apache/mxnet/javaapi/ResourceScopeTestSuite.java | 12 +++++++++--- scala-package/pom.xml | 4 ++-- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/scala-package/core/src/test/java/org/apache/mxnet/javaapi/ResourceScopeTestSuite.java b/scala-package/core/src/test/java/org/apache/mxnet/javaapi/ResourceScopeTestSuite.java index f570ba927faa..1c246d870e28 100644 --- a/scala-package/core/src/test/java/org/apache/mxnet/javaapi/ResourceScopeTestSuite.java +++ b/scala-package/core/src/test/java/org/apache/mxnet/javaapi/ResourceScopeTestSuite.java @@ -73,7 +73,9 @@ public void testObjectReleaseFromList() { } assertEquals(list.size() , 10); - list.forEach(n -> assertTrue(n.verifyIsDisposed())); + for (TestNDArray item : list) { + assertTrue(item.verifyIsDisposed()); + } } @Test @@ -87,7 +89,9 @@ public void testObjectReleaseFromMap() { } assertEquals(stringToNDArrayMap.size(), 10); - stringToNDArrayMap.forEach((key, value) -> assertTrue(value.verifyIsDisposed())); + for (Map.Entry entry : stringToNDArrayMap.entrySet()) { + assertTrue(entry.getValue().verifyIsDisposed()); + } Map ndArrayToStringMap = new HashMap<>(); @@ -98,7 +102,9 @@ public void testObjectReleaseFromMap() { } assertEquals(ndArrayToStringMap.size(), 10); - ndArrayToStringMap.forEach((key, value) -> assertTrue(key.verifyIsDisposed())); + for (Map.Entry entry : ndArrayToStringMap.entrySet()) { + assertTrue(entry.getKey().verifyIsDisposed()); + } } } diff --git a/scala-package/pom.xml b/scala-package/pom.xml index eb3f6f0b3354..9f7a498ee9b5 100644 --- a/scala-package/pom.xml +++ b/scala-package/pom.xml @@ -190,8 +190,8 @@ maven-compiler-plugin 3.3 - 1.8 - 1.8 + 1.7 + 1.7 UTF-8 From 5aaa72998e180e56d4b21c90d8791928661754c3 Mon Sep 17 00:00:00 2001 From: Lanking Date: Fri, 26 Oct 2018 11:53:34 -0700 Subject: [PATCH 06/38] [MXNET-984] Java NDArray Documentation Generation (#12835) * cherry pick javaDoc changes * update NDArray changes * refactoring change and merge all docGen in a single place * clean the scalastyle * take on Piyush nit * drop the comments --- .../org/apache/mxnet/javaapi/NDArray.scala | 2 +- .../org/apache/mxnet/APIDocGenerator.scala | 151 +++++++++++++----- .../mxnet/javaapi/JavaNDArrayMacro.scala | 6 +- .../apache/mxnet/utils/CToScalaUtils.scala | 9 +- 4 files changed, 124 insertions(+), 44 deletions(-) diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/NDArray.scala b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/NDArray.scala index c77b440d8802..96119be84b91 100644 --- a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/NDArray.scala +++ b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/NDArray.scala @@ -22,7 +22,7 @@ import org.apache.mxnet.javaapi.DType.DType import collection.JavaConverters._ @AddJNDArrayAPIs(false) -object NDArray { +object NDArray extends NDArrayBase { implicit def fromNDArray(nd: org.apache.mxnet.NDArray): NDArray = new NDArray(nd) implicit def toNDArray(jnd: NDArray): org.apache.mxnet.NDArray = jnd.nd diff --git a/scala-package/macros/src/main/scala/org/apache/mxnet/APIDocGenerator.scala b/scala-package/macros/src/main/scala/org/apache/mxnet/APIDocGenerator.scala index b4efa659443c..44d47a2099d5 100644 --- a/scala-package/macros/src/main/scala/org/apache/mxnet/APIDocGenerator.scala +++ b/scala-package/macros/src/main/scala/org/apache/mxnet/APIDocGenerator.scala @@ -42,6 +42,8 @@ private[mxnet] object APIDocGenerator{ hashCollector += absClassGen(FILE_PATH, false) hashCollector += nonTypeSafeClassGen(FILE_PATH, true) hashCollector += nonTypeSafeClassGen(FILE_PATH, false) + // Generate Java API documentation + hashCollector += javaClassGen(FILE_PATH + "javaapi/") val finalHash = hashCollector.mkString("\n") } @@ -52,8 +54,45 @@ private[mxnet] object APIDocGenerator{ org.apache.commons.codec.binary.Base64.encodeBase64URLSafeString(digest) } - def absClassGen(FILE_PATH : String, isSymbol : Boolean) : String = { - // scalastyle:off + def fileGen(filePath : String, packageName : String, packageDef : String, + absFuncs : List[String]) : String = { + val apacheLicense = + """/* + |* Licensed to the Apache Software Foundation (ASF) under one or more + |* contributor license agreements. See the NOTICE file distributed with + |* this work for additional information regarding copyright ownership. + |* The ASF licenses this file to You under the Apache License, Version 2.0 + |* (the "License"); you may not use this file except in compliance with + |* the License. You may obtain a copy of the License at + |* + |* http://www.apache.org/licenses/LICENSE-2.0 + |* + |* Unless required by applicable law or agreed to in writing, software + |* distributed under the License is distributed on an "AS IS" BASIS, + |* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + |* See the License for the specific language governing permissions and + |* limitations under the License. + |*/ + |""".stripMargin + val scalaStyle = "// scalastyle:off" + val imports = "import org.apache.mxnet.annotation.Experimental" + val absClassDef = s"abstract class $packageName" + + val finalStr = + s"""$apacheLicense + |$scalaStyle + |$packageDef + |$imports + |$absClassDef { + |${absFuncs.mkString("\n")} + |}""".stripMargin + val pw = new PrintWriter(new File(filePath + s"$packageName.scala")) + pw.write(finalStr) + pw.close() + MD5Generator(finalStr) + } + + def absClassGen(filePath : String, isSymbol : Boolean) : String = { val absClassFunctions = getSymbolNDArrayMethods(isSymbol) // Defines Operators that should not generated val notGenerated = Set("Custom") @@ -66,19 +105,27 @@ private[mxnet] object APIDocGenerator{ s"$scalaDoc\n$defBody" }) val packageName = if (isSymbol) "SymbolAPIBase" else "NDArrayAPIBase" - val apacheLicence = "/*\n* Licensed to the Apache Software Foundation (ASF) under one or more\n* contributor license agreements. See the NOTICE file distributed with\n* this work for additional information regarding copyright ownership.\n* The ASF licenses this file to You under the Apache License, Version 2.0\n* (the \"License\"); you may not use this file except in compliance with\n* the License. You may obtain a copy of the License at\n*\n* http://www.apache.org/licenses/LICENSE-2.0\n*\n* Unless required by applicable law or agreed to in writing, software\n* distributed under the License is distributed on an \"AS IS\" BASIS,\n* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n* See the License for the specific language governing permissions and\n* limitations under the License.\n*/\n" - val scalaStyle = "// scalastyle:off" val packageDef = "package org.apache.mxnet" - val imports = "import org.apache.mxnet.annotation.Experimental" - val absClassDef = s"abstract class $packageName" - val finalStr = s"$apacheLicence\n$scalaStyle\n$packageDef\n$imports\n$absClassDef {\n${absFuncs.mkString("\n")}\n}" - val pw = new PrintWriter(new File(FILE_PATH + s"$packageName.scala")) - pw.write(finalStr) - pw.close() - MD5Generator(finalStr) + fileGen(filePath, packageName, packageDef, absFuncs) + } + + def javaClassGen(filePath : String) : String = { + val notGenerated = Set("Custom") + val absClassFunctions = getSymbolNDArrayMethods(false, true) + // TODO: Add Filter to the same location in case of refactor + val absFuncs = absClassFunctions.filterNot(_.name.startsWith("_")) + .filterNot(ele => notGenerated.contains(ele.name)) + .map(absClassFunction => { + val scalaDoc = generateAPIDocFromBackend(absClassFunction) + val defBody = generateJavaAPISignature(absClassFunction) + s"$scalaDoc\n$defBody" + }) + val packageName = "NDArrayBase" + val packageDef = "package org.apache.mxnet.javaapi" + fileGen(filePath, packageName, packageDef, absFuncs) } - def nonTypeSafeClassGen(FILE_PATH : String, isSymbol : Boolean) : String = { + def nonTypeSafeClassGen(filePath : String, isSymbol : Boolean) : String = { // scalastyle:off val absClassFunctions = getSymbolNDArrayMethods(isSymbol) val absFuncs = absClassFunctions.map(absClassFunction => { @@ -93,17 +140,23 @@ private[mxnet] object APIDocGenerator{ } }) val packageName = if (isSymbol) "SymbolBase" else "NDArrayBase" - val apacheLicence = "/*\n* Licensed to the Apache Software Foundation (ASF) under one or more\n* contributor license agreements. See the NOTICE file distributed with\n* this work for additional information regarding copyright ownership.\n* The ASF licenses this file to You under the Apache License, Version 2.0\n* (the \"License\"); you may not use this file except in compliance with\n* the License. You may obtain a copy of the License at\n*\n* http://www.apache.org/licenses/LICENSE-2.0\n*\n* Unless required by applicable law or agreed to in writing, software\n* distributed under the License is distributed on an \"AS IS\" BASIS,\n* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n* See the License for the specific language governing permissions and\n* limitations under the License.\n*/\n" - val scalaStyle = "// scalastyle:off" val packageDef = "package org.apache.mxnet" - val imports = "import org.apache.mxnet.annotation.Experimental" - val absClassDef = s"abstract class $packageName" - val finalStr = s"$apacheLicence\n$scalaStyle\n$packageDef\n$imports\n$absClassDef {\n${absFuncs.mkString("\n")}\n}" - import java.io._ - val pw = new PrintWriter(new File(FILE_PATH + s"$packageName.scala")) - pw.write(finalStr) - pw.close() - MD5Generator(finalStr) + fileGen(filePath, packageName, packageDef, absFuncs) + } + + /** + * Some of the C++ type name is not valid in Scala + * such as var and type. This method is to convert + * them into other names to get it passed + * @param in the input String + * @return converted name string + */ + def safetyNameCheck(in : String) : String = { + in match { + case "var" => "vari" + case "type" => "typeOf" + case _ => in + } } // Generate ScalaDoc type @@ -115,11 +168,7 @@ private[mxnet] object APIDocGenerator{ }) desc += " * " val params = func.listOfArgs.map({ absClassArg => - val currArgName = absClassArg.argName match { - case "var" => "vari" - case "type" => "typeOf" - case _ => absClassArg.argName - } + val currArgName = safetyNameCheck(absClassArg.argName) s" * @param $currArgName\t\t${absClassArg.argDesc}" }) val returnType = s" * @return ${func.returnType}" @@ -133,11 +182,7 @@ private[mxnet] object APIDocGenerator{ def generateAPISignature(func : absClassFunction, isSymbol : Boolean) : String = { var argDef = ListBuffer[String]() func.listOfArgs.foreach(absClassArg => { - val currArgName = absClassArg.argName match { - case "var" => "vari" - case "type" => "typeOf" - case _ => absClassArg.argName - } + val currArgName = safetyNameCheck(absClassArg.argName) if (absClassArg.isOptional) { argDef += s"$currArgName : Option[${absClassArg.argType}] = None" } @@ -157,23 +202,57 @@ private[mxnet] object APIDocGenerator{ s"$experimentalTag\ndef ${func.name} (${argDef.mkString(", ")}) : $returnType" } + def generateJavaAPISignature(func : absClassFunction) : String = { + var argDef = ListBuffer[String]() + var classDef = ListBuffer[String]() + func.listOfArgs.foreach(absClassArg => { + val currArgName = safetyNameCheck(absClassArg.argName) + // scalastyle:off + if (absClassArg.isOptional) { + classDef += s"def set${absClassArg.argName}(${absClassArg.argName} : ${absClassArg.argType}) : ${func.name}BuilderBase" + } + else { + argDef += s"$currArgName : ${absClassArg.argType}" + } + // scalastyle:on + }) + classDef += s"def setout(out : NDArray) : ${func.name}BuilderBase" + classDef += s"def invoke() : org.apache.mxnet.javaapi.NDArrayFuncReturn" + val experimentalTag = "@Experimental" + // scalastyle:off + var finalStr = s"$experimentalTag\ndef ${func.name} (${argDef.mkString(", ")}) : ${func.name}BuilderBase\n" + // scalastyle:on + finalStr += s"abstract class ${func.name}BuilderBase {\n ${classDef.mkString("\n ")}\n}" + finalStr + } + // List and add all the atomic symbol functions to current module. - private def getSymbolNDArrayMethods(isSymbol : Boolean): List[absClassFunction] = { + private def getSymbolNDArrayMethods(isSymbol : Boolean, + isJava : Boolean = false): List[absClassFunction] = { val opNames = ListBuffer.empty[String] val returnType = if (isSymbol) "Symbol" else "NDArray" + val returnHeader = if (isJava) "org.apache.mxnet.javaapi." else "org.apache.mxnet." _LIB.mxListAllOpNames(opNames) // TODO: Add '_linalg_', '_sparse_', '_image_' support // TODO: Add Filter to the same location in case of refactor opNames.map(opName => { val opHandle = new RefLong _LIB.nnGetOpHandle(opName, opHandle) - makeAtomicSymbolFunction(opHandle.value, opName, "org.apache.mxnet." + returnType) - }).toList.filterNot(_.name.startsWith("_")) + makeAtomicSymbolFunction(opHandle.value, opName, returnHeader + returnType) + }).filterNot(_.name.startsWith("_")).groupBy(_.name.toLowerCase).map(ele => { + // Pattern matching for not generating depreciated method + if (ele._2.length == 1) ele._2.head + else { + if (ele._2.head.name.head.isLower) ele._2.head + else ele._2.last + } + }).toList } // Create an atomic symbol function by handle and function name. - private def makeAtomicSymbolFunction(handle: SymbolHandle, aliasName: String, returnType : String) + private def makeAtomicSymbolFunction(handle: SymbolHandle, + aliasName: String, returnType : String) : absClassFunction = { val name = new RefString val desc = new RefString diff --git a/scala-package/macros/src/main/scala/org/apache/mxnet/javaapi/JavaNDArrayMacro.scala b/scala-package/macros/src/main/scala/org/apache/mxnet/javaapi/JavaNDArrayMacro.scala index c530c730a449..d5be97b501c5 100644 --- a/scala-package/macros/src/main/scala/org/apache/mxnet/javaapi/JavaNDArrayMacro.scala +++ b/scala-package/macros/src/main/scala/org/apache/mxnet/javaapi/JavaNDArrayMacro.scala @@ -120,12 +120,12 @@ private[mxnet] object JavaNDArrayMacro { // scalastyle:off // Combine and build the function string impl += "org.apache.mxnet.NDArray.genericNDArrayFunctionInvoke(\"" + ndarrayfunction.name + "\", args.toSeq, map.toMap)" - val classDef = s"class ${ndarrayfunction.name}Builder(${argDef.mkString(",")})" + val classDef = s"class ${ndarrayfunction.name}Builder(${argDef.mkString(",")}) extends ${ndarrayfunction.name}BuilderBase" val classBody = s"${OptionArgDef.mkString("\n")}\n${classImpl.mkString("\n")}\ndef invoke() : $returnType = {${impl.mkString("\n")}}" val classFinal = s"$classDef {$classBody}" val functionDef = s"def ${ndarrayfunction.name} (${argDef.mkString(",")})" val functionBody = s"new ${ndarrayfunction.name}Builder(${argDef.map(_.split(":")(0)).mkString(",")})" - val functionFinal = s"$functionDef = $functionBody" + val functionFinal = s"$functionDef : ${ndarrayfunction.name}BuilderBase = $functionBody" // scalastyle:on functionDefs += c.parse(functionFinal).asInstanceOf[DefDef] classDefs += c.parse(classFinal).asInstanceOf[ClassDef] @@ -195,7 +195,7 @@ private[mxnet] object JavaNDArrayMacro { val argList = argNames zip argTypes map { case (argName, argType) => val typeAndOption = CToScalaUtils.argumentCleaner(argName, argType, - "org.apache.mxnet.javaapi.NDArray", "javaapi.Shape") + "org.apache.mxnet.javaapi.NDArray") new NDArrayArg(argName, typeAndOption._1, typeAndOption._2) } new NDArrayFunction(aliasName, argList.toList) diff --git a/scala-package/macros/src/main/scala/org/apache/mxnet/utils/CToScalaUtils.scala b/scala-package/macros/src/main/scala/org/apache/mxnet/utils/CToScalaUtils.scala index 48d8fdf38bc4..2fd8b2e73c7a 100644 --- a/scala-package/macros/src/main/scala/org/apache/mxnet/utils/CToScalaUtils.scala +++ b/scala-package/macros/src/main/scala/org/apache/mxnet/utils/CToScalaUtils.scala @@ -22,9 +22,10 @@ private[mxnet] object CToScalaUtils { // Convert C++ Types to Scala Types def typeConversion(in : String, argType : String = "", argName : String, - returnType : String, shapeType : String = "Shape") : String = { + returnType : String) : String = { + val header = returnType.split("\\.").dropRight(1) in match { - case "Shape(tuple)" | "ShapeorNone" => s"org.apache.mxnet.$shapeType" + case "Shape(tuple)" | "ShapeorNone" => s"${header.mkString(".")}.Shape" case "Symbol" | "NDArray" | "NDArray-or-Symbol" => returnType case "Symbol[]" | "NDArray[]" | "NDArray-or-Symbol[]" | "SymbolorSymbol[]" => s"Array[$returnType]" @@ -53,7 +54,7 @@ private[mxnet] object CToScalaUtils { * @return (Scala_Type, isOptional) */ def argumentCleaner(argName: String, argType : String, - returnType : String, shapeType : String = "Shape") : (String, Boolean) = { + returnType : String) : (String, Boolean) = { val spaceRemoved = argType.replaceAll("\\s+", "") var commaRemoved : Array[String] = new Array[String](0) // Deal with the case e.g: stype : {'csr', 'default', 'row_sparse'} @@ -73,7 +74,7 @@ private[mxnet] object CToScalaUtils { s"""expected "default=..." got ${commaRemoved(2)}""") (typeConversion(commaRemoved(0), argType, argName, returnType), true) } else if (commaRemoved.length == 2 || commaRemoved.length == 1) { - val tempType = typeConversion(commaRemoved(0), argType, argName, returnType, shapeType) + val tempType = typeConversion(commaRemoved(0), argType, argName, returnType) val tempOptional = tempType.equals("org.apache.mxnet.Symbol") (tempType, tempOptional) } else { From 743301ccfe53ae5bee7debc4b3486f080a45291f Mon Sep 17 00:00:00 2001 From: Andrew Ayres Date: Fri, 26 Oct 2018 16:52:19 -0700 Subject: [PATCH 07/38] First pass at adding JavaDocs for new java api classes (#12963) * First pass at adding JavaDocs for new java api classes * Fix a scalastyle issue * Updating JavaDoc based on feedback --- .../org/apache/mxnet/javaapi/Context.scala | 12 ++ .../scala/org/apache/mxnet/javaapi/IO.scala | 8 + .../org/apache/mxnet/javaapi/NDArray.scala | 199 ++++++++++++++++++ .../mxnet/infer/javaapi/ObjectDetector.scala | 16 +- .../mxnet/infer/javaapi/Predictor.scala | 17 ++ 5 files changed, 251 insertions(+), 1 deletion(-) diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Context.scala b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Context.scala index 2f4f3e6409ed..ac3517b151f1 100644 --- a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Context.scala +++ b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Context.scala @@ -18,6 +18,13 @@ package org.apache.mxnet.javaapi import collection.JavaConverters._ +/** + * Constructing a context which is used to specify the device and device type that will + * be utilized by the engine. + * + * @param deviceTypeName {'cpu', 'gpu'} String representing the device type + * @param deviceId The device id of the device, needed for GPU + */ class Context(val context: org.apache.mxnet.Context) { val deviceTypeid: Int = context.deviceTypeid @@ -26,6 +33,11 @@ class Context(val context: org.apache.mxnet.Context) { = this(new org.apache.mxnet.Context(deviceTypeName, deviceId)) def withScope[T](body: => T): T = context.withScope(body) + + /** + * Return device type of current context. + * @return device_type + */ def deviceType: String = context.deviceType override def toString: String = context.toString diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/IO.scala b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/IO.scala index 47b1c367c1c2..bf961b2bd529 100644 --- a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/IO.scala +++ b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/IO.scala @@ -30,5 +30,13 @@ object DataDesc{ implicit def toDataDesc(dataDesc: DataDesc): org.apache.mxnet.DataDesc = dataDesc.dataDesc + /** + * Get the dimension that corresponds to the batch size. + * @param layout layout string. For example, "NCHW". + * @return An axis indicating the batch_size dimension. When data-parallelism is used, + * the data will be automatically split and concatenate along the batch_size dimension. + * Axis can be -1, which means the whole array will be copied + * for each data-parallelism device. + */ def getBatchAxis(layout: String): Int = org.apache.mxnet.DataDesc.getBatchAxis(Some(layout)) } diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/NDArray.scala b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/NDArray.scala index 96119be84b91..d4e67f73408e 100644 --- a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/NDArray.scala +++ b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/NDArray.scala @@ -29,27 +29,64 @@ object NDArray extends NDArrayBase { def waitall(): Unit = org.apache.mxnet.NDArray.waitall() + /** + * One hot encoding indices into matrix out. + * @param indices An NDArray containing indices of the categorical features. + * @param out The result holder of the encoding. + * @return Same as out. + */ def onehotEncode(indices: NDArray, out: NDArray): NDArray = org.apache.mxnet.NDArray.onehotEncode(indices, out) + /** + * Create an empty uninitialized new NDArray, with specified shape. + * + * @param shape shape of the NDArray. + * @param ctx The context of the NDArray. + * + * @return The created NDArray. + */ def empty(shape: Shape, ctx: Context, dtype: DType.DType): NDArray = org.apache.mxnet.NDArray.empty(shape, ctx, dtype) def empty(ctx: Context, shape: Array[Int]): NDArray = org.apache.mxnet.NDArray.empty(new Shape(shape), ctx) def empty(ctx : Context, shape : java.util.List[java.lang.Integer]) : NDArray = org.apache.mxnet.NDArray.empty(new Shape(shape), ctx) + + /** + * Create a new NDArray filled with 0, with specified shape. + * + * @param shape shape of the NDArray. + * @param ctx The context of the NDArray. + * + * @return The created NDArray. + */ def zeros(shape: Shape, ctx: Context, dtype: DType.DType): NDArray = org.apache.mxnet.NDArray.zeros(shape, ctx, dtype) def zeros(ctx: Context, shape: Array[Int]): NDArray = org.apache.mxnet.NDArray.zeros(new Shape(shape), ctx) def zeros(ctx : Context, shape : java.util.List[java.lang.Integer]) : NDArray = org.apache.mxnet.NDArray.zeros(new Shape(shape), ctx) + + /** + * Create a new NDArray filled with 1, with specified shape. + * @param shape shape of the NDArray. + * @param ctx The context of the NDArray. + * @return The created NDArray. + */ def ones(shape: Shape, ctx: Context, dtype: DType.DType): NDArray = org.apache.mxnet.NDArray.ones(shape, ctx, dtype) def ones(ctx: Context, shape: Array[Int]): NDArray = org.apache.mxnet.NDArray.ones(new Shape(shape), ctx) def ones(ctx : Context, shape : java.util.List[java.lang.Integer]) : NDArray = org.apache.mxnet.NDArray.ones(new Shape(shape), ctx) + + /** + * Create a new NDArray filled with given value, with specified shape. + * @param shape shape of the NDArray. + * @param value value to be filled with + * @param ctx The context of the NDArray + */ def full(shape: Shape, value: Float, ctx: Context): NDArray = org.apache.mxnet.NDArray.full(shape, value, ctx) @@ -65,37 +102,102 @@ object NDArray extends NDArrayBase { def minimum(lhs: NDArray, rhs: Float): NDArray = org.apache.mxnet.NDArray.minimum(lhs, rhs) def minimum(lhs: Float, rhs: NDArray): NDArray = org.apache.mxnet.NDArray.minimum(lhs, rhs) + + /** + * Returns the result of element-wise **equal to** (==) comparison operation with broadcasting. + * For each element in input arrays, return 1(true) if corresponding elements are same, + * otherwise return 0(false). + */ def equal(lhs: NDArray, rhs: NDArray): NDArray = org.apache.mxnet.NDArray.equal(lhs, rhs) def equal(lhs: NDArray, rhs: Float): NDArray = org.apache.mxnet.NDArray.equal(lhs, rhs) + /** + * Returns the result of element-wise **not equal to** (!=) comparison operation + * with broadcasting. + * For each element in input arrays, return 1(true) if corresponding elements are different, + * otherwise return 0(false). + */ def notEqual(lhs: NDArray, rhs: NDArray): NDArray = org.apache.mxnet.NDArray.notEqual(lhs, rhs) def notEqual(lhs: NDArray, rhs: Float): NDArray = org.apache.mxnet.NDArray.notEqual(lhs, rhs) + /** + * Returns the result of element-wise **greater than** (>) comparison operation + * with broadcasting. + * For each element in input arrays, return 1(true) if lhs elements are greater than rhs, + * otherwise return 0(false). + */ def greater(lhs: NDArray, rhs: NDArray): NDArray = org.apache.mxnet.NDArray.greater(lhs, rhs) def greater(lhs: NDArray, rhs: Float): NDArray = org.apache.mxnet.NDArray.greater(lhs, rhs) + /** + * Returns the result of element-wise **greater than or equal to** (>=) comparison + * operation with broadcasting. + * For each element in input arrays, return 1(true) if lhs elements are greater than equal to rhs + * otherwise return 0(false). + */ def greaterEqual(lhs: NDArray, rhs: NDArray): NDArray = org.apache.mxnet.NDArray.greaterEqual(lhs, rhs) def greaterEqual(lhs: NDArray, rhs: Float): NDArray = org.apache.mxnet.NDArray.greaterEqual(lhs, rhs) + /** + * Returns the result of element-wise **lesser than** (<) comparison operation + * with broadcasting. + * For each element in input arrays, return 1(true) if lhs elements are less than rhs, + * otherwise return 0(false). + */ def lesser(lhs: NDArray, rhs: NDArray): NDArray = org.apache.mxnet.NDArray.lesser(lhs, rhs) def lesser(lhs: NDArray, rhs: Float): NDArray = org.apache.mxnet.NDArray.lesser(lhs, rhs) + /** + * Returns the result of element-wise **lesser than or equal to** (<=) comparison + * operation with broadcasting. + * For each element in input arrays, return 1(true) if lhs elements are + * lesser than equal to rhs, otherwise return 0(false). + */ def lesserEqual(lhs: NDArray, rhs: NDArray): NDArray = org.apache.mxnet.NDArray.lesserEqual(lhs, rhs) def lesserEqual(lhs: NDArray, rhs: Float): NDArray = org.apache.mxnet.NDArray.lesserEqual(lhs, rhs) + /** + * Create a new NDArray that copies content from source_array. + * @param sourceArr Source data to create NDArray from. + * @param shape shape of the NDArray + * @param ctx The context of the NDArray, default to current default context. + * @return The created NDArray. + */ def array(sourceArr: java.util.List[java.lang.Float], shape: Shape, ctx: Context = null): NDArray = org.apache.mxnet.NDArray.array( sourceArr.asScala.map(ele => Float.unbox(ele)).toArray, shape, ctx) + /** + * Returns evenly spaced values within a given interval. + * Values are generated within the half-open interval [`start`, `stop`). In other + * words, the interval includes `start` but excludes `stop`. + * @param start Start of interval. + * @param stop End of interval. + * @param step Spacing between values. + * @param repeat Number of times to repeat each element. + * @param ctx Device context. + * @param dType The data type of the `NDArray`. + * @return NDArray of evenly spaced values in the specified range. + */ def arange(start: Float, stop: Float, step: Float, repeat: Int, ctx: Context, dType: DType.DType): NDArray = org.apache.mxnet.NDArray.arange(start, Some(stop), step, repeat, ctx, dType) } +/** + * NDArray object in mxnet. + * NDArray is basic ndarray/Tensor like data structure in mxnet.
+ * + * NOTE: NDArray is stored in native memory. Use NDArray in a try-with-resources() construct + * or a [[ResourceScope]] in a try-with-resource to have them automatically disposed. You can + * explicitly control the lifetime of NDArray by calling dispose manually. Failure to do this + * will result in leaking native memory. + * + */ class NDArray(val nd : org.apache.mxnet.NDArray ) { def this(arr : Array[Float], shape : Shape, ctx : Context) = { @@ -108,28 +210,88 @@ class NDArray(val nd : org.apache.mxnet.NDArray ) { def serialize() : Array[Byte] = nd.serialize() + /** + * Release the native memory.
+ * The NDArrays it depends on will NOT be disposed.
+ * The object shall never be used after it is disposed. + */ def dispose() : Unit = nd.dispose() + + /** + * Dispose all NDArrays who help to construct this array.
+ * e.g. (a * b + c).disposeDeps() will dispose a, b, c (including their deps) and a * b + * @return this array + */ def disposeDeps() : NDArray = nd.disposeDepsExcept() // def disposeDepsExcept(arr : Array[NDArray]) : NDArray = nd.disposeDepsExcept() + /** + * Return a sliced NDArray that shares memory with current one. + * NDArray only support continuous slicing on axis 0 + * + * @param start Starting index of slice. + * @param stop Finishing index of slice. + * + * @return a sliced NDArray that shares memory with current one. + */ def slice(start : Int, stop : Int) : NDArray = nd.slice(start, stop) + /** + * Return a sliced NDArray at the ith position of axis0 + * @param i + * @return a sliced NDArray that shares memory with current one. + */ def slice (i : Int) : NDArray = nd.slice(i) + /** + * Return a sub NDArray that shares memory with current one. + * the first axis will be rolled up, which causes its shape different from slice(i, i+1) + * @param idx index of sub array. + */ def at(idx : Int) : NDArray = nd.at(idx) def T : NDArray = nd.T + /** + * Get data type of current NDArray. + * @return class representing type of current ndarray + */ def dtype : DType = nd.dtype + /** + * Return a copied numpy array of current array with specified type. + * @param dtype Desired type of result array. + * @return A copy of array content. + */ def asType(dtype : DType) : NDArray = nd.asType(dtype) + /** + * Return a reshaped NDArray that shares memory with current one. + * @param dims New shape. + * + * @return a reshaped NDArray that shares memory with current one. + */ def reshape(dims : Array[Int]) : NDArray = nd.reshape(dims) + /** + * Block until all pending writes operations on current NDArray are finished. + * This function will return when all the pending writes to the current + * NDArray finishes. There can still be pending read going on when the + * function returns. + */ def waitToRead(): Unit = nd.waitToRead() + /** + * Get context of current NDArray. + * @return The context of current NDArray. + */ def context : Context = nd.context + /** + * Set the values of the NDArray + * @param value Value to set + * @return Current NDArray + */ def set(value : Float) : NDArray = nd.set(value) def set(other : NDArray) : NDArray = nd.set(other) def set(other : Array[Float]) : NDArray = nd.set(other) @@ -167,20 +329,57 @@ class NDArray(val nd : org.apache.mxnet.NDArray ) { def lesserEqual(other : NDArray) : NDArray = this.nd <= other def lesserEqual(other : Float) : NDArray = this.nd <= other + /** + * Return a copied flat java array of current array (row-major). + * @return A copy of array content. + */ def toArray : Array[Float] = nd.toArray + /** + * Return a CPU scalar(float) of current ndarray. + * This ndarray must have shape (1,) + * + * @return The scalar representation of the ndarray. + */ def toScalar : Float = nd.toScalar + /** + * Copy the content of current array to other. + * + * @param other Target NDArray or context we want to copy data to. + * @return The copy target NDArray + */ def copyTo(other : NDArray) : NDArray = nd.copyTo(other) + /** + * Copy the content of current array to a new NDArray in the context. + * + * @param ctx Target context we want to copy data to. + * @return The copy target NDArray + */ def copyTo(ctx : Context) : NDArray = nd.copyTo(ctx) + /** + * Clone the current array + * @return the copied NDArray in the same context + */ def copy() : NDArray = copyTo(this.context) + /** + * Get shape of current NDArray. + * @return an array representing shape of current ndarray + */ def shape : Shape = nd.shape + def size : Int = shape.product + /** + * Return an `NDArray` that lives in the target context. If the array + * is already in that context, `self` is returned. Otherwise, a copy is made. + * @param context The target context we want the return value to live in. + * @return A copy or `self` as an `NDArray` that lives in the target context. + */ def asInContext(context: Context): NDArray = nd.asInContext(context) override def equals(obj: Any): Boolean = nd.equals(obj) diff --git a/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/ObjectDetector.scala b/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/ObjectDetector.scala index 6cd3df6b896b..f48375ffe4a7 100644 --- a/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/ObjectDetector.scala +++ b/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/ObjectDetector.scala @@ -26,7 +26,21 @@ import org.apache.mxnet.javaapi.{Context, DataDesc, NDArray} import scala.collection.JavaConverters import scala.collection.JavaConverters._ - +/** + * The ObjectDetector class helps to run ObjectDetection tasks where the goal + * is to find bounding boxes and corresponding labels for objects in a image. + * + * @param modelPathPrefix Path prefix from where to load the model artifacts. + * These include the symbol, parameters, and synset.txt. + * Example: file://model-dir/ssd_resnet50_512 (containing + * ssd_resnet50_512-symbol.json, ssd_resnet50_512-0000.params, + * and synset.txt) + * @param inputDescriptors Descriptors defining the input node names, shape, + * layout and type parameters + * @param contexts Device contexts on which you want to run inference. + * Defaults to CPU. + * @param epoch Model epoch to load; defaults to 0 + */ class ObjectDetector(val objDetector: org.apache.mxnet.infer.ObjectDetector){ def this(modelPathPrefix: String, inputDescriptors: java.util.List[DataDesc], contexts: diff --git a/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/Predictor.scala b/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/Predictor.scala index 26ccd06cf466..3e0fcb7b507a 100644 --- a/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/Predictor.scala +++ b/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/Predictor.scala @@ -22,6 +22,23 @@ import org.apache.mxnet.javaapi.{Context, DataDesc, NDArray} import scala.collection.JavaConverters import scala.collection.JavaConverters._ +/** + * Implementation of prediction routines. + * + * @param modelPathPrefix Path prefix from where to load the model artifacts. + * These include the symbol, parameters, and synset.txt + * Example: file://model-dir/resnet-152 (containing + * resnet-152-symbol.json, resnet-152-0000.params, and synset.txt). + * @param inputDescriptors Descriptors defining the input node names, shape, + * layout and type parameters + *

Note: If the input Descriptors is missing batchSize + * ('N' in layout), a batchSize of 1 is assumed for the model. + * @param contexts Device contexts on which you want to run inference; defaults to CPU + * @param epoch Model epoch to load; defaults to 0 + + */ + +// JavaDoc description of class to be updated in https://issues.apache.org/jira/browse/MXNET-1178 class Predictor(val predictor: org.apache.mxnet.infer.Predictor){ def this(modelPathPrefix: String, inputDescriptors: java.util.List[DataDesc], contexts: java.util.List[Context], epoch: Int) From 7e776c930f0fe7a394ec30db1c575bf0a30c8569 Mon Sep 17 00:00:00 2001 From: Lanking Date: Mon, 29 Oct 2018 11:49:28 -0700 Subject: [PATCH 08/38] [MXNET-1160] add Java build/run example (#12969) * add example * clean up nit * find the pain point * add java tut into whitelist * Trigger CI * add java demo and split scala demo * address the comments * change the examples * fix the wrong configuration --- .../mxnet_java_install_and_run_examples.md | 123 ++++++++++++++++++ scala-package/mxnet-demo/java-demo/Makefile | 54 ++++++++ scala-package/mxnet-demo/java-demo/README.md | 76 +++++++++++ .../mxnet-demo/java-demo/bin/java_sample.sh | 20 +++ .../mxnet-demo/java-demo/bin/run_od.sh | 21 +++ scala-package/mxnet-demo/java-demo/pom.xml | 25 ++++ .../src/main/java/sample/HelloWorld.java | 28 ++++ .../src/main/java/sample/ObjectDetection.java | 101 ++++++++++++++ .../mxnet-demo/{ => scala-demo}/Makefile | 2 +- .../mxnet-demo/{ => scala-demo}/README.md | 12 +- .../mxnet-demo/{ => scala-demo}/bin/demo.sh | 0 .../mxnet-demo/{ => scala-demo}/bin/run_im.sh | 0 .../mxnet-demo/{ => scala-demo}/pom.xml | 0 .../src/main/scala/sample/HelloWorld.scala | 0 .../sample/ImageClassificationExample.scala | 0 tests/tutorials/test_sanity_tutorials.py | 1 + 16 files changed, 457 insertions(+), 6 deletions(-) create mode 100644 docs/tutorials/scala/mxnet_java_install_and_run_examples.md create mode 100644 scala-package/mxnet-demo/java-demo/Makefile create mode 100644 scala-package/mxnet-demo/java-demo/README.md create mode 100644 scala-package/mxnet-demo/java-demo/bin/java_sample.sh create mode 100644 scala-package/mxnet-demo/java-demo/bin/run_od.sh create mode 100644 scala-package/mxnet-demo/java-demo/pom.xml create mode 100644 scala-package/mxnet-demo/java-demo/src/main/java/sample/HelloWorld.java create mode 100644 scala-package/mxnet-demo/java-demo/src/main/java/sample/ObjectDetection.java rename scala-package/mxnet-demo/{ => scala-demo}/Makefile (98%) rename scala-package/mxnet-demo/{ => scala-demo}/README.md (88%) rename scala-package/mxnet-demo/{ => scala-demo}/bin/demo.sh (100%) rename scala-package/mxnet-demo/{ => scala-demo}/bin/run_im.sh (100%) rename scala-package/mxnet-demo/{ => scala-demo}/pom.xml (100%) rename scala-package/mxnet-demo/{ => scala-demo}/src/main/scala/sample/HelloWorld.scala (100%) rename scala-package/mxnet-demo/{ => scala-demo}/src/main/scala/sample/ImageClassificationExample.scala (100%) diff --git a/docs/tutorials/scala/mxnet_java_install_and_run_examples.md b/docs/tutorials/scala/mxnet_java_install_and_run_examples.md new file mode 100644 index 000000000000..83e1ec5b2daa --- /dev/null +++ b/docs/tutorials/scala/mxnet_java_install_and_run_examples.md @@ -0,0 +1,123 @@ +# Install and run Java Examples + +## Prerequisites: +Please follow the Step 1 in the [Scala configuration](http://mxnet.incubator.apache.org/install/scala_setup.html#setup-instructions) +These should help you install the correct Java version and all dependencies. + +## Run the Java example project +We have provided a general MXNet Java template under `scala-package/mxnet-demo/java-demo` which contains the necessary project files for you to get started. It contains a simple Hello world! equivalent program `JavaSample.java` and a full fledged `ObjectDetection.java `that shows how to run Object Detection on images using MXNet and pre-trained SSD model. + +Alternatively you could build project from scratch following the below instructions. + +## Import and run the Java package +For users using a desktop/laptop, we recommend using IntelliJ IDE as it is tested and supported to provide the necessary documentation for the Java API. + +Alternatively, users can follow the second instruction to set up an empty Maven project for Java. + +### IntelliJ instruction +If you are using a computer with Ubuntu16.04 or Mac, you can install IntelliJ to run the Java package. Please follow the instruction below: + +1. Create a new Java project in IntelliJ. Fire up IntelliJ and click `Create New Project`. + +2. Click `Next`, and in the `Create project from template` window, do not select anything and click `Next` again. + +3. In the next window choose your `Project name` and the `Project location` and click on `Finish`. + +4. Let's add the Java Inference API jars that we build from source. At the top of the window, Go to the `File -> Project Structure`. In the popup window that opens up, click on `Libraries -> +` and select the path to the jar files downloaded. Click `Apply` and then click `OK`. + +6. Create a new Java class under the folder `your-project-name/src`. Let's call this class `JavaSample.java`. Type in the following code snippet and run it. In this code snippet, we create an NDArray object in Java and print its shape. +```java +import org.apache.mxnet.javaapi.Context; +import org.apache.mxnet.javaapi.NDArray; + +public class JavaSample { +public static void main(String[] args) { + System.out.println("Hello"); + NDArray nd = NDArray.ones(Context.cpu(), new int[] {10, 20}); + + System.out.println("Shape of NDarray is : " + nd.shape()); +} +} +``` + +7. If all went well, you should see an output like this : +``` +Hello +SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder". +SLF4J: Defaulting to no-operation (NOP) logger implementation +SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for further details. +Shape of NDarray is : (10,20) +Process finished with exit code 0 +``` +This means you have successfully set it up on your machine + +### Run the project manually in Maven +In this example, Maven is being used to create the project. This tutorial referred the [Maven in 5 min](https://maven.apache.org/guides/getting-started/maven-in-five-minutes.html) tutorial. + +1. Create a new folder and run the following commands +``` +mvn archetype:generate -DgroupId=com.mycompany.app -DartifactId=my-app -DarchetypeArtifactId=maven-archetype-quickstart -DinteractiveMode=false +``` +You can specify the `groupId` and `artifactId` to your favourite names. You can also create a maven project using empty archetype. + +2. then go to `pom.xml` file in your project folder and add the following content. + +- Change the `osx-x86_64` to `linux-x86_64` if your platform is linux. +- Change `cpu` into `gpu` if you are using gpu +- Change the version of your package from `1.3.1-SNAPSHOT` to the matched jar version. +```xml + + org.apache.mxnet + mxnet-full_2.11-osx-x86_64-cpu + 1.3.1-SNAPSHOT + system + path-to-your-jar/jarName.jar + + + args4j + args4j + 2.0.29 + + + org.slf4j + slf4j-api + 1.7.7 + + + org.slf4j + slf4j-log4j12 + 1.7.7 + +``` +3. Finally you can replace the code in `App.java` +```java +import org.apache.mxnet.javaapi.Context; +import org.apache.mxnet.javaapi.NDArray; + +public class App { +public static void main(String[] args) { + System.out.println("Hello"); + NDArray nd = NDArray.ones(Context.cpu(), new int[] {10, 20}); + + System.out.println("Shape of NDarray is : " + nd.shape()); + +} +} +``` +make the package by +``` +mvn package +``` + +and run it by +``` +java -cp target/my-app-1.0-SNAPSHOT.jar:/.jar com.mycompany.app.App +``` +The result looks like this: +``` +Hello +SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder". +SLF4J: Defaulting to no-operation (NOP) logger implementation +SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for further details. +Shape of NDarray is : (10,20) +``` \ No newline at end of file diff --git a/scala-package/mxnet-demo/java-demo/Makefile b/scala-package/mxnet-demo/java-demo/Makefile new file mode 100644 index 000000000000..340a50f75965 --- /dev/null +++ b/scala-package/mxnet-demo/java-demo/Makefile @@ -0,0 +1,54 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +SCALA_VERSION_PROFILE := 2.11 +SCALA_VERSION := 2.11.8 +MXNET_VERSION := 1.3.1-SNAPSHOT + +ifeq ($(OS),Windows_NT) + UNAME_S := Windows +else + UNAME_S := $(shell uname -s) +endif + +ifeq ($(UNAME_S), Windows) + # TODO: currently scala package does not support windows + SCALA_PKG_PROFILE := windows +else + ifeq ($(UNAME_S), Darwin) + SCALA_PKG_PROFILE := osx-x86_64-cpu + else + SCALA_PKG_PROFILE := linux-x86_64 + ifeq ($(USE_CUDA), 1) + SCALA_PKG_PROFILE := $(SCALA_PKG_PROFILE)-gpu + else + SCALA_PKG_PROFILE := $(SCALA_PKG_PROFILE)-cpu + endif + endif +endif + +javademo: + (mvn package -Dmxnet.profile=$(SCALA_PKG_PROFILE) \ + -Dmxnet.scalaprofile=$(SCALA_VERSION_PROFILE) \ + -Dmxnet.version=$(MXNET_VERSION) \ + -Dscala.version=$(SCALA_VERSION)) + +javaclean: + (mvn clean -Dmxnet.profile=$(SCALA_PKG_PROFILE) \ + -Dmxnet.scalaprofile=$(SCALA_VERSION_PROFILE) \ + -Dmxnet.version=$(MXNET_VERSION) \ + -Dscala.version=$(SCALA_VERSION)) \ No newline at end of file diff --git a/scala-package/mxnet-demo/java-demo/README.md b/scala-package/mxnet-demo/java-demo/README.md new file mode 100644 index 000000000000..ffe614a29287 --- /dev/null +++ b/scala-package/mxnet-demo/java-demo/README.md @@ -0,0 +1,76 @@ +# MXNet Java Sample Project +This is an project created to use Maven-published Scala/Java package with two Java examples. +## Setup +Please copy the downloaded MXNet Java package jar file under the `java-demo` folder. + +User are required to use `mvn package` to build the package, + which are shown below: +```Bash +export SCALA_VERSION_PROFILE=2.11 SCALA_VERSION=2.11.8 MXNET_VERSION=1.3.1-SNAPSHOT +export SCALA_PKG_PROFILE= +mvn package -Dmxnet.profile=$(SCALA_PKG_PROFILE) \ + -Dmxnet.scalaprofile=$(SCALA_VERSION_PROFILE) \ + -Dmxnet.version=$(MXNET_VERSION) \ + -Dscala.version=$(SCALA_VERSION) +``` +These environment variable (`SCALA_PKG_PROFILE`, `SCALA_VERSION_PROFILE`, `MXNET_VERSION`, `SCALA_VERSION`) +should be set before executing the line above. + +You can also use the `Makefile` as an alternative to do the same thing. Simply do the following: +```Bash +make javademo +``` +This will load the default parameter for all the environment variable. + If you want to run with GPU on Linux, just simply add `USE_CUDA=1` when you run the make file + +## Run +### Hello World +The Scala file is being executed using Java. You can execute the helloWorld example as follows: +```Bash +java -cp $CLASSPATH sample.HelloWorld +``` +However, you have to define the Classpath before you run the demo code. More information can be found in the `demo.sh` And you can run the bash script as follows: +```Bash +bash bin/java_sample.sh +``` +It will load the library automatically and run the example +### Object Detection using Inference API +We also provide an example to do object detection, which downloads a ImageNet trained resnet50 model and runs inference on an image to return the classification result as +```Bash +Class: car +Probabilties: 0.99847263 +Coord:312.21335, 72.02908, 456.01443, 150.66176 +Class: bicycle +Probabilties: 0.9047381 +Coord:155.9581, 149.96365, 383.83694, 418.94516 +Class: dog +Probabilties: 0.82268167 +Coord:83.82356, 179.14001, 206.63783, 476.78754 +``` + +you can run using the command shown below: +```Bash +java -cp $CLASSPATH sample.ObjectDetection +``` +or script as follows: +```Bash +bash bin/run_od.sh +``` + +If you want to test run on GPU, you can set a environment variable as follows: +```Bash +export SCALA_TEST_ON_GPU=1 +``` +## Clean up +Clean up for Maven package is simple, you can run the pre-configed `Makefile` as: +```Bash +make javaclean +``` + +## Q & A +If you are facing opencv issue on Ubuntu, please try as follows to install opencv 3.4 (required by 1.2.0 package and above) +```Bash +sudo add-apt-repository ppa:timsc/opencv-3.4 +sudo apt-get update +sudo apt install libopencv-imgcodecs3.4 +``` \ No newline at end of file diff --git a/scala-package/mxnet-demo/java-demo/bin/java_sample.sh b/scala-package/mxnet-demo/java-demo/bin/java_sample.sh new file mode 100644 index 000000000000..50e7fb9eb97d --- /dev/null +++ b/scala-package/mxnet-demo/java-demo/bin/java_sample.sh @@ -0,0 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +#!/bin/bash +CURR_DIR=$(cd $(dirname $0)/../; pwd) +CLASSPATH=$CLASSPATH:$CURR_DIR/target/*:$CLASSPATH:$CURR_DIR/* +java -Xmx8G -cp $CLASSPATH sample.HelloWorld \ No newline at end of file diff --git a/scala-package/mxnet-demo/java-demo/bin/run_od.sh b/scala-package/mxnet-demo/java-demo/bin/run_od.sh new file mode 100644 index 000000000000..5cbc53fbcefe --- /dev/null +++ b/scala-package/mxnet-demo/java-demo/bin/run_od.sh @@ -0,0 +1,21 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +#!/bin/bash +CURR_DIR=$(cd $(dirname $0)/../; pwd) + +CLASSPATH=$CLASSPATH:$CURR_DIR/target/*:$CLASSPATH:$CURR_DIR/* +java -Xmx8G -cp $CLASSPATH sample.ObjectDetection \ No newline at end of file diff --git a/scala-package/mxnet-demo/java-demo/pom.xml b/scala-package/mxnet-demo/java-demo/pom.xml new file mode 100644 index 000000000000..5014d2e09f55 --- /dev/null +++ b/scala-package/mxnet-demo/java-demo/pom.xml @@ -0,0 +1,25 @@ + + + 4.0.0 + Demo + mxnet-java-demo + 1.0-SNAPSHOT + MXNet Java Demo + + + + org.apache.mxnet + mxnet-full_${mxnet.scalaprofile}-${mxnet.profile} + ${mxnet.version} + system + ${project.basedir}/mxnet-full_${mxnet.scalaprofile}-${mxnet.profile}-${mxnet.version}.jar + + + commons-io + commons-io + 2.4 + + + \ No newline at end of file diff --git a/scala-package/mxnet-demo/java-demo/src/main/java/sample/HelloWorld.java b/scala-package/mxnet-demo/java-demo/src/main/java/sample/HelloWorld.java new file mode 100644 index 000000000000..60619dc8a806 --- /dev/null +++ b/scala-package/mxnet-demo/java-demo/src/main/java/sample/HelloWorld.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package sample; + +import org.apache.mxnet.javaapi.*; +import java.util.Arrays; + +public class HelloWorld { + public static void main(String[] args) { + System.out.println("Hello World!"); + NDArray nd = new NDArray(new float[]{2.0f, 3.0f}, new Shape(new int[]{1, 2}), Context.cpu()); + System.out.println(nd.shape()); + } +} diff --git a/scala-package/mxnet-demo/java-demo/src/main/java/sample/ObjectDetection.java b/scala-package/mxnet-demo/java-demo/src/main/java/sample/ObjectDetection.java new file mode 100644 index 000000000000..bf9a93ae8217 --- /dev/null +++ b/scala-package/mxnet-demo/java-demo/src/main/java/sample/ObjectDetection.java @@ -0,0 +1,101 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package sample; +import org.apache.mxnet.infer.javaapi.ObjectDetectorOutput; +import org.apache.mxnet.javaapi.*; +import org.apache.mxnet.infer.javaapi.ObjectDetector; +import org.apache.commons.io.FileUtils; +import java.io.File; +import java.net.URL; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +public class ObjectDetection { + private static String modelPath; + private static String imagePath; + + private static void downloadUrl(String url, String filePath) { + File tmpFile = new File(filePath); + if (!tmpFile.exists()) { + try { + FileUtils.copyURLToFile(new URL(url), tmpFile); + } catch (Exception exception) { + System.err.println(exception); + } + } + } + + public static void downloadModelImage() { + String tempDirPath = System.getProperty("java.io.tmpdir"); + System.out.println("tempDirPath: %s".format(tempDirPath)); + imagePath = tempDirPath + "/inputImages/resnetssd/dog-ssd.jpg"; + String imgURL = "https://s3.amazonaws.com/model-server/inputs/dog-ssd.jpg"; + downloadUrl(imgURL, imagePath); + modelPath = tempDirPath + "resnetssd/resnet50_ssd_model"; + System.out.println("Download model files, this can take a while..."); + String modelURL = "https://s3.amazonaws.com/model-server/models/resnet50_ssd/"; + downloadUrl(modelURL + "resnet50_ssd_model-symbol.json", + tempDirPath + "/resnetssd/resnet50_ssd_model-symbol.json"); + downloadUrl(modelURL + "resnet50_ssd_model-0000.params", + tempDirPath + "/resnetssd/resnet50_ssd_model-0000.params"); + downloadUrl(modelURL + "synset.txt", + tempDirPath + "/resnetssd/synset.txt"); + } + + static List> + runObjectDetectionSingle(String modelPathPrefix, String inputImagePath, List context) { + Shape inputShape = new Shape(new int[] {1, 3, 512, 512}); + List inputDescriptors = new ArrayList(); + inputDescriptors.add(new DataDesc("data", inputShape, DType.Float32(), "NCHW")); + ObjectDetector objDet = new ObjectDetector(modelPathPrefix, inputDescriptors, context, 0); + return objDet.imageObjectDetect(ObjectDetector.loadImageFromFile(inputImagePath), 3); + } + + public static void main(String[] args) { + List context = new ArrayList(); + if (System.getenv().containsKey("SCALA_TEST_ON_GPU") && + Integer.valueOf(System.getenv("SCALA_TEST_ON_GPU")) == 1) { + context.add(Context.gpu()); + } else { + context.add(Context.cpu()); + } + downloadModelImage(); + Shape inputShape = new Shape(new int[] {1, 3, 512, 512}); + Shape outputShape = new Shape(new int[] {1, 6132, 6}); + int width = inputShape.get(2); + int height = inputShape.get(3); + List> output + = runObjectDetectionSingle(modelPath, imagePath, context); + String outputStr = "\n"; + for (List ele : output) { + for (ObjectDetectorOutput i : ele) { + outputStr += "Class: " + i.getClassName() + "\n"; + outputStr += "Probabilties: " + i.getProbability() + "\n"; + + List coord = Arrays.asList(i.getXMin() * width, + i.getXMax() * height, i.getYMin() * width, i.getYMax() * height); + StringBuilder sb = new StringBuilder(); + for (float c: coord) { + sb.append(", ").append(c); + } + outputStr += "Coord:" + sb.substring(2)+ "\n"; + } + } + System.out.println(outputStr); + } +} \ No newline at end of file diff --git a/scala-package/mxnet-demo/Makefile b/scala-package/mxnet-demo/scala-demo/Makefile similarity index 98% rename from scala-package/mxnet-demo/Makefile rename to scala-package/mxnet-demo/scala-demo/Makefile index 227697ba2e8a..458077d13904 100644 --- a/scala-package/mxnet-demo/Makefile +++ b/scala-package/mxnet-demo/scala-demo/Makefile @@ -17,7 +17,7 @@ SCALA_VERSION_PROFILE := 2.11 SCALA_VERSION := 2.11.8 -MXNET_VERSION := 1.2.0 +MXNET_VERSION := 1.3.0 ifeq ($(OS),Windows_NT) UNAME_S := Windows diff --git a/scala-package/mxnet-demo/README.md b/scala-package/mxnet-demo/scala-demo/README.md similarity index 88% rename from scala-package/mxnet-demo/README.md rename to scala-package/mxnet-demo/scala-demo/README.md index e30a61a2fc13..300fc7b2e108 100644 --- a/scala-package/mxnet-demo/README.md +++ b/scala-package/mxnet-demo/scala-demo/README.md @@ -4,7 +4,7 @@ This is an project created to use Maven-published Scala package with two Scala e User are required to use `mvn package` to build the package, which are shown below: ```Bash -export SCALA_VERSION_PROFILE=2.11 SCALA_VERSION=2.11.8 MXNET_VERSION=1.2.0 +export SCALA_VERSION_PROFILE=2.11 SCALA_VERSION=2.11.8 MXNET_VERSION=1.3.0 export SCALA_PKG_PROFILE= mvn package -Dmxnet.profile=$(SCALA_PKG_PROFILE) \ -Dmxnet.scalaprofile=$(SCALA_VERSION_PROFILE) \ @@ -12,7 +12,9 @@ mvn package -Dmxnet.profile=$(SCALA_PKG_PROFILE) \ -Dscala.version=$(SCALA_VERSION) ``` These environment variable (`SCALA_PKG_PROFILE`, `SCALA_VERSION_PROFILE`, `MXNET_VERSION`, `SCALA_VERSION`) -should be set before executing the line above. +should be set before executing the line above. + +To obtain the most recent MXNet version, please click [here](https://mvnrepository.com/search?q=org.apache.mxnet) You can also use the `Makefile` as an alternative to do the same thing. Simply do the following: ```Bash @@ -25,7 +27,7 @@ This will load the default parameter for all the environment variable. ### Hello World The Scala file is being executed using Java. You can execute the helloWorld example as follows: ```Bash -java -Xmx8G -cp $CLASSPATH sample.HelloWorld +java -cp $CLASSPATH sample.HelloWorld ``` However, you have to define the Classpath before you run the demo code. More information can be found in the `demo.sh` And you can run the bash script as follows: ```Bash @@ -41,7 +43,7 @@ You can review the complete example [here](https://github.com/apache/incubator-m you can run using the command shown below: ```Bash -java -Xmx8G -cp $CLASSPATH sample.ImageClassificationExample +java -cp $CLASSPATH sample.ImageClassificationExample ``` or script as follows: ```Bash @@ -59,7 +61,7 @@ make scalaclean ``` ## Q & A -If you are facing opencv issue on Ubuntu, please try as follows to install opencv 3.4 (required by 1.2.0 package) +If you are facing opencv issue on Ubuntu, please try as follows to install opencv 3.4 (required by 1.2.0 package and above) ```Bash sudo add-apt-repository ppa:timsc/opencv-3.4 sudo apt-get update diff --git a/scala-package/mxnet-demo/bin/demo.sh b/scala-package/mxnet-demo/scala-demo/bin/demo.sh similarity index 100% rename from scala-package/mxnet-demo/bin/demo.sh rename to scala-package/mxnet-demo/scala-demo/bin/demo.sh diff --git a/scala-package/mxnet-demo/bin/run_im.sh b/scala-package/mxnet-demo/scala-demo/bin/run_im.sh similarity index 100% rename from scala-package/mxnet-demo/bin/run_im.sh rename to scala-package/mxnet-demo/scala-demo/bin/run_im.sh diff --git a/scala-package/mxnet-demo/pom.xml b/scala-package/mxnet-demo/scala-demo/pom.xml similarity index 100% rename from scala-package/mxnet-demo/pom.xml rename to scala-package/mxnet-demo/scala-demo/pom.xml diff --git a/scala-package/mxnet-demo/src/main/scala/sample/HelloWorld.scala b/scala-package/mxnet-demo/scala-demo/src/main/scala/sample/HelloWorld.scala similarity index 100% rename from scala-package/mxnet-demo/src/main/scala/sample/HelloWorld.scala rename to scala-package/mxnet-demo/scala-demo/src/main/scala/sample/HelloWorld.scala diff --git a/scala-package/mxnet-demo/src/main/scala/sample/ImageClassificationExample.scala b/scala-package/mxnet-demo/scala-demo/src/main/scala/sample/ImageClassificationExample.scala similarity index 100% rename from scala-package/mxnet-demo/src/main/scala/sample/ImageClassificationExample.scala rename to scala-package/mxnet-demo/scala-demo/src/main/scala/sample/ImageClassificationExample.scala diff --git a/tests/tutorials/test_sanity_tutorials.py b/tests/tutorials/test_sanity_tutorials.py index cd3f6bfcbace..078e96b3b29e 100644 --- a/tests/tutorials/test_sanity_tutorials.py +++ b/tests/tutorials/test_sanity_tutorials.py @@ -49,6 +49,7 @@ 'scala/mnist.md', 'scala/index.md', 'scala/mxnet_scala_on_intellij.md', + 'scala/mxnet_java_install_and_run_examples.md', 'sparse/index.md', 'speech_recognition/index.md', 'unsupervised_learning/index.md', From 62d2800af08664abe322e32b7fe2f39c75b6c0c7 Mon Sep 17 00:00:00 2001 From: Zach Kimberg Date: Fri, 2 Nov 2018 16:24:07 -0700 Subject: [PATCH 09/38] Maven Surefire bug workaround (#13097) --- scala-package/pom.xml | 1 + 1 file changed, 1 insertion(+) diff --git a/scala-package/pom.xml b/scala-package/pom.xml index 9f7a498ee9b5..daa2dff0bd52 100644 --- a/scala-package/pom.xml +++ b/scala-package/pom.xml @@ -215,6 +215,7 @@ 2.19 true + false From 2df7a611c112fefb3cd8f5eb63481db6fee616b2 Mon Sep 17 00:00:00 2001 From: Lanking Date: Wed, 7 Nov 2018 13:52:25 -0800 Subject: [PATCH 10/38] use ResourceScope in Model/Trainer/FeedForward.scala (#12882) (#13164) * use ResourceScope in Model/Trainer/FeedForward.scala * add moveToOuterScope public method to move resources to a outerScope if it exists * fix memory leak in FeedForward.scala by making it a native resource and disposing argparams, auxParams in dispose() method --- .../scala/org/apache/mxnet/FeedForward.scala | 152 +++++++++++------- .../org/apache/mxnet/NativeResource.scala | 8 +- .../org/apache/mxnet/ResourceScope.scala | 35 ++-- .../imclassification/TrainModel.scala | 80 ++++----- .../imclassification/util/Trainer.scala | 133 +++++++-------- 5 files changed, 230 insertions(+), 178 deletions(-) diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/FeedForward.scala b/scala-package/core/src/main/scala/org/apache/mxnet/FeedForward.scala index 00a1450089f7..2ed9d8cfbb84 100644 --- a/scala-package/core/src/main/scala/org/apache/mxnet/FeedForward.scala +++ b/scala-package/core/src/main/scala/org/apache/mxnet/FeedForward.scala @@ -17,9 +17,10 @@ package org.apache.mxnet +import org.apache.mxnet.Base.CPtrAddress import org.apache.mxnet.io.NDArrayIter import org.apache.mxnet.optimizer.SGD -import org.slf4j.{LoggerFactory, Logger} +import org.slf4j.{Logger, LoggerFactory} import scala.collection.mutable.ListBuffer @@ -55,7 +56,7 @@ class FeedForward private( argParams: Map[String, NDArray], auxParams: Map[String, NDArray], private val allowExtraParams: Boolean, - val beginEpoch: Int) { + val beginEpoch: Int) extends NativeResource { val logger: Logger = LoggerFactory.getLogger(classOf[FeedForward]) private var argumentChecked = false @@ -126,6 +127,8 @@ class FeedForward private( } // Initialize weight parameters and auxiliary states + // The NDArrays associated with the _argParms and _auxParams are not disposed instead + // they are passed a outer scope if available. private def initParams(inputShapes: Map[String, Shape], overwrite: Boolean = false) : (IndexedSeq[String], IndexedSeq[String], IndexedSeq[String]) = { val (argShapes, _, auxShapes) = symbol.inferShape(inputShapes) @@ -137,16 +140,26 @@ class FeedForward private( val paramNameShapes = (argNames zip argShapes).filter { case (name, _) => paramNames.contains(name) } - val argParams = paramNameShapes.map { case (name, shape) => - (name, NDArray.zeros(shape)) + val argParams = paramNameShapes.map { case (name, shape) => { + val param = NDArray.zeros(shape) + val curScope = ResourceScope.getCurrentScope() + if (curScope.isDefined) curScope.get.moveToOuterScope(param) + (name, param) + } }.toMap - val auxParams = (auxNames zip auxShapes).map { case (name, shape) => - (name, NDArray.zeros(shape)) + + val auxParams = (auxNames zip auxShapes).map { case (name, shape) => { + val param = NDArray.zeros(shape) + val curScope = ResourceScope.getCurrentScope() + if (curScope.isDefined) curScope.get.moveToOuterScope(param) + (name, param) + } }.toMap for ((k, v) <- argParams) { if (_argParams != null && _argParams.contains(k) && (!overwrite)) { argParams(k).set(_argParams(k)) + } else { initializer(k, v) } @@ -277,13 +290,15 @@ class FeedForward private( def fit(trainData: DataIter, evalData: DataIter, evalMetric: EvalMetric, kvStoreType: String, epochEndCallback: EpochEndCallback, batchEndCallback: BatchEndCallback, logger: Logger, workLoadList: Seq[Float]): Unit = { - // init params first to allow kv store use _argParams to decide its type - initSymbolParams(trainData) - // create kvstore - val (kvStore, updateOnKVStore) = Model.createKVStore(kvStoreType, ctx.length, _argParams) - fit(trainData, evalData, evalMetric, kvStore, updateOnKVStore, - epochEndCallback, batchEndCallback, logger, workLoadList) - kvStore.foreach(_.dispose()) + ResourceScope.using() { + // init params first to allow kv store use _argParams to decide its type + initSymbolParams(trainData) + // create kvstore + val (kvStore, updateOnKVStore) = Model.createKVStore(kvStoreType, ctx.length, _argParams) + fit(trainData, evalData, evalMetric, kvStore, updateOnKVStore, + epochEndCallback, batchEndCallback, logger, workLoadList) +// kvStore.foreach(_.dispose()) + } } def fit(trainData: DataIter, evalData: DataIter, evalMetric: EvalMetric, @@ -313,11 +328,13 @@ class FeedForward private( batchEndCallback: BatchEndCallback, logger: Logger, workLoadList: Seq[Float]): Unit = { // init params first to allow kv store use _argParams to decide its type - initSymbolParams(trainData) - // create kvstore - val (kvStore, updateOnKVStore) = Model.createKVStore(kv) - fit(trainData, evalData, evalMetric, kvStore, updateOnKVStore, - epochEndCallback, batchEndCallback, logger, workLoadList) + ResourceScope.using() { + initSymbolParams(trainData) + // create kvstore + val (kvStore, updateOnKVStore) = Model.createKVStore(kv) + fit(trainData, evalData, evalMetric, kvStore, updateOnKVStore, + epochEndCallback, batchEndCallback, logger, workLoadList) + } } def fit(trainData: DataIter, evalData: DataIter, evalMetric: EvalMetric, @@ -352,44 +369,49 @@ class FeedForward private( batchEndCallback: BatchEndCallback = null, logger: Logger = FeedForward.logger, workLoadList: Seq[Float] = null): Unit = { require(evalMetric != null, "evalMetric cannot be null") - val (argNames, paramNames, auxNames) = initSymbolParams(trainData) - - // init optimizer - val batchSizeMultiplier = kvStore.map { kv => - if (kv.`type` == "dist_sync") { - kv.numWorkers - } else { - 1 - } - } - val batchSize = trainData.batchSize * batchSizeMultiplier.getOrElse(1) - this.optimizer.setArgNames(argNames) - this.optimizer.setRescaleGrad(1f / batchSize) - this.optimizer.setSymbol(this.symbol) - val paramIdx2Name = - if (updateOnKVStore) { - paramNames.zipWithIndex.map { case (name, idx) => idx -> name }.toMap - } else { - paramNames.zipWithIndex.flatMap { case (name, idx) => - (0 until ctx.length).map(k => (idx * ctx.length + k) -> name).toMap - }.toMap + // TODO: https://issues.apache.org/jira/browse/MXNET-1171 + // this leaks memory, initSymbolParams->initParams is already called which allocates + // NDArray in argParams, auxParams and here we are overwriting it by calling again. + // PhantomRef should take care of releasing this when GC is called, however we have to + // wait for the GC call to happen. + val (argNames, paramNames, auxNames) = initSymbolParams(trainData) + + // init optimizer + val batchSizeMultiplier = kvStore.map { kv => + if (kv.`type` == "dist_sync") { + kv.numWorkers + } else { + 1 + } } - this.optimizer.setIdx2Name(paramIdx2Name) - - logger.debug("Start training on multi-device") - Model.trainMultiDevice( - symbol, ctx, argNames, paramNames, auxNames, - _argParams, _auxParams, - this.beginEpoch, this.numEpoch, - this.epochSize, this.optimizer, - kvStore, updateOnKVStore, - trainData = trainData, evalData = Option(evalData), - evalMetric = evalMetric, - epochEndCallback = Option(epochEndCallback), - batchEndCallback = Option(batchEndCallback), - workLoadList = workLoadList, - monitor = monitor, - symGen = symGen) + val batchSize = trainData.batchSize * batchSizeMultiplier.getOrElse(1) + this.optimizer.setArgNames(argNames) + this.optimizer.setRescaleGrad(1f / batchSize) + this.optimizer.setSymbol(this.symbol) + val paramIdx2Name = + if (updateOnKVStore) { + paramNames.zipWithIndex.map { case (name, idx) => idx -> name }.toMap + } else { + paramNames.zipWithIndex.flatMap { case (name, idx) => + (0 until ctx.length).map(k => (idx * ctx.length + k) -> name).toMap + }.toMap + } + this.optimizer.setIdx2Name(paramIdx2Name) + + logger.debug("Start training on multi-device") + Model.trainMultiDevice( + symbol, ctx, argNames, paramNames, auxNames, + _argParams, _auxParams, + this.beginEpoch, this.numEpoch, + this.epochSize, this.optimizer, + kvStore, updateOnKVStore, + trainData = trainData, evalData = Option(evalData), + evalMetric = evalMetric, + epochEndCallback = Option(epochEndCallback), + batchEndCallback = Option(batchEndCallback), + workLoadList = workLoadList, + monitor = monitor, + symGen = symGen) } /** @@ -416,9 +438,29 @@ class FeedForward private( def serialize(): Array[Byte] = { Model.serialize(this.symbol, getArgParams, getAuxParams) } + + // hack to make the FeedForward.scala work with ResourceScope and + // automatically release _argParms and _auxParms + override def nativeAddress: CPtrAddress = hashCode() + + override def nativeDeAllocator: CPtrAddress => Int = FeedForward.doNothingDeAllocator + + override val ref: NativeResourceRef = super.register() + + override val bytesAllocated: Long = 0L + + override def dispose(): Unit = { + if (!super.isDisposed) { + _argParams.foreach { case (_, param) => param.dispose() } + _auxParams.foreach { case (_, param) => param.dispose() } + } + } } object FeedForward { + + private def doNothingDeAllocator(dummy: CPtrAddress): Int = 0 + private val logger: Logger = LoggerFactory.getLogger(classOf[FeedForward]) // Check if name is a data argument. private def isDataArg(name: String): Boolean = { diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/NativeResource.scala b/scala-package/core/src/main/scala/org/apache/mxnet/NativeResource.scala index 48d4b0c193b1..1806b8653376 100644 --- a/scala-package/core/src/main/scala/org/apache/mxnet/NativeResource.scala +++ b/scala-package/core/src/main/scala/org/apache/mxnet/NativeResource.scala @@ -46,7 +46,8 @@ private[mxnet] trait NativeResource */ def nativeDeAllocator: (CPtrAddress => Int) - /** Call NativeResource.register to get the reference + /** + * Call NativeResource.register to get the reference */ val ref: NativeResourceRef @@ -56,6 +57,7 @@ private[mxnet] trait NativeResource // intentionally making it a val, so it gets evaluated when defined val bytesAllocated: Long + // this is set and unset by [[ResourceScope.add]] and [[ResourceScope.remove]] private[mxnet] var scope: Option[ResourceScope] = None @volatile private var disposed = false @@ -69,11 +71,11 @@ private[mxnet] trait NativeResource * using PhantomReference */ def register(): NativeResourceRef = { - scope = ResourceScope.getCurrentScope() + val scope = ResourceScope.getCurrentScope() if (scope.isDefined) scope.get.add(this) NativeResource.totalBytesAllocated.getAndAdd(bytesAllocated) - // register with PhantomRef tracking to release incase the objects go + // register with PhantomRef tracking to release in case the objects go // out of reference within scope but are held for long time NativeResourceRef.register(this, nativeDeAllocator) } diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/ResourceScope.scala b/scala-package/core/src/main/scala/org/apache/mxnet/ResourceScope.scala index 1c5782d873a9..30fe1473a2cd 100644 --- a/scala-package/core/src/main/scala/org/apache/mxnet/ResourceScope.scala +++ b/scala-package/core/src/main/scala/org/apache/mxnet/ResourceScope.scala @@ -58,6 +58,7 @@ class ResourceScope extends AutoCloseable { */ def add(resource: NativeResource): Unit = { resourceQ.+=(resource) + resource.scope = Some(this) } /** @@ -67,7 +68,21 @@ class ResourceScope extends AutoCloseable { */ def remove(resource: NativeResource): Unit = { resourceQ.-=(resource) + resource.scope = None } + + /** + * Removes from current Scope and moves to outer scope if it exists + * @param resource Resource to be moved to an outer scope + */ + def moveToOuterScope(resource: NativeResource): Unit = { + val prevScope: Option[ResourceScope] = ResourceScope.getPrevScope() + if (prevScope.isDefined) { + this.remove(resource) + prevScope.get.add(resource) + } else this.remove(resource) + } + } object ResourceScope { @@ -92,32 +107,22 @@ object ResourceScope { val curScope = if (scope != null) scope else new ResourceScope() - val prevScope: Option[ResourceScope] = ResourceScope.getPrevScope() - @inline def resourceInGeneric(g: scala.collection.Iterable[_]) = { g.foreach( n => n match { case nRes: NativeResource => { - removeAndAddToPrevScope(nRes) + curScope.moveToOuterScope(nRes) } case kv: scala.Tuple2[_, _] => { - if (kv._1.isInstanceOf[NativeResource]) removeAndAddToPrevScope( + if (kv._1.isInstanceOf[NativeResource]) curScope.moveToOuterScope( kv._1.asInstanceOf[NativeResource]) - if (kv._2.isInstanceOf[NativeResource]) removeAndAddToPrevScope( + if (kv._2.isInstanceOf[NativeResource]) curScope.moveToOuterScope( kv._2.asInstanceOf[NativeResource]) } } ) } - @inline def removeAndAddToPrevScope(r: NativeResource) = { - curScope.remove(r) - if (prevScope.isDefined) { - prevScope.get.add(r) - r.scope = prevScope - } - } - @inline def safeAddSuppressed(t: Throwable, suppressed: Throwable): Unit = { if (!t.isInstanceOf[ControlThrowable]) t.addSuppressed(suppressed) } @@ -129,8 +134,8 @@ object ResourceScope { ret match { // don't de-allocate if returning any collection that contains NativeResource. case resInGeneric: scala.collection.Iterable[_] => resourceInGeneric(resInGeneric) - case nRes: NativeResource => removeAndAddToPrevScope(nRes) - case ndRet: NDArrayFuncReturn => ndRet.arr.foreach( nd => removeAndAddToPrevScope(nd) ) + case nRes: NativeResource => curScope.moveToOuterScope(nRes) + case ndRet: NDArrayFuncReturn => ndRet.arr.foreach( nd => curScope.moveToOuterScope(nd) ) case _ => // do nothing } ret diff --git a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/imclassification/TrainModel.scala b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/imclassification/TrainModel.scala index 608e191e019f..f6c283c3dfb2 100644 --- a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/imclassification/TrainModel.scala +++ b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/imclassification/TrainModel.scala @@ -43,7 +43,7 @@ object TrainModel { */ def test(model: String, dataPath: String, numExamples: Int = 60000, numEpochs: Int = 10, benchmark: Boolean = false): Float = { - NDArrayCollector.auto().withScope { + ResourceScope.using() { val devs = Array(Context.cpu(0)) val envs: mutable.Map[String, String] = mutable.HashMap.empty[String, String] val (dataLoader, net) = dataLoaderAndModel("mnist", model, dataPath, @@ -110,44 +110,46 @@ object TrainModel { val inst = new TrainModel val parser: CmdLineParser = new CmdLineParser(inst) try { - parser.parseArgument(args.toList.asJava) - - val dataPath = if (inst.dataDir == null) System.getenv("MXNET_HOME") - else inst.dataDir - - val (dataLoader, net) = dataLoaderAndModel(inst.dataset, inst.network, dataPath, - inst.numLayers, inst.numExamples, inst.benchmark) - - val devs = - if (inst.gpus != null) inst.gpus.split(',').map(id => Context.gpu(id.trim.toInt)) - else if (inst.cpus != null) inst.cpus.split(',').map(id => Context.cpu(id.trim.toInt)) - else Array(Context.cpu(0)) - - val envs: mutable.Map[String, String] = mutable.HashMap.empty[String, String] - envs.put("DMLC_ROLE", inst.role) - if (inst.schedulerHost != null) { - require(inst.schedulerPort > 0, "scheduler port not specified") - envs.put("DMLC_PS_ROOT_URI", inst.schedulerHost) - envs.put("DMLC_PS_ROOT_PORT", inst.schedulerPort.toString) - require(inst.numWorker > 0, "Num of workers must > 0") - envs.put("DMLC_NUM_WORKER", inst.numWorker.toString) - require(inst.numServer > 0, "Num of servers must > 0") - envs.put("DMLC_NUM_SERVER", inst.numServer.toString) - logger.info("Init PS environments") - KVStoreServer.init(envs.toMap) - } - - if (inst.role != "worker") { - logger.info("Start KVStoreServer for scheduler & servers") - KVStoreServer.start() - } else { - Trainer.fit(batchSize = inst.batchSize, numExamples = inst.numExamples, devs = devs, - network = net, dataLoader = dataLoader, - kvStore = inst.kvStore, numEpochs = inst.numEpochs, - modelPrefix = inst.modelPrefix, loadEpoch = inst.loadEpoch, - lr = inst.lr, lrFactor = inst.lrFactor, lrFactorEpoch = inst.lrFactorEpoch, - monitorSize = inst.monitor) - logger.info("Finish fit ...") + ResourceScope.using() { + parser.parseArgument(args.toList.asJava) + + val dataPath = if (inst.dataDir == null) System.getenv("MXNET_HOME") + else inst.dataDir + + val (dataLoader, net) = dataLoaderAndModel(inst.dataset, inst.network, dataPath, + inst.numLayers, inst.numExamples, inst.benchmark) + + val devs = + if (inst.gpus != null) inst.gpus.split(',').map(id => Context.gpu(id.trim.toInt)) + else if (inst.cpus != null) inst.cpus.split(',').map(id => Context.cpu(id.trim.toInt)) + else Array(Context.cpu(0)) + + val envs: mutable.Map[String, String] = mutable.HashMap.empty[String, String] + envs.put("DMLC_ROLE", inst.role) + if (inst.schedulerHost != null) { + require(inst.schedulerPort > 0, "scheduler port not specified") + envs.put("DMLC_PS_ROOT_URI", inst.schedulerHost) + envs.put("DMLC_PS_ROOT_PORT", inst.schedulerPort.toString) + require(inst.numWorker > 0, "Num of workers must > 0") + envs.put("DMLC_NUM_WORKER", inst.numWorker.toString) + require(inst.numServer > 0, "Num of servers must > 0") + envs.put("DMLC_NUM_SERVER", inst.numServer.toString) + logger.info("Init PS environments") + KVStoreServer.init(envs.toMap) + } + + if (inst.role != "worker") { + logger.info("Start KVStoreServer for scheduler & servers") + KVStoreServer.start() + } else { + Trainer.fit(batchSize = inst.batchSize, numExamples = inst.numExamples, devs = devs, + network = net, dataLoader = dataLoader, + kvStore = inst.kvStore, numEpochs = inst.numEpochs, + modelPrefix = inst.modelPrefix, loadEpoch = inst.loadEpoch, + lr = inst.lr, lrFactor = inst.lrFactor, lrFactorEpoch = inst.lrFactorEpoch, + monitorSize = inst.monitor) + logger.info("Finish fit ...") + } } } catch { case ex: Exception => { diff --git a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/imclassification/util/Trainer.scala b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/imclassification/util/Trainer.scala index 9a54e58b653e..276816cf8c8c 100644 --- a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/imclassification/util/Trainer.scala +++ b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/imclassification/util/Trainer.scala @@ -50,83 +50,84 @@ object Trainer { lr: Float = 0.1f, lrFactor: Float = 1f, lrFactorEpoch: Float = 1f, clipGradient: Float = 0f, monitorSize: Int = -1): Accuracy = { // kvstore - var kv = KVStore.create(kvStore) + ResourceScope.using() { + var kv = KVStore.create(kvStore) - // load model - val modelPrefixWithRank = - if (modelPrefix == null) null - else modelPrefix + s"-${kv.rank}" + // load model + val modelPrefixWithRank = + if (modelPrefix == null) null + else modelPrefix + s"-${kv.rank}" - val (argParams, auxParams, beginEpoch) = - if (loadEpoch >= 0) { - require(modelPrefixWithRank != null) - val tmp = FeedForward.load(modelPrefix, loadEpoch) - (tmp.getArgParams, tmp.getAuxParams, loadEpoch) - } else { - (null, null, 0) - } + val (argParams, auxParams, beginEpoch) = + if (loadEpoch >= 0) { + require(modelPrefixWithRank != null) + val tmp = FeedForward.load(modelPrefix, loadEpoch) + (tmp.getArgParams, tmp.getAuxParams, loadEpoch) + } else { + (null, null, 0) + } - // save model - val checkpoint: EpochEndCallback = - if (modelPrefix == null) null - else new EpochEndCallback { - override def invoke(epoch: Int, symbol: Symbol, - argParams: Map[String, NDArray], - auxStates: Map[String, NDArray]): Unit = { - Model.saveCheckpoint(modelPrefix, epoch + 1, symbol, argParams, auxParams) + // save model + val checkpoint: EpochEndCallback = + if (modelPrefix == null) null + else new EpochEndCallback { + override def invoke(epoch: Int, symbol: Symbol, + argParams: Map[String, NDArray], + auxStates: Map[String, NDArray]): Unit = { + Model.saveCheckpoint(modelPrefix, epoch + 1, symbol, argParams, auxParams) + } } - } - // data - val (train, validation) = dataLoader(batchSize, kv) + // data + val (train, validation) = dataLoader(batchSize, kv) - // train - val epochSize = - if (kvStore == "dist_sync") numExamples / batchSize / kv.numWorkers - else numExamples / batchSize + // train + val epochSize = + if (kvStore == "dist_sync") numExamples / batchSize / kv.numWorkers + else numExamples / batchSize - val lrScheduler = - if (lrFactor < 1f) { - new FactorScheduler(step = Math.max((epochSize * lrFactorEpoch).toInt, 1), - factor = lrFactor) - } else { - null - } - val optimizer: Optimizer = new SGD(learningRate = lr, - lrScheduler = lrScheduler, clipGradient = clipGradient, - momentum = 0.9f, wd = 0.00001f) + val lrScheduler = + if (lrFactor < 1f) { + new FactorScheduler(step = Math.max((epochSize * lrFactorEpoch).toInt, 1), + factor = lrFactor) + } else { + null + } + val optimizer: Optimizer = new SGD(learningRate = lr, + lrScheduler = lrScheduler, clipGradient = clipGradient, + momentum = 0.9f, wd = 0.00001f) - // disable kvstore for single device - if (kv.`type`.contains("local") && (devs.length == 1 || devs(0).deviceType != "gpu")) { - kv.dispose() - kv = null - } + // disable kvstore for single device + if (kv.`type`.contains("local") && (devs.length == 1 || devs(0).deviceType != "gpu")) { + kv.dispose() + kv = null + } - val model = new FeedForward(ctx = devs, - symbol = network, - numEpoch = numEpochs, - optimizer = optimizer, - initializer = new Xavier(factorType = "in", magnitude = 2.34f), - argParams = argParams, - auxParams = auxParams, - beginEpoch = beginEpoch, - epochSize = epochSize) - if (monitorSize > 0) { - model.setMonitor(new Monitor(monitorSize)) - } - val acc = new Accuracy() - model.fit(trainData = train, - evalData = validation, - evalMetric = acc, - kvStore = kv, - batchEndCallback = new Speedometer(batchSize, 50), - epochEndCallback = checkpoint) - if (kv != null) { - kv.dispose() + val model = new FeedForward(ctx = devs, + symbol = network, + numEpoch = numEpochs, + optimizer = optimizer, + initializer = new Xavier(factorType = "in", magnitude = 2.34f), + argParams = argParams, + auxParams = auxParams, + beginEpoch = beginEpoch, + epochSize = epochSize) + if (monitorSize > 0) { + model.setMonitor(new Monitor(monitorSize)) + } + val acc = new Accuracy() + model.fit(trainData = train, + evalData = validation, + evalMetric = acc, + kvStore = kv, + batchEndCallback = new Speedometer(batchSize, 50), + epochEndCallback = checkpoint) + if (kv != null) { + kv.dispose() + } + acc } - acc } - // scalastyle:on parameterNum } From 149ea17d6bfdf1e20d5e3f7968b33fce43653f43 Mon Sep 17 00:00:00 2001 From: Piyush Ghai Date: Thu, 8 Nov 2018 11:05:19 -0800 Subject: [PATCH 11/38] [MXNET-1187] Added Tutorial for Java under mxnet.io/docs/tutorials (#13183) * Added tutorial for Java installation on IntelliJ for mxnet.io website * Added correct image resources * Removed spurious quotes * Added java tutorial to whitelisting * Added community download edition link to intelliJ section --- docs/tutorials/index.md | 6 + docs/tutorials/java/mxnet_java_on_intellij.md | 210 ++++++++++++++++++ tests/tutorials/test_sanity_tutorials.py | 3 +- 3 files changed, 218 insertions(+), 1 deletion(-) create mode 100644 docs/tutorials/java/mxnet_java_on_intellij.md diff --git a/docs/tutorials/index.md b/docs/tutorials/index.md index ef7306c14955..3c1841c19bdb 100644 --- a/docs/tutorials/index.md +++ b/docs/tutorials/index.md @@ -154,6 +154,12 @@ Select API:  * [MXNet-Scala Examples](https://github.com/apache/incubator-mxnet/tree/master/scala-package/examples/src/main/scala/org/apache/mxnetexamples)


+## Java Tutorials +* Getting Started + * [Developer Environment Setup on IntelliJ IDE](/tutorials/java/mxnet_java_on_intellij.html) +* [MXNet-Java Examples](https://github.com/apache/incubator-mxnet/tree/master/scala-package/examples/src/main/java/org/apache/mxnetexamples) +
+ ## C++ Tutorials * Models diff --git a/docs/tutorials/java/mxnet_java_on_intellij.md b/docs/tutorials/java/mxnet_java_on_intellij.md new file mode 100644 index 000000000000..b90a92b0a7b5 --- /dev/null +++ b/docs/tutorials/java/mxnet_java_on_intellij.md @@ -0,0 +1,210 @@ +# Run MXNet Java Examples Using the IntelliJ IDE (macOS) + +This tutorial guides you through setting up a simple Java project in IntelliJ IDE on macOS and demonstrates usage of the MXNet Java APIs. + +## Prerequisites: +To use this tutorial you need the following pre-requisites: + +- [Java 8 JDK](http://www.oracle.com/technetwork/java/javase/downloads/index.html) +- [Maven](https://maven.apache.org/install.html) +- [OpenCV](https://opencv.org/) +- [IntelliJ IDEA](https://www.jetbrains.com/idea/) (One can download the community edition from [here](https://www.jetbrains.com/idea/download)) + +### MacOS Prerequisites + +**Step 1.** Install brew: +``` +/usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" +``` + +Or, if you already have brew, update it: +``` +brew update +``` + +**Step 2.** Install Java 8: +``` +brew tap caskroom/versions +brew cask install java8 +``` + +**Step 3.** Install maven: +``` +brew install maven +``` + +**Step 4.** Install OpenCV: +``` +brew install opencv +``` + +You can also run this tutorial on an Ubuntu machine after installing the following prerequisites. +### Ubuntu Prerequisites + +**Step 1.** Download the MXNet source. + +```bash +git clone --recursive https://github.com/apache/incubator-mxnet.git mxnet +cd mxnet +``` + +**Step 2.** Run the dependency installation scripts. + +```bash +sudo ./ci/docker/install/ubuntu_core.sh +sudo ./ci/docker/install/ubuntu_scala.sh +``` + +The `ubuntu_scala.sh` installs the common dependencies required for both MXNet Scala and MXNet Java packages. + +## Set Up Your Project + +**Step 1.** Install and setup [IntelliJ IDEA](https://www.jetbrains.com/idea/) + +**Step 2.** Create a new Project: + +![intellij welcome](https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/scala/intellij-welcome.png) + +From the IntelliJ welcome screen, select "Create New Project". + +Choose the Maven project type. + +Select the checkbox for `Create from archetype`, then choose `org.apache.maven.archetypes:maven-archetype-quickstart` from the list below. More on this can be found on a Maven tutorial : [Maven in 5 Minutes](https://maven.apache.org/guides/getting-started/maven-in-five-minutes.html). + +![maven project type - archetype](https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/java/project-archetype.png) + +click `Next`. + +![project metadata](https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/java/intellij-project-metadata.png) + +Set the project's metadata. For this tutorial, use the following: + +**GroupId** +``` +mxnet +``` +**ArtifactId** +``` +ArtifactId: javaMXNet +``` +**Version** +``` +1.0-SNAPSHOT +``` + +TODO +![project properties](https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/java/intellij-project-properties.png) + +Review the project's properties. The settings can be left as their default. + +TODO +![project location](https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/java/intellij-project-location.png) + +Set the project's location. The rest of the settings can be left as their default. + +TODO +![project 1](https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/java/intellij-project-pom.png) + +After clicking Finish, you will be presented with the project's first view. +The project's `pom.xml` will be open for editing. + +**Step 3.** Add the following Maven dependency to your `pom.xml` file under the `dependencies` tag: + +```html + + org.apache.mxnet + mxnet-full_2.11-osx-x86_64-cpu + 1.4.0 + +``` + +To view the latest MXNet Maven packages, you can check [MXNet Maven package repository](https://search.maven.org/#search%7Cga%7C1%7Cg%3A%22org.apache.mxnet%22) + + +**Step 4.** Import dependencies with Maven: + + - Note the prompt in the lower right corner that states "Maven projects need to be imported". If this is not visible, click on the little greed balloon that appears in the lower right corner. + +![import_dependencies](https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/java/project-import-changes.png) + +Click "Import Changes" in this prompt. + +**Step 5.** Build the project: +- To build the project, from the menu choose Build, and then choose Build Project. + +**Step 6.** Navigate to the App.java class in the project and paste the following code, overwriting the original hello world code. +```java +package mxnet; + +import org.apache.mxnet.javaapi.Context; +import org.apache.mxnet.javaapi.NDArray; + +public class App +{ + public static void main( String[] args ) + { + NDArray nd = NDArray.ones(Context.cpu(), new int[] {10, 20}); + System.out.println( "Testing MXNet by generating a 10x20 NDArray" ); + System.out.println("Shape of NDArray is : " + nd.shape()); + } +} +``` + +**Step 7.** Now run the App.java by clicking the green arrow as highlighted in the image below. + +![run hello mxnet](https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/java/intellij-run-projects.png) + + +The result should be this output: + +``` +Testing MXNet by generating a 10x20 NDArray +Shape of NDArray is : (10,20) + +Process finished with exit code 0 +``` + + +### Troubleshooting + +If you get an error, check the dependencies at the beginning of this tutorial. For example, you might see the following in the middle of the error messages, where `x.x` would the version it's looking for. + +``` +... +Library not loaded: /usr/local/opt/opencv/lib/libopencv_calib3d.x.x.dylib +... +``` + +This can be resolved be installing OpenCV. + + +### Command Line Build Option + +- You can also compile the project by using the following command at the command line. Change directories to this project's root folder then run the following: + +```bash +mvn clean install dependency:copy-dependencies +``` +If the command succeeds, you should see a lot of info and some warning messages, followed by: + +```bash +[INFO] ------------------------------------------------------------------------ +[INFO] BUILD SUCCESS +[INFO] ------------------------------------------------------------------------ +[INFO] Total time: 3.475 s +[INFO] Finished at: 2018-11-08T05:06:31-08:00 +[INFO] ------------------------------------------------------------------------ +``` +The build generates a new jar file in the `target` folder called `javaMXNet-1.0-SNAPSHOT.jar`. + +To run the App.java use the following command from the project's root folder and you should see the same output as we got when the project was run from IntelliJ. +```bash +java -cp target/javaMXNet-1.0-SNAPSHOT.jar:target/dependency/* mxnet.App +``` + +## Next Steps +For more information about MXNet Java resources, see the following: + +* [Java Inference API](https://mxnet.incubator.apache.org/api/java/infer.html) +* [Java Inference Examples](https://github.com/apache/incubator-mxnet/tree/java-api/scala-package/examples/src/main/java/org/apache/mxnetexamples/infer/) +* [MXNet Tutorials Index](http://mxnet.io/tutorials/index.html) diff --git a/tests/tutorials/test_sanity_tutorials.py b/tests/tutorials/test_sanity_tutorials.py index 078e96b3b29e..446a6c27f627 100644 --- a/tests/tutorials/test_sanity_tutorials.py +++ b/tests/tutorials/test_sanity_tutorials.py @@ -55,7 +55,8 @@ 'unsupervised_learning/index.md', 'vision/index.md', 'tensorrt/index.md', - 'tensorrt/inference_with_trt.md'] + 'tensorrt/inference_with_trt.md', + 'java/mxnet_java_on_intellij.md'] whitelist_set = set(whitelist) def test_tutorial_downloadable(): From 3664a7cd3347969eabe77a086c75f7d597515df4 Mon Sep 17 00:00:00 2001 From: Lanking Date: Mon, 12 Nov 2018 15:58:09 -0800 Subject: [PATCH 12/38] [MXNET-1202] Change Builder class into a better way (#13159) * applying changes for Builder functions * simplify the code structure * update docgen * follow Naveen's suggestion * apply comments to Param * clean up param build * change on the comments * add one description line --- .../org/apache/mxnet/javaapi/NDArray.scala | 14 ---- .../org/apache/mxnet/javaapi/NDArrayTest.java | 6 +- .../org/apache/mxnet/APIDocGenerator.scala | 56 +++++++++++--- .../mxnet/javaapi/JavaNDArrayMacro.scala | 73 +++++++++---------- 4 files changed, 82 insertions(+), 67 deletions(-) diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/NDArray.scala b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/NDArray.scala index d4e67f73408e..cdcc292ada63 100644 --- a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/NDArray.scala +++ b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/NDArray.scala @@ -385,17 +385,3 @@ class NDArray(val nd : org.apache.mxnet.NDArray ) { override def equals(obj: Any): Boolean = nd.equals(obj) override def hashCode(): Int = nd.hashCode } - -object NDArrayFuncReturn { - implicit def toNDFuncReturn(javaFunReturn : NDArrayFuncReturn) - : org.apache.mxnet.NDArrayFuncReturn = javaFunReturn.ndFuncReturn - implicit def toJavaNDFuncReturn(ndFuncReturn : org.apache.mxnet.NDArrayFuncReturn) - : NDArrayFuncReturn = new NDArrayFuncReturn(ndFuncReturn) -} - -private[mxnet] class NDArrayFuncReturn(val ndFuncReturn : org.apache.mxnet.NDArrayFuncReturn) { - def head : NDArray = ndFuncReturn.head - def get : NDArray = ndFuncReturn.get - def apply(i : Int) : NDArray = ndFuncReturn.apply(i) - // TODO: Add JavaNDArray operational stuff -} diff --git a/scala-package/core/src/test/java/org/apache/mxnet/javaapi/NDArrayTest.java b/scala-package/core/src/test/java/org/apache/mxnet/javaapi/NDArrayTest.java index a9bad83f62d6..2659b7848bc6 100644 --- a/scala-package/core/src/test/java/org/apache/mxnet/javaapi/NDArrayTest.java +++ b/scala-package/core/src/test/java/org/apache/mxnet/javaapi/NDArrayTest.java @@ -19,9 +19,9 @@ import org.junit.Test; -import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import org.apache.mxnet.javaapi.NDArrayBase.*; import static org.junit.Assert.assertTrue; @@ -71,7 +71,7 @@ public void testGenerated(){ NDArray$ NDArray = NDArray$.MODULE$; float[] arr = new float[]{1.0f, 2.0f, 3.0f}; NDArray nd = new NDArray(arr, new Shape(new int[]{3}), new Context("cpu", 0)); - float result = NDArray.norm(nd).invoke().get().toArray()[0]; + float result = NDArray.norm(NDArray.new normParam(nd))[0].toArray()[0]; float cal = 0.0f; for (float ele : arr) { cal += ele * ele; @@ -79,7 +79,7 @@ public void testGenerated(){ cal = (float) Math.sqrt(cal); assertTrue(Math.abs(result - cal) < 1e-5); NDArray dotResult = new NDArray(new float[]{0}, new Shape(new int[]{1}), new Context("cpu", 0)); - NDArray.dot(nd, nd).setout(dotResult).invoke().get(); + NDArray.dot(NDArray.new dotParam(nd, nd).setOut(dotResult)); assertTrue(Arrays.equals(dotResult.toArray(), new float[]{14.0f})); } } diff --git a/scala-package/macros/src/main/scala/org/apache/mxnet/APIDocGenerator.scala b/scala-package/macros/src/main/scala/org/apache/mxnet/APIDocGenerator.scala index 44d47a2099d5..f2326868e8e7 100644 --- a/scala-package/macros/src/main/scala/org/apache/mxnet/APIDocGenerator.scala +++ b/scala-package/macros/src/main/scala/org/apache/mxnet/APIDocGenerator.scala @@ -116,9 +116,7 @@ private[mxnet] object APIDocGenerator{ val absFuncs = absClassFunctions.filterNot(_.name.startsWith("_")) .filterNot(ele => notGenerated.contains(ele.name)) .map(absClassFunction => { - val scalaDoc = generateAPIDocFromBackend(absClassFunction) - val defBody = generateJavaAPISignature(absClassFunction) - s"$scalaDoc\n$defBody" + generateJavaAPISignature(absClassFunction) }) val packageName = "NDArrayBase" val packageDef = "package org.apache.mxnet.javaapi" @@ -203,27 +201,61 @@ private[mxnet] object APIDocGenerator{ } def generateJavaAPISignature(func : absClassFunction) : String = { + val useParamObject = func.listOfArgs.count(arg => arg.isOptional) >= 2 var argDef = ListBuffer[String]() var classDef = ListBuffer[String]() + var requiredParam = ListBuffer[String]() func.listOfArgs.foreach(absClassArg => { val currArgName = safetyNameCheck(absClassArg.argName) // scalastyle:off - if (absClassArg.isOptional) { - classDef += s"def set${absClassArg.argName}(${absClassArg.argName} : ${absClassArg.argType}) : ${func.name}BuilderBase" + if (absClassArg.isOptional && useParamObject) { + classDef += + s"""private var $currArgName: ${absClassArg.argType} = null + |/** + | * @param $currArgName\t\t${absClassArg.argDesc} + | */ + |def set${currArgName.capitalize}($currArgName : ${absClassArg.argType}): ${func.name}Param = { + | this.$currArgName = $currArgName + | this + | }""".stripMargin } else { + requiredParam += s" * @param $currArgName\t\t${absClassArg.argDesc}" argDef += s"$currArgName : ${absClassArg.argType}" } + classDef += s"def get${currArgName.capitalize}() = this.$currArgName" // scalastyle:on }) - classDef += s"def setout(out : NDArray) : ${func.name}BuilderBase" - classDef += s"def invoke() : org.apache.mxnet.javaapi.NDArrayFuncReturn" val experimentalTag = "@Experimental" - // scalastyle:off - var finalStr = s"$experimentalTag\ndef ${func.name} (${argDef.mkString(", ")}) : ${func.name}BuilderBase\n" - // scalastyle:on - finalStr += s"abstract class ${func.name}BuilderBase {\n ${classDef.mkString("\n ")}\n}" - finalStr + val returnType = "Array[NDArray]" + val scalaDoc = generateAPIDocFromBackend(func) + val scalaDocNoParam = generateAPIDocFromBackend(func, false) + if(useParamObject) { + classDef += + s"""private var out : org.apache.mxnet.NDArray = null + |def setOut(out : NDArray) : ${func.name}Param = { + | this.out = out + | this + | } + | def getOut() = this.out + | """.stripMargin + s"""$scalaDocNoParam + | $experimentalTag + | def ${func.name}(po: ${func.name}Param) : $returnType + | /** + | * This Param Object is specifically used for ${func.name} + | ${requiredParam.mkString("\n")} + | */ + | class ${func.name}Param(${argDef.mkString(",")}) { + | ${classDef.mkString("\n ")} + | }""".stripMargin + } else { + argDef += "out : NDArray" + s"""$scalaDoc + |$experimentalTag + | def ${func.name}(${argDef.mkString(", ")}) : $returnType + | """.stripMargin + } } diff --git a/scala-package/macros/src/main/scala/org/apache/mxnet/javaapi/JavaNDArrayMacro.scala b/scala-package/macros/src/main/scala/org/apache/mxnet/javaapi/JavaNDArrayMacro.scala index d5be97b501c5..2d1827038afc 100644 --- a/scala-package/macros/src/main/scala/org/apache/mxnet/javaapi/JavaNDArrayMacro.scala +++ b/scala-package/macros/src/main/scala/org/apache/mxnet/javaapi/JavaNDArrayMacro.scala @@ -68,18 +68,14 @@ private[mxnet] object JavaNDArrayMacro { newNDArrayFunctions.foreach { ndarrayfunction => + val useParamObject = ndarrayfunction.listOfArgs.count(arg => arg.isOptional) >= 2 // Construct argument field with all required args var argDef = ListBuffer[String]() - // Construct Optional Arg - var OptionArgDef = ListBuffer[String]() // Construct function Implementation field (e.g norm) var impl = ListBuffer[String]() impl += "val map = scala.collection.mutable.Map[String, Any]()" - // scalastyle:off - impl += "val args= scala.collection.mutable.ArrayBuffer.empty[org.apache.mxnet.NDArray]" - // scalastyle:on - // Construct Class Implementation (e.g normBuilder) - var classImpl = ListBuffer[String]() + impl += + "val args= scala.collection.mutable.ArrayBuffer.empty[org.apache.mxnet.NDArray]" ndarrayfunction.listOfArgs.foreach({ ndarrayArg => // var is a special word used to define variable in Scala, // need to changed to something else in order to make it work @@ -88,55 +84,56 @@ private[mxnet] object JavaNDArrayMacro { case "type" => "typeOf" case _ => ndarrayArg.argName } - if (ndarrayArg.isOptional) { - OptionArgDef += s"private var $currArgName : ${ndarrayArg.argType} = null" - val tempDef = s"def set$currArgName($currArgName : ${ndarrayArg.argType})" - val tempImpl = s"this.$currArgName = $currArgName\nthis" - classImpl += s"$tempDef = {$tempImpl}" - } else { - argDef += s"$currArgName : ${ndarrayArg.argType}" - } + if (useParamObject) currArgName = s"po.get${currArgName.capitalize}()" + argDef += s"$currArgName : ${ndarrayArg.argType}" // NDArray arg implementation val returnType = "org.apache.mxnet.javaapi.NDArray" val base = if (ndarrayArg.argType.equals(returnType)) { - s"args += this.$currArgName" + s"args += $currArgName" } else if (ndarrayArg.argType.equals(s"Array[$returnType]")){ - s"this.$currArgName.foreach(args+=_)" + s"$currArgName.foreach(args+=_)" } else { - "map(\"" + ndarrayArg.argName + "\") = this." + currArgName + "map(\"" + ndarrayArg.argName + "\") = " + currArgName } impl.append( - if (ndarrayArg.isOptional) s"if (this.$currArgName != null) $base" + if (ndarrayArg.isOptional) s"if ($currArgName != null) $base" else base ) }) // add default out parameter - classImpl += - "def setout(out : org.apache.mxnet.javaapi.NDArray) = {this.out = out\nthis}" - impl += "if (this.out != null) map(\"out\") = this.out" - OptionArgDef += "private var out : org.apache.mxnet.NDArray = null" - val returnType = "org.apache.mxnet.javaapi.NDArrayFuncReturn" + argDef += s"out: org.apache.mxnet.javaapi.NDArray" + if (useParamObject) { + impl += "if (po.getOut() != null) map(\"out\") = po.getOut()" + } else { + impl += "if (out != null) map(\"out\") = out" + } + val returnType = "Array[org.apache.mxnet.javaapi.NDArray]" // scalastyle:off // Combine and build the function string - impl += "org.apache.mxnet.NDArray.genericNDArrayFunctionInvoke(\"" + ndarrayfunction.name + "\", args.toSeq, map.toMap)" - val classDef = s"class ${ndarrayfunction.name}Builder(${argDef.mkString(",")}) extends ${ndarrayfunction.name}BuilderBase" - val classBody = s"${OptionArgDef.mkString("\n")}\n${classImpl.mkString("\n")}\ndef invoke() : $returnType = {${impl.mkString("\n")}}" - val classFinal = s"$classDef {$classBody}" - val functionDef = s"def ${ndarrayfunction.name} (${argDef.mkString(",")})" - val functionBody = s"new ${ndarrayfunction.name}Builder(${argDef.map(_.split(":")(0)).mkString(",")})" - val functionFinal = s"$functionDef : ${ndarrayfunction.name}BuilderBase = $functionBody" - // scalastyle:on - functionDefs += c.parse(functionFinal).asInstanceOf[DefDef] - classDefs += c.parse(classFinal).asInstanceOf[ClassDef] + impl += "val finalArr = org.apache.mxnet.NDArray.genericNDArrayFunctionInvoke(\"" + + ndarrayfunction.name + "\", args.toSeq, map.toMap).arr" + impl += "finalArr.map(ele => new NDArray(ele))" + if (useParamObject) { + val funcDef = + s"""def ${ndarrayfunction.name}(po: ${ndarrayfunction.name}Param): $returnType = { + | ${impl.mkString("\n")} + | }""".stripMargin + functionDefs += c.parse(funcDef).asInstanceOf[DefDef] + } else { + val funcDef = + s"""def ${ndarrayfunction.name}(${argDef.mkString(",")}): $returnType = { + | ${impl.mkString("\n")} + | }""".stripMargin + functionDefs += c.parse(funcDef).asInstanceOf[DefDef] + } } - structGeneration(c)(functionDefs.toList, classDefs.toList, annottees : _*) + structGeneration(c)(functionDefs.toList, annottees : _*) } private def structGeneration(c: blackbox.Context) (funcDef : List[c.universe.DefDef], - classDef : List[c.universe.ClassDef], annottees: c.Expr[Any]*) : c.Expr[Any] = { import c.universe._ @@ -146,7 +143,7 @@ private[mxnet] object JavaNDArrayMacro { case ClassDef(mods, name, something, template) => val q = template match { case Template(superMaybe, emptyValDef, defs) => - Template(superMaybe, emptyValDef, defs ++ funcDef ++ classDef) + Template(superMaybe, emptyValDef, defs ++ funcDef) case ex => throw new IllegalArgumentException(s"Invalid template: $ex") } @@ -154,7 +151,7 @@ private[mxnet] object JavaNDArrayMacro { case ModuleDef(mods, name, template) => val q = template match { case Template(superMaybe, emptyValDef, defs) => - Template(superMaybe, emptyValDef, defs ++ funcDef ++ classDef) + Template(superMaybe, emptyValDef, defs ++ funcDef) case ex => throw new IllegalArgumentException(s"Invalid template: $ex") } From 1bb5b7f076c2ecd3f4edae8d517573efa4678c17 Mon Sep 17 00:00:00 2001 From: Lanking Date: Mon, 12 Nov 2018 16:02:34 -0800 Subject: [PATCH 13/38] [MXNET-1041] Add Java benchmark (#13095) * add java benchmark * applied changes based on Piyush comments * applies Andrew's change * fix clojure test issue * update the statistic names * follow Naveen's instruction --- .../benchmark/run_java_inference_bm.sh | 40 ++++++ .../objectdetector/run_ssd_java_example.sh | 2 +- .../javaapi/benchmark/InferBase.java | 35 +++++ .../javaapi/benchmark/JavaBenchmark.java | 135 ++++++++++++++++++ .../benchmark/ObjectDetectionBenchmark.java | 64 +++++++++ .../infer}/objectdetector/README.md | 0 .../objectdetector/SSDClassifierExample.java | 2 +- .../mxnet/infer/javaapi/ObjectDetector.scala | 10 +- 8 files changed, 285 insertions(+), 3 deletions(-) create mode 100644 scala-package/examples/scripts/benchmark/run_java_inference_bm.sh create mode 100644 scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/InferBase.java create mode 100644 scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/JavaBenchmark.java create mode 100644 scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/ObjectDetectionBenchmark.java rename scala-package/examples/src/main/java/org/apache/mxnetexamples/{infer/javapi => javaapi/infer}/objectdetector/README.md (100%) rename scala-package/examples/src/main/java/org/apache/mxnetexamples/{infer/javapi => javaapi/infer}/objectdetector/SSDClassifierExample.java (99%) diff --git a/scala-package/examples/scripts/benchmark/run_java_inference_bm.sh b/scala-package/examples/scripts/benchmark/run_java_inference_bm.sh new file mode 100644 index 000000000000..5a468e344829 --- /dev/null +++ b/scala-package/examples/scripts/benchmark/run_java_inference_bm.sh @@ -0,0 +1,40 @@ +#!/bin/bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +set -e + +hw_type=cpu +if [ "$USE_GPU" = "1" ] +then + hw_type=gpu +fi + +platform=linux-x86_64 + +if [[ $OSTYPE = [darwin]* ]] +then + platform=osx-x86_64 +fi + +MXNET_ROOT=$(cd "$(dirname $0)/../../../.."; pwd) +CLASS_PATH=$MXNET_ROOT/scala-package/assembly/$platform-$hw_type/target/*:$MXNET_ROOT/scala-package/examples/target/* + +java -Xmx8G -Dmxnet.traceLeakedObjects=true -cp $CLASS_PATH \ + org.apache.mxnetexamples.javaapi.benchmark.JavaBenchmark $@ + diff --git a/scala-package/examples/scripts/infer/objectdetector/run_ssd_java_example.sh b/scala-package/examples/scripts/infer/objectdetector/run_ssd_java_example.sh index f444a3a59af7..00ed793a7bb5 100755 --- a/scala-package/examples/scripts/infer/objectdetector/run_ssd_java_example.sh +++ b/scala-package/examples/scripts/infer/objectdetector/run_ssd_java_example.sh @@ -41,7 +41,7 @@ INPUT_IMG=$2 INPUT_DIR=$3 java -Xmx8G -cp $CLASS_PATH \ - org.apache.mxnetexamples.infer.javapi.objectdetector.SSDClassifierExample \ + org.apache.mxnetexamples.javaapi.infer.objectdetector.SSDClassifierExample \ --model-path-prefix $MODEL_DIR \ --input-image $INPUT_IMG \ --input-dir $INPUT_DIR diff --git a/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/InferBase.java b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/InferBase.java new file mode 100644 index 000000000000..fdcde6b4152c --- /dev/null +++ b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/InferBase.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.mxnetexamples.javaapi.benchmark; + +import org.apache.mxnet.javaapi.Context; +import org.kohsuke.args4j.Option; + +import java.util.List; + +abstract class InferBase { + @Option(name = "--num-runs", usage = "Number of runs") + public int numRun = 1; + @Option(name = "--model-name", usage = "Name of the model") + public String modelName = ""; + @Option(name = "--batchsize", usage = "Size of the batch") + public int batchSize = 1; + + public abstract void preProcessModel(List context); + public abstract void runSingleInference(); + public abstract void runBatchInference(); +} diff --git a/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/JavaBenchmark.java b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/JavaBenchmark.java new file mode 100644 index 000000000000..1baca20fbe6d --- /dev/null +++ b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/JavaBenchmark.java @@ -0,0 +1,135 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.mxnetexamples.javaapi.benchmark; + +import org.apache.mxnet.javaapi.Context; +import org.kohsuke.args4j.CmdLineParser; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +public class JavaBenchmark { + + private static boolean runBatch = false; + + private static void parse(Object inst, String[] args) { + CmdLineParser parser = new CmdLineParser(inst); + try { + parser.parseArgument(args); + } catch (Exception e) { + System.err.println(e.getMessage() + e); + parser.printUsage(System.err); + System.exit(1); + } + } + + private static long percentile(int p, long[] seq) { + Arrays.sort(seq); + int k = (int) Math.ceil((seq.length - 1) * (p / 100.0)); + return seq[k]; + } + + private static void printStatistics(long[] inferenceTimesRaw, String metricsPrefix) { + long[] inferenceTimes = inferenceTimesRaw; + // remove head and tail + if (inferenceTimes.length > 2) { + inferenceTimes = Arrays.copyOfRange(inferenceTimesRaw, + 1, inferenceTimesRaw.length - 1); + } + double p50 = percentile(50, inferenceTimes) / 1.0e6; + double p99 = percentile(99, inferenceTimes) / 1.0e6; + double p90 = percentile(90, inferenceTimes) / 1.0e6; + long sum = 0; + for (long time: inferenceTimes) sum += time; + double average = sum / (inferenceTimes.length * 1.0e6); + + System.out.println( + String.format("\n%s_p99 %fms\n%s_p90 %fms\n%s_p50 %fms\n%s_average %1.2fms", + metricsPrefix, p99, metricsPrefix, p90, + metricsPrefix, p50, metricsPrefix, average) + ); + + } + + private static List getContext() { + List context = new ArrayList(); + if (System.getenv().containsKey("SCALA_TEST_ON_GPU") && + Integer.valueOf(System.getenv("SCALA_TEST_ON_GPU")) == 1) { + context.add(Context.gpu()); + } else { + context.add(Context.cpu()); + } + return context; + } + + public static void main(String[] args) { + if (args.length < 2) { + StringBuilder sb = new StringBuilder(); + sb.append("Please follow the format:"); + sb.append("\n --model-name "); + sb.append("\n --num-runs "); + sb.append("\n --batchsize "); + System.out.println(sb.toString()); + return; + } + String modelName = args[1]; + InferBase model = null; + switch(modelName) { + case "ObjectDetection": + runBatch = true; + ObjectDetectionBenchmark inst = new ObjectDetectionBenchmark(); + parse(inst, args); + model = inst; + default: + System.err.println("Model name not found! " + modelName); + System.exit(1); + } + List context = getContext(); + if (System.getenv().containsKey("SCALA_TEST_ON_GPU") && + Integer.valueOf(System.getenv("SCALA_TEST_ON_GPU")) == 1) { + context.add(Context.gpu()); + } else { + context.add(Context.cpu()); + } + + long[] result = new long[model.numRun]; + model.preProcessModel(context); + if (runBatch) { + for (int i =0;i < model.numRun; i++) { + long currTime = System.nanoTime(); + model.runBatchInference(); + result[i] = System.nanoTime() - currTime; + } + System.out.println("Batchsize: " + model.batchSize); + System.out.println("Num of runs: " + model.numRun); + printStatistics(result, modelName +"batch_inference"); + } + + model.batchSize = 1; + model.preProcessModel(context); + result = new long[model.numRun]; + for (int i = 0; i < model.numRun; i++) { + long currTime = System.nanoTime(); + model.runSingleInference(); + result[i] = System.nanoTime() - currTime; + } + System.out.println("Num of runs: " + model.numRun); + printStatistics(result, modelName + "single_inference"); + } +} diff --git a/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/ObjectDetectionBenchmark.java b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/ObjectDetectionBenchmark.java new file mode 100644 index 000000000000..485e0afa3e46 --- /dev/null +++ b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/ObjectDetectionBenchmark.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.mxnetexamples.javaapi.benchmark; + +import org.apache.mxnet.infer.javaapi.ObjectDetector; +import org.apache.mxnet.javaapi.*; +import org.kohsuke.args4j.Option; + +import java.util.ArrayList; +import java.util.List; + +class ObjectDetectionBenchmark extends InferBase { + @Option(name = "--model-path-prefix", usage = "input model directory and prefix of the model") + public String modelPathPrefix = "/model/ssd_resnet50_512"; + @Option(name = "--input-image", usage = "the input image") + public String inputImagePath = "/images/dog.jpg"; + + private ObjectDetector objDet; + private NDArray img; + private NDArray$ NDArray = NDArray$.MODULE$; + + public void preProcessModel(List context) { + Shape inputShape = new Shape(new int[] {this.batchSize, 3, 512, 512}); + List inputDescriptors = new ArrayList<>(); + inputDescriptors.add(new DataDesc("data", inputShape, DType.Float32(), "NCHW")); + objDet = new ObjectDetector(modelPathPrefix, inputDescriptors, context, 0); + img = ObjectDetector.bufferedImageToPixels( + ObjectDetector.reshapeImage( + ObjectDetector.loadImageFromFile(inputImagePath), 512, 512 + ), + new Shape(new int[] {1, 3, 512, 512}) + ); + } + + public void runSingleInference() { + List nd = new ArrayList<>(); + nd.add(img); + objDet.objectDetectWithNDArray(nd, 3); + } + + public void runBatchInference() { + List nd = new ArrayList<>(); + NDArray[] temp = new NDArray[batchSize]; + for (int i = 0; i < batchSize; i++) temp[i] = img.copy(); + NDArray batched = NDArray.concat(temp, batchSize).setdim(0).invoke().get(); + nd.add(batched); + objDet.objectDetectWithNDArray(nd, 3); + } +} diff --git a/scala-package/examples/src/main/java/org/apache/mxnetexamples/infer/javapi/objectdetector/README.md b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/objectdetector/README.md similarity index 100% rename from scala-package/examples/src/main/java/org/apache/mxnetexamples/infer/javapi/objectdetector/README.md rename to scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/objectdetector/README.md diff --git a/scala-package/examples/src/main/java/org/apache/mxnetexamples/infer/javapi/objectdetector/SSDClassifierExample.java b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/objectdetector/SSDClassifierExample.java similarity index 99% rename from scala-package/examples/src/main/java/org/apache/mxnetexamples/infer/javapi/objectdetector/SSDClassifierExample.java rename to scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/objectdetector/SSDClassifierExample.java index 13f9d2d9a3e5..4befc8edde6b 100644 --- a/scala-package/examples/src/main/java/org/apache/mxnetexamples/infer/javapi/objectdetector/SSDClassifierExample.java +++ b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/objectdetector/SSDClassifierExample.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.mxnetexamples.infer.javapi.objectdetector; +package org.apache.mxnetexamples.javaapi.infer.objectdetector; import org.apache.mxnet.infer.javaapi.ObjectDetectorOutput; import org.kohsuke.args4j.CmdLineParser; diff --git a/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/ObjectDetector.scala b/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/ObjectDetector.scala index f48375ffe4a7..447518b5a89c 100644 --- a/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/ObjectDetector.scala +++ b/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/ObjectDetector.scala @@ -21,7 +21,7 @@ package org.apache.mxnet.infer.javaapi import java.awt.image.BufferedImage // scalastyle:on -import org.apache.mxnet.javaapi.{Context, DataDesc, NDArray} +import org.apache.mxnet.javaapi.{Context, DataDesc, NDArray, Shape} import scala.collection.JavaConverters import scala.collection.JavaConverters._ @@ -113,6 +113,14 @@ object ObjectDetector { org.apache.mxnet.infer.ImageClassifier.loadImageFromFile(inputImagePath) } + def reshapeImage(img : BufferedImage, newWidth: Int, newHeight: Int): BufferedImage = { + org.apache.mxnet.infer.ImageClassifier.reshapeImage(img, newWidth, newHeight) + } + + def bufferedImageToPixels(resizedImage: BufferedImage, inputImageShape: Shape): NDArray = { + org.apache.mxnet.infer.ImageClassifier.bufferedImageToPixels(resizedImage, inputImageShape) + } + def loadInputBatch(inputImagePaths: java.util.List[String]): java.util.List[BufferedImage] = { org.apache.mxnet.infer.ImageClassifier .loadInputBatch(inputImagePaths.asScala.toList).toList.asJava From fb4cad9dadac1010a16435ccb88f82b784d6cd91 Mon Sep 17 00:00:00 2001 From: Lanking Date: Tue, 13 Nov 2018 13:57:18 -0800 Subject: [PATCH 14/38] [MXNET-918] [Introduce Random module / Refact code generation (#13038)][Cherry pick] (#13242) * [MXNET-918] Introduce Random module / Refact code generation (#13038) * refactor code gen * remove xxxAPIMacroBase (overkill) * CI errors / scala-style * PR review comments * clean up the duplicated code * add comments --- .../benchmark/ObjectDetectionBenchmark.java | 2 +- .../org/apache/mxnet/APIDocGenerator.scala | 315 ++++++++---------- .../org/apache/mxnet/GeneratorBase.scala | 163 +++++++++ .../scala/org/apache/mxnet/NDArrayMacro.scala | 263 +++++---------- .../scala/org/apache/mxnet/SymbolMacro.scala | 250 ++++---------- .../mxnet/javaapi/JavaNDArrayMacro.scala | 95 +----- 6 files changed, 454 insertions(+), 634 deletions(-) create mode 100644 scala-package/macros/src/main/scala/org/apache/mxnet/GeneratorBase.scala diff --git a/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/ObjectDetectionBenchmark.java b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/ObjectDetectionBenchmark.java index 485e0afa3e46..257ea3241626 100644 --- a/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/ObjectDetectionBenchmark.java +++ b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/ObjectDetectionBenchmark.java @@ -57,7 +57,7 @@ public void runBatchInference() { List nd = new ArrayList<>(); NDArray[] temp = new NDArray[batchSize]; for (int i = 0; i < batchSize; i++) temp[i] = img.copy(); - NDArray batched = NDArray.concat(temp, batchSize).setdim(0).invoke().get(); + NDArray batched = NDArray.concat(temp, batchSize, 0, null)[0]; nd.add(batched); objDet.objectDetectWithNDArray(nd, 3); } diff --git a/scala-package/macros/src/main/scala/org/apache/mxnet/APIDocGenerator.scala b/scala-package/macros/src/main/scala/org/apache/mxnet/APIDocGenerator.scala index f2326868e8e7..0c12e1f1c674 100644 --- a/scala-package/macros/src/main/scala/org/apache/mxnet/APIDocGenerator.scala +++ b/scala-package/macros/src/main/scala/org/apache/mxnet/APIDocGenerator.scala @@ -17,196 +17,151 @@ package org.apache.mxnet -import org.apache.mxnet.init.Base._ -import org.apache.mxnet.utils.CToScalaUtils import java.io._ import java.security.MessageDigest -import scala.collection.mutable.{ArrayBuffer, ListBuffer} +import scala.collection.mutable.ListBuffer /** * This object will generate the Scala documentation of the new Scala API * Two file namely: SymbolAPIBase.scala and NDArrayAPIBase.scala * The code will be executed during Macros stage and file live in Core stage */ -private[mxnet] object APIDocGenerator{ - case class absClassArg(argName : String, argType : String, argDesc : String, isOptional : Boolean) - case class absClassFunction(name : String, desc : String, - listOfArgs: List[absClassArg], returnType : String) +private[mxnet] object APIDocGenerator extends GeneratorBase { - - def main(args: Array[String]) : Unit = { + def main(args: Array[String]): Unit = { val FILE_PATH = args(0) val hashCollector = ListBuffer[String]() - hashCollector += absClassGen(FILE_PATH, true) - hashCollector += absClassGen(FILE_PATH, false) + hashCollector += typeSafeClassGen(FILE_PATH, true) + hashCollector += typeSafeClassGen(FILE_PATH, false) hashCollector += nonTypeSafeClassGen(FILE_PATH, true) hashCollector += nonTypeSafeClassGen(FILE_PATH, false) - // Generate Java API documentation - hashCollector += javaClassGen(FILE_PATH + "javaapi/") + hashCollector += javaClassGen(FILE_PATH) val finalHash = hashCollector.mkString("\n") } - def MD5Generator(input : String) : String = { + def MD5Generator(input: String): String = { val md = MessageDigest.getInstance("MD5") md.update(input.getBytes("UTF-8")) val digest = md.digest() org.apache.commons.codec.binary.Base64.encodeBase64URLSafeString(digest) } - def fileGen(filePath : String, packageName : String, packageDef : String, - absFuncs : List[String]) : String = { - val apacheLicense = - """/* - |* Licensed to the Apache Software Foundation (ASF) under one or more - |* contributor license agreements. See the NOTICE file distributed with - |* this work for additional information regarding copyright ownership. - |* The ASF licenses this file to You under the Apache License, Version 2.0 - |* (the "License"); you may not use this file except in compliance with - |* the License. You may obtain a copy of the License at - |* - |* http://www.apache.org/licenses/LICENSE-2.0 - |* - |* Unless required by applicable law or agreed to in writing, software - |* distributed under the License is distributed on an "AS IS" BASIS, - |* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - |* See the License for the specific language governing permissions and - |* limitations under the License. - |*/ - |""".stripMargin - val scalaStyle = "// scalastyle:off" - val imports = "import org.apache.mxnet.annotation.Experimental" - val absClassDef = s"abstract class $packageName" + def typeSafeClassGen(FILE_PATH: String, isSymbol: Boolean): String = { + val generated = typeSafeFunctionsToGenerate(isSymbol, isContrib = false) + .map { func => + val scalaDoc = generateAPIDocFromBackend(func) + val decl = generateAPISignature(func, isSymbol) + s"$scalaDoc\n$decl" + } - val finalStr = - s"""$apacheLicense - |$scalaStyle - |$packageDef - |$imports - |$absClassDef { - |${absFuncs.mkString("\n")} - |}""".stripMargin - val pw = new PrintWriter(new File(filePath + s"$packageName.scala")) - pw.write(finalStr) - pw.close() - MD5Generator(finalStr) + writeFile( + FILE_PATH, + if (isSymbol) "SymbolAPIBase" else "NDArrayAPIBase", + "package org.apache.mxnet", + generated) } - def absClassGen(filePath : String, isSymbol : Boolean) : String = { - val absClassFunctions = getSymbolNDArrayMethods(isSymbol) - // Defines Operators that should not generated - val notGenerated = Set("Custom") - // TODO: Add Filter to the same location in case of refactor - val absFuncs = absClassFunctions.filterNot(_.name.startsWith("_")) - .filterNot(ele => notGenerated.contains(ele.name)) - .map(absClassFunction => { - val scalaDoc = generateAPIDocFromBackend(absClassFunction) - val defBody = generateAPISignature(absClassFunction, isSymbol) - s"$scalaDoc\n$defBody" - }) - val packageName = if (isSymbol) "SymbolAPIBase" else "NDArrayAPIBase" - val packageDef = "package org.apache.mxnet" - fileGen(filePath, packageName, packageDef, absFuncs) + def nonTypeSafeClassGen(FILE_PATH: String, isSymbol: Boolean): String = { + val absFuncs = functionsToGenerate(isSymbol, isContrib = false) + .map { func => + val scalaDoc = generateAPIDocFromBackend(func, false) + if (isSymbol) { + s"""$scalaDoc + |def ${func.name}(name : String = null, attr : Map[String, String] = null) + | (args : org.apache.mxnet.Symbol*)(kwargs : Map[String, Any] = null): + | org.apache.mxnet.Symbol + """.stripMargin + } else { + s"""$scalaDoc + |def ${func.name}(kwargs: Map[String, Any] = null) + | (args: Any*): org.apache.mxnet.NDArrayFuncReturn + | + |$scalaDoc + |def ${func.name}(args: Any*): org.apache.mxnet.NDArrayFuncReturn + """.stripMargin + } + } + + writeFile( + FILE_PATH, + if (isSymbol) "SymbolBase" else "NDArrayBase", + "package org.apache.mxnet", + absFuncs) } def javaClassGen(filePath : String) : String = { val notGenerated = Set("Custom") - val absClassFunctions = getSymbolNDArrayMethods(false, true) - // TODO: Add Filter to the same location in case of refactor - val absFuncs = absClassFunctions.filterNot(_.name.startsWith("_")) - .filterNot(ele => notGenerated.contains(ele.name)) - .map(absClassFunction => { + val absClassFunctions = functionsToGenerate(false, false, true) + val absFuncs = absClassFunctions.filterNot(ele => notGenerated.contains(ele.name)) + .groupBy(_.name.toLowerCase).map(ele => { + /* Pattern matching for not generating deprecated method + * Group all method name in lowercase + * Kill the capital lettered method such as Cast vs cast + * As it defined by default it deprecated + */ + if (ele._2.length == 1) ele._2.head + else { + if (ele._2.head.name.head.isLower) ele._2.head + else ele._2.last + } + }).map(absClassFunction => { generateJavaAPISignature(absClassFunction) - }) + }).toSeq val packageName = "NDArrayBase" val packageDef = "package org.apache.mxnet.javaapi" - fileGen(filePath, packageName, packageDef, absFuncs) + writeFile(filePath + "javaapi/", packageName, packageDef, absFuncs) } - def nonTypeSafeClassGen(filePath : String, isSymbol : Boolean) : String = { - // scalastyle:off - val absClassFunctions = getSymbolNDArrayMethods(isSymbol) - val absFuncs = absClassFunctions.map(absClassFunction => { - val scalaDoc = generateAPIDocFromBackend(absClassFunction, false) - if (isSymbol) { - val defBody = s"def ${absClassFunction.name}(name : String = null, attr : Map[String, String] = null)(args : org.apache.mxnet.Symbol*)(kwargs : Map[String, Any] = null): org.apache.mxnet.Symbol" - s"$scalaDoc\n$defBody" - } else { - val defBodyWithKwargs = s"def ${absClassFunction.name}(kwargs: Map[String, Any] = null)(args: Any*) : org.apache.mxnet.NDArrayFuncReturn" - val defBody = s"def ${absClassFunction.name}(args: Any*) : org.apache.mxnet.NDArrayFuncReturn" - s"$scalaDoc\n$defBodyWithKwargs\n$scalaDoc\n$defBody" - } - }) - val packageName = if (isSymbol) "SymbolBase" else "NDArrayBase" - val packageDef = "package org.apache.mxnet" - fileGen(filePath, packageName, packageDef, absFuncs) - } + def generateAPIDocFromBackend(func: Func, withParam: Boolean = true): String = { + val desc = func.desc.split("\n") + .mkString(" *
\n", "\n  * ", "  * 
\n") - /** - * Some of the C++ type name is not valid in Scala - * such as var and type. This method is to convert - * them into other names to get it passed - * @param in the input String - * @return converted name string - */ - def safetyNameCheck(in : String) : String = { - in match { - case "var" => "vari" - case "type" => "typeOf" - case _ => in + val params = func.listOfArgs.map { absClassArg => + s" * @param ${absClassArg.safeArgName}\t\t${absClassArg.argDesc}" } - } - // Generate ScalaDoc type - def generateAPIDocFromBackend(func : absClassFunction, withParam : Boolean = true) : String = { - val desc = ArrayBuffer[String]() - desc += " *
"
-      func.desc.split("\n").foreach({ currStr =>
-      desc += s"  * $currStr"
-    })
-    desc += "  * 
" - val params = func.listOfArgs.map({ absClassArg => - val currArgName = safetyNameCheck(absClassArg.argName) - s" * @param $currArgName\t\t${absClassArg.argDesc}" - }) val returnType = s" * @return ${func.returnType}" + if (withParam) { - s" /**\n${desc.mkString("\n")}\n${params.mkString("\n")}\n$returnType\n */" + s""" /** + |$desc + |${params.mkString("\n")} + |$returnType + | */""".stripMargin } else { - s" /**\n${desc.mkString("\n")}\n$returnType\n */" + s""" /** + |$desc + |$returnType + | */""".stripMargin } } - def generateAPISignature(func : absClassFunction, isSymbol : Boolean) : String = { - var argDef = ListBuffer[String]() - func.listOfArgs.foreach(absClassArg => { - val currArgName = safetyNameCheck(absClassArg.argName) - if (absClassArg.isOptional) { - argDef += s"$currArgName : Option[${absClassArg.argType}] = None" - } - else { - argDef += s"$currArgName : ${absClassArg.argType}" - } - }) - var returnType = func.returnType + def generateAPISignature(func: Func, isSymbol: Boolean): String = { + val argDef = ListBuffer[String]() + + argDef ++= typedFunctionCommonArgDef(func) + if (isSymbol) { argDef += "name : String = null" argDef += "attr : Map[String, String] = null" } else { argDef += "out : Option[NDArray] = None" - returnType = "org.apache.mxnet.NDArrayFuncReturn" } - val experimentalTag = "@Experimental" - s"$experimentalTag\ndef ${func.name} (${argDef.mkString(", ")}) : $returnType" + + val returnType = func.returnType + + s"""@Experimental + |def ${func.name} (${argDef.mkString(", ")}): $returnType""".stripMargin } - def generateJavaAPISignature(func : absClassFunction) : String = { + def generateJavaAPISignature(func : Func) : String = { val useParamObject = func.listOfArgs.count(arg => arg.isOptional) >= 2 var argDef = ListBuffer[String]() var classDef = ListBuffer[String]() var requiredParam = ListBuffer[String]() func.listOfArgs.foreach(absClassArg => { - val currArgName = safetyNameCheck(absClassArg.argName) + val currArgName = absClassArg.safeArgName // scalastyle:off if (absClassArg.isOptional && useParamObject) { classDef += @@ -240,15 +195,15 @@ private[mxnet] object APIDocGenerator{ | def getOut() = this.out | """.stripMargin s"""$scalaDocNoParam - | $experimentalTag - | def ${func.name}(po: ${func.name}Param) : $returnType - | /** - | * This Param Object is specifically used for ${func.name} - | ${requiredParam.mkString("\n")} - | */ - | class ${func.name}Param(${argDef.mkString(",")}) { - | ${classDef.mkString("\n ")} - | }""".stripMargin + | $experimentalTag + | def ${func.name}(po: ${func.name}Param) : $returnType + | /** + | * This Param Object is specifically used for ${func.name} + | ${requiredParam.mkString("\n")} + | */ + | class ${func.name}Param(${argDef.mkString(",")}) { + | ${classDef.mkString("\n ")} + | }""".stripMargin } else { argDef += "out : NDArray" s"""$scalaDoc @@ -258,48 +213,40 @@ private[mxnet] object APIDocGenerator{ } } + def writeFile(FILE_PATH: String, className: String, packageDef: String, + absFuncs: Seq[String]): String = { - // List and add all the atomic symbol functions to current module. - private def getSymbolNDArrayMethods(isSymbol : Boolean, - isJava : Boolean = false): List[absClassFunction] = { - val opNames = ListBuffer.empty[String] - val returnType = if (isSymbol) "Symbol" else "NDArray" - val returnHeader = if (isJava) "org.apache.mxnet.javaapi." else "org.apache.mxnet." - _LIB.mxListAllOpNames(opNames) - // TODO: Add '_linalg_', '_sparse_', '_image_' support - // TODO: Add Filter to the same location in case of refactor - opNames.map(opName => { - val opHandle = new RefLong - _LIB.nnGetOpHandle(opName, opHandle) - makeAtomicSymbolFunction(opHandle.value, opName, returnHeader + returnType) - }).filterNot(_.name.startsWith("_")).groupBy(_.name.toLowerCase).map(ele => { - // Pattern matching for not generating depreciated method - if (ele._2.length == 1) ele._2.head - else { - if (ele._2.head.name.head.isLower) ele._2.head - else ele._2.last - } - }).toList - } - - // Create an atomic symbol function by handle and function name. - private def makeAtomicSymbolFunction(handle: SymbolHandle, - aliasName: String, returnType : String) - : absClassFunction = { - val name = new RefString - val desc = new RefString - val keyVarNumArgs = new RefString - val numArgs = new RefInt - val argNames = ListBuffer.empty[String] - val argTypes = ListBuffer.empty[String] - val argDescs = ListBuffer.empty[String] + val finalStr = + s"""/* + |* Licensed to the Apache Software Foundation (ASF) under one or more + |* contributor license agreements. See the NOTICE file distributed with + |* this work for additional information regarding copyright ownership. + |* The ASF licenses this file to You under the Apache License, Version 2.0 + |* (the "License"); you may not use this file except in compliance with + |* the License. You may obtain a copy of the License at + |* + |* http://www.apache.org/licenses/LICENSE-2.0 + |* + |* Unless required by applicable law or agreed to in writing, software + |* distributed under the License is distributed on an "AS IS" BASIS, + |* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + |* See the License for the specific language governing permissions and + |* limitations under the License. + |*/ + | + |$packageDef + | + |import org.apache.mxnet.annotation.Experimental + | + |// scalastyle:off + |abstract class $className { + |${absFuncs.mkString("\n")} + |}""".stripMargin - _LIB.mxSymbolGetAtomicSymbolInfo( - handle, name, desc, numArgs, argNames, argTypes, argDescs, keyVarNumArgs) - val argList = argNames zip argTypes zip argDescs map { case ((argName, argType), argDesc) => - val typeAndOption = CToScalaUtils.argumentCleaner(argName, argType, returnType) - new absClassArg(argName, typeAndOption._1, argDesc, typeAndOption._2) - } - new absClassFunction(aliasName, desc.value, argList.toList, returnType) + val pw = new PrintWriter(new File(FILE_PATH + s"$className.scala")) + pw.write(finalStr) + pw.close() + MD5Generator(finalStr) } -} + +} \ No newline at end of file diff --git a/scala-package/macros/src/main/scala/org/apache/mxnet/GeneratorBase.scala b/scala-package/macros/src/main/scala/org/apache/mxnet/GeneratorBase.scala new file mode 100644 index 000000000000..9245ef1b437f --- /dev/null +++ b/scala-package/macros/src/main/scala/org/apache/mxnet/GeneratorBase.scala @@ -0,0 +1,163 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.mxnet + +import org.apache.mxnet.init.Base.{RefInt, RefLong, RefString, _LIB} +import org.apache.mxnet.utils.{CToScalaUtils, OperatorBuildUtils} + +import scala.collection.mutable.ListBuffer +import scala.reflect.macros.blackbox + +abstract class GeneratorBase { + type Handle = Long + + case class Arg(argName: String, argType: String, argDesc: String, isOptional: Boolean) { + def safeArgName: String = argName match { + case "var" => "vari" + case "type" => "typeOf" + case _ => argName + } + } + + case class Func(name: String, desc: String, listOfArgs: List[Arg], returnType: String) + + def functionsToGenerate(isSymbol: Boolean, isContrib: Boolean, + isJava: Boolean = false): List[Func] = { + val l = getBackEndFunctions(isSymbol, isJava) + if (isContrib) { + l.filter(func => func.name.startsWith("_contrib_") || !func.name.startsWith("_")) + } else { + l.filterNot(_.name.startsWith("_")) + } + } + + def typeSafeFunctionsToGenerate(isSymbol: Boolean, isContrib: Boolean): List[Func] = { + // Operators that should not be generated + val notGenerated = Set("Custom") + + val l = getBackEndFunctions(isSymbol) + val res = if (isContrib) { + l.filter(func => func.name.startsWith("_contrib_") || !func.name.startsWith("_")) + } else { + l.filterNot(_.name.startsWith("_")) + } + res.filterNot(ele => notGenerated.contains(ele.name)) + } + + protected def getBackEndFunctions(isSymbol: Boolean, isJava: Boolean = false): List[Func] = { + val opNames = ListBuffer.empty[String] + _LIB.mxListAllOpNames(opNames) + opNames.map(opName => { + val opHandle = new RefLong + _LIB.nnGetOpHandle(opName, opHandle) + makeAtomicFunction(opHandle.value, opName, isSymbol, isJava) + }).toList + } + + private def makeAtomicFunction(handle: Handle, aliasName: String, + isSymbol: Boolean, isJava: Boolean): Func = { + val name = new RefString + val desc = new RefString + val keyVarNumArgs = new RefString + val numArgs = new RefInt + val argNames = ListBuffer.empty[String] + val argTypes = ListBuffer.empty[String] + val argDescs = ListBuffer.empty[String] + + _LIB.mxSymbolGetAtomicSymbolInfo( + handle, name, desc, numArgs, argNames, argTypes, argDescs, keyVarNumArgs) + val paramStr = OperatorBuildUtils.ctypes2docstring(argNames, argTypes, argDescs) + val extraDoc: String = if (keyVarNumArgs.value != null && keyVarNumArgs.value.length > 0) { + s"This function support variable length of positional input (${keyVarNumArgs.value})." + } else { + "" + } + val realName = if (aliasName == name.value) "" else s"(a.k.a., ${name.value})" + val docStr = s"$aliasName $realName\n${desc.value}\n\n$paramStr\n$extraDoc\n" + + val argList = argNames zip argTypes zip argDescs map { case ((argName, argType), argDesc) => + val family = if (isJava) "org.apache.mxnet.javaapi.NDArray" + else if (isSymbol) "org.apache.mxnet.Symbol" + else "org.apache.mxnet.NDArray" + val typeAndOption = + CToScalaUtils.argumentCleaner(argName, argType, family) + Arg(argName, typeAndOption._1, argDesc, typeAndOption._2) + } + val returnType = + if (isJava) "Array[org.apache.mxnet.javaapi.NDArray]" + else if (isSymbol) "org.apache.mxnet.Symbol" + else "org.apache.mxnet.NDArrayFuncReturn" + Func(aliasName, desc.value, argList.toList, returnType) + } + + /** + * Generate class structure for all function APIs + * + * @param c + * @param funcDef DefDef type of function definitions + * @param annottees + * @return + */ + protected def structGeneration(c: blackbox.Context) + (funcDef: List[c.universe.DefDef], annottees: c.Expr[Any]*) + : c.Expr[Any] = { + import c.universe._ + val inputs = annottees.map(_.tree).toList + // pattern match on the inputs + val modDefs = inputs map { + case ClassDef(mods, name, something, template) => + val q = template match { + case Template(superMaybe, emptyValDef, defs) => + Template(superMaybe, emptyValDef, defs ++ funcDef) + case ex => + throw new IllegalArgumentException(s"Invalid template: $ex") + } + ClassDef(mods, name, something, q) + case ModuleDef(mods, name, template) => + val q = template match { + case Template(superMaybe, emptyValDef, defs) => + Template(superMaybe, emptyValDef, defs ++ funcDef) + case ex => + throw new IllegalArgumentException(s"Invalid template: $ex") + } + ModuleDef(mods, name, q) + case ex => + throw new IllegalArgumentException(s"Invalid macro input: $ex") + } + // wrap the result up in an Expr, and return it + val result = c.Expr(Block(modDefs, Literal(Constant()))) + result + } + + protected def typedFunctionCommonArgDef(func: Func): List[String] = { + // build function argument definition, with optionality, and safe names + func.listOfArgs.map(arg => + if (arg.isOptional) { + // let's avoid a stupid Option[Array[...]] + if (arg.argType.startsWith("Array[")) { + s"${arg.safeArgName} : ${arg.argType} = Array.empty" + } else { + s"${arg.safeArgName} : Option[${arg.argType}] = None" + } + } + else { + s"${arg.safeArgName} : ${arg.argType}" + } + ) + } +} diff --git a/scala-package/macros/src/main/scala/org/apache/mxnet/NDArrayMacro.scala b/scala-package/macros/src/main/scala/org/apache/mxnet/NDArrayMacro.scala index 2d3a1c7ec5af..d85abe1ecc4f 100644 --- a/scala-package/macros/src/main/scala/org/apache/mxnet/NDArrayMacro.scala +++ b/scala-package/macros/src/main/scala/org/apache/mxnet/NDArrayMacro.scala @@ -17,11 +17,8 @@ package org.apache.mxnet -import org.apache.mxnet.init.Base._ -import org.apache.mxnet.utils.{CToScalaUtils, OperatorBuildUtils} - import scala.annotation.StaticAnnotation -import scala.collection.mutable.{ArrayBuffer, ListBuffer} +import scala.collection.mutable.ListBuffer import scala.language.experimental.macros import scala.reflect.macros.blackbox @@ -30,207 +27,111 @@ private[mxnet] class AddNDArrayFunctions(isContrib: Boolean) extends StaticAnnot } private[mxnet] class AddNDArrayAPIs(isContrib: Boolean) extends StaticAnnotation { - private[mxnet] def macroTransform(annottees: Any*) = macro NDArrayMacro.typeSafeAPIDefs + private[mxnet] def macroTransform(annottees: Any*) = macro TypedNDArrayAPIMacro.typeSafeAPIDefs } -private[mxnet] object NDArrayMacro { - case class NDArrayArg(argName: String, argType: String, isOptional : Boolean) - case class NDArrayFunction(name: String, listOfArgs: List[NDArrayArg]) - - // scalastyle:off havetype - def addDefs(c: blackbox.Context)(annottees: c.Expr[Any]*) = { - impl(c)(annottees: _*) - } - def typeSafeAPIDefs(c: blackbox.Context)(annottees: c.Expr[Any]*) = { - typeSafeAPIImpl(c)(annottees: _*) - } - // scalastyle:off havetype - - private val ndarrayFunctions: List[NDArrayFunction] = initNDArrayModule() +private[mxnet] object NDArrayMacro extends GeneratorBase { - private def impl(c: blackbox.Context)(annottees: c.Expr[Any]*): c.Expr[Any] = { + def addDefs(c: blackbox.Context)(annottees: c.Expr[Any]*): c.Expr[Any] = { import c.universe._ - val isContrib: Boolean = c.prefix.tree match { case q"new AddNDArrayFunctions($b)" => c.eval[Boolean](c.Expr(b)) } - val newNDArrayFunctions = { - if (isContrib) ndarrayFunctions.filter(_.name.startsWith("_contrib_")) - else ndarrayFunctions.filterNot(_.name.startsWith("_")) - } - - val functionDefs = newNDArrayFunctions flatMap { NDArrayfunction => - val funcName = NDArrayfunction.name - val termName = TermName(funcName) - Seq( - // scalastyle:off - // (yizhi) We are investigating a way to make these functions type-safe - // and waiting to see the new approach is stable enough. - // Thus these functions may be deprecated in the future. - // e.g def transpose(kwargs: Map[String, Any] = null)(args: Any*) - q"def $termName(kwargs: Map[String, Any] = null)(args: Any*) = {genericNDArrayFunctionInvoke($funcName, args, kwargs)}".asInstanceOf[DefDef], - // e.g def transpose(args: Any*) - q"def $termName(args: Any*) = {genericNDArrayFunctionInvoke($funcName, args, null)}".asInstanceOf[DefDef] - // scalastyle:on - ) - } - - structGeneration(c)(functionDefs, annottees : _*) + impl(c)(isContrib, annottees: _*) } - private def typeSafeAPIImpl(c: blackbox.Context)(annottees: c.Expr[Any]*) : c.Expr[Any] = { + private def impl(c: blackbox.Context) + (isContrib: Boolean, annottees: c.Expr[Any]*): c.Expr[Any] = { import c.universe._ - val isContrib: Boolean = c.prefix.tree match { - case q"new AddNDArrayAPIs($b)" => c.eval[Boolean](c.Expr(b)) - } - // Defines Operators that should not generated - val notGenerated = Set("Custom") - - val newNDArrayFunctions = { - if (isContrib) ndarrayFunctions.filter( - func => func.name.startsWith("_contrib_") || !func.name.startsWith("_")) - else ndarrayFunctions.filterNot(_.name.startsWith("_")) - }.filterNot(ele => notGenerated.contains(ele.name)) - - val functionDefs = newNDArrayFunctions.map { ndarrayfunction => - - // Construct argument field - var argDef = ListBuffer[String]() - // Construct Implementation field - var impl = ListBuffer[String]() - impl += "val map = scala.collection.mutable.Map[String, Any]()" - impl += "val args = scala.collection.mutable.ArrayBuffer.empty[NDArray]" - ndarrayfunction.listOfArgs.foreach({ ndarrayarg => - // var is a special word used to define variable in Scala, - // need to changed to something else in order to make it work - val currArgName = ndarrayarg.argName match { - case "var" => "vari" - case "type" => "typeOf" - case default => ndarrayarg.argName - } - if (ndarrayarg.isOptional) { - argDef += s"${currArgName} : Option[${ndarrayarg.argType}] = None" - } - else { - argDef += s"${currArgName} : ${ndarrayarg.argType}" - } - // NDArray arg implementation - val returnType = "org.apache.mxnet.NDArray" - - // TODO: Currently we do not add place holder for NDArray - // Example: an NDArray operator like the following format - // nd.foo(arg1: NDArray(required), arg2: NDArray(Optional), arg3: NDArray(Optional) - // If we place nd.foo(arg1, arg3 = arg3), do we need to add place holder for arg2? - // What it should be? - val base = - if (ndarrayarg.argType.equals(returnType)) { - s"args += $currArgName" - } else if (ndarrayarg.argType.equals(s"Array[$returnType]")){ - s"args ++= $currArgName" - } else { - "map(\"" + ndarrayarg.argName + "\") = " + currArgName - } - impl.append( - if (ndarrayarg.isOptional) s"if (!$currArgName.isEmpty) $base.get" - else base - ) - }) - // add default out parameter - argDef += "out : Option[NDArray] = None" - impl += "if (!out.isEmpty) map(\"out\") = out.get" - // scalastyle:off - impl += "org.apache.mxnet.NDArray.genericNDArrayFunctionInvoke(\"" + ndarrayfunction.name + "\", args.toSeq, map.toMap)" - // scalastyle:on - // Combine and build the function string - val returnType = "org.apache.mxnet.NDArrayFuncReturn" - var finalStr = s"def ${ndarrayfunction.name}" - finalStr += s" (${argDef.mkString(",")}) : $returnType" - finalStr += s" = {${impl.mkString("\n")}}" - c.parse(finalStr).asInstanceOf[DefDef] + val functions = functionsToGenerate(isSymbol = false, isContrib) + + val functionDefs = functions.flatMap { NDArrayfunction => + val funcName = NDArrayfunction.name + val termName = TermName(funcName) + Seq( + // e.g def transpose(kwargs: Map[String, Any] = null)(args: Any*) + q""" + def $termName(kwargs: Map[String, Any] = null)(args: Any*) = { + genericNDArrayFunctionInvoke($funcName, args, kwargs) + } + """.asInstanceOf[DefDef], + // e.g def transpose(args: Any*) + q""" + def $termName(args: Any*) = { + genericNDArrayFunctionInvoke($funcName, args, null) + } + """.asInstanceOf[DefDef] + ) } - structGeneration(c)(functionDefs, annottees : _*) + structGeneration(c)(functionDefs, annottees: _*) } +} - private def structGeneration(c: blackbox.Context) - (funcDef : List[c.universe.DefDef], annottees: c.Expr[Any]*) - : c.Expr[Any] = { +private[mxnet] object TypedNDArrayAPIMacro extends GeneratorBase { + + def typeSafeAPIDefs(c: blackbox.Context)(annottees: c.Expr[Any]*): c.Expr[Any] = { import c.universe._ - val inputs = annottees.map(_.tree).toList - // pattern match on the inputs - val modDefs = inputs map { - case ClassDef(mods, name, something, template) => - val q = template match { - case Template(superMaybe, emptyValDef, defs) => - Template(superMaybe, emptyValDef, defs ++ funcDef) - case ex => - throw new IllegalArgumentException(s"Invalid template: $ex") - } - ClassDef(mods, name, something, q) - case ModuleDef(mods, name, template) => - val q = template match { - case Template(superMaybe, emptyValDef, defs) => - Template(superMaybe, emptyValDef, defs ++ funcDef) - case ex => - throw new IllegalArgumentException(s"Invalid template: $ex") - } - ModuleDef(mods, name, q) - case ex => - throw new IllegalArgumentException(s"Invalid macro input: $ex") + val isContrib: Boolean = c.prefix.tree match { + case q"new AddNDArrayAPIs($b)" => c.eval[Boolean](c.Expr(b)) } - // wrap the result up in an Expr, and return it - val result = c.Expr(Block(modDefs, Literal(Constant()))) - result + + val functions = typeSafeFunctionsToGenerate(isSymbol = false, isContrib) + + val functionDefs = functions.map(f => buildTypedFunction(c)(f)) + structGeneration(c)(functionDefs, annottees: _*) } + protected def buildTypedFunction(c: blackbox.Context) + (function: Func): c.universe.DefDef = { + import c.universe._ + val returnType = "org.apache.mxnet.NDArrayFuncReturn" + val ndarrayType = "org.apache.mxnet.NDArray" + // Construct argument field + val argDef = ListBuffer[String]() + argDef ++= typedFunctionCommonArgDef(function) + argDef += "out : Option[NDArray] = None" - // List and add all the atomic symbol functions to current module. - private def initNDArrayModule(): List[NDArrayFunction] = { - val opNames = ListBuffer.empty[String] - _LIB.mxListAllOpNames(opNames) - opNames.map(opName => { - val opHandle = new RefLong - _LIB.nnGetOpHandle(opName, opHandle) - makeNDArrayFunction(opHandle.value, opName) - }).toList - } + // Construct Implementation field + var impl = ListBuffer[String]() + impl += "val map = scala.collection.mutable.Map[String, Any]()" + impl += s"val args = scala.collection.mutable.ArrayBuffer.empty[$ndarrayType]" - // Create an atomic symbol function by handle and function name. - private def makeNDArrayFunction(handle: NDArrayHandle, aliasName: String) - : NDArrayFunction = { - val name = new RefString - val desc = new RefString - val keyVarNumArgs = new RefString - val numArgs = new RefInt - val argNames = ListBuffer.empty[String] - val argTypes = ListBuffer.empty[String] - val argDescs = ListBuffer.empty[String] - - _LIB.mxSymbolGetAtomicSymbolInfo( - handle, name, desc, numArgs, argNames, argTypes, argDescs, keyVarNumArgs) - val paramStr = OperatorBuildUtils.ctypes2docstring(argNames, argTypes, argDescs) - val extraDoc: String = if (keyVarNumArgs.value != null && keyVarNumArgs.value.length > 0) { - s"This function support variable length of positional input (${keyVarNumArgs.value})." - } else { - "" - } - val realName = if (aliasName == name.value) "" else s"(a.k.a., ${name.value})" - val docStr = s"$aliasName $realName\n${desc.value}\n\n$paramStr\n$extraDoc\n" - // scalastyle:off println - if (System.getenv("MXNET4J_PRINT_OP_DEF") != null - && System.getenv("MXNET4J_PRINT_OP_DEF").toLowerCase == "true") { - println("NDArray function definition:\n" + docStr) - } - // scalastyle:on println - val argList = argNames zip argTypes map { case (argName, argType) => - val typeAndOption = - CToScalaUtils.argumentCleaner(argName, argType, "org.apache.mxnet.NDArray") - new NDArrayArg(argName, typeAndOption._1, typeAndOption._2) + // NDArray arg implementation + impl ++= function.listOfArgs.map { arg => + if (arg.argType.equals(s"Array[$ndarrayType]")) { + s"args ++= ${arg.safeArgName}" + } else { + val base = + if (arg.argType.equals(ndarrayType)) { + // ndarrays go to args + s"args += ${arg.safeArgName}" + } else { + // other types go to kwargs + s"""map("${arg.argName}") = ${arg.safeArgName}""" + } + if (arg.isOptional) s"if (!${arg.safeArgName}.isEmpty) $base.get" + else base + } } - new NDArrayFunction(aliasName, argList.toList) + + impl += + s"""if (!out.isEmpty) map("out") = out.get + |org.apache.mxnet.NDArray.genericNDArrayFunctionInvoke( + | "${function.name}", args.toSeq, map.toMap) + """.stripMargin + + // Combine and build the function string + val finalStr = + s"""def ${function.name} + | (${argDef.mkString(",")}) : $returnType + | = {${impl.mkString("\n")}} + """.stripMargin + + c.parse(finalStr).asInstanceOf[DefDef] } } diff --git a/scala-package/macros/src/main/scala/org/apache/mxnet/SymbolMacro.scala b/scala-package/macros/src/main/scala/org/apache/mxnet/SymbolMacro.scala index 42aa11781d8f..ab864e1ef195 100644 --- a/scala-package/macros/src/main/scala/org/apache/mxnet/SymbolMacro.scala +++ b/scala-package/macros/src/main/scala/org/apache/mxnet/SymbolMacro.scala @@ -21,222 +21,106 @@ import scala.annotation.StaticAnnotation import scala.collection.mutable.ListBuffer import scala.language.experimental.macros import scala.reflect.macros.blackbox -import org.apache.mxnet.init.Base._ -import org.apache.mxnet.utils.{CToScalaUtils, OperatorBuildUtils} private[mxnet] class AddSymbolFunctions(isContrib: Boolean) extends StaticAnnotation { - private[mxnet] def macroTransform(annottees: Any*) = macro SymbolImplMacros.addDefs + private[mxnet] def macroTransform(annottees: Any*) = macro SymbolMacro.addDefs } private[mxnet] class AddSymbolAPIs(isContrib: Boolean) extends StaticAnnotation { - private[mxnet] def macroTransform(annottees: Any*) = macro SymbolImplMacros.typeSafeAPIDefs + private[mxnet] def macroTransform(annottees: Any*) = macro TypedSymbolAPIMacro.typeSafeAPIDefs } -private[mxnet] object SymbolImplMacros { - case class SymbolArg(argName: String, argType: String, isOptional : Boolean) - case class SymbolFunction(name: String, listOfArgs: List[SymbolArg]) +private[mxnet] object SymbolMacro extends GeneratorBase { - // scalastyle:off havetype - def addDefs(c: blackbox.Context)(annottees: c.Expr[Any]*) = { - impl(c)(annottees: _*) - } - def typeSafeAPIDefs(c: blackbox.Context)(annottees: c.Expr[Any]*) = { - typedAPIImpl(c)(annottees: _*) - } - // scalastyle:on havetype - - private val symbolFunctions: List[SymbolFunction] = initSymbolModule() - - /** - * Implementation for fixed input API structure - */ - private def impl(c: blackbox.Context)(annottees: c.Expr[Any]*): c.Expr[Any] = { + def addDefs(c: blackbox.Context)(annottees: c.Expr[Any]*): c.Expr[Any] = { import c.universe._ - val isContrib: Boolean = c.prefix.tree match { case q"new AddSymbolFunctions($b)" => c.eval[Boolean](c.Expr(b)) } - val newSymbolFunctions = { - if (isContrib) symbolFunctions.filter( - func => func.name.startsWith("_contrib_") || !func.name.startsWith("_")) - else symbolFunctions.filter(!_.name.startsWith("_")) - } + impl(c)(isContrib, annottees: _*) + } + + private def impl(c: blackbox.Context) + (isContrib: Boolean, annottees: c.Expr[Any]*): c.Expr[Any] = { + import c.universe._ + val functions = functionsToGenerate(isSymbol = false, isContrib) - val functionDefs = newSymbolFunctions map { symbolfunction => - val funcName = symbolfunction.name - val tName = TermName(funcName) - q""" + val functionDefs = functions.map { symbolfunction => + val funcName = symbolfunction.name + val tName = TermName(funcName) + q""" def $tName(name : String = null, attr : Map[String, String] = null) - (args : org.apache.mxnet.Symbol*)(kwargs : Map[String, Any] = null) - : org.apache.mxnet.Symbol = { - createSymbolGeneral($funcName,name,attr,args,kwargs) - } + (args : org.apache.mxnet.Symbol*)(kwargs : Map[String, Any] = null) + : org.apache.mxnet.Symbol = { + createSymbolGeneral($funcName,name,attr,args,kwargs) + } """.asInstanceOf[DefDef] - } + } - structGeneration(c)(functionDefs, annottees : _*) + structGeneration(c)(functionDefs, annottees: _*) } +} - /** - * Implementation for Dynamic typed API Symbol.api. - */ - private def typedAPIImpl(c: blackbox.Context)(annottees: c.Expr[Any]*) : c.Expr[Any] = { - import c.universe._ +private[mxnet] object TypedSymbolAPIMacro extends GeneratorBase { + def typeSafeAPIDefs(c: blackbox.Context)(annottees: c.Expr[Any]*): c.Expr[Any] = { + import c.universe._ val isContrib: Boolean = c.prefix.tree match { case q"new AddSymbolAPIs($b)" => c.eval[Boolean](c.Expr(b)) } - // Defines Operators that should not generated - val notGenerated = Set("Custom") - - // TODO: Put Symbol.api.foo --> Stable APIs - // Symbol.contrib.bar--> Contrib APIs - val newSymbolFunctions = { - if (isContrib) symbolFunctions.filter( - func => func.name.startsWith("_contrib_") || !func.name.startsWith("_")) - else symbolFunctions.filter(!_.name.startsWith("_")) - }.filterNot(ele => notGenerated.contains(ele.name)) - - val functionDefs = newSymbolFunctions map { symbolfunction => - - // Construct argument field - var argDef = ListBuffer[String]() - // Construct Implementation field - var impl = ListBuffer[String]() - impl += "val map = scala.collection.mutable.Map[String, Any]()" - impl += "var args = Seq[org.apache.mxnet.Symbol]()" - symbolfunction.listOfArgs.foreach({ symbolarg => - // var is a special word used to define variable in Scala, - // need to changed to something else in order to make it work - val currArgName = symbolarg.argName match { - case "var" => "vari" - case "type" => "typeOf" - case default => symbolarg.argName - } - if (symbolarg.isOptional) { - argDef += s"${currArgName} : Option[${symbolarg.argType}] = None" - } - else { - argDef += s"${currArgName} : ${symbolarg.argType}" - } - // Symbol arg implementation - val returnType = "org.apache.mxnet.Symbol" - val base = - if (symbolarg.argType.equals(s"Array[$returnType]")) { - if (symbolarg.isOptional) s"if (!$currArgName.isEmpty) args = $currArgName.get.toSeq" - else s"args = $currArgName.toSeq" - } else { - if (symbolarg.isOptional) { - // scalastyle:off - s"if (!$currArgName.isEmpty) map(" + "\"" + symbolarg.argName + "\"" + s") = $currArgName.get" - // scalastyle:on - } - else "map(\"" + symbolarg.argName + "\"" + s") = $currArgName" - } + val functions = typeSafeFunctionsToGenerate(isSymbol = true, isContrib) - impl += base - }) - argDef += "name : String = null" - argDef += "attr : Map[String, String] = null" - // scalastyle:off - // TODO: Seq() here allows user to place Symbols rather than normal arguments to run, need to fix if old API deprecated - impl += "org.apache.mxnet.Symbol.createSymbolGeneral(\"" + symbolfunction.name + "\", name, attr, args, map.toMap)" - // scalastyle:on - // Combine and build the function string - val returnType = "org.apache.mxnet.Symbol" - var finalStr = s"def ${symbolfunction.name}" - finalStr += s" (${argDef.mkString(",")}) : $returnType" - finalStr += s" = {${impl.mkString("\n")}}" - c.parse(finalStr).asInstanceOf[DefDef] - } - structGeneration(c)(functionDefs, annottees : _*) + val functionDefs = functions.map(f => buildTypedFunction(c)(f)) + structGeneration(c)(functionDefs, annottees: _*) } - /** - * Generate class structure for all function APIs - * @param c - * @param funcDef DefDef type of function definitions - * @param annottees - * @return - */ - private def structGeneration(c: blackbox.Context) - (funcDef : List[c.universe.DefDef], annottees: c.Expr[Any]*) - : c.Expr[Any] = { + protected def buildTypedFunction(c: blackbox.Context) + (function: Func): c.universe.DefDef = { import c.universe._ - val inputs = annottees.map(_.tree).toList - // pattern match on the inputs - val modDefs = inputs map { - case ClassDef(mods, name, something, template) => - val q = template match { - case Template(superMaybe, emptyValDef, defs) => - Template(superMaybe, emptyValDef, defs ++ funcDef) - case ex => - throw new IllegalArgumentException(s"Invalid template: $ex") - } - ClassDef(mods, name, something, q) - case ModuleDef(mods, name, template) => - val q = template match { - case Template(superMaybe, emptyValDef, defs) => - Template(superMaybe, emptyValDef, defs ++ funcDef) - case ex => - throw new IllegalArgumentException(s"Invalid template: $ex") - } - ModuleDef(mods, name, q) - case ex => - throw new IllegalArgumentException(s"Invalid macro input: $ex") - } - // wrap the result up in an Expr, and return it - val result = c.Expr(Block(modDefs, Literal(Constant()))) - result - } - // List and add all the atomic symbol functions to current module. - private def initSymbolModule(): List[SymbolFunction] = { - val opNames = ListBuffer.empty[String] - _LIB.mxListAllOpNames(opNames) - // TODO: Add '_linalg_', '_sparse_', '_image_' support - opNames.map(opName => { - val opHandle = new RefLong - _LIB.nnGetOpHandle(opName, opHandle) - makeAtomicSymbolFunction(opHandle.value, opName) - }).toList - } + val returnType = "org.apache.mxnet.Symbol" + val symbolType = "org.apache.mxnet.Symbol" - // Create an atomic symbol function by handle and function name. - private def makeAtomicSymbolFunction(handle: SymbolHandle, aliasName: String) - : SymbolFunction = { - val name = new RefString - val desc = new RefString - val keyVarNumArgs = new RefString - val numArgs = new RefInt - val argNames = ListBuffer.empty[String] - val argTypes = ListBuffer.empty[String] - val argDescs = ListBuffer.empty[String] - - _LIB.mxSymbolGetAtomicSymbolInfo( - handle, name, desc, numArgs, argNames, argTypes, argDescs, keyVarNumArgs) - val paramStr = OperatorBuildUtils.ctypes2docstring(argNames, argTypes, argDescs) - val extraDoc: String = if (keyVarNumArgs.value != null && keyVarNumArgs.value.length > 0) { - s"This function support variable length of positional input (${keyVarNumArgs.value})." + // Construct argument field + val argDef = ListBuffer[String]() + argDef ++= typedFunctionCommonArgDef(function) + argDef += "name : String = null" + argDef += "attr : Map[String, String] = null" + + // Construct Implementation field + val impl = ListBuffer[String]() + impl += "val map = scala.collection.mutable.Map[String, Any]()" + impl += s"var args = scala.collection.Seq[$symbolType]()" + + // Symbol arg implementation + impl ++= function.listOfArgs.map { arg => + if (arg.argType.equals(s"Array[$symbolType]")) { + s"if (!${arg.safeArgName}.isEmpty) args = ${arg.safeArgName}.toSeq" } else { - "" + // all go in kwargs + if (arg.isOptional) { + s"""if (!${arg.safeArgName}.isEmpty) map("${arg.argName}") = ${arg.safeArgName}.get""" + } else { + s"""map("${arg.argName}") = ${arg.safeArgName}""" + } } - val realName = if (aliasName == name.value) "" else s"(a.k.a., ${name.value})" - val docStr = s"$aliasName $realName\n${desc.value}\n\n$paramStr\n$extraDoc\n" - // scalastyle:off println - if (System.getenv("MXNET4J_PRINT_OP_DEF") != null - && System.getenv("MXNET4J_PRINT_OP_DEF").toLowerCase == "true") { - println("Symbol function definition:\n" + docStr) } - // scalastyle:on println - val argList = argNames zip argTypes map { case (argName, argType) => - val typeAndOption = - CToScalaUtils.argumentCleaner(argName, argType, "org.apache.mxnet.Symbol") - new SymbolArg(argName, typeAndOption._1, typeAndOption._2) - } - new SymbolFunction(aliasName, argList.toList) + + impl += + s"""org.apache.mxnet.Symbol.createSymbolGeneral( + | "${function.name}", name, attr, args, map.toMap) + """.stripMargin + + // Combine and build the function string + val finalStr = + s"""def ${function.name} + | (${argDef.mkString(",")}) : $returnType + | = {${impl.mkString("\n")}} + """.stripMargin + + c.parse(finalStr).asInstanceOf[DefDef] } } diff --git a/scala-package/macros/src/main/scala/org/apache/mxnet/javaapi/JavaNDArrayMacro.scala b/scala-package/macros/src/main/scala/org/apache/mxnet/javaapi/JavaNDArrayMacro.scala index 2d1827038afc..4dfd6eb044a1 100644 --- a/scala-package/macros/src/main/scala/org/apache/mxnet/javaapi/JavaNDArrayMacro.scala +++ b/scala-package/macros/src/main/scala/org/apache/mxnet/javaapi/JavaNDArrayMacro.scala @@ -17,8 +17,7 @@ package org.apache.mxnet.javaapi -import org.apache.mxnet.init.Base._ -import org.apache.mxnet.utils.CToScalaUtils +import org.apache.mxnet.GeneratorBase import scala.annotation.StaticAnnotation import scala.collection.mutable.ListBuffer @@ -29,9 +28,7 @@ private[mxnet] class AddJNDArrayAPIs(isContrib: Boolean) extends StaticAnnotatio private[mxnet] def macroTransform(annottees: Any*) = macro JavaNDArrayMacro.typeSafeAPIDefs } -private[mxnet] object JavaNDArrayMacro { - case class NDArrayArg(argName: String, argType: String, isOptional : Boolean) - case class NDArrayFunction(name: String, listOfArgs: List[NDArrayArg]) +private[mxnet] object JavaNDArrayMacro extends GeneratorBase { // scalastyle:off havetype def typeSafeAPIDefs(c: blackbox.Context)(annottees: c.Expr[Any]*) = { @@ -39,8 +36,6 @@ private[mxnet] object JavaNDArrayMacro { } // scalastyle:off havetype - private val ndarrayFunctions: List[NDArrayFunction] = initNDArrayModule() - private def typeSafeAPIImpl(c: blackbox.Context)(annottees: c.Expr[Any]*) : c.Expr[Any] = { import c.universe._ @@ -50,12 +45,13 @@ private[mxnet] object JavaNDArrayMacro { // Defines Operators that should not generated val notGenerated = Set("Custom") - val newNDArrayFunctions = { - if (isContrib) ndarrayFunctions.filter( - func => func.name.startsWith("_contrib_") || !func.name.startsWith("_")) - else ndarrayFunctions.filterNot(_.name.startsWith("_")) - }.filterNot(ele => notGenerated.contains(ele.name)).groupBy(_.name.toLowerCase).map(ele => { - // Pattern matching for not generating depreciated method + val newNDArrayFunctions = functionsToGenerate(false, false, true) + .filterNot(ele => notGenerated.contains(ele.name)).groupBy(_.name.toLowerCase).map(ele => { + /* Pattern matching for not generating deprecated method + * Group all method name in lowercase + * Kill the capital lettered method such as Cast vs cast + * As it defined by default it deprecated + */ if (ele._2.length == 1) ele._2.head else { if (ele._2.head.name.head.isLower) ele._2.head @@ -79,11 +75,7 @@ private[mxnet] object JavaNDArrayMacro { ndarrayfunction.listOfArgs.foreach({ ndarrayArg => // var is a special word used to define variable in Scala, // need to changed to something else in order to make it work - var currArgName = ndarrayArg.argName match { - case "var" => "vari" - case "type" => "typeOf" - case _ => ndarrayArg.argName - } + var currArgName = ndarrayArg.safeArgName if (useParamObject) currArgName = s"po.get${currArgName.capitalize}()" argDef += s"$currArgName : ${ndarrayArg.argType}" // NDArray arg implementation @@ -128,73 +120,6 @@ private[mxnet] object JavaNDArrayMacro { functionDefs += c.parse(funcDef).asInstanceOf[DefDef] } } - structGeneration(c)(functionDefs.toList, annottees : _*) } - - private def structGeneration(c: blackbox.Context) - (funcDef : List[c.universe.DefDef], - annottees: c.Expr[Any]*) - : c.Expr[Any] = { - import c.universe._ - val inputs = annottees.map(_.tree).toList - // pattern match on the inputs - var modDefs = inputs map { - case ClassDef(mods, name, something, template) => - val q = template match { - case Template(superMaybe, emptyValDef, defs) => - Template(superMaybe, emptyValDef, defs ++ funcDef) - case ex => - throw new IllegalArgumentException(s"Invalid template: $ex") - } - ClassDef(mods, name, something, q) - case ModuleDef(mods, name, template) => - val q = template match { - case Template(superMaybe, emptyValDef, defs) => - Template(superMaybe, emptyValDef, defs ++ funcDef) - case ex => - throw new IllegalArgumentException(s"Invalid template: $ex") - } - ModuleDef(mods, name, q) - case ex => - throw new IllegalArgumentException(s"Invalid macro input: $ex") - } - // modDefs ++= classDef - // wrap the result up in an Expr, and return it - val result = c.Expr(Block(modDefs, Literal(Constant()))) - result - } - - // List and add all the atomic symbol functions to current module. - private def initNDArrayModule(): List[NDArrayFunction] = { - val opNames = ListBuffer.empty[String] - _LIB.mxListAllOpNames(opNames) - opNames.map(opName => { - val opHandle = new RefLong - _LIB.nnGetOpHandle(opName, opHandle) - makeNDArrayFunction(opHandle.value, opName) - }).toList - } - - // Create an atomic symbol function by handle and function name. - private def makeNDArrayFunction(handle: NDArrayHandle, aliasName: String) - : NDArrayFunction = { - val name = new RefString - val desc = new RefString - val keyVarNumArgs = new RefString - val numArgs = new RefInt - val argNames = ListBuffer.empty[String] - val argTypes = ListBuffer.empty[String] - val argDescs = ListBuffer.empty[String] - - _LIB.mxSymbolGetAtomicSymbolInfo( - handle, name, desc, numArgs, argNames, argTypes, argDescs, keyVarNumArgs) - val argList = argNames zip argTypes map { case (argName, argType) => - val typeAndOption = - CToScalaUtils.argumentCleaner(argName, argType, - "org.apache.mxnet.javaapi.NDArray") - new NDArrayArg(argName, typeAndOption._1, typeAndOption._2) - } - new NDArrayFunction(aliasName, argList.toList) - } } From 6b39c6b332c7ef0d4ed9639747deb06a48d7cfe8 Mon Sep 17 00:00:00 2001 From: Piyush Ghai Date: Tue, 13 Nov 2018 18:25:35 -0800 Subject: [PATCH 15/38] Fixed missing break statement (#13257) --- .../apache/mxnetexamples/javaapi/benchmark/JavaBenchmark.java | 1 + 1 file changed, 1 insertion(+) diff --git a/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/JavaBenchmark.java b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/JavaBenchmark.java index 1baca20fbe6d..2fd9fd5f7cbf 100644 --- a/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/JavaBenchmark.java +++ b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/JavaBenchmark.java @@ -96,6 +96,7 @@ public static void main(String[] args) { ObjectDetectionBenchmark inst = new ObjectDetectionBenchmark(); parse(inst, args); model = inst; + break; default: System.err.println("Model name not found! " + modelName); System.exit(1); From 6f940cf070f6db2eda6844b3a5c3562ebed9844d Mon Sep 17 00:00:00 2001 From: Lanking Date: Wed, 14 Nov 2018 17:09:37 -0800 Subject: [PATCH 16/38] Java Benchmark failure (#13258) * patch fix * update ignore * rename getContext to bindToDevice * Update JavaBenchmark.java --- scala-package/.gitignore | 3 +++ .../javaapi/benchmark/JavaBenchmark.java | 11 ++--------- 2 files changed, 5 insertions(+), 9 deletions(-) diff --git a/scala-package/.gitignore b/scala-package/.gitignore index 0f860e62836a..6aa4da6b1cfc 100644 --- a/scala-package/.gitignore +++ b/scala-package/.gitignore @@ -1,5 +1,8 @@ .flattened-pom.xml core/src/main/scala/org/apache/mxnet/NDArrayAPIBase.scala core/src/main/scala/org/apache/mxnet/NDArrayBase.scala +core/src/main/scala/org/apache/mxnet/javaapi/NDArrayBase.scala core/src/main/scala/org/apache/mxnet/SymbolAPIBase.scala core/src/main/scala/org/apache/mxnet/SymbolBase.scala +examples/scripts/infer/images/ +examples/scripts/infer/models/ diff --git a/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/JavaBenchmark.java b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/JavaBenchmark.java index 2fd9fd5f7cbf..4a6bb2dd38bf 100644 --- a/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/JavaBenchmark.java +++ b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/JavaBenchmark.java @@ -67,7 +67,7 @@ private static void printStatistics(long[] inferenceTimesRaw, String metricsPref } - private static List getContext() { + private static List bindToDevice() { List context = new ArrayList(); if (System.getenv().containsKey("SCALA_TEST_ON_GPU") && Integer.valueOf(System.getenv("SCALA_TEST_ON_GPU")) == 1) { @@ -101,14 +101,7 @@ public static void main(String[] args) { System.err.println("Model name not found! " + modelName); System.exit(1); } - List context = getContext(); - if (System.getenv().containsKey("SCALA_TEST_ON_GPU") && - Integer.valueOf(System.getenv("SCALA_TEST_ON_GPU")) == 1) { - context.add(Context.gpu()); - } else { - context.add(Context.cpu()); - } - + List context = bindToDevice(); long[] result = new long[model.numRun]; model.preProcessModel(context); if (runBatch) { From 218a7a93c239dce7a9ce33fc2cc4f58e473e3da6 Mon Sep 17 00:00:00 2001 From: Andrew Ayres Date: Wed, 14 Nov 2018 17:51:25 -0800 Subject: [PATCH 17/38] Addressing PR feedback for merging Java API into master (#13277) * Addressing PR feedback for merging Java API into master * Changed constructors to package private instead of private --- .../org/apache/mxnet/javaapi/Context.scala | 2 +- .../scala/org/apache/mxnet/javaapi/IO.scala | 2 +- .../org/apache/mxnet/javaapi/NDArray.scala | 2 +- .../org/apache/mxnet/javaapi/Shape.scala | 2 +- .../infer/objectdetector/run_ssd_example.sh | 2 +- .../javaapi/infer/objectdetector/README.md | 31 +- .../objectdetector/SSDClassifierExample.java | 316 +++++++++--------- .../infer/objectdetector/README.md | 16 +- .../objectdetector/SSDClassifierExample.scala | 4 +- .../mxnet/infer/javaapi/ObjectDetector.scala | 2 +- .../mxnet/infer/javaapi/Predictor.scala | 2 +- 11 files changed, 175 insertions(+), 206 deletions(-) diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Context.scala b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Context.scala index ac3517b151f1..d2d9b27ada91 100644 --- a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Context.scala +++ b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Context.scala @@ -25,7 +25,7 @@ import collection.JavaConverters._ * @param deviceTypeName {'cpu', 'gpu'} String representing the device type * @param deviceId The device id of the device, needed for GPU */ -class Context(val context: org.apache.mxnet.Context) { +class Context private[mxnet] (val context: org.apache.mxnet.Context) { val deviceTypeid: Int = context.deviceTypeid diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/IO.scala b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/IO.scala index bf961b2bd529..6ba355d6510a 100644 --- a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/IO.scala +++ b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/IO.scala @@ -17,7 +17,7 @@ package org.apache.mxnet.javaapi -class DataDesc(val dataDesc: org.apache.mxnet.DataDesc) { +class DataDesc private[mxnet] (val dataDesc: org.apache.mxnet.DataDesc) { def this(name: String, shape: Shape, dType: DType.DType, layout: String) = this(new org.apache.mxnet.DataDesc(name, shape, dType, layout)) diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/NDArray.scala b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/NDArray.scala index cdcc292ada63..446df257e20b 100644 --- a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/NDArray.scala +++ b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/NDArray.scala @@ -198,7 +198,7 @@ object NDArray extends NDArrayBase { * will result in leaking native memory. * */ -class NDArray(val nd : org.apache.mxnet.NDArray ) { +class NDArray private[mxnet] (val nd : org.apache.mxnet.NDArray ) { def this(arr : Array[Float], shape : Shape, ctx : Context) = { this(org.apache.mxnet.NDArray.array(arr, shape, ctx)) diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Shape.scala b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Shape.scala index 594e3a60578f..9ed45e8d6778 100644 --- a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Shape.scala +++ b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Shape.scala @@ -23,7 +23,7 @@ import collection.JavaConverters._ * Shape of [[NDArray]] or other data */ -class Shape(val shape: org.apache.mxnet.Shape) { +class Shape private[mxnet] (val shape: org.apache.mxnet.Shape) { def this(dims: java.util.List[java.lang.Integer]) = this(new org.apache.mxnet.Shape(dims.asScala.map(Int.unbox))) def this(dims: Array[Int]) = this(new org.apache.mxnet.Shape(dims)) diff --git a/scala-package/examples/scripts/infer/objectdetector/run_ssd_example.sh b/scala-package/examples/scripts/infer/objectdetector/run_ssd_example.sh index adb8830de06e..6b4edb7c4c94 100755 --- a/scala-package/examples/scripts/infer/objectdetector/run_ssd_example.sh +++ b/scala-package/examples/scripts/infer/objectdetector/run_ssd_example.sh @@ -18,7 +18,7 @@ # under the License. hw_type=cpu -if [[ $1 = gpu ]] +if [[ $4 = gpu ]] then hw_type=gpu fi diff --git a/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/objectdetector/README.md b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/objectdetector/README.md index 63b9f929a82e..681253f39a88 100644 --- a/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/objectdetector/README.md +++ b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/objectdetector/README.md @@ -1,6 +1,6 @@ -# Single Shot Multi Object Detection using Scala Inference API +# Single Shot Multi Object Detection using Java Inference API -In this example, you will learn how to use Scala Inference API to run Inference on pre-trained Single Shot Multi Object Detection (SSD) MXNet model. +In this example, you will learn how to use Java Inference API to run Inference on pre-trained Single Shot Multi Object Detection (SSD) MXNet model. The model is trained on the [Pascal VOC 2012 dataset](http://host.robots.ox.ac.uk/pascal/VOC/voc2012/index.html). The network is a SSD model built on Resnet50 as base network to extract image features. The model is trained to detect the following entities (classes): ['aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car', 'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse', 'motorbike', 'person', 'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor']. For more details about the model, you can refer to the [MXNet SSD example](https://github.com/apache/incubator-mxnet/tree/master/example/ssd). @@ -19,7 +19,7 @@ The model is trained on the [Pascal VOC 2012 dataset](http://host.robots.ox.ac.u 1. MXNet 2. MXNet Scala Package -3. [IntelliJ IDE (or alternative IDE) project setup](http://mxnet.incubator.apache.org/tutorials/scala/mxnet_scala_on_intellij.html) with the MXNet Scala Package +3. [IntelliJ IDE (or alternative IDE) project setup](http://mxnet.incubator.apache.org/tutorials/java/mxnet_java_on_intellij.html) with the MXNet Scala/Java Package 4. wget @@ -28,18 +28,13 @@ The model is trained on the [Pascal VOC 2012 dataset](http://host.robots.ox.ac.u ### Download Artifacts #### Step 1 You can download the files using the script `get_ssd_data.sh`. It will download and place the model files in a `model` folder and the test image files in a `image` folder in the current directory. -From the `scala-package/examples/scripts/infer/imageclassifier/` folder run: +From the `scala-package/examples/scripts/infer/objectdetector/` folder run: ```bash ./get_ssd_data.sh ``` -**Note**: You may need to run `chmod +x get_resnet_data.sh` before running this script. - -Alternatively use the following links to download the Symbol and Params files via your browser: -- [resnet50_ssd_model-symbol.json](https://s3.amazonaws.com/model-server/models/resnet50_ssd/resnet50_ssd_model-symbol.json) -- [resnet50_ssd_model-0000.params](https://s3.amazonaws.com/model-server/models/resnet50_ssd/resnet50_ssd_model-0000.params) -- [synset.txt](https://github.com/awslabs/mxnet-model-server/blob/master/examples/ssd/synset.txt) +**Note**: You may need to run `chmod +x get_ssd_data.sh` before running this script. In the pre-trained model, the `input_name` is `data` and shape is `(1, 3, 512, 512)`. This shape translates to: a batch of `1` image, the image has color and uses `3` channels (RGB), and the image has the dimensions of `512` pixels in height by `512` pixels in width. @@ -57,13 +52,6 @@ The output shape is `(1, 6132, 6)`. As with the input, the `1` is the number of ### Setup Datapath and Parameters #### Step 2 -The code `Line 31: val baseDir = System.getProperty("user.dir")` in the example will automatically searches the work directory you have defined. Please put the files in your [work directory](https://stackoverflow.com/questions/16239130/java-user-dir-property-what-exactly-does-it-mean). - -Alternatively, if you would like to use your own path, please change line 31 into your own path -```scala -val baseDir = -``` - The followings is the parameters defined for this example, you can find more information in the `class SSDClassifierExample`. | Argument | Comments | @@ -102,15 +90,8 @@ the outputs come from the the input image, with top3 predictions picked. ## Infer API Details -This example uses ObjectDetector class provided by MXNet's scala package Infer APIs. It provides methods to load the images, create NDArray out of Java BufferedImage and run prediction using Classifier and Predictor APIs. +This example uses ObjectDetector class provided by MXNet's Java Infer APIs. It provides methods to load the images, create NDArray out of Java BufferedImage and run prediction using Classifier and Predictor APIs. ## References This documentation used the model and inference setup guide from the [MXNet Model Server SSD example](https://github.com/awslabs/mxnet-model-server/blob/master/examples/ssd/README.md). - - -## Next Steps - -Check out the following related tutorials and examples for the Infer API: - -* [Image Classification with the MXNet Scala Infer API](../imageclassifier/README.md) diff --git a/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/objectdetector/SSDClassifierExample.java b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/objectdetector/SSDClassifierExample.java index 4befc8edde6b..a9c00f7f1d81 100644 --- a/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/objectdetector/SSDClassifierExample.java +++ b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/objectdetector/SSDClassifierExample.java @@ -38,162 +38,162 @@ import java.io.File; public class SSDClassifierExample { - @Option(name = "--model-path-prefix", usage = "input model directory and prefix of the model") - private String modelPathPrefix = "/model/ssd_resnet50_512"; - @Option(name = "--input-image", usage = "the input image") - private String inputImagePath = "/images/dog.jpg"; - @Option(name = "--input-dir", usage = "the input batch of images directory") - private String inputImageDir = "/images/"; - - final static Logger logger = LoggerFactory.getLogger(SSDClassifierExample.class); - - static List> - runObjectDetectionSingle(String modelPathPrefix, String inputImagePath, List context) { - Shape inputShape = new Shape(new int[] {1, 3, 512, 512}); - List inputDescriptors = new ArrayList(); - inputDescriptors.add(new DataDesc("data", inputShape, DType.Float32(), "NCHW")); - BufferedImage img = ObjectDetector.loadImageFromFile(inputImagePath); - ObjectDetector objDet = new ObjectDetector(modelPathPrefix, inputDescriptors, context, 0); - return objDet.imageObjectDetect(img, 3); - } - - static List>> - runObjectDetectionBatch(String modelPathPrefix, String inputImageDir, List context) { - Shape inputShape = new Shape(new int[]{1, 3, 512, 512}); - List inputDescriptors = new ArrayList(); - inputDescriptors.add(new DataDesc("data", inputShape, DType.Float32(), "NCHW")); - ObjectDetector objDet = new ObjectDetector(modelPathPrefix, inputDescriptors, context, 0); - - // Loading batch of images from the directory path - List> batchFiles = generateBatches(inputImageDir, 20); - List>> outputList - = new ArrayList>>(); - - for (List batchFile : batchFiles) { - List imgList = ObjectDetector.loadInputBatch(batchFile); - // Running inference on batch of images loaded in previous step - List> tmp - = objDet.imageBatchObjectDetect(imgList, 5); - outputList.add(tmp); - } - return outputList; - } - - static List> generateBatches(String inputImageDirPath, int batchSize) { - File dir = new File(inputImageDirPath); - - List> output = new ArrayList>(); - List batch = new ArrayList(); - for (File imgFile : dir.listFiles()) { - batch.add(imgFile.getPath()); - if (batch.size() == batchSize) { - output.add(batch); - batch = new ArrayList(); - } - } - if (batch.size() > 0) { - output.add(batch); - } - return output; - } - - public static void main(String[] args) { - SSDClassifierExample inst = new SSDClassifierExample(); - CmdLineParser parser = new CmdLineParser(inst); - try { - parser.parseArgument(args); - } catch (Exception e) { - logger.error(e.getMessage(), e); - parser.printUsage(System.err); - System.exit(1); - } - - String mdprefixDir = inst.modelPathPrefix; - String imgPath = inst.inputImagePath; - String imgDir = inst.inputImageDir; - - if (!checkExist(Arrays.asList(mdprefixDir + "-symbol.json", imgDir, imgPath))) { - logger.error("Model or input image path does not exist"); - System.exit(1); - } - - List context = new ArrayList(); - if (System.getenv().containsKey("SCALA_TEST_ON_GPU") && - Integer.valueOf(System.getenv("SCALA_TEST_ON_GPU")) == 1) { - context.add(Context.gpu()); - } else { - context.add(Context.cpu()); - } - - try { - Shape inputShape = new Shape(new int[] {1, 3, 512, 512}); - Shape outputShape = new Shape(new int[] {1, 6132, 6}); - - - int width = inputShape.get(2); - int height = inputShape.get(3); - String outputStr = "\n"; - - List> output - = runObjectDetectionSingle(mdprefixDir, imgPath, context); - - for (List ele : output) { - for (ObjectDetectorOutput i : ele) { - outputStr += "Class: " + i.getClassName() + "\n"; - outputStr += "Probabilties: " + i.getProbability() + "\n"; - - List coord = Arrays.asList(i.getXMin() * width, - i.getXMax() * height, i.getYMin() * width, i.getYMax() * height); - StringBuilder sb = new StringBuilder(); - for (float c: coord) { - sb.append(", ").append(c); - } - outputStr += "Coord:" + sb.substring(2)+ "\n"; - } - } - logger.info(outputStr); - - List>> outputList = - runObjectDetectionBatch(mdprefixDir, imgDir, context); - - outputStr = "\n"; - int index = 0; - for (List> i: outputList) { - for (List j : i) { - outputStr += "*** Image " + (index + 1) + "***" + "\n"; - for (ObjectDetectorOutput k : j) { - outputStr += "Class: " + k.getClassName() + "\n"; - outputStr += "Probabilties: " + k.getProbability() + "\n"; - List coord = Arrays.asList(k.getXMin() * width, - k.getXMax() * height, k.getYMin() * width, k.getYMax() * height); - - StringBuilder sb = new StringBuilder(); - for (float c : coord) { - sb.append(", ").append(c); - } - outputStr += "Coord:" + sb.substring(2) + "\n"; - } - index++; - } - } - logger.info(outputStr); - - } catch (Exception e) { - logger.error(e.getMessage(), e); - parser.printUsage(System.err); - System.exit(1); - } - System.exit(0); - } - - static Boolean checkExist(List arr) { - Boolean exist = true; - for (String item : arr) { - exist = new File(item).exists() && exist; - if (!exist) { - logger.error("Cannot find: " + item); - } - } - return exist; - } + @Option(name = "--model-path-prefix", usage = "input model directory and prefix of the model") + private String modelPathPrefix = "/model/ssd_resnet50_512"; + @Option(name = "--input-image", usage = "the input image") + private String inputImagePath = "/images/dog.jpg"; + @Option(name = "--input-dir", usage = "the input batch of images directory") + private String inputImageDir = "/images/"; + + final static Logger logger = LoggerFactory.getLogger(SSDClassifierExample.class); + + static List> + runObjectDetectionSingle(String modelPathPrefix, String inputImagePath, List context) { + Shape inputShape = new Shape(new int[]{1, 3, 512, 512}); + List inputDescriptors = new ArrayList(); + inputDescriptors.add(new DataDesc("data", inputShape, DType.Float32(), "NCHW")); + BufferedImage img = ObjectDetector.loadImageFromFile(inputImagePath); + ObjectDetector objDet = new ObjectDetector(modelPathPrefix, inputDescriptors, context, 0); + return objDet.imageObjectDetect(img, 3); + } + + static List>> + runObjectDetectionBatch(String modelPathPrefix, String inputImageDir, List context) { + Shape inputShape = new Shape(new int[]{1, 3, 512, 512}); + List inputDescriptors = new ArrayList(); + inputDescriptors.add(new DataDesc("data", inputShape, DType.Float32(), "NCHW")); + ObjectDetector objDet = new ObjectDetector(modelPathPrefix, inputDescriptors, context, 0); + + // Loading batch of images from the directory path + List> batchFiles = generateBatches(inputImageDir, 20); + List>> outputList + = new ArrayList>>(); + + for (List batchFile : batchFiles) { + List imgList = ObjectDetector.loadInputBatch(batchFile); + // Running inference on batch of images loaded in previous step + List> tmp + = objDet.imageBatchObjectDetect(imgList, 5); + outputList.add(tmp); + } + return outputList; + } + + static List> generateBatches(String inputImageDirPath, int batchSize) { + File dir = new File(inputImageDirPath); + + List> output = new ArrayList>(); + List batch = new ArrayList(); + for (File imgFile : dir.listFiles()) { + batch.add(imgFile.getPath()); + if (batch.size() == batchSize) { + output.add(batch); + batch = new ArrayList(); + } + } + if (batch.size() > 0) { + output.add(batch); + } + return output; + } + + public static void main(String[] args) { + SSDClassifierExample inst = new SSDClassifierExample(); + CmdLineParser parser = new CmdLineParser(inst); + try { + parser.parseArgument(args); + } catch (Exception e) { + logger.error(e.getMessage(), e); + parser.printUsage(System.err); + System.exit(1); + } + + String mdprefixDir = inst.modelPathPrefix; + String imgPath = inst.inputImagePath; + String imgDir = inst.inputImageDir; + + if (!checkExist(Arrays.asList(mdprefixDir + "-symbol.json", imgDir, imgPath))) { + logger.error("Model or input image path does not exist"); + System.exit(1); + } + + List context = new ArrayList(); + if (System.getenv().containsKey("SCALA_TEST_ON_GPU") && + Integer.valueOf(System.getenv("SCALA_TEST_ON_GPU")) == 1) { + context.add(Context.gpu()); + } else { + context.add(Context.cpu()); + } + + try { + Shape inputShape = new Shape(new int[]{1, 3, 512, 512}); + Shape outputShape = new Shape(new int[]{1, 6132, 6}); + + + int width = inputShape.get(2); + int height = inputShape.get(3); + StringBuilder outputStr = new StringBuilder().append("\n"); + + List> output + = runObjectDetectionSingle(mdprefixDir, imgPath, context); + + for (List ele : output) { + for (ObjectDetectorOutput i : ele) { + outputStr.append("Class: " + i.getClassName() + "\n"); + outputStr.append("Probabilties: " + i.getProbability() + "\n"); + + List coord = Arrays.asList(i.getXMin() * width, + i.getXMax() * height, i.getYMin() * width, i.getYMax() * height); + StringBuilder sb = new StringBuilder(); + for (float c : coord) { + sb.append(", ").append(c); + } + outputStr.append("Coord:" + sb.substring(2) + "\n"); + } + } + logger.info(outputStr.toString()); + + List>> outputList = + runObjectDetectionBatch(mdprefixDir, imgDir, context); + + outputStr = new StringBuilder().append("\n"); + int index = 0; + for (List> i : outputList) { + for (List j : i) { + outputStr.append("*** Image " + (index + 1) + "***" + "\n"); + for (ObjectDetectorOutput k : j) { + outputStr.append("Class: " + k.getClassName() + "\n"); + outputStr.append("Probabilties: " + k.getProbability() + "\n"); + List coord = Arrays.asList(k.getXMin() * width, + k.getXMax() * height, k.getYMin() * width, k.getYMax() * height); + + StringBuilder sb = new StringBuilder(); + for (float c : coord) { + sb.append(", ").append(c); + } + outputStr.append("Coord:" + sb.substring(2) + "\n"); + } + index++; + } + } + logger.info(outputStr.toString()); + + } catch (Exception e) { + logger.error(e.getMessage(), e); + parser.printUsage(System.err); + System.exit(1); + } + System.exit(0); + } + + static Boolean checkExist(List arr) { + Boolean exist = true; + for (String item : arr) { + if (!(new File(item).exists())) { + logger.error("Cannot find: " + item); + exist = false; + } + } + return exist; + } } diff --git a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/infer/objectdetector/README.md b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/infer/objectdetector/README.md index bf4a44a76d00..77aec7bb5dee 100644 --- a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/infer/objectdetector/README.md +++ b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/infer/objectdetector/README.md @@ -28,18 +28,13 @@ The model is trained on the [Pascal VOC 2012 dataset](http://host.robots.ox.ac.u ### Download Artifacts #### Step 1 You can download the files using the script `get_ssd_data.sh`. It will download and place the model files in a `model` folder and the test image files in a `image` folder in the current directory. -From the `scala-package/examples/scripts/infer/imageclassifier/` folder run: +From the `scala-package/examples/scripts/infer/objectdetector/` folder run: ```bash ./get_ssd_data.sh ``` -**Note**: You may need to run `chmod +x get_resnet_data.sh` before running this script. - -Alternatively use the following links to download the Symbol and Params files via your browser: -- [resnet50_ssd_model-symbol.json](https://s3.amazonaws.com/model-server/models/resnet50_ssd/resnet50_ssd_model-symbol.json) -- [resnet50_ssd_model-0000.params](https://s3.amazonaws.com/model-server/models/resnet50_ssd/resnet50_ssd_model-0000.params) -- [synset.txt](https://github.com/awslabs/mxnet-model-server/blob/master/examples/ssd/synset.txt) +**Note**: You may need to run `chmod +x get_ssd_data.sh` before running this script. In the pre-trained model, the `input_name` is `data` and shape is `(1, 3, 512, 512)`. This shape translates to: a batch of `1` image, the image has color and uses `3` channels (RGB), and the image has the dimensions of `512` pixels in height by `512` pixels in width. @@ -57,13 +52,6 @@ The output shape is `(1, 6132, 6)`. As with the input, the `1` is the number of ### Setup Datapath and Parameters #### Step 2 -The code `Line 31: val baseDir = System.getProperty("user.dir")` in the example will automatically searches the work directory you have defined. Please put the files in your [work directory](https://stackoverflow.com/questions/16239130/java-user-dir-property-what-exactly-does-it-mean). - -Alternatively, if you would like to use your own path, please change line 31 into your own path -```scala -val baseDir = -``` - The followings is the parameters defined for this example, you can find more information in the `class SSDClassifierExample`. | Argument | Comments | diff --git a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/infer/objectdetector/SSDClassifierExample.scala b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/infer/objectdetector/SSDClassifierExample.scala index f752ef6dab58..07d1cc82e927 100644 --- a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/infer/objectdetector/SSDClassifierExample.scala +++ b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/infer/objectdetector/SSDClassifierExample.scala @@ -182,9 +182,9 @@ object SSDClassifierExample { def checkExist(arr : Array[String]) : Boolean = { var exist : Boolean = true for (item <- arr) { - exist = Files.exists(Paths.get(item)) && exist - if (!exist) { + if (!(Files.exists(Paths.get(item)))) { logger.error("Cannot find: " + item) + exist = false } } exist diff --git a/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/ObjectDetector.scala b/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/ObjectDetector.scala index 447518b5a89c..08fffb410adf 100644 --- a/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/ObjectDetector.scala +++ b/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/ObjectDetector.scala @@ -41,7 +41,7 @@ import scala.collection.JavaConverters._ * Defaults to CPU. * @param epoch Model epoch to load; defaults to 0 */ -class ObjectDetector(val objDetector: org.apache.mxnet.infer.ObjectDetector){ +class ObjectDetector private[mxnet] (val objDetector: org.apache.mxnet.infer.ObjectDetector){ def this(modelPathPrefix: String, inputDescriptors: java.util.List[DataDesc], contexts: java.util.List[Context], epoch: Int) diff --git a/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/Predictor.scala b/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/Predictor.scala index 3e0fcb7b507a..21e62b3aa55f 100644 --- a/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/Predictor.scala +++ b/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/Predictor.scala @@ -39,7 +39,7 @@ import scala.collection.JavaConverters._ */ // JavaDoc description of class to be updated in https://issues.apache.org/jira/browse/MXNET-1178 -class Predictor(val predictor: org.apache.mxnet.infer.Predictor){ +class Predictor private[mxnet] (val predictor: org.apache.mxnet.infer.Predictor){ def this(modelPathPrefix: String, inputDescriptors: java.util.List[DataDesc], contexts: java.util.List[Context], epoch: Int) = this { From 52bead068c8b120c0eccd42d46a0b0afab5420cd Mon Sep 17 00:00:00 2001 From: Lanking Date: Thu, 15 Nov 2018 11:36:33 -0800 Subject: [PATCH 18/38] clean up the NDArray follow the comments (#13281) --- .../org/apache/mxnet/javaapi/NDArray.scala | 138 ++++++++++-------- 1 file changed, 74 insertions(+), 64 deletions(-) diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/NDArray.scala b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/NDArray.scala index 446df257e20b..6b4f4bdebda5 100644 --- a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/NDArray.scala +++ b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/NDArray.scala @@ -50,7 +50,7 @@ object NDArray extends NDArrayBase { = org.apache.mxnet.NDArray.empty(shape, ctx, dtype) def empty(ctx: Context, shape: Array[Int]): NDArray = org.apache.mxnet.NDArray.empty(new Shape(shape), ctx) - def empty(ctx : Context, shape : java.util.List[java.lang.Integer]) : NDArray + def empty(ctx: Context, shape: java.util.List[java.lang.Integer]): NDArray = org.apache.mxnet.NDArray.empty(new Shape(shape), ctx) /** @@ -65,7 +65,7 @@ object NDArray extends NDArrayBase { = org.apache.mxnet.NDArray.zeros(shape, ctx, dtype) def zeros(ctx: Context, shape: Array[Int]): NDArray = org.apache.mxnet.NDArray.zeros(new Shape(shape), ctx) - def zeros(ctx : Context, shape : java.util.List[java.lang.Integer]) : NDArray + def zeros(ctx: Context, shape: java.util.List[java.lang.Integer]): NDArray = org.apache.mxnet.NDArray.zeros(new Shape(shape), ctx) /** @@ -78,7 +78,7 @@ object NDArray extends NDArrayBase { = org.apache.mxnet.NDArray.ones(shape, ctx, dtype) def ones(ctx: Context, shape: Array[Int]): NDArray = org.apache.mxnet.NDArray.ones(new Shape(shape), ctx) - def ones(ctx : Context, shape : java.util.List[java.lang.Integer]) : NDArray + def ones(ctx: Context, shape: java.util.List[java.lang.Integer]): NDArray = org.apache.mxnet.NDArray.ones(new Shape(shape), ctx) /** @@ -193,37 +193,47 @@ object NDArray extends NDArrayBase { * NDArray is basic ndarray/Tensor like data structure in mxnet.
* * NOTE: NDArray is stored in native memory. Use NDArray in a try-with-resources() construct - * or a [[ResourceScope]] in a try-with-resource to have them automatically disposed. You can - * explicitly control the lifetime of NDArray by calling dispose manually. Failure to do this - * will result in leaking native memory. + * or a [[org.apache.mxnet.ResourceScope]] in a try-with-resource to have them + * automatically disposed. You can explicitly control the lifetime of NDArray + * by calling dispose manually. Failure to do this will result in leaking native memory. * */ -class NDArray private[mxnet] (val nd : org.apache.mxnet.NDArray ) { +class NDArray private[mxnet] (val nd: org.apache.mxnet.NDArray ) { - def this(arr : Array[Float], shape : Shape, ctx : Context) = { + def this(arr: Array[Float], shape: Shape, ctx: Context) = { this(org.apache.mxnet.NDArray.array(arr, shape, ctx)) } - def this(arr : java.util.List[java.lang.Float], shape : Shape, ctx : Context) = { + def this(arr: java.util.List[java.lang.Float], shape: Shape, ctx: Context) = { this(NDArray.array(arr, shape, ctx)) } - def serialize() : Array[Byte] = nd.serialize() + def serialize(): Array[Byte] = nd.serialize() /** * Release the native memory.
* The NDArrays it depends on will NOT be disposed.
* The object shall never be used after it is disposed. */ - def dispose() : Unit = nd.dispose() + def dispose(): Unit = nd.dispose() /** * Dispose all NDArrays who help to construct this array.
* e.g. (a * b + c).disposeDeps() will dispose a, b, c (including their deps) and a * b * @return this array */ - def disposeDeps() : NDArray = nd.disposeDepsExcept() - // def disposeDepsExcept(arr : Array[NDArray]) : NDArray = nd.disposeDepsExcept() + def disposeDeps(): NDArray = nd.disposeDepsExcept() + + /** + * Dispose all NDArrays who help to construct this array, excepts those in the arguments.
+ * e.g. (a * b + c).disposeDepsExcept(a, b) + * will dispose c and a * b. + * Note that a, b's dependencies will not be disposed either. + * @param arr the Array of NDArray not to dispose + * @return this array + */ + def disposeDepsExcept(arr: Array[NDArray]): NDArray = + nd.disposeDepsExcept(arr.map(NDArray.toNDArray): _*) /** * Return a sliced NDArray that shares memory with current one. @@ -234,36 +244,36 @@ class NDArray private[mxnet] (val nd : org.apache.mxnet.NDArray ) { * * @return a sliced NDArray that shares memory with current one. */ - def slice(start : Int, stop : Int) : NDArray = nd.slice(start, stop) + def slice(start: Int, stop: Int): NDArray = nd.slice(start, stop) /** * Return a sliced NDArray at the ith position of axis0 * @param i * @return a sliced NDArray that shares memory with current one. */ - def slice (i : Int) : NDArray = nd.slice(i) + def slice (i: Int): NDArray = nd.slice(i) /** * Return a sub NDArray that shares memory with current one. * the first axis will be rolled up, which causes its shape different from slice(i, i+1) * @param idx index of sub array. */ - def at(idx : Int) : NDArray = nd.at(idx) + def at(idx: Int): NDArray = nd.at(idx) - def T : NDArray = nd.T + def T: NDArray = nd.T /** * Get data type of current NDArray. * @return class representing type of current ndarray */ - def dtype : DType = nd.dtype + def dtype: DType = nd.dtype /** * Return a copied numpy array of current array with specified type. * @param dtype Desired type of result array. * @return A copy of array content. */ - def asType(dtype : DType) : NDArray = nd.asType(dtype) + def asType(dtype: DType): NDArray = nd.asType(dtype) /** * Return a reshaped NDArray that shares memory with current one. @@ -271,7 +281,7 @@ class NDArray private[mxnet] (val nd : org.apache.mxnet.NDArray ) { * * @return a reshaped NDArray that shares memory with current one. */ - def reshape(dims : Array[Int]) : NDArray = nd.reshape(dims) + def reshape(dims: Array[Int]): NDArray = nd.reshape(dims) /** * Block until all pending writes operations on current NDArray are finished. @@ -285,55 +295,55 @@ class NDArray private[mxnet] (val nd : org.apache.mxnet.NDArray ) { * Get context of current NDArray. * @return The context of current NDArray. */ - def context : Context = nd.context + def context: Context = nd.context /** * Set the values of the NDArray * @param value Value to set * @return Current NDArray */ - def set(value : Float) : NDArray = nd.set(value) - def set(other : NDArray) : NDArray = nd.set(other) - def set(other : Array[Float]) : NDArray = nd.set(other) - - def add(other : NDArray) : NDArray = this.nd + other.nd - def add(other : Float) : NDArray = this.nd + other - def _add(other : NDArray) : NDArray = this.nd += other - def _add(other : Float) : NDArray = this.nd += other - def subtract(other : NDArray) : NDArray = this.nd - other - def subtract(other : Float) : NDArray = this.nd - other - def _subtract(other : NDArray) : NDArray = this.nd -= other - def _subtract(other : Float) : NDArray = this.nd -= other - def multiply(other : NDArray) : NDArray = this.nd * other - def multiply(other : Float) : NDArray = this.nd * other - def _multiply(other : NDArray) : NDArray = this.nd *= other - def _multiply(other : Float) : NDArray = this.nd *= other - def div(other : NDArray) : NDArray = this.nd / other - def div(other : Float) : NDArray = this.nd / other - def _div(other : NDArray) : NDArray = this.nd /= other - def _div(other : Float) : NDArray = this.nd /= other - def pow(other : NDArray) : NDArray = this.nd ** other - def pow(other : Float) : NDArray = this.nd ** other - def _pow(other : NDArray) : NDArray = this.nd **= other - def _pow(other : Float) : NDArray = this.nd **= other - def mod(other : NDArray) : NDArray = this.nd % other - def mod(other : Float) : NDArray = this.nd % other - def _mod(other : NDArray) : NDArray = this.nd %= other - def _mod(other : Float) : NDArray = this.nd %= other - def greater(other : NDArray) : NDArray = this.nd > other - def greater(other : Float) : NDArray = this.nd > other - def greaterEqual(other : NDArray) : NDArray = this.nd >= other - def greaterEqual(other : Float) : NDArray = this.nd >= other - def lesser(other : NDArray) : NDArray = this.nd < other - def lesser(other : Float) : NDArray = this.nd < other - def lesserEqual(other : NDArray) : NDArray = this.nd <= other - def lesserEqual(other : Float) : NDArray = this.nd <= other + def set(value: Float): NDArray = nd.set(value) + def set(other: NDArray): NDArray = nd.set(other) + def set(other: Array[Float]): NDArray = nd.set(other) + + def add(other: NDArray): NDArray = this.nd + other.nd + def add(other: Float): NDArray = this.nd + other + def addInplace(other: NDArray): NDArray = this.nd += other + def addInplace(other: Float): NDArray = this.nd += other + def subtract(other: NDArray): NDArray = this.nd - other + def subtract(other: Float): NDArray = this.nd - other + def subtractInplace(other: NDArray): NDArray = this.nd -= other + def subtractInplace(other: Float): NDArray = this.nd -= other + def multiply(other: NDArray): NDArray = this.nd * other + def multiply(other: Float): NDArray = this.nd * other + def multiplyInplace(other: NDArray): NDArray = this.nd *= other + def multiplyInplace(other: Float): NDArray = this.nd *= other + def div(other: NDArray): NDArray = this.nd / other + def div(other: Float): NDArray = this.nd / other + def divInplace(other: NDArray): NDArray = this.nd /= other + def divInplace(other: Float): NDArray = this.nd /= other + def pow(other: NDArray): NDArray = this.nd ** other + def pow(other: Float): NDArray = this.nd ** other + def powInplace(other: NDArray): NDArray = this.nd **= other + def powInplace(other: Float): NDArray = this.nd **= other + def mod(other: NDArray): NDArray = this.nd % other + def mod(other: Float): NDArray = this.nd % other + def modInplace(other: NDArray): NDArray = this.nd %= other + def modInplace(other: Float): NDArray = this.nd %= other + def greater(other: NDArray): NDArray = this.nd > other + def greater(other: Float): NDArray = this.nd > other + def greaterEqual(other: NDArray): NDArray = this.nd >= other + def greaterEqual(other: Float): NDArray = this.nd >= other + def lesser(other: NDArray): NDArray = this.nd < other + def lesser(other: Float): NDArray = this.nd < other + def lesserEqual(other: NDArray): NDArray = this.nd <= other + def lesserEqual(other: Float): NDArray = this.nd <= other /** * Return a copied flat java array of current array (row-major). * @return A copy of array content. */ - def toArray : Array[Float] = nd.toArray + def toArray: Array[Float] = nd.toArray /** * Return a CPU scalar(float) of current ndarray. @@ -341,7 +351,7 @@ class NDArray private[mxnet] (val nd : org.apache.mxnet.NDArray ) { * * @return The scalar representation of the ndarray. */ - def toScalar : Float = nd.toScalar + def toScalar: Float = nd.toScalar /** * Copy the content of current array to other. @@ -349,7 +359,7 @@ class NDArray private[mxnet] (val nd : org.apache.mxnet.NDArray ) { * @param other Target NDArray or context we want to copy data to. * @return The copy target NDArray */ - def copyTo(other : NDArray) : NDArray = nd.copyTo(other) + def copyTo(other: NDArray): NDArray = nd.copyTo(other) /** * Copy the content of current array to a new NDArray in the context. @@ -357,22 +367,22 @@ class NDArray private[mxnet] (val nd : org.apache.mxnet.NDArray ) { * @param ctx Target context we want to copy data to. * @return The copy target NDArray */ - def copyTo(ctx : Context) : NDArray = nd.copyTo(ctx) + def copyTo(ctx: Context): NDArray = nd.copyTo(ctx) /** * Clone the current array * @return the copied NDArray in the same context */ - def copy() : NDArray = copyTo(this.context) + def copy(): NDArray = copyTo(this.context) /** * Get shape of current NDArray. * @return an array representing shape of current ndarray */ - def shape : Shape = nd.shape + def shape: Shape = nd.shape - def size : Int = shape.product + def size: Int = shape.product /** * Return an `NDArray` that lives in the target context. If the array From 7d512410671f45a8ab043962cbeacc4b27901250 Mon Sep 17 00:00:00 2001 From: Piyush Ghai Date: Thu, 15 Nov 2018 13:31:13 -0800 Subject: [PATCH 19/38] [MXNET-1181] Added command line alternative to IntelliJ in install instructions (#13267) * Added command line alternative to IntelliJ * Removed the duplicate file * Fixed typos * Fixed minor command issue --- docs/tutorials/java/mxnet_java_on_intellij.md | 15 ++- .../mxnet_java_install_and_run_examples.md | 123 ------------------ 2 files changed, 14 insertions(+), 124 deletions(-) delete mode 100644 docs/tutorials/scala/mxnet_java_install_and_run_examples.md diff --git a/docs/tutorials/java/mxnet_java_on_intellij.md b/docs/tutorials/java/mxnet_java_on_intellij.md index b90a92b0a7b5..7f02853660d9 100644 --- a/docs/tutorials/java/mxnet_java_on_intellij.md +++ b/docs/tutorials/java/mxnet_java_on_intellij.md @@ -1,6 +1,6 @@ # Run MXNet Java Examples Using the IntelliJ IDE (macOS) -This tutorial guides you through setting up a simple Java project in IntelliJ IDE on macOS and demonstrates usage of the MXNet Java APIs. +This tutorial guides you through setting up a simple Java project in IntelliJ IDE on macOS and demonstrates usage of the MXNet Java APIs. ## Prerequisites: To use this tutorial you need the following pre-requisites: @@ -108,6 +108,15 @@ TODO After clicking Finish, you will be presented with the project's first view. The project's `pom.xml` will be open for editing. +**IntelliJ IDEA Alternative** If you want to use only Maven to create the project, you can create a new folder and run the following in the newly created folder : +```bash +mkdir java-proj +cd java-proj/ +mvn archetype:generate -DgroupId=mxnet -DartifactId=mxnetJava -DarchetypeArtifactId=maven-archetype-quickstart -DinteractiveMode=false +``` +This command will create a new Java project folder with name `mxnetJava` inside `java-proj` folder. +More on creating Maven projects can be found on this Maven tutorial : [Maven in 5 Minutes](https://maven.apache.org/guides/getting-started/maven-in-five-minutes.html). + **Step 3.** Add the following Maven dependency to your `pom.xml` file under the `dependencies` tag: ```html @@ -120,6 +129,10 @@ The project's `pom.xml` will be open for editing. To view the latest MXNet Maven packages, you can check [MXNet Maven package repository](https://search.maven.org/#search%7Cga%7C1%7Cg%3A%22org.apache.mxnet%22) +Note : +- Change the osx-x86_64 to linux-x86_64 if your platform is linux. +- Change cpu into gpu if you have a gpu backed machine and want to use gpu. + **Step 4.** Import dependencies with Maven: diff --git a/docs/tutorials/scala/mxnet_java_install_and_run_examples.md b/docs/tutorials/scala/mxnet_java_install_and_run_examples.md deleted file mode 100644 index 83e1ec5b2daa..000000000000 --- a/docs/tutorials/scala/mxnet_java_install_and_run_examples.md +++ /dev/null @@ -1,123 +0,0 @@ -# Install and run Java Examples - -## Prerequisites: -Please follow the Step 1 in the [Scala configuration](http://mxnet.incubator.apache.org/install/scala_setup.html#setup-instructions) -These should help you install the correct Java version and all dependencies. - -## Run the Java example project -We have provided a general MXNet Java template under `scala-package/mxnet-demo/java-demo` which contains the necessary project files for you to get started. It contains a simple Hello world! equivalent program `JavaSample.java` and a full fledged `ObjectDetection.java `that shows how to run Object Detection on images using MXNet and pre-trained SSD model. - -Alternatively you could build project from scratch following the below instructions. - -## Import and run the Java package -For users using a desktop/laptop, we recommend using IntelliJ IDE as it is tested and supported to provide the necessary documentation for the Java API. - -Alternatively, users can follow the second instruction to set up an empty Maven project for Java. - -### IntelliJ instruction -If you are using a computer with Ubuntu16.04 or Mac, you can install IntelliJ to run the Java package. Please follow the instruction below: - -1. Create a new Java project in IntelliJ. Fire up IntelliJ and click `Create New Project`. - -2. Click `Next`, and in the `Create project from template` window, do not select anything and click `Next` again. - -3. In the next window choose your `Project name` and the `Project location` and click on `Finish`. - -4. Let's add the Java Inference API jars that we build from source. At the top of the window, Go to the `File -> Project Structure`. In the popup window that opens up, click on `Libraries -> +` and select the path to the jar files downloaded. Click `Apply` and then click `OK`. - -6. Create a new Java class under the folder `your-project-name/src`. Let's call this class `JavaSample.java`. Type in the following code snippet and run it. In this code snippet, we create an NDArray object in Java and print its shape. -```java -import org.apache.mxnet.javaapi.Context; -import org.apache.mxnet.javaapi.NDArray; - -public class JavaSample { -public static void main(String[] args) { - System.out.println("Hello"); - NDArray nd = NDArray.ones(Context.cpu(), new int[] {10, 20}); - - System.out.println("Shape of NDarray is : " + nd.shape()); -} -} -``` - -7. If all went well, you should see an output like this : -``` -Hello -SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder". -SLF4J: Defaulting to no-operation (NOP) logger implementation -SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for further details. -Shape of NDarray is : (10,20) -Process finished with exit code 0 -``` -This means you have successfully set it up on your machine - -### Run the project manually in Maven -In this example, Maven is being used to create the project. This tutorial referred the [Maven in 5 min](https://maven.apache.org/guides/getting-started/maven-in-five-minutes.html) tutorial. - -1. Create a new folder and run the following commands -``` -mvn archetype:generate -DgroupId=com.mycompany.app -DartifactId=my-app -DarchetypeArtifactId=maven-archetype-quickstart -DinteractiveMode=false -``` -You can specify the `groupId` and `artifactId` to your favourite names. You can also create a maven project using empty archetype. - -2. then go to `pom.xml` file in your project folder and add the following content. - -- Change the `osx-x86_64` to `linux-x86_64` if your platform is linux. -- Change `cpu` into `gpu` if you are using gpu -- Change the version of your package from `1.3.1-SNAPSHOT` to the matched jar version. -```xml - - org.apache.mxnet - mxnet-full_2.11-osx-x86_64-cpu - 1.3.1-SNAPSHOT - system - path-to-your-jar/jarName.jar - - - args4j - args4j - 2.0.29 - - - org.slf4j - slf4j-api - 1.7.7 - - - org.slf4j - slf4j-log4j12 - 1.7.7 - -``` -3. Finally you can replace the code in `App.java` -```java -import org.apache.mxnet.javaapi.Context; -import org.apache.mxnet.javaapi.NDArray; - -public class App { -public static void main(String[] args) { - System.out.println("Hello"); - NDArray nd = NDArray.ones(Context.cpu(), new int[] {10, 20}); - - System.out.println("Shape of NDarray is : " + nd.shape()); - -} -} -``` -make the package by -``` -mvn package -``` - -and run it by -``` -java -cp target/my-app-1.0-SNAPSHOT.jar:/.jar com.mycompany.app.App -``` -The result looks like this: -``` -Hello -SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder". -SLF4J: Defaulting to no-operation (NOP) logger implementation -SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for further details. -Shape of NDarray is : (10,20) -``` \ No newline at end of file From 3ec9030fc68e895b53761b88b968b16bb450938e Mon Sep 17 00:00:00 2001 From: Lanking Date: Thu, 15 Nov 2018 17:22:18 -0800 Subject: [PATCH 20/38] add defaults and clean up the tests (#13295) --- Makefile | 2 +- scala-package/core/pom.xml | 10 ++++------ scala-package/examples/pom.xml | 10 ++++------ scala-package/infer/pom.xml | 10 ++++------ 4 files changed, 13 insertions(+), 19 deletions(-) diff --git a/Makefile b/Makefile index 638a0864dc57..b12311736647 100644 --- a/Makefile +++ b/Makefile @@ -605,7 +605,7 @@ scalaclean: scalapkg: (cd $(ROOTDIR)/scala-package; \ - mvn package -P$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE),integrationtest -Dcxx="$(CXX)" \ + mvn package -P$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE) -Dcxx="$(CXX)" \ -Dbuild.platform="$(SCALA_PKG_PROFILE)" \ -Dcflags="$(CFLAGS)" -Dldflags="$(LDFLAGS)" \ -Dcurrent_libdir="$(ROOTDIR)/lib" \ diff --git a/scala-package/core/pom.xml b/scala-package/core/pom.xml index d5396dab1e67..1445c0bf270a 100644 --- a/scala-package/core/pom.xml +++ b/scala-package/core/pom.xml @@ -10,6 +10,10 @@ ../pom.xml + + true + + mxnet-core_2.11 MXNet Scala Package - Core @@ -20,12 +24,6 @@ false - - integrationtest - - true - - osx-x86_64-cpu diff --git a/scala-package/examples/pom.xml b/scala-package/examples/pom.xml index 436f2992768b..72a40dc01f01 100644 --- a/scala-package/examples/pom.xml +++ b/scala-package/examples/pom.xml @@ -13,13 +13,11 @@ mxnet-examples_2.11 MXNet Scala Package - Examples + + true + + - - unittest - - true - - integrationtest diff --git a/scala-package/infer/pom.xml b/scala-package/infer/pom.xml index e50100169328..91a1e1b30d2f 100644 --- a/scala-package/infer/pom.xml +++ b/scala-package/infer/pom.xml @@ -13,6 +13,10 @@ mxnet-infer_2.11 MXNet Scala Package - Inference + + true + + unittest @@ -20,12 +24,6 @@ false - - integrationtest - - true - - osx-x86_64-cpu From f52b9aa59cab07b9b1b7ed764c4459ad67fc6a9e Mon Sep 17 00:00:00 2001 From: Piyush Ghai Date: Thu, 15 Nov 2018 17:27:19 -0800 Subject: [PATCH 21/38] [MXNET-1187] Added Java SSD Inference Tutorial for website (#13201) * Added Java SSD Inference Tutorial for website * Added whitelisting to SSD tutorial * Address PR feedback * Marking intelliJ as optional --- docs/tutorials/index.md | 1 + docs/tutorials/java/ssd_inference.md | 186 +++++++++++++++++++++++ tests/tutorials/test_sanity_tutorials.py | 3 +- 3 files changed, 189 insertions(+), 1 deletion(-) create mode 100644 docs/tutorials/java/ssd_inference.md diff --git a/docs/tutorials/index.md b/docs/tutorials/index.md index e5e26772064b..7ed34fc9d180 100644 --- a/docs/tutorials/index.md +++ b/docs/tutorials/index.md @@ -158,6 +158,7 @@ Select API:  ## Java Tutorials * Getting Started * [Developer Environment Setup on IntelliJ IDE](/tutorials/java/mxnet_java_on_intellij.html) +* [Multi Object Detection using pre-trained Single Shot Detector (SSD) Model](/tutorials/java/ssd_inference.html) * [MXNet-Java Examples](https://github.com/apache/incubator-mxnet/tree/master/scala-package/examples/src/main/java/org/apache/mxnetexamples)
diff --git a/docs/tutorials/java/ssd_inference.md b/docs/tutorials/java/ssd_inference.md new file mode 100644 index 000000000000..6bcaaa2504a4 --- /dev/null +++ b/docs/tutorials/java/ssd_inference.md @@ -0,0 +1,186 @@ +# Multi Object Detection using pre-trained SSD Model via Java Inference APIs + +This tutorial shows how to use MXNet Java Inference APIs to run inference on a pre-trained Single Shot Detector (SSD) Model. + +The SSD model is trained on the Pascal VOC 2012 dataset. The network is a SSD model built on Resnet50 as the base network to extract image features. The model is trained to detect the following entities (classes): ['aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car', 'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse', 'motorbike', 'person', 'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor']. For more details about the model, you can refer to the [MXNet SSD example](https://github.com/apache/incubator-mxnet/tree/master/example/ssd). + +## Prerequisites + +To complete this tutorial, you need the following: +* [MXNet Java Setup on IntelliJ IDEA](/java/mxnet_java_on_intellij.html) (Optional) +* [wget](https://www.gnu.org/software/wget/) To download model artifacts +* SSD Model artifacts + * Use the following script to get the SSD Model files : +```bash +data_path=/tmp/resnet50_ssd +mkdir -p "$data_path" +wget https://s3.amazonaws.com/model-server/models/resnet50_ssd/resnet50_ssd_model-symbol.json -P $data_path +wget https://s3.amazonaws.com/model-server/models/resnet50_ssd/resnet50_ssd_model-0000.params -P $data_path +wget https://s3.amazonaws.com/model-server/models/resnet50_ssd/synset.txt -P $data_path +``` +* Test images : A few sample images to run inference on. + * Use the following script to download sample images : +```bash +image_path=/tmp/resnet50_ssd/images +mkdir -p "$image_path" +cd $image_path +wget https://cloud.githubusercontent.com/assets/3307514/20012567/cbb60336-a27d-11e6-93ff-cbc3f09f5c9e.jpg -O dog.jpg +wget https://cloud.githubusercontent.com/assets/3307514/20012563/cbb41382-a27d-11e6-92a9-18dab4fd1ad3.jpg -O person.jpg +``` + +Alternately, you can get the entire SSD Model artifacts + images in one single script from the MXNet Repository by running [get_ssd_data.sh script](https://github.com/apache/incubator-mxnet/blob/master/scala-package/examples/scripts/infer/objectdetector/get_ssd_data.sh) + +## Time to code! +1\. Following the [MXNet Java Setup on IntelliJ IDEA](/java/mxnet_java_on_intellij.html) tutorial, in the same project `JavaMXNet`, create a new empty class called : `ObjectDetectionTutorial.java`. + +2\. In the `main` function of `ObjectDetectionTutorial.java` define the downloaded model path and the image data paths. This is the same path where we downloaded the model artifacts and images in a previous step. + +```java +String modelPathPrefix = "/tmp/resnet50_ssd/resnet50_ssd_model"; +String inputImagePath = "/tmp/resnet50_ssd/images/dog.jpg"; +``` + +3\. We can run the inference code in this example on either CPU or GPU (if you have a GPU backed machine) by choosing the appropriate context. + +```java + +List context = getContext(); +... + +private static List getContext() { +List ctx = new ArrayList<>(); +ctx.add(Context.cpu()); // Choosing CPU Context here + +return ctx; +} +``` + +4\. To provide an input to the model, define the input shape to the model and the Input Data Descriptor (DataDesc) as shown below : + +```java +Shape inputShape = new Shape(new int[] {1, 3, 512, 512}); +List inputDescriptors = new ArrayList(); +inputDescriptors.add(new DataDesc("data", inputShape, DType.Float32(), "NCHW")); +``` + +The input shape can be interpreted as follows : The input has a batch size of 1, with 3 RGB channels in the image, and the height and width of the image is 512 each. + +5\. To run an actual inference on the given image, add the following lines to the `ObjectDetectionTutorial.java` class : + +```java +BufferedImage img = ObjectDetector.loadImageFromFile(inputImagePath); +ObjectDetector objDet = new ObjectDetector(modelPathPrefix, inputDescriptors, context, 0); +List> output = objDet.imageObjectDetect(img, 3); // Top 3 objects detected will be returned +``` + +6\. Let's piece all of the above steps together by showing the final contents of the `ObjectDetectionTutorial.java`. + +```java +package mxnet; + +import org.apache.mxnet.infer.javaapi.ObjectDetector; +import org.apache.mxnet.infer.javaapi.ObjectDetectorOutput; +import org.apache.mxnet.javaapi.Context; +import org.apache.mxnet.javaapi.DType; +import org.apache.mxnet.javaapi.DataDesc; +import org.apache.mxnet.javaapi.Shape; + +import java.awt.image.BufferedImage; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +public class ObjectDetectionTutorial { + + public static void main(String[] args) { + + String modelPathPrefix = "/tmp/resnet50_ssd/resnet50_ssd_model"; + + String inputImagePath = "/tmp/resnet50_ssd/images/dog.jpg"; + + List context = getContext(); + + Shape inputShape = new Shape(new int[] {1, 3, 512, 512}); + + List inputDescriptors = new ArrayList(); + inputDescriptors.add(new DataDesc("data", inputShape, DType.Float32(), "NCHW")); + + BufferedImage img = ObjectDetector.loadImageFromFile(inputImagePath); + ObjectDetector objDet = new ObjectDetector(modelPathPrefix, inputDescriptors, context, 0); + List> output = objDet.imageObjectDetect(img, 3); + + printOutput(output, inputShape); + } + + + private static List getContext() { + List ctx = new ArrayList<>(); + ctx.add(Context.cpu()); + + return ctx; + } + + private static void printOutput(List> output, Shape inputShape) { + + StringBuilder outputStr = new StringBuilder(); + + int width = inputShape.get(3); + int height = inputShape.get(2); + + for (List ele : output) { + for (ObjectDetectorOutput i : ele) { + outputStr.append("Class: " + i.getClassName() + "\n"); + outputStr.append("Probabilties: " + i.getProbability() + "\n"); + + List coord = Arrays.asList(i.getXMin() * width, + i.getXMax() * height, i.getYMin() * width, i.getYMax() * height); + StringBuilder sb = new StringBuilder(); + for (float c: coord) { + sb.append(", ").append(c); + } + outputStr.append("Coord:" + sb.substring(2)+ "\n"); + } + } + System.out.println(outputStr); + + } +} +``` + +7\. To compile and run this code, change directories to this project's root folder, then run the following: +```bash +mvn clean install dependency:copy-dependencies +``` + +The build generates a new jar file in the `target` folder called `javaMXNet-1.0-SNAPSHOT.jar`. + +To run the ObjectDetectionTutorial.java use the following command from the project's root folder. +```bash +java -cp target/javaMXNet-1.0-SNAPSHOT.jar:target/dependency/* mxnet.ObjectDetectionTutorial +``` + +You should see a similar output being generated for the dog image that we used: +```bash +Class: car +Probabilties: 0.99847263 +Coord:312.21335, 72.02908, 456.01443, 150.66176 +Class: bicycle +Probabilties: 0.9047381 +Coord:155.9581, 149.96365, 383.83694, 418.94516 +Class: dog +Probabilties: 0.82268167 +Coord:83.82356, 179.14001, 206.63783, 476.78754 +``` + +![dog_1](https://cloud.githubusercontent.com/assets/3307514/20012567/cbb60336-a27d-11e6-93ff-cbc3f09f5c9e.jpg) + +The results returned by the inference call translate into the regions in the image where the model detected objects. + +![dog_2](https://cloud.githubusercontent.com/assets/3307514/19171063/91ec2792-8be0-11e6-983c-773bd6868fa8.png) + +## Next Steps +For more information about MXNet Java resources, see the following: + +* [Java Inference API](/api/java/infer.html) +* [Java Inference Examples](https://github.com/apache/incubator-mxnet/tree/java-api/scala-package/examples/src/main/java/org/apache/mxnetexamples/infer/) +* [MXNet Tutorials Index](/tutorials/index.html) diff --git a/tests/tutorials/test_sanity_tutorials.py b/tests/tutorials/test_sanity_tutorials.py index dc5fbf5d83a6..9e5c38abc976 100644 --- a/tests/tutorials/test_sanity_tutorials.py +++ b/tests/tutorials/test_sanity_tutorials.py @@ -57,7 +57,8 @@ 'vision/index.md', 'tensorrt/index.md', 'tensorrt/inference_with_trt.md', - 'java/mxnet_java_on_intellij.md'] + 'java/mxnet_java_on_intellij.md', + 'java/ssd_inference.md'] whitelist_set = set(whitelist) def test_tutorial_downloadable(): From bb7bbaf5e82e4ca71c3be8a768ce49b0c1bfdc27 Mon Sep 17 00:00:00 2001 From: Lanking Date: Thu, 15 Nov 2018 17:42:17 -0800 Subject: [PATCH 22/38] [MXNET-1182] Predictor example (#13237) * add initial commit * push back predictor * name fix and bug fix * update readme and script to run * minor fix * minor fix * fix on doc * update predictor --- .../predictor/run_predictor_java_example.sh | 44 ++++ .../infer/predictor/PredictorExample.java | 200 ++++++++++++++++++ .../javaapi/infer/predictor/README.md | 61 ++++++ .../mxnet/infer/javaapi/Predictor.scala | 13 ++ 4 files changed, 318 insertions(+) create mode 100755 scala-package/examples/scripts/infer/predictor/run_predictor_java_example.sh create mode 100644 scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/predictor/PredictorExample.java create mode 100644 scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/predictor/README.md diff --git a/scala-package/examples/scripts/infer/predictor/run_predictor_java_example.sh b/scala-package/examples/scripts/infer/predictor/run_predictor_java_example.sh new file mode 100755 index 000000000000..4ebcc3076a78 --- /dev/null +++ b/scala-package/examples/scripts/infer/predictor/run_predictor_java_example.sh @@ -0,0 +1,44 @@ +#!/bin/bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +hw_type=cpu +if [[ $3 = gpu ]] +then + hw_type=gpu +fi + +platform=linux-x86_64 + +if [[ $OSTYPE = [darwin]* ]] +then + platform=osx-x86_64 +fi + +MXNET_ROOT=$(cd "$(dirname $0)/../../../../../"; pwd) +CLASS_PATH=$MXNET_ROOT/scala-package/assembly/$platform-$hw_type/target/*:$MXNET_ROOT/scala-package/examples/target/*:$MXNET_ROOT/scala-package/examples/target/classes/lib/*:$MXNET_ROOT/scala-package/infer/target/* + +# model dir and prefix +MODEL_DIR=$1 +# input image +INPUT_IMG=$2 + +java -Xmx8G -cp $CLASS_PATH \ + org.apache.mxnetexamples.javaapi.infer.predictor.PredictorExample \ + --model-path-prefix $MODEL_DIR \ + --input-image $INPUT_IMG diff --git a/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/predictor/PredictorExample.java b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/predictor/PredictorExample.java new file mode 100644 index 000000000000..c9b4426f52b3 --- /dev/null +++ b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/predictor/PredictorExample.java @@ -0,0 +1,200 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.mxnetexamples.javaapi.infer.predictor; + +import org.apache.mxnet.infer.javaapi.Predictor; +import org.apache.mxnet.javaapi.*; +import org.kohsuke.args4j.CmdLineParser; +import org.kohsuke.args4j.Option; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.imageio.ImageIO; +import java.awt.Graphics2D; +import java.awt.image.BufferedImage; +import java.io.BufferedReader; +import java.io.File; +import java.io.FileReader; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +/** + * This Class is a demo to show how users can use Predictor APIs to do + * Image Classification with all hand-crafted Pre-processing. + * All helper functions for image pre-processing are + * currently available in ObjectDetector class. + */ +public class PredictorExample { + @Option(name = "--model-path-prefix", usage = "input model directory and prefix of the model") + private String modelPathPrefix = "/model/ssd_resnet50_512"; + @Option(name = "--input-image", usage = "the input image") + private String inputImagePath = "/images/dog.jpg"; + + final static Logger logger = LoggerFactory.getLogger(PredictorExample.class); + + /** + * Load the image from file to buffered image + * It can be replaced by loadImageFromFile from ObjectDetector + * @param inputImagePath input image Path in String + * @return Buffered image + */ + private static BufferedImage loadIamgeFromFile(String inputImagePath) { + BufferedImage buf = null; + try { + buf = ImageIO.read(new File(inputImagePath)); + } catch (IOException e) { + System.err.println(e); + } + return buf; + } + + /** + * Reshape the current image using ImageIO and Graph2D + * It can be replaced by reshapeImage from ObjectDetector + * @param buf Buffered image + * @param newWidth desired width + * @param newHeight desired height + * @return a reshaped bufferedImage + */ + private static BufferedImage reshapeImage(BufferedImage buf, int newWidth, int newHeight) { + BufferedImage resizedImage = new BufferedImage(newWidth, newHeight, BufferedImage.TYPE_INT_RGB); + Graphics2D g = resizedImage.createGraphics(); + g.drawImage(buf, 0, 0, newWidth, newHeight, null); + g.dispose(); + return resizedImage; + } + + /** + * Convert an image from a buffered image into pixels float array + * It can be replaced by bufferedImageToPixels from ObjectDetector + * @param buf buffered image + * @return Float array + */ + private static float[] imagePreprocess(BufferedImage buf) { + // Get height and width of the image + int w = buf.getWidth(); + int h = buf.getHeight(); + + // get an array of integer pixels in the default RGB color mode + int[] pixels = buf.getRGB(0, 0, w, h, null, 0, w); + + // 3 times height and width for R,G,B channels + float[] result = new float[3 * h * w]; + + int row = 0; + // copy pixels to array vertically + while (row < h) { + int col = 0; + // copy pixels to array horizontally + while (col < w) { + int rgb = pixels[row * w + col]; + // getting red color + result[0 * h * w + row * w + col] = (rgb >> 16) & 0xFF; + // getting green color + result[1 * h * w + row * w + col] = (rgb >> 8) & 0xFF; + // getting blue color + result[2 * h * w + row * w + col] = rgb & 0xFF; + col += 1; + } + row += 1; + } + buf.flush(); + return result; + } + + /** + * Helper class to print the maximum prediction result + * @param probabilities The float array of probability + * @param modelPathPrefix model Path needs to load the synset.txt + */ + private static String printMaximumClass(float[] probabilities, + String modelPathPrefix) throws IOException { + String synsetFilePath = modelPathPrefix.substring(0, + 1 + modelPathPrefix.lastIndexOf(File.separator)) + "/synset.txt"; + BufferedReader reader = new BufferedReader(new FileReader(synsetFilePath)); + ArrayList list = new ArrayList<>(); + String line = reader.readLine(); + + while (line != null){ + list.add(line); + line = reader.readLine(); + } + reader.close(); + + int maxIdx = 0; + for (int i = 1;i probabilities[maxIdx]) { + maxIdx = i; + } + } + + return "Probability : " + probabilities[maxIdx] + " Class : " + list.get(maxIdx) ; + } + + public static void main(String[] args) { + PredictorExample inst = new PredictorExample(); + CmdLineParser parser = new CmdLineParser(inst); + try { + parser.parseArgument(args); + } catch (Exception e) { + logger.error(e.getMessage(), e); + parser.printUsage(System.err); + System.exit(1); + } + // Prepare the model + List context = new ArrayList(); + if (System.getenv().containsKey("SCALA_TEST_ON_GPU") && + Integer.valueOf(System.getenv("SCALA_TEST_ON_GPU")) == 1) { + context.add(Context.gpu()); + } else { + context.add(Context.cpu()); + } + List inputDesc = new ArrayList<>(); + Shape inputShape = new Shape(new int[]{1, 3, 224, 224}); + inputDesc.add(new DataDesc("data", inputShape, DType.Float32(), "NCHW")); + Predictor predictor = new Predictor(inst.modelPathPrefix, inputDesc, context,0); + // Prepare data + BufferedImage img = loadIamgeFromFile(inst.inputImagePath); + + img = reshapeImage(img, 224, 224); + // predict + float[][] result = predictor.predict(new float[][]{imagePreprocess(img)}); + try { + System.out.println("Predict with Float input"); + System.out.println(printMaximumClass(result[0], inst.modelPathPrefix)); + } catch (IOException e) { + System.err.println(e); + } + // predict with NDArray + NDArray nd = new NDArray( + imagePreprocess(img), + new Shape(new int[]{1, 3, 224, 224}), + Context.cpu()); + List ndList = new ArrayList<>(); + ndList.add(nd); + List ndResult = predictor.predictWithNDArray(ndList); + try { + System.out.println("Predict with NDArray"); + System.out.println(printMaximumClass(ndResult.get(0).toArray(), inst.modelPathPrefix)); + } catch (IOException e) { + System.err.println(e); + } + } + +} diff --git a/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/predictor/README.md b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/predictor/README.md new file mode 100644 index 000000000000..1f2c9e0e813c --- /dev/null +++ b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/predictor/README.md @@ -0,0 +1,61 @@ +# Image Classification using Java Predictor + +In this example, you will learn how to use Java Inference API to +build and run pre-trained Resnet 18 model. + +## Contents + +1. [Prerequisites](#prerequisites) +2. [Download artifacts](#download-artifacts) +3. [Setup datapath and parameters](#setup-datapath-and-parameters) +4. [Run the image classifier example](#run-the-image-inference-example) + +## Prerequisites + +1. Build from source with [MXNet](https://mxnet.incubator.apache.org/install/index.html) +2. [IntelliJ IDE (or alternative IDE) project setup](https://github.com/apache/incubator-mxnet/blob/master/docs/tutorials/java/mxnet_java_on_intellij.md) with the MXNet Java Package +3. wget + +## Download Artifacts + +For this tutorial, you can get the model and sample input image by running following bash file. This script will use `wget` to download these artifacts from AWS S3. + +From the `scala-package/examples/scripts/infer/imageclassifier/` folder run: + +```bash +./get_resnet_18_data.sh +``` + +**Note**: You may need to run `chmod +x get_resnet_18_data.sh` before running this script. + +### Setup Datapath and Parameters + +The available arguments are as follows: + +| Argument | Comments | +| ----------------------------- | ---------------------------------------- | +| `model-dir`                   | Folder path with prefix to the model (including json, params, and any synset file). | +| `input-image` | The image to run inference on. | + +## Run the image classifier example + +After the previous steps, you should be able to run the code using the following script that will pass all of the required parameters to the Predictor API. + +From the `scala-package/examples/scripts/infer/predictor/` folder run: + +```bash +bash run_predictor_java_example.sh ../models/resnet-18/resnet-18 ../images/kitten.jpg +``` + +**Notes**: +* These are relative paths to this script. +* You may need to run `chmod +x run_predictor_java_example.sh` before running this script. + +The example should give an output similar to the one shown below: +``` +Predict with Float input +Probability : 0.30337515 Class : n02123159 tiger cat +Predict with NDArray +Probability : 0.30337515 Class : n02123159 tiger cat +``` +the outputs come from the the input image, with top1 predictions picked. \ No newline at end of file diff --git a/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/Predictor.scala b/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/Predictor.scala index 21e62b3aa55f..a5428e1c8219 100644 --- a/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/Predictor.scala +++ b/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/Predictor.scala @@ -49,6 +49,18 @@ class Predictor private[mxnet] (val predictor: org.apache.mxnet.infer.Predictor) new org.apache.mxnet.infer.Predictor(modelPathPrefix, informationDesc, inContexts, Some(epoch)) } + /** + * Takes input as Array of one dimensional arrays and creates the NDArray needed for inference + * The array will be reshaped based on the input descriptors. + * + * @param input: An Array of a one-dimensional array. + An extra Array is needed for when the model has more than one input. + * @return Indexed sequence array of outputs + */ + def predict(input: Array[Array[Float]]): + Array[Array[Float]] = { + predictor.predict(input).toArray + } /** * Takes input as List of one dimensional arrays and creates the NDArray needed for inference @@ -65,6 +77,7 @@ class Predictor private[mxnet] (val predictor: org.apache.mxnet.infer.Predictor) } + /** * Predict using NDArray as input * This method is useful when the input is a batch of data From ab8772c26e68f70ca0a46571dc48f96710fad2d6 Mon Sep 17 00:00:00 2001 From: Piyush Ghai Date: Fri, 16 Nov 2018 14:18:24 -0800 Subject: [PATCH 23/38] Reducing the length of setup tutorial (#13306) --- docs/tutorials/java/mxnet_java_on_intellij.md | 78 ++++--------------- 1 file changed, 13 insertions(+), 65 deletions(-) diff --git a/docs/tutorials/java/mxnet_java_on_intellij.md b/docs/tutorials/java/mxnet_java_on_intellij.md index 7f02853660d9..d9a215998005 100644 --- a/docs/tutorials/java/mxnet_java_on_intellij.md +++ b/docs/tutorials/java/mxnet_java_on_intellij.md @@ -12,48 +12,29 @@ To use this tutorial you need the following pre-requisites: ### MacOS Prerequisites -**Step 1.** Install brew: +You can run the following commands to install the prerequisites. ``` /usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" -``` - -Or, if you already have brew, update it: -``` brew update -``` - -**Step 2.** Install Java 8: -``` brew tap caskroom/versions brew cask install java8 -``` - -**Step 3.** Install maven: -``` brew install maven -``` - -**Step 4.** Install OpenCV: -``` brew install opencv ``` You can also run this tutorial on an Ubuntu machine after installing the following prerequisites. ### Ubuntu Prerequisites -**Step 1.** Download the MXNet source. +Run the following commands to install the prerequisites. ```bash -git clone --recursive https://github.com/apache/incubator-mxnet.git mxnet -cd mxnet +wget https://github.com/apache/incubator-mxnet/blob/master/ci/docker/install/ubuntu_core.sh +sudo ./ubuntu_core.sh +wget https://github.com/apache/incubator-mxnet/blob/master/ci/docker/install/ubuntu_scala.sh +sudo ./ubuntu_scala.sh ``` -**Step 2.** Run the dependency installation scripts. - -```bash -sudo ./ci/docker/install/ubuntu_core.sh -sudo ./ci/docker/install/ubuntu_scala.sh -``` +Note : You might need to run `chmod u+x ubuntu_core.sh` and `chmod u+x ubuntu_scala` before running the scripts. The `ubuntu_scala.sh` installs the common dependencies required for both MXNet Scala and MXNet Java packages. @@ -92,31 +73,19 @@ ArtifactId: javaMXNet 1.0-SNAPSHOT ``` -TODO ![project properties](https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/java/intellij-project-properties.png) Review the project's properties. The settings can be left as their default. -TODO ![project location](https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/java/intellij-project-location.png) Set the project's location. The rest of the settings can be left as their default. -TODO ![project 1](https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/java/intellij-project-pom.png) After clicking Finish, you will be presented with the project's first view. The project's `pom.xml` will be open for editing. -**IntelliJ IDEA Alternative** If you want to use only Maven to create the project, you can create a new folder and run the following in the newly created folder : -```bash -mkdir java-proj -cd java-proj/ -mvn archetype:generate -DgroupId=mxnet -DartifactId=mxnetJava -DarchetypeArtifactId=maven-archetype-quickstart -DinteractiveMode=false -``` -This command will create a new Java project folder with name `mxnetJava` inside `java-proj` folder. -More on creating Maven projects can be found on this Maven tutorial : [Maven in 5 Minutes](https://maven.apache.org/guides/getting-started/maven-in-five-minutes.html). - **Step 3.** Add the following Maven dependency to your `pom.xml` file under the `dependencies` tag: ```html @@ -145,39 +114,19 @@ Click "Import Changes" in this prompt. **Step 5.** Build the project: - To build the project, from the menu choose Build, and then choose Build Project. -**Step 6.** Navigate to the App.java class in the project and paste the following code, overwriting the original hello world code. -```java -package mxnet; - -import org.apache.mxnet.javaapi.Context; -import org.apache.mxnet.javaapi.NDArray; - -public class App -{ - public static void main( String[] args ) - { - NDArray nd = NDArray.ones(Context.cpu(), new int[] {10, 20}); - System.out.println( "Testing MXNet by generating a 10x20 NDArray" ); - System.out.println("Shape of NDArray is : " + nd.shape()); - } -} -``` - -**Step 7.** Now run the App.java by clicking the green arrow as highlighted in the image below. - -![run hello mxnet](https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/java/intellij-run-projects.png) +**Step 6.** Navigate to the App.java class in the project and paste the code from HelloWorld.java from [Java Demo project](https://github.com/apache/incubator-mxnet/blob/java-api/scala-package/mxnet-demo/java-demo/src/main/java/sample/HelloWorld.java) on MXNet repository, overwriting the original hello world code. +You can also grab the entire [Java Demo project](https://github.com/apache/incubator-mxnet/tree/java-api/scala-package/mxnet-demo/java-demo) and run it by following the instructions on the [README](https://github.com/apache/incubator-mxnet/blob/java-api/scala-package/mxnet-demo/java-demo/README.md) +**Step 7.** Now run the App.java. -The result should be this output: +The result should be something similar to this: ``` -Testing MXNet by generating a 10x20 NDArray -Shape of NDArray is : (10,20) - +Hello World! +(1,2) Process finished with exit code 0 ``` - ### Troubleshooting If you get an error, check the dependencies at the beginning of this tutorial. For example, you might see the following in the middle of the error messages, where `x.x` would the version it's looking for. @@ -190,7 +139,6 @@ Library not loaded: /usr/local/opt/opencv/lib/libopencv_calib3d.x.x.dylib This can be resolved be installing OpenCV. - ### Command Line Build Option - You can also compile the project by using the following command at the command line. Change directories to this project's root folder then run the following: From daf35298883ba9acdc82c7ce39b2f89896ed22a3 Mon Sep 17 00:00:00 2001 From: Lanking Date: Fri, 16 Nov 2018 16:12:55 -0800 Subject: [PATCH 24/38] Revert "[MXNET-1198] MXNet Java API (#13162)" This reverts commit 0e9a1ff917d6e8ac54968ace807d1b8c33c214c1. --- docs/tutorials/index.md | 7 - docs/tutorials/java/mxnet_java_on_intellij.md | 171 -------- docs/tutorials/java/ssd_inference.md | 186 -------- scala-package/.gitignore | 3 - scala-package/core/pom.xml | 15 +- .../org/apache/mxnet/javaapi/Context.scala | 15 +- .../scala/org/apache/mxnet/javaapi/IO.scala | 11 +- .../org/apache/mxnet/javaapi/NDArray.scala | 397 ------------------ .../org/apache/mxnet/javaapi/Shape.scala | 2 +- .../org/apache/mxnet/javaapi/NDArrayTest.java | 85 ---- .../mxnet/javaapi/ResourceScopeTestSuite.java | 110 ----- scala-package/examples/pom.xml | 10 +- .../benchmark/run_java_inference_bm.sh | 40 -- .../infer/objectdetector/run_ssd_example.sh | 14 +- .../objectdetector/run_ssd_java_example.sh | 47 --- .../predictor/run_predictor_java_example.sh | 44 -- .../javaapi/benchmark/InferBase.java | 35 -- .../javaapi/benchmark/JavaBenchmark.java | 129 ------ .../benchmark/ObjectDetectionBenchmark.java | 64 --- .../javaapi/infer/objectdetector/README.md | 97 ----- .../objectdetector/SSDClassifierExample.java | 199 --------- .../infer/predictor/PredictorExample.java | 200 --------- .../javaapi/infer/predictor/README.md | 61 --- .../infer/objectdetector/README.md | 20 +- .../objectdetector/SSDClassifierExample.scala | 4 +- scala-package/infer/pom.xml | 10 +- .../mxnet/infer/javaapi/ObjectDetector.scala | 128 ------ .../infer/javaapi/ObjectDetectorOutput.scala | 34 -- .../mxnet/infer/javaapi/Predictor.scala | 99 ----- .../org/apache/mxnet/APIDocGenerator.scala | 84 ---- .../org/apache/mxnet/GeneratorBase.scala | 20 +- .../mxnet/javaapi/JavaNDArrayMacro.scala | 125 ------ .../apache/mxnet/utils/CToScalaUtils.scala | 21 +- .../scala/org/apache/mxnet/MacrosSuite.scala | 2 +- .../mxnet-demo/{scala-demo => }/Makefile | 2 +- .../mxnet-demo/{scala-demo => }/README.md | 12 +- .../mxnet-demo/{scala-demo => }/bin/demo.sh | 0 .../mxnet-demo/{scala-demo => }/bin/run_im.sh | 0 scala-package/mxnet-demo/java-demo/Makefile | 54 --- scala-package/mxnet-demo/java-demo/README.md | 76 ---- .../mxnet-demo/java-demo/bin/java_sample.sh | 20 - .../mxnet-demo/java-demo/bin/run_od.sh | 21 - scala-package/mxnet-demo/java-demo/pom.xml | 25 -- .../src/main/java/sample/HelloWorld.java | 28 -- .../src/main/java/sample/ObjectDetection.java | 101 ----- .../mxnet-demo/{scala-demo => }/pom.xml | 0 .../src/main/scala/sample/HelloWorld.scala | 0 .../sample/ImageClassificationExample.scala | 0 scala-package/pom.xml | 4 +- tests/tutorials/test_sanity_tutorials.py | 5 +- 50 files changed, 70 insertions(+), 2767 deletions(-) delete mode 100644 docs/tutorials/java/mxnet_java_on_intellij.md delete mode 100644 docs/tutorials/java/ssd_inference.md delete mode 100644 scala-package/core/src/main/scala/org/apache/mxnet/javaapi/NDArray.scala delete mode 100644 scala-package/core/src/test/java/org/apache/mxnet/javaapi/NDArrayTest.java delete mode 100644 scala-package/core/src/test/java/org/apache/mxnet/javaapi/ResourceScopeTestSuite.java delete mode 100644 scala-package/examples/scripts/benchmark/run_java_inference_bm.sh delete mode 100755 scala-package/examples/scripts/infer/objectdetector/run_ssd_java_example.sh delete mode 100755 scala-package/examples/scripts/infer/predictor/run_predictor_java_example.sh delete mode 100644 scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/InferBase.java delete mode 100644 scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/JavaBenchmark.java delete mode 100644 scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/ObjectDetectionBenchmark.java delete mode 100644 scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/objectdetector/README.md delete mode 100644 scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/objectdetector/SSDClassifierExample.java delete mode 100644 scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/predictor/PredictorExample.java delete mode 100644 scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/predictor/README.md delete mode 100644 scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/ObjectDetector.scala delete mode 100644 scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/ObjectDetectorOutput.scala delete mode 100644 scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/Predictor.scala delete mode 100644 scala-package/macros/src/main/scala/org/apache/mxnet/javaapi/JavaNDArrayMacro.scala rename scala-package/mxnet-demo/{scala-demo => }/Makefile (98%) rename scala-package/mxnet-demo/{scala-demo => }/README.md (88%) rename scala-package/mxnet-demo/{scala-demo => }/bin/demo.sh (100%) rename scala-package/mxnet-demo/{scala-demo => }/bin/run_im.sh (100%) delete mode 100644 scala-package/mxnet-demo/java-demo/Makefile delete mode 100644 scala-package/mxnet-demo/java-demo/README.md delete mode 100644 scala-package/mxnet-demo/java-demo/bin/java_sample.sh delete mode 100644 scala-package/mxnet-demo/java-demo/bin/run_od.sh delete mode 100644 scala-package/mxnet-demo/java-demo/pom.xml delete mode 100644 scala-package/mxnet-demo/java-demo/src/main/java/sample/HelloWorld.java delete mode 100644 scala-package/mxnet-demo/java-demo/src/main/java/sample/ObjectDetection.java rename scala-package/mxnet-demo/{scala-demo => }/pom.xml (100%) rename scala-package/mxnet-demo/{scala-demo => }/src/main/scala/sample/HelloWorld.scala (100%) rename scala-package/mxnet-demo/{scala-demo => }/src/main/scala/sample/ImageClassificationExample.scala (100%) diff --git a/docs/tutorials/index.md b/docs/tutorials/index.md index 23cf67529c19..a8f822030ff6 100644 --- a/docs/tutorials/index.md +++ b/docs/tutorials/index.md @@ -156,13 +156,6 @@ Select API:  * [MXNet-Scala Examples](https://github.com/apache/incubator-mxnet/tree/master/scala-package/examples/src/main/scala/org/apache/mxnetexamples)
-## Java Tutorials -* Getting Started - * [Developer Environment Setup on IntelliJ IDE](/tutorials/java/mxnet_java_on_intellij.html) -* [Multi Object Detection using pre-trained Single Shot Detector (SSD) Model](/tutorials/java/ssd_inference.html) -* [MXNet-Java Examples](https://github.com/apache/incubator-mxnet/tree/master/scala-package/examples/src/main/java/org/apache/mxnetexamples) -
- ## C++ Tutorials * Models diff --git a/docs/tutorials/java/mxnet_java_on_intellij.md b/docs/tutorials/java/mxnet_java_on_intellij.md deleted file mode 100644 index d9a215998005..000000000000 --- a/docs/tutorials/java/mxnet_java_on_intellij.md +++ /dev/null @@ -1,171 +0,0 @@ -# Run MXNet Java Examples Using the IntelliJ IDE (macOS) - -This tutorial guides you through setting up a simple Java project in IntelliJ IDE on macOS and demonstrates usage of the MXNet Java APIs. - -## Prerequisites: -To use this tutorial you need the following pre-requisites: - -- [Java 8 JDK](http://www.oracle.com/technetwork/java/javase/downloads/index.html) -- [Maven](https://maven.apache.org/install.html) -- [OpenCV](https://opencv.org/) -- [IntelliJ IDEA](https://www.jetbrains.com/idea/) (One can download the community edition from [here](https://www.jetbrains.com/idea/download)) - -### MacOS Prerequisites - -You can run the following commands to install the prerequisites. -``` -/usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" -brew update -brew tap caskroom/versions -brew cask install java8 -brew install maven -brew install opencv -``` - -You can also run this tutorial on an Ubuntu machine after installing the following prerequisites. -### Ubuntu Prerequisites - -Run the following commands to install the prerequisites. - -```bash -wget https://github.com/apache/incubator-mxnet/blob/master/ci/docker/install/ubuntu_core.sh -sudo ./ubuntu_core.sh -wget https://github.com/apache/incubator-mxnet/blob/master/ci/docker/install/ubuntu_scala.sh -sudo ./ubuntu_scala.sh -``` - -Note : You might need to run `chmod u+x ubuntu_core.sh` and `chmod u+x ubuntu_scala` before running the scripts. - -The `ubuntu_scala.sh` installs the common dependencies required for both MXNet Scala and MXNet Java packages. - -## Set Up Your Project - -**Step 1.** Install and setup [IntelliJ IDEA](https://www.jetbrains.com/idea/) - -**Step 2.** Create a new Project: - -![intellij welcome](https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/scala/intellij-welcome.png) - -From the IntelliJ welcome screen, select "Create New Project". - -Choose the Maven project type. - -Select the checkbox for `Create from archetype`, then choose `org.apache.maven.archetypes:maven-archetype-quickstart` from the list below. More on this can be found on a Maven tutorial : [Maven in 5 Minutes](https://maven.apache.org/guides/getting-started/maven-in-five-minutes.html). - -![maven project type - archetype](https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/java/project-archetype.png) - -click `Next`. - -![project metadata](https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/java/intellij-project-metadata.png) - -Set the project's metadata. For this tutorial, use the following: - -**GroupId** -``` -mxnet -``` -**ArtifactId** -``` -ArtifactId: javaMXNet -``` -**Version** -``` -1.0-SNAPSHOT -``` - -![project properties](https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/java/intellij-project-properties.png) - -Review the project's properties. The settings can be left as their default. - -![project location](https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/java/intellij-project-location.png) - -Set the project's location. The rest of the settings can be left as their default. - -![project 1](https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/java/intellij-project-pom.png) - -After clicking Finish, you will be presented with the project's first view. -The project's `pom.xml` will be open for editing. - -**Step 3.** Add the following Maven dependency to your `pom.xml` file under the `dependencies` tag: - -```html - - org.apache.mxnet - mxnet-full_2.11-osx-x86_64-cpu - 1.4.0 - -``` - -To view the latest MXNet Maven packages, you can check [MXNet Maven package repository](https://search.maven.org/#search%7Cga%7C1%7Cg%3A%22org.apache.mxnet%22) - -Note : -- Change the osx-x86_64 to linux-x86_64 if your platform is linux. -- Change cpu into gpu if you have a gpu backed machine and want to use gpu. - - -**Step 4.** Import dependencies with Maven: - - - Note the prompt in the lower right corner that states "Maven projects need to be imported". If this is not visible, click on the little greed balloon that appears in the lower right corner. - -![import_dependencies](https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/java/project-import-changes.png) - -Click "Import Changes" in this prompt. - -**Step 5.** Build the project: -- To build the project, from the menu choose Build, and then choose Build Project. - -**Step 6.** Navigate to the App.java class in the project and paste the code from HelloWorld.java from [Java Demo project](https://github.com/apache/incubator-mxnet/blob/java-api/scala-package/mxnet-demo/java-demo/src/main/java/sample/HelloWorld.java) on MXNet repository, overwriting the original hello world code. -You can also grab the entire [Java Demo project](https://github.com/apache/incubator-mxnet/tree/java-api/scala-package/mxnet-demo/java-demo) and run it by following the instructions on the [README](https://github.com/apache/incubator-mxnet/blob/java-api/scala-package/mxnet-demo/java-demo/README.md) - -**Step 7.** Now run the App.java. - -The result should be something similar to this: - -``` -Hello World! -(1,2) -Process finished with exit code 0 -``` - -### Troubleshooting - -If you get an error, check the dependencies at the beginning of this tutorial. For example, you might see the following in the middle of the error messages, where `x.x` would the version it's looking for. - -``` -... -Library not loaded: /usr/local/opt/opencv/lib/libopencv_calib3d.x.x.dylib -... -``` - -This can be resolved be installing OpenCV. - -### Command Line Build Option - -- You can also compile the project by using the following command at the command line. Change directories to this project's root folder then run the following: - -```bash -mvn clean install dependency:copy-dependencies -``` -If the command succeeds, you should see a lot of info and some warning messages, followed by: - -```bash -[INFO] ------------------------------------------------------------------------ -[INFO] BUILD SUCCESS -[INFO] ------------------------------------------------------------------------ -[INFO] Total time: 3.475 s -[INFO] Finished at: 2018-11-08T05:06:31-08:00 -[INFO] ------------------------------------------------------------------------ -``` -The build generates a new jar file in the `target` folder called `javaMXNet-1.0-SNAPSHOT.jar`. - -To run the App.java use the following command from the project's root folder and you should see the same output as we got when the project was run from IntelliJ. -```bash -java -cp target/javaMXNet-1.0-SNAPSHOT.jar:target/dependency/* mxnet.App -``` - -## Next Steps -For more information about MXNet Java resources, see the following: - -* [Java Inference API](https://mxnet.incubator.apache.org/api/java/infer.html) -* [Java Inference Examples](https://github.com/apache/incubator-mxnet/tree/java-api/scala-package/examples/src/main/java/org/apache/mxnetexamples/infer/) -* [MXNet Tutorials Index](http://mxnet.io/tutorials/index.html) diff --git a/docs/tutorials/java/ssd_inference.md b/docs/tutorials/java/ssd_inference.md deleted file mode 100644 index 6bcaaa2504a4..000000000000 --- a/docs/tutorials/java/ssd_inference.md +++ /dev/null @@ -1,186 +0,0 @@ -# Multi Object Detection using pre-trained SSD Model via Java Inference APIs - -This tutorial shows how to use MXNet Java Inference APIs to run inference on a pre-trained Single Shot Detector (SSD) Model. - -The SSD model is trained on the Pascal VOC 2012 dataset. The network is a SSD model built on Resnet50 as the base network to extract image features. The model is trained to detect the following entities (classes): ['aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car', 'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse', 'motorbike', 'person', 'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor']. For more details about the model, you can refer to the [MXNet SSD example](https://github.com/apache/incubator-mxnet/tree/master/example/ssd). - -## Prerequisites - -To complete this tutorial, you need the following: -* [MXNet Java Setup on IntelliJ IDEA](/java/mxnet_java_on_intellij.html) (Optional) -* [wget](https://www.gnu.org/software/wget/) To download model artifacts -* SSD Model artifacts - * Use the following script to get the SSD Model files : -```bash -data_path=/tmp/resnet50_ssd -mkdir -p "$data_path" -wget https://s3.amazonaws.com/model-server/models/resnet50_ssd/resnet50_ssd_model-symbol.json -P $data_path -wget https://s3.amazonaws.com/model-server/models/resnet50_ssd/resnet50_ssd_model-0000.params -P $data_path -wget https://s3.amazonaws.com/model-server/models/resnet50_ssd/synset.txt -P $data_path -``` -* Test images : A few sample images to run inference on. - * Use the following script to download sample images : -```bash -image_path=/tmp/resnet50_ssd/images -mkdir -p "$image_path" -cd $image_path -wget https://cloud.githubusercontent.com/assets/3307514/20012567/cbb60336-a27d-11e6-93ff-cbc3f09f5c9e.jpg -O dog.jpg -wget https://cloud.githubusercontent.com/assets/3307514/20012563/cbb41382-a27d-11e6-92a9-18dab4fd1ad3.jpg -O person.jpg -``` - -Alternately, you can get the entire SSD Model artifacts + images in one single script from the MXNet Repository by running [get_ssd_data.sh script](https://github.com/apache/incubator-mxnet/blob/master/scala-package/examples/scripts/infer/objectdetector/get_ssd_data.sh) - -## Time to code! -1\. Following the [MXNet Java Setup on IntelliJ IDEA](/java/mxnet_java_on_intellij.html) tutorial, in the same project `JavaMXNet`, create a new empty class called : `ObjectDetectionTutorial.java`. - -2\. In the `main` function of `ObjectDetectionTutorial.java` define the downloaded model path and the image data paths. This is the same path where we downloaded the model artifacts and images in a previous step. - -```java -String modelPathPrefix = "/tmp/resnet50_ssd/resnet50_ssd_model"; -String inputImagePath = "/tmp/resnet50_ssd/images/dog.jpg"; -``` - -3\. We can run the inference code in this example on either CPU or GPU (if you have a GPU backed machine) by choosing the appropriate context. - -```java - -List context = getContext(); -... - -private static List getContext() { -List ctx = new ArrayList<>(); -ctx.add(Context.cpu()); // Choosing CPU Context here - -return ctx; -} -``` - -4\. To provide an input to the model, define the input shape to the model and the Input Data Descriptor (DataDesc) as shown below : - -```java -Shape inputShape = new Shape(new int[] {1, 3, 512, 512}); -List inputDescriptors = new ArrayList(); -inputDescriptors.add(new DataDesc("data", inputShape, DType.Float32(), "NCHW")); -``` - -The input shape can be interpreted as follows : The input has a batch size of 1, with 3 RGB channels in the image, and the height and width of the image is 512 each. - -5\. To run an actual inference on the given image, add the following lines to the `ObjectDetectionTutorial.java` class : - -```java -BufferedImage img = ObjectDetector.loadImageFromFile(inputImagePath); -ObjectDetector objDet = new ObjectDetector(modelPathPrefix, inputDescriptors, context, 0); -List> output = objDet.imageObjectDetect(img, 3); // Top 3 objects detected will be returned -``` - -6\. Let's piece all of the above steps together by showing the final contents of the `ObjectDetectionTutorial.java`. - -```java -package mxnet; - -import org.apache.mxnet.infer.javaapi.ObjectDetector; -import org.apache.mxnet.infer.javaapi.ObjectDetectorOutput; -import org.apache.mxnet.javaapi.Context; -import org.apache.mxnet.javaapi.DType; -import org.apache.mxnet.javaapi.DataDesc; -import org.apache.mxnet.javaapi.Shape; - -import java.awt.image.BufferedImage; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -public class ObjectDetectionTutorial { - - public static void main(String[] args) { - - String modelPathPrefix = "/tmp/resnet50_ssd/resnet50_ssd_model"; - - String inputImagePath = "/tmp/resnet50_ssd/images/dog.jpg"; - - List context = getContext(); - - Shape inputShape = new Shape(new int[] {1, 3, 512, 512}); - - List inputDescriptors = new ArrayList(); - inputDescriptors.add(new DataDesc("data", inputShape, DType.Float32(), "NCHW")); - - BufferedImage img = ObjectDetector.loadImageFromFile(inputImagePath); - ObjectDetector objDet = new ObjectDetector(modelPathPrefix, inputDescriptors, context, 0); - List> output = objDet.imageObjectDetect(img, 3); - - printOutput(output, inputShape); - } - - - private static List getContext() { - List ctx = new ArrayList<>(); - ctx.add(Context.cpu()); - - return ctx; - } - - private static void printOutput(List> output, Shape inputShape) { - - StringBuilder outputStr = new StringBuilder(); - - int width = inputShape.get(3); - int height = inputShape.get(2); - - for (List ele : output) { - for (ObjectDetectorOutput i : ele) { - outputStr.append("Class: " + i.getClassName() + "\n"); - outputStr.append("Probabilties: " + i.getProbability() + "\n"); - - List coord = Arrays.asList(i.getXMin() * width, - i.getXMax() * height, i.getYMin() * width, i.getYMax() * height); - StringBuilder sb = new StringBuilder(); - for (float c: coord) { - sb.append(", ").append(c); - } - outputStr.append("Coord:" + sb.substring(2)+ "\n"); - } - } - System.out.println(outputStr); - - } -} -``` - -7\. To compile and run this code, change directories to this project's root folder, then run the following: -```bash -mvn clean install dependency:copy-dependencies -``` - -The build generates a new jar file in the `target` folder called `javaMXNet-1.0-SNAPSHOT.jar`. - -To run the ObjectDetectionTutorial.java use the following command from the project's root folder. -```bash -java -cp target/javaMXNet-1.0-SNAPSHOT.jar:target/dependency/* mxnet.ObjectDetectionTutorial -``` - -You should see a similar output being generated for the dog image that we used: -```bash -Class: car -Probabilties: 0.99847263 -Coord:312.21335, 72.02908, 456.01443, 150.66176 -Class: bicycle -Probabilties: 0.9047381 -Coord:155.9581, 149.96365, 383.83694, 418.94516 -Class: dog -Probabilties: 0.82268167 -Coord:83.82356, 179.14001, 206.63783, 476.78754 -``` - -![dog_1](https://cloud.githubusercontent.com/assets/3307514/20012567/cbb60336-a27d-11e6-93ff-cbc3f09f5c9e.jpg) - -The results returned by the inference call translate into the regions in the image where the model detected objects. - -![dog_2](https://cloud.githubusercontent.com/assets/3307514/19171063/91ec2792-8be0-11e6-983c-773bd6868fa8.png) - -## Next Steps -For more information about MXNet Java resources, see the following: - -* [Java Inference API](/api/java/infer.html) -* [Java Inference Examples](https://github.com/apache/incubator-mxnet/tree/java-api/scala-package/examples/src/main/java/org/apache/mxnetexamples/infer/) -* [MXNet Tutorials Index](/tutorials/index.html) diff --git a/scala-package/.gitignore b/scala-package/.gitignore index 6aa4da6b1cfc..0f860e62836a 100644 --- a/scala-package/.gitignore +++ b/scala-package/.gitignore @@ -1,8 +1,5 @@ .flattened-pom.xml core/src/main/scala/org/apache/mxnet/NDArrayAPIBase.scala core/src/main/scala/org/apache/mxnet/NDArrayBase.scala -core/src/main/scala/org/apache/mxnet/javaapi/NDArrayBase.scala core/src/main/scala/org/apache/mxnet/SymbolAPIBase.scala core/src/main/scala/org/apache/mxnet/SymbolBase.scala -examples/scripts/infer/images/ -examples/scripts/infer/models/ diff --git a/scala-package/core/pom.xml b/scala-package/core/pom.xml index daf8c389cbeb..56ff4db1408b 100644 --- a/scala-package/core/pom.xml +++ b/scala-package/core/pom.xml @@ -10,10 +10,6 @@ ../pom.xml - - true - - mxnet-core_2.11 MXNet Scala Package - Core @@ -24,6 +20,12 @@ false
+ + integrationtest + + true + + osx-x86_64-cpu @@ -84,10 +86,7 @@ maven-surefire-plugin 2.22.0 - - -Djava.library.path=${project.parent.basedir}/native/${platform}/target - - ${skipTests} + false diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Context.scala b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Context.scala index 3d397e3fc496..acae8bf5994c 100644 --- a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Context.scala +++ b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Context.scala @@ -19,14 +19,7 @@ package org.apache.mxnet.javaapi import collection.JavaConverters._ import scala.language.implicitConversions -/** - * Constructing a context which is used to specify the device and device type that will - * be utilized by the engine. - * - * @param deviceTypeName {'cpu', 'gpu'} String representing the device type - * @param deviceId The device id of the device, needed for GPU - */ -class Context private[mxnet] (val context: org.apache.mxnet.Context) { +class Context(val context: org.apache.mxnet.Context) { val deviceTypeid: Int = context.deviceTypeid @@ -34,11 +27,6 @@ class Context private[mxnet] (val context: org.apache.mxnet.Context) { = this(new org.apache.mxnet.Context(deviceTypeName, deviceId)) def withScope[T](body: => T): T = context.withScope(body) - - /** - * Return device type of current context. - * @return device_type - */ def deviceType: String = context.deviceType override def toString: String = context.toString @@ -55,5 +43,6 @@ object Context { val gpu: Context = org.apache.mxnet.Context.gpu() val devtype2str = org.apache.mxnet.Context.devstr2type.asJava val devstr2type = org.apache.mxnet.Context.devstr2type.asJava + def defaultCtx: Context = org.apache.mxnet.Context.defaultCtx } diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/IO.scala b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/IO.scala index d0e10815a1e6..888a5d812c7a 100644 --- a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/IO.scala +++ b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/IO.scala @@ -16,9 +16,10 @@ */ package org.apache.mxnet.javaapi + import scala.language.implicitConversions -class DataDesc private[mxnet] (val dataDesc: org.apache.mxnet.DataDesc) { +class DataDesc(val dataDesc: org.apache.mxnet.DataDesc) { def this(name: String, shape: Shape, dType: DType.DType, layout: String) = this(new org.apache.mxnet.DataDesc(name, shape, dType, layout)) @@ -31,13 +32,5 @@ object DataDesc{ implicit def toDataDesc(dataDesc: DataDesc): org.apache.mxnet.DataDesc = dataDesc.dataDesc - /** - * Get the dimension that corresponds to the batch size. - * @param layout layout string. For example, "NCHW". - * @return An axis indicating the batch_size dimension. When data-parallelism is used, - * the data will be automatically split and concatenate along the batch_size dimension. - * Axis can be -1, which means the whole array will be copied - * for each data-parallelism device. - */ def getBatchAxis(layout: String): Int = org.apache.mxnet.DataDesc.getBatchAxis(Some(layout)) } diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/NDArray.scala b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/NDArray.scala deleted file mode 100644 index 6b4f4bdebda5..000000000000 --- a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/NDArray.scala +++ /dev/null @@ -1,397 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.mxnet.javaapi - -import org.apache.mxnet.javaapi.DType.DType - -import collection.JavaConverters._ - -@AddJNDArrayAPIs(false) -object NDArray extends NDArrayBase { - implicit def fromNDArray(nd: org.apache.mxnet.NDArray): NDArray = new NDArray(nd) - - implicit def toNDArray(jnd: NDArray): org.apache.mxnet.NDArray = jnd.nd - - def waitall(): Unit = org.apache.mxnet.NDArray.waitall() - - /** - * One hot encoding indices into matrix out. - * @param indices An NDArray containing indices of the categorical features. - * @param out The result holder of the encoding. - * @return Same as out. - */ - def onehotEncode(indices: NDArray, out: NDArray): NDArray - = org.apache.mxnet.NDArray.onehotEncode(indices, out) - - /** - * Create an empty uninitialized new NDArray, with specified shape. - * - * @param shape shape of the NDArray. - * @param ctx The context of the NDArray. - * - * @return The created NDArray. - */ - def empty(shape: Shape, ctx: Context, dtype: DType.DType): NDArray - = org.apache.mxnet.NDArray.empty(shape, ctx, dtype) - def empty(ctx: Context, shape: Array[Int]): NDArray - = org.apache.mxnet.NDArray.empty(new Shape(shape), ctx) - def empty(ctx: Context, shape: java.util.List[java.lang.Integer]): NDArray - = org.apache.mxnet.NDArray.empty(new Shape(shape), ctx) - - /** - * Create a new NDArray filled with 0, with specified shape. - * - * @param shape shape of the NDArray. - * @param ctx The context of the NDArray. - * - * @return The created NDArray. - */ - def zeros(shape: Shape, ctx: Context, dtype: DType.DType): NDArray - = org.apache.mxnet.NDArray.zeros(shape, ctx, dtype) - def zeros(ctx: Context, shape: Array[Int]): NDArray - = org.apache.mxnet.NDArray.zeros(new Shape(shape), ctx) - def zeros(ctx: Context, shape: java.util.List[java.lang.Integer]): NDArray - = org.apache.mxnet.NDArray.zeros(new Shape(shape), ctx) - - /** - * Create a new NDArray filled with 1, with specified shape. - * @param shape shape of the NDArray. - * @param ctx The context of the NDArray. - * @return The created NDArray. - */ - def ones(shape: Shape, ctx: Context, dtype: DType.DType): NDArray - = org.apache.mxnet.NDArray.ones(shape, ctx, dtype) - def ones(ctx: Context, shape: Array[Int]): NDArray - = org.apache.mxnet.NDArray.ones(new Shape(shape), ctx) - def ones(ctx: Context, shape: java.util.List[java.lang.Integer]): NDArray - = org.apache.mxnet.NDArray.ones(new Shape(shape), ctx) - - /** - * Create a new NDArray filled with given value, with specified shape. - * @param shape shape of the NDArray. - * @param value value to be filled with - * @param ctx The context of the NDArray - */ - def full(shape: Shape, value: Float, ctx: Context): NDArray - = org.apache.mxnet.NDArray.full(shape, value, ctx) - - def power(lhs: NDArray, rhs: NDArray): NDArray = org.apache.mxnet.NDArray.power(lhs, rhs) - def power(lhs: NDArray, rhs: Float): NDArray = org.apache.mxnet.NDArray.power(lhs, rhs) - def power(lhs: Float, rhs: NDArray): NDArray = org.apache.mxnet.NDArray.power(lhs, rhs) - - def maximum(lhs: NDArray, rhs: NDArray): NDArray = org.apache.mxnet.NDArray.maximum(lhs, rhs) - def maximum(lhs: NDArray, rhs: Float): NDArray = org.apache.mxnet.NDArray.maximum(lhs, rhs) - def maximum(lhs: Float, rhs: NDArray): NDArray = org.apache.mxnet.NDArray.maximum(lhs, rhs) - - def minimum(lhs: NDArray, rhs: NDArray): NDArray = org.apache.mxnet.NDArray.minimum(lhs, rhs) - def minimum(lhs: NDArray, rhs: Float): NDArray = org.apache.mxnet.NDArray.minimum(lhs, rhs) - def minimum(lhs: Float, rhs: NDArray): NDArray = org.apache.mxnet.NDArray.minimum(lhs, rhs) - - - /** - * Returns the result of element-wise **equal to** (==) comparison operation with broadcasting. - * For each element in input arrays, return 1(true) if corresponding elements are same, - * otherwise return 0(false). - */ - def equal(lhs: NDArray, rhs: NDArray): NDArray = org.apache.mxnet.NDArray.equal(lhs, rhs) - def equal(lhs: NDArray, rhs: Float): NDArray = org.apache.mxnet.NDArray.equal(lhs, rhs) - - /** - * Returns the result of element-wise **not equal to** (!=) comparison operation - * with broadcasting. - * For each element in input arrays, return 1(true) if corresponding elements are different, - * otherwise return 0(false). - */ - def notEqual(lhs: NDArray, rhs: NDArray): NDArray = org.apache.mxnet.NDArray.notEqual(lhs, rhs) - def notEqual(lhs: NDArray, rhs: Float): NDArray = org.apache.mxnet.NDArray.notEqual(lhs, rhs) - - /** - * Returns the result of element-wise **greater than** (>) comparison operation - * with broadcasting. - * For each element in input arrays, return 1(true) if lhs elements are greater than rhs, - * otherwise return 0(false). - */ - def greater(lhs: NDArray, rhs: NDArray): NDArray = org.apache.mxnet.NDArray.greater(lhs, rhs) - def greater(lhs: NDArray, rhs: Float): NDArray = org.apache.mxnet.NDArray.greater(lhs, rhs) - - /** - * Returns the result of element-wise **greater than or equal to** (>=) comparison - * operation with broadcasting. - * For each element in input arrays, return 1(true) if lhs elements are greater than equal to rhs - * otherwise return 0(false). - */ - def greaterEqual(lhs: NDArray, rhs: NDArray): NDArray - = org.apache.mxnet.NDArray.greaterEqual(lhs, rhs) - def greaterEqual(lhs: NDArray, rhs: Float): NDArray - = org.apache.mxnet.NDArray.greaterEqual(lhs, rhs) - - /** - * Returns the result of element-wise **lesser than** (<) comparison operation - * with broadcasting. - * For each element in input arrays, return 1(true) if lhs elements are less than rhs, - * otherwise return 0(false). - */ - def lesser(lhs: NDArray, rhs: NDArray): NDArray = org.apache.mxnet.NDArray.lesser(lhs, rhs) - def lesser(lhs: NDArray, rhs: Float): NDArray = org.apache.mxnet.NDArray.lesser(lhs, rhs) - - /** - * Returns the result of element-wise **lesser than or equal to** (<=) comparison - * operation with broadcasting. - * For each element in input arrays, return 1(true) if lhs elements are - * lesser than equal to rhs, otherwise return 0(false). - */ - def lesserEqual(lhs: NDArray, rhs: NDArray): NDArray - = org.apache.mxnet.NDArray.lesserEqual(lhs, rhs) - def lesserEqual(lhs: NDArray, rhs: Float): NDArray - = org.apache.mxnet.NDArray.lesserEqual(lhs, rhs) - - /** - * Create a new NDArray that copies content from source_array. - * @param sourceArr Source data to create NDArray from. - * @param shape shape of the NDArray - * @param ctx The context of the NDArray, default to current default context. - * @return The created NDArray. - */ - def array(sourceArr: java.util.List[java.lang.Float], shape: Shape, ctx: Context = null): NDArray - = org.apache.mxnet.NDArray.array( - sourceArr.asScala.map(ele => Float.unbox(ele)).toArray, shape, ctx) - - /** - * Returns evenly spaced values within a given interval. - * Values are generated within the half-open interval [`start`, `stop`). In other - * words, the interval includes `start` but excludes `stop`. - * @param start Start of interval. - * @param stop End of interval. - * @param step Spacing between values. - * @param repeat Number of times to repeat each element. - * @param ctx Device context. - * @param dType The data type of the `NDArray`. - * @return NDArray of evenly spaced values in the specified range. - */ - def arange(start: Float, stop: Float, step: Float, repeat: Int, - ctx: Context, dType: DType.DType): NDArray = - org.apache.mxnet.NDArray.arange(start, Some(stop), step, repeat, ctx, dType) -} - -/** - * NDArray object in mxnet. - * NDArray is basic ndarray/Tensor like data structure in mxnet.
- * - * NOTE: NDArray is stored in native memory. Use NDArray in a try-with-resources() construct - * or a [[org.apache.mxnet.ResourceScope]] in a try-with-resource to have them - * automatically disposed. You can explicitly control the lifetime of NDArray - * by calling dispose manually. Failure to do this will result in leaking native memory. - * - */ -class NDArray private[mxnet] (val nd: org.apache.mxnet.NDArray ) { - - def this(arr: Array[Float], shape: Shape, ctx: Context) = { - this(org.apache.mxnet.NDArray.array(arr, shape, ctx)) - } - - def this(arr: java.util.List[java.lang.Float], shape: Shape, ctx: Context) = { - this(NDArray.array(arr, shape, ctx)) - } - - def serialize(): Array[Byte] = nd.serialize() - - /** - * Release the native memory.
- * The NDArrays it depends on will NOT be disposed.
- * The object shall never be used after it is disposed. - */ - def dispose(): Unit = nd.dispose() - - /** - * Dispose all NDArrays who help to construct this array.
- * e.g. (a * b + c).disposeDeps() will dispose a, b, c (including their deps) and a * b - * @return this array - */ - def disposeDeps(): NDArray = nd.disposeDepsExcept() - - /** - * Dispose all NDArrays who help to construct this array, excepts those in the arguments.
- * e.g. (a * b + c).disposeDepsExcept(a, b) - * will dispose c and a * b. - * Note that a, b's dependencies will not be disposed either. - * @param arr the Array of NDArray not to dispose - * @return this array - */ - def disposeDepsExcept(arr: Array[NDArray]): NDArray = - nd.disposeDepsExcept(arr.map(NDArray.toNDArray): _*) - - /** - * Return a sliced NDArray that shares memory with current one. - * NDArray only support continuous slicing on axis 0 - * - * @param start Starting index of slice. - * @param stop Finishing index of slice. - * - * @return a sliced NDArray that shares memory with current one. - */ - def slice(start: Int, stop: Int): NDArray = nd.slice(start, stop) - - /** - * Return a sliced NDArray at the ith position of axis0 - * @param i - * @return a sliced NDArray that shares memory with current one. - */ - def slice (i: Int): NDArray = nd.slice(i) - - /** - * Return a sub NDArray that shares memory with current one. - * the first axis will be rolled up, which causes its shape different from slice(i, i+1) - * @param idx index of sub array. - */ - def at(idx: Int): NDArray = nd.at(idx) - - def T: NDArray = nd.T - - /** - * Get data type of current NDArray. - * @return class representing type of current ndarray - */ - def dtype: DType = nd.dtype - - /** - * Return a copied numpy array of current array with specified type. - * @param dtype Desired type of result array. - * @return A copy of array content. - */ - def asType(dtype: DType): NDArray = nd.asType(dtype) - - /** - * Return a reshaped NDArray that shares memory with current one. - * @param dims New shape. - * - * @return a reshaped NDArray that shares memory with current one. - */ - def reshape(dims: Array[Int]): NDArray = nd.reshape(dims) - - /** - * Block until all pending writes operations on current NDArray are finished. - * This function will return when all the pending writes to the current - * NDArray finishes. There can still be pending read going on when the - * function returns. - */ - def waitToRead(): Unit = nd.waitToRead() - - /** - * Get context of current NDArray. - * @return The context of current NDArray. - */ - def context: Context = nd.context - - /** - * Set the values of the NDArray - * @param value Value to set - * @return Current NDArray - */ - def set(value: Float): NDArray = nd.set(value) - def set(other: NDArray): NDArray = nd.set(other) - def set(other: Array[Float]): NDArray = nd.set(other) - - def add(other: NDArray): NDArray = this.nd + other.nd - def add(other: Float): NDArray = this.nd + other - def addInplace(other: NDArray): NDArray = this.nd += other - def addInplace(other: Float): NDArray = this.nd += other - def subtract(other: NDArray): NDArray = this.nd - other - def subtract(other: Float): NDArray = this.nd - other - def subtractInplace(other: NDArray): NDArray = this.nd -= other - def subtractInplace(other: Float): NDArray = this.nd -= other - def multiply(other: NDArray): NDArray = this.nd * other - def multiply(other: Float): NDArray = this.nd * other - def multiplyInplace(other: NDArray): NDArray = this.nd *= other - def multiplyInplace(other: Float): NDArray = this.nd *= other - def div(other: NDArray): NDArray = this.nd / other - def div(other: Float): NDArray = this.nd / other - def divInplace(other: NDArray): NDArray = this.nd /= other - def divInplace(other: Float): NDArray = this.nd /= other - def pow(other: NDArray): NDArray = this.nd ** other - def pow(other: Float): NDArray = this.nd ** other - def powInplace(other: NDArray): NDArray = this.nd **= other - def powInplace(other: Float): NDArray = this.nd **= other - def mod(other: NDArray): NDArray = this.nd % other - def mod(other: Float): NDArray = this.nd % other - def modInplace(other: NDArray): NDArray = this.nd %= other - def modInplace(other: Float): NDArray = this.nd %= other - def greater(other: NDArray): NDArray = this.nd > other - def greater(other: Float): NDArray = this.nd > other - def greaterEqual(other: NDArray): NDArray = this.nd >= other - def greaterEqual(other: Float): NDArray = this.nd >= other - def lesser(other: NDArray): NDArray = this.nd < other - def lesser(other: Float): NDArray = this.nd < other - def lesserEqual(other: NDArray): NDArray = this.nd <= other - def lesserEqual(other: Float): NDArray = this.nd <= other - - /** - * Return a copied flat java array of current array (row-major). - * @return A copy of array content. - */ - def toArray: Array[Float] = nd.toArray - - /** - * Return a CPU scalar(float) of current ndarray. - * This ndarray must have shape (1,) - * - * @return The scalar representation of the ndarray. - */ - def toScalar: Float = nd.toScalar - - /** - * Copy the content of current array to other. - * - * @param other Target NDArray or context we want to copy data to. - * @return The copy target NDArray - */ - def copyTo(other: NDArray): NDArray = nd.copyTo(other) - - /** - * Copy the content of current array to a new NDArray in the context. - * - * @param ctx Target context we want to copy data to. - * @return The copy target NDArray - */ - def copyTo(ctx: Context): NDArray = nd.copyTo(ctx) - - /** - * Clone the current array - * @return the copied NDArray in the same context - */ - def copy(): NDArray = copyTo(this.context) - - /** - * Get shape of current NDArray. - * @return an array representing shape of current ndarray - */ - def shape: Shape = nd.shape - - - def size: Int = shape.product - - /** - * Return an `NDArray` that lives in the target context. If the array - * is already in that context, `self` is returned. Otherwise, a copy is made. - * @param context The target context we want the return value to live in. - * @return A copy or `self` as an `NDArray` that lives in the target context. - */ - def asInContext(context: Context): NDArray = nd.asInContext(context) - - override def equals(obj: Any): Boolean = nd.equals(obj) - override def hashCode(): Int = nd.hashCode -} diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Shape.scala b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Shape.scala index b795fe31f726..5c4464f84211 100644 --- a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Shape.scala +++ b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Shape.scala @@ -24,7 +24,7 @@ import scala.language.implicitConversions * Shape of [[NDArray]] or other data */ -class Shape private[mxnet] (val shape: org.apache.mxnet.Shape) { +class Shape(val shape: org.apache.mxnet.Shape) { def this(dims: java.util.List[java.lang.Integer]) = this(new org.apache.mxnet.Shape(dims.asScala.map(Int.unbox))) def this(dims: Array[Int]) = this(new org.apache.mxnet.Shape(dims)) diff --git a/scala-package/core/src/test/java/org/apache/mxnet/javaapi/NDArrayTest.java b/scala-package/core/src/test/java/org/apache/mxnet/javaapi/NDArrayTest.java deleted file mode 100644 index 2659b7848bc6..000000000000 --- a/scala-package/core/src/test/java/org/apache/mxnet/javaapi/NDArrayTest.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.mxnet.javaapi; - -import org.junit.Test; - -import java.util.Arrays; -import java.util.List; -import org.apache.mxnet.javaapi.NDArrayBase.*; - -import static org.junit.Assert.assertTrue; - -public class NDArrayTest { - @Test - public void testCreateNDArray() { - NDArray nd = new NDArray(new float[]{1.0f, 2.0f, 3.0f}, - new Shape(new int[]{1, 3}), - new Context("cpu", 0)); - int[] arr = new int[]{1, 3}; - assertTrue(Arrays.equals(nd.shape().toArray(), arr)); - assertTrue(nd.at(0).at(0).toArray()[0] == 1.0f); - List list = Arrays.asList(1.0f, 2.0f, 3.0f); - // Second way creating NDArray - nd = NDArray.array(list, - new Shape(new int[]{1, 3}), - new Context("cpu", 0)); - assertTrue(Arrays.equals(nd.shape().toArray(), arr)); - } - - @Test - public void testZeroOneEmpty(){ - NDArray ones = NDArray.ones(new Context("cpu", 0), new int[]{100, 100}); - NDArray zeros = NDArray.zeros(new Context("cpu", 0), new int[]{100, 100}); - NDArray empty = NDArray.empty(new Context("cpu", 0), new int[]{100, 100}); - int[] arr = new int[]{100, 100}; - assertTrue(Arrays.equals(ones.shape().toArray(), arr)); - assertTrue(Arrays.equals(zeros.shape().toArray(), arr)); - assertTrue(Arrays.equals(empty.shape().toArray(), arr)); - } - - @Test - public void testComparison(){ - NDArray nd = new NDArray(new float[]{1.0f, 2.0f, 3.0f}, new Shape(new int[]{3}), new Context("cpu", 0)); - NDArray nd2 = new NDArray(new float[]{3.0f, 4.0f, 5.0f}, new Shape(new int[]{3}), new Context("cpu", 0)); - nd = nd.add(nd2); - float[] greater = new float[]{1, 1, 1}; - assertTrue(Arrays.equals(nd.greater(nd2).toArray(), greater)); - nd = nd.subtract(nd2); - nd = nd.subtract(nd2); - float[] lesser = new float[]{0, 0, 0}; - assertTrue(Arrays.equals(nd.greater(nd2).toArray(), lesser)); - } - - @Test - public void testGenerated(){ - NDArray$ NDArray = NDArray$.MODULE$; - float[] arr = new float[]{1.0f, 2.0f, 3.0f}; - NDArray nd = new NDArray(arr, new Shape(new int[]{3}), new Context("cpu", 0)); - float result = NDArray.norm(NDArray.new normParam(nd))[0].toArray()[0]; - float cal = 0.0f; - for (float ele : arr) { - cal += ele * ele; - } - cal = (float) Math.sqrt(cal); - assertTrue(Math.abs(result - cal) < 1e-5); - NDArray dotResult = new NDArray(new float[]{0}, new Shape(new int[]{1}), new Context("cpu", 0)); - NDArray.dot(NDArray.new dotParam(nd, nd).setOut(dotResult)); - assertTrue(Arrays.equals(dotResult.toArray(), new float[]{14.0f})); - } -} diff --git a/scala-package/core/src/test/java/org/apache/mxnet/javaapi/ResourceScopeTestSuite.java b/scala-package/core/src/test/java/org/apache/mxnet/javaapi/ResourceScopeTestSuite.java deleted file mode 100644 index 1c246d870e28..000000000000 --- a/scala-package/core/src/test/java/org/apache/mxnet/javaapi/ResourceScopeTestSuite.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - - -package org.apache.mxnet.javaapi; - -import org.apache.mxnet.NativeResourceRef; -import org.apache.mxnet.ResourceScope; -import org.junit.Test; - -import java.util.*; -import java.util.concurrent.Callable; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -public class ResourceScopeTestSuite { - - /** - * This is a placeholder class to test out whether NDArray References get collected or not when using - * try-with-resources in Java. - * - */ - class TestNDArray { - NDArray selfArray; - - public TestNDArray(Context context, int[] shape) { - this.selfArray = NDArray.ones(context, shape); - } - - public boolean verifyIsDisposed() { - return this.selfArray.nd().isDisposed(); - } - - public NativeResourceRef getNDArrayReference() { - return this.selfArray.nd().ref(); - } - } - - @Test - public void testNDArrayAutoRelease() { - TestNDArray test = null; - - try (ResourceScope scope = new ResourceScope()) { - test = new TestNDArray(Context.cpu(), new int[]{100, 100}); - } - - assertTrue(test.verifyIsDisposed()); - } - - @Test - public void testObjectReleaseFromList() { - List list = new ArrayList<>(); - - try (ResourceScope scope = new ResourceScope()) { - for (int i = 0;i < 10; i++) { - list.add(new TestNDArray(Context.cpu(), new int[] {100, 100})); - } - } - - assertEquals(list.size() , 10); - for (TestNDArray item : list) { - assertTrue(item.verifyIsDisposed()); - } - } - - @Test - public void testObjectReleaseFromMap() { - Map stringToNDArrayMap = new HashMap<>(); - - try (ResourceScope scope = new ResourceScope()) { - for (int i = 0;i < 10; i++) { - stringToNDArrayMap.put(String.valueOf(i),new TestNDArray(Context.cpu(), new int[] {i, i})); - } - } - - assertEquals(stringToNDArrayMap.size(), 10); - for (Map.Entry entry : stringToNDArrayMap.entrySet()) { - assertTrue(entry.getValue().verifyIsDisposed()); - } - - Map ndArrayToStringMap = new HashMap<>(); - - try (ResourceScope scope = new ResourceScope()) { - for (int i = 0;i < 10; i++) { - ndArrayToStringMap.put(new TestNDArray(Context.cpu(), new int[] {i, i}), String.valueOf(i)); - } - } - - assertEquals(ndArrayToStringMap.size(), 10); - for (Map.Entry entry : ndArrayToStringMap.entrySet()) { - assertTrue(entry.getKey().verifyIsDisposed()); - } - - } -} diff --git a/scala-package/examples/pom.xml b/scala-package/examples/pom.xml index 72a40dc01f01..436f2992768b 100644 --- a/scala-package/examples/pom.xml +++ b/scala-package/examples/pom.xml @@ -13,11 +13,13 @@ mxnet-examples_2.11 MXNet Scala Package - Examples - - true - - + + unittest + + true + + integrationtest diff --git a/scala-package/examples/scripts/benchmark/run_java_inference_bm.sh b/scala-package/examples/scripts/benchmark/run_java_inference_bm.sh deleted file mode 100644 index 5a468e344829..000000000000 --- a/scala-package/examples/scripts/benchmark/run_java_inference_bm.sh +++ /dev/null @@ -1,40 +0,0 @@ -#!/bin/bash - -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -set -e - -hw_type=cpu -if [ "$USE_GPU" = "1" ] -then - hw_type=gpu -fi - -platform=linux-x86_64 - -if [[ $OSTYPE = [darwin]* ]] -then - platform=osx-x86_64 -fi - -MXNET_ROOT=$(cd "$(dirname $0)/../../../.."; pwd) -CLASS_PATH=$MXNET_ROOT/scala-package/assembly/$platform-$hw_type/target/*:$MXNET_ROOT/scala-package/examples/target/* - -java -Xmx8G -Dmxnet.traceLeakedObjects=true -cp $CLASS_PATH \ - org.apache.mxnetexamples.javaapi.benchmark.JavaBenchmark $@ - diff --git a/scala-package/examples/scripts/infer/objectdetector/run_ssd_example.sh b/scala-package/examples/scripts/infer/objectdetector/run_ssd_example.sh index 6b4edb7c4c94..8cea892b5809 100755 --- a/scala-package/examples/scripts/infer/objectdetector/run_ssd_example.sh +++ b/scala-package/examples/scripts/infer/objectdetector/run_ssd_example.sh @@ -17,21 +17,9 @@ # specific language governing permissions and limitations # under the License. -hw_type=cpu -if [[ $4 = gpu ]] -then - hw_type=gpu -fi - -platform=linux-x86_64 - -if [[ $OSTYPE = [darwin]* ]] -then - platform=osx-x86_64 -fi MXNET_ROOT=$(cd "$(dirname $0)/../../../../../"; pwd) -CLASS_PATH=$MXNET_ROOT/scala-package/assembly/$platform-$hw_type/target/*:$MXNET_ROOT/scala-package/examples/target/*:$MXNET_ROOT/scala-package/examples/target/classes/lib/*:$MXNET_ROOT/scala-package/infer/target/* +CLASS_PATH=$MXNET_ROOT/scala-package/assembly/osx-x86_64-cpu/target/*:$MXNET_ROOT/scala-package/examples/target/*:$MXNET_ROOT/scala-package/examples/target/classes/lib/*:$MXNET_ROOT/scala-package/infer/target/* # model dir and prefix MODEL_DIR=$1 diff --git a/scala-package/examples/scripts/infer/objectdetector/run_ssd_java_example.sh b/scala-package/examples/scripts/infer/objectdetector/run_ssd_java_example.sh deleted file mode 100755 index 00ed793a7bb5..000000000000 --- a/scala-package/examples/scripts/infer/objectdetector/run_ssd_java_example.sh +++ /dev/null @@ -1,47 +0,0 @@ -#!/bin/bash - -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -hw_type=cpu -if [[ $4 = gpu ]] -then - hw_type=gpu -fi - -platform=linux-x86_64 - -if [[ $OSTYPE = [darwin]* ]] -then - platform=osx-x86_64 -fi - -MXNET_ROOT=$(cd "$(dirname $0)/../../../../../"; pwd) -CLASS_PATH=$MXNET_ROOT/scala-package/assembly/$platform-$hw_type/target/*:$MXNET_ROOT/scala-package/examples/target/*:$MXNET_ROOT/scala-package/examples/target/classes/lib/*:$MXNET_ROOT/scala-package/infer/target/*:$MXNET_ROOT/scala-package/examples/src/main/scala/org/apache/mxnetexamples/api/java/infer/imageclassifier/* - -# model dir and prefix -MODEL_DIR=$1 -# input image -INPUT_IMG=$2 -# which input image dir -INPUT_DIR=$3 - -java -Xmx8G -cp $CLASS_PATH \ - org.apache.mxnetexamples.javaapi.infer.objectdetector.SSDClassifierExample \ - --model-path-prefix $MODEL_DIR \ - --input-image $INPUT_IMG \ - --input-dir $INPUT_DIR diff --git a/scala-package/examples/scripts/infer/predictor/run_predictor_java_example.sh b/scala-package/examples/scripts/infer/predictor/run_predictor_java_example.sh deleted file mode 100755 index 4ebcc3076a78..000000000000 --- a/scala-package/examples/scripts/infer/predictor/run_predictor_java_example.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/bin/bash - -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -hw_type=cpu -if [[ $3 = gpu ]] -then - hw_type=gpu -fi - -platform=linux-x86_64 - -if [[ $OSTYPE = [darwin]* ]] -then - platform=osx-x86_64 -fi - -MXNET_ROOT=$(cd "$(dirname $0)/../../../../../"; pwd) -CLASS_PATH=$MXNET_ROOT/scala-package/assembly/$platform-$hw_type/target/*:$MXNET_ROOT/scala-package/examples/target/*:$MXNET_ROOT/scala-package/examples/target/classes/lib/*:$MXNET_ROOT/scala-package/infer/target/* - -# model dir and prefix -MODEL_DIR=$1 -# input image -INPUT_IMG=$2 - -java -Xmx8G -cp $CLASS_PATH \ - org.apache.mxnetexamples.javaapi.infer.predictor.PredictorExample \ - --model-path-prefix $MODEL_DIR \ - --input-image $INPUT_IMG diff --git a/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/InferBase.java b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/InferBase.java deleted file mode 100644 index fdcde6b4152c..000000000000 --- a/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/InferBase.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.mxnetexamples.javaapi.benchmark; - -import org.apache.mxnet.javaapi.Context; -import org.kohsuke.args4j.Option; - -import java.util.List; - -abstract class InferBase { - @Option(name = "--num-runs", usage = "Number of runs") - public int numRun = 1; - @Option(name = "--model-name", usage = "Name of the model") - public String modelName = ""; - @Option(name = "--batchsize", usage = "Size of the batch") - public int batchSize = 1; - - public abstract void preProcessModel(List context); - public abstract void runSingleInference(); - public abstract void runBatchInference(); -} diff --git a/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/JavaBenchmark.java b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/JavaBenchmark.java deleted file mode 100644 index 4a6bb2dd38bf..000000000000 --- a/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/JavaBenchmark.java +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.mxnetexamples.javaapi.benchmark; - -import org.apache.mxnet.javaapi.Context; -import org.kohsuke.args4j.CmdLineParser; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -public class JavaBenchmark { - - private static boolean runBatch = false; - - private static void parse(Object inst, String[] args) { - CmdLineParser parser = new CmdLineParser(inst); - try { - parser.parseArgument(args); - } catch (Exception e) { - System.err.println(e.getMessage() + e); - parser.printUsage(System.err); - System.exit(1); - } - } - - private static long percentile(int p, long[] seq) { - Arrays.sort(seq); - int k = (int) Math.ceil((seq.length - 1) * (p / 100.0)); - return seq[k]; - } - - private static void printStatistics(long[] inferenceTimesRaw, String metricsPrefix) { - long[] inferenceTimes = inferenceTimesRaw; - // remove head and tail - if (inferenceTimes.length > 2) { - inferenceTimes = Arrays.copyOfRange(inferenceTimesRaw, - 1, inferenceTimesRaw.length - 1); - } - double p50 = percentile(50, inferenceTimes) / 1.0e6; - double p99 = percentile(99, inferenceTimes) / 1.0e6; - double p90 = percentile(90, inferenceTimes) / 1.0e6; - long sum = 0; - for (long time: inferenceTimes) sum += time; - double average = sum / (inferenceTimes.length * 1.0e6); - - System.out.println( - String.format("\n%s_p99 %fms\n%s_p90 %fms\n%s_p50 %fms\n%s_average %1.2fms", - metricsPrefix, p99, metricsPrefix, p90, - metricsPrefix, p50, metricsPrefix, average) - ); - - } - - private static List bindToDevice() { - List context = new ArrayList(); - if (System.getenv().containsKey("SCALA_TEST_ON_GPU") && - Integer.valueOf(System.getenv("SCALA_TEST_ON_GPU")) == 1) { - context.add(Context.gpu()); - } else { - context.add(Context.cpu()); - } - return context; - } - - public static void main(String[] args) { - if (args.length < 2) { - StringBuilder sb = new StringBuilder(); - sb.append("Please follow the format:"); - sb.append("\n --model-name "); - sb.append("\n --num-runs "); - sb.append("\n --batchsize "); - System.out.println(sb.toString()); - return; - } - String modelName = args[1]; - InferBase model = null; - switch(modelName) { - case "ObjectDetection": - runBatch = true; - ObjectDetectionBenchmark inst = new ObjectDetectionBenchmark(); - parse(inst, args); - model = inst; - break; - default: - System.err.println("Model name not found! " + modelName); - System.exit(1); - } - List context = bindToDevice(); - long[] result = new long[model.numRun]; - model.preProcessModel(context); - if (runBatch) { - for (int i =0;i < model.numRun; i++) { - long currTime = System.nanoTime(); - model.runBatchInference(); - result[i] = System.nanoTime() - currTime; - } - System.out.println("Batchsize: " + model.batchSize); - System.out.println("Num of runs: " + model.numRun); - printStatistics(result, modelName +"batch_inference"); - } - - model.batchSize = 1; - model.preProcessModel(context); - result = new long[model.numRun]; - for (int i = 0; i < model.numRun; i++) { - long currTime = System.nanoTime(); - model.runSingleInference(); - result[i] = System.nanoTime() - currTime; - } - System.out.println("Num of runs: " + model.numRun); - printStatistics(result, modelName + "single_inference"); - } -} diff --git a/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/ObjectDetectionBenchmark.java b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/ObjectDetectionBenchmark.java deleted file mode 100644 index 257ea3241626..000000000000 --- a/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/benchmark/ObjectDetectionBenchmark.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.mxnetexamples.javaapi.benchmark; - -import org.apache.mxnet.infer.javaapi.ObjectDetector; -import org.apache.mxnet.javaapi.*; -import org.kohsuke.args4j.Option; - -import java.util.ArrayList; -import java.util.List; - -class ObjectDetectionBenchmark extends InferBase { - @Option(name = "--model-path-prefix", usage = "input model directory and prefix of the model") - public String modelPathPrefix = "/model/ssd_resnet50_512"; - @Option(name = "--input-image", usage = "the input image") - public String inputImagePath = "/images/dog.jpg"; - - private ObjectDetector objDet; - private NDArray img; - private NDArray$ NDArray = NDArray$.MODULE$; - - public void preProcessModel(List context) { - Shape inputShape = new Shape(new int[] {this.batchSize, 3, 512, 512}); - List inputDescriptors = new ArrayList<>(); - inputDescriptors.add(new DataDesc("data", inputShape, DType.Float32(), "NCHW")); - objDet = new ObjectDetector(modelPathPrefix, inputDescriptors, context, 0); - img = ObjectDetector.bufferedImageToPixels( - ObjectDetector.reshapeImage( - ObjectDetector.loadImageFromFile(inputImagePath), 512, 512 - ), - new Shape(new int[] {1, 3, 512, 512}) - ); - } - - public void runSingleInference() { - List nd = new ArrayList<>(); - nd.add(img); - objDet.objectDetectWithNDArray(nd, 3); - } - - public void runBatchInference() { - List nd = new ArrayList<>(); - NDArray[] temp = new NDArray[batchSize]; - for (int i = 0; i < batchSize; i++) temp[i] = img.copy(); - NDArray batched = NDArray.concat(temp, batchSize, 0, null)[0]; - nd.add(batched); - objDet.objectDetectWithNDArray(nd, 3); - } -} diff --git a/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/objectdetector/README.md b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/objectdetector/README.md deleted file mode 100644 index 681253f39a88..000000000000 --- a/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/objectdetector/README.md +++ /dev/null @@ -1,97 +0,0 @@ -# Single Shot Multi Object Detection using Java Inference API - -In this example, you will learn how to use Java Inference API to run Inference on pre-trained Single Shot Multi Object Detection (SSD) MXNet model. - -The model is trained on the [Pascal VOC 2012 dataset](http://host.robots.ox.ac.uk/pascal/VOC/voc2012/index.html). The network is a SSD model built on Resnet50 as base network to extract image features. The model is trained to detect the following entities (classes): ['aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car', 'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse', 'motorbike', 'person', 'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor']. For more details about the model, you can refer to the [MXNet SSD example](https://github.com/apache/incubator-mxnet/tree/master/example/ssd). - - -## Contents - -1. [Prerequisites](#prerequisites) -2. [Download artifacts](#download-artifacts) -3. [Setup datapath and parameters](#setup-datapath-and-parameters) -4. [Run the image inference example](#run-the-image-inference-example) -5. [Infer APIs](#infer-api-details) -6. [Next steps](#next-steps) - - -## Prerequisites - -1. MXNet -2. MXNet Scala Package -3. [IntelliJ IDE (or alternative IDE) project setup](http://mxnet.incubator.apache.org/tutorials/java/mxnet_java_on_intellij.html) with the MXNet Scala/Java Package -4. wget - - -## Setup Guide - -### Download Artifacts -#### Step 1 -You can download the files using the script `get_ssd_data.sh`. It will download and place the model files in a `model` folder and the test image files in a `image` folder in the current directory. -From the `scala-package/examples/scripts/infer/objectdetector/` folder run: - -```bash -./get_ssd_data.sh -``` - -**Note**: You may need to run `chmod +x get_ssd_data.sh` before running this script. - -In the pre-trained model, the `input_name` is `data` and shape is `(1, 3, 512, 512)`. -This shape translates to: a batch of `1` image, the image has color and uses `3` channels (RGB), and the image has the dimensions of `512` pixels in height by `512` pixels in width. - -`image/jpeg` is the expected input type, since this example's image pre-processor only supports the handling of binary JPEG images. - -The output shape is `(1, 6132, 6)`. As with the input, the `1` is the number of images. `6132` is the number of prediction results, and `6` is for the size of each prediction. Each prediction contains the following components: -- `Class` -- `Accuracy` -- `Xmin` -- `Ymin` -- `Xmax` -- `Ymax` - - -### Setup Datapath and Parameters -#### Step 2 -The followings is the parameters defined for this example, you can find more information in the `class SSDClassifierExample`. - -| Argument | Comments | -| ----------------------------- | ---------------------------------------- | -| `model-path-prefix` | Folder path with prefix to the model (including json, params, and any synset file). | -| `input-image` | The image to run inference on. | -| `input-dir` | The directory of images to run inference on. | - - -## How to Run Inference -After the previous steps, you should be able to run the code using the following script that will pass all of the required parameters to the Infer API. - -From the `scala-package/examples/scripts/inferexample/objectdetector/` folder run: - -```bash -./run_ssd_example.sh ../models/resnet50_ssd/resnet50_ssd/resnet50_ssd_model ../images/dog.jpg ../images -``` - -**Notes**: -* These are relative paths to this script. -* You may need to run `chmod +x run_ssd_example.sh` before running this script. - -The example should give expected output as shown below: -``` -Class: car -Probabilties: 0.99847263 -(Coord:,312.21335,72.0291,456.01443,150.66176) -Class: bicycle -Probabilties: 0.90473825 -(Coord:,155.95807,149.96362,383.8369,418.94513) -Class: dog -Probabilties: 0.8226818 -(Coord:,83.82353,179.13998,206.63783,476.7875) -``` -the outputs come from the the input image, with top3 predictions picked. - - -## Infer API Details -This example uses ObjectDetector class provided by MXNet's Java Infer APIs. It provides methods to load the images, create NDArray out of Java BufferedImage and run prediction using Classifier and Predictor APIs. - - -## References -This documentation used the model and inference setup guide from the [MXNet Model Server SSD example](https://github.com/awslabs/mxnet-model-server/blob/master/examples/ssd/README.md). diff --git a/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/objectdetector/SSDClassifierExample.java b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/objectdetector/SSDClassifierExample.java deleted file mode 100644 index a9c00f7f1d81..000000000000 --- a/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/objectdetector/SSDClassifierExample.java +++ /dev/null @@ -1,199 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.mxnetexamples.javaapi.infer.objectdetector; - -import org.apache.mxnet.infer.javaapi.ObjectDetectorOutput; -import org.kohsuke.args4j.CmdLineParser; -import org.kohsuke.args4j.Option; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import org.apache.mxnet.javaapi.*; -import org.apache.mxnet.infer.javaapi.ObjectDetector; - -// scalastyle:off -import java.awt.image.BufferedImage; -// scalastyle:on - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import java.io.File; - -public class SSDClassifierExample { - @Option(name = "--model-path-prefix", usage = "input model directory and prefix of the model") - private String modelPathPrefix = "/model/ssd_resnet50_512"; - @Option(name = "--input-image", usage = "the input image") - private String inputImagePath = "/images/dog.jpg"; - @Option(name = "--input-dir", usage = "the input batch of images directory") - private String inputImageDir = "/images/"; - - final static Logger logger = LoggerFactory.getLogger(SSDClassifierExample.class); - - static List> - runObjectDetectionSingle(String modelPathPrefix, String inputImagePath, List context) { - Shape inputShape = new Shape(new int[]{1, 3, 512, 512}); - List inputDescriptors = new ArrayList(); - inputDescriptors.add(new DataDesc("data", inputShape, DType.Float32(), "NCHW")); - BufferedImage img = ObjectDetector.loadImageFromFile(inputImagePath); - ObjectDetector objDet = new ObjectDetector(modelPathPrefix, inputDescriptors, context, 0); - return objDet.imageObjectDetect(img, 3); - } - - static List>> - runObjectDetectionBatch(String modelPathPrefix, String inputImageDir, List context) { - Shape inputShape = new Shape(new int[]{1, 3, 512, 512}); - List inputDescriptors = new ArrayList(); - inputDescriptors.add(new DataDesc("data", inputShape, DType.Float32(), "NCHW")); - ObjectDetector objDet = new ObjectDetector(modelPathPrefix, inputDescriptors, context, 0); - - // Loading batch of images from the directory path - List> batchFiles = generateBatches(inputImageDir, 20); - List>> outputList - = new ArrayList>>(); - - for (List batchFile : batchFiles) { - List imgList = ObjectDetector.loadInputBatch(batchFile); - // Running inference on batch of images loaded in previous step - List> tmp - = objDet.imageBatchObjectDetect(imgList, 5); - outputList.add(tmp); - } - return outputList; - } - - static List> generateBatches(String inputImageDirPath, int batchSize) { - File dir = new File(inputImageDirPath); - - List> output = new ArrayList>(); - List batch = new ArrayList(); - for (File imgFile : dir.listFiles()) { - batch.add(imgFile.getPath()); - if (batch.size() == batchSize) { - output.add(batch); - batch = new ArrayList(); - } - } - if (batch.size() > 0) { - output.add(batch); - } - return output; - } - - public static void main(String[] args) { - SSDClassifierExample inst = new SSDClassifierExample(); - CmdLineParser parser = new CmdLineParser(inst); - try { - parser.parseArgument(args); - } catch (Exception e) { - logger.error(e.getMessage(), e); - parser.printUsage(System.err); - System.exit(1); - } - - String mdprefixDir = inst.modelPathPrefix; - String imgPath = inst.inputImagePath; - String imgDir = inst.inputImageDir; - - if (!checkExist(Arrays.asList(mdprefixDir + "-symbol.json", imgDir, imgPath))) { - logger.error("Model or input image path does not exist"); - System.exit(1); - } - - List context = new ArrayList(); - if (System.getenv().containsKey("SCALA_TEST_ON_GPU") && - Integer.valueOf(System.getenv("SCALA_TEST_ON_GPU")) == 1) { - context.add(Context.gpu()); - } else { - context.add(Context.cpu()); - } - - try { - Shape inputShape = new Shape(new int[]{1, 3, 512, 512}); - Shape outputShape = new Shape(new int[]{1, 6132, 6}); - - - int width = inputShape.get(2); - int height = inputShape.get(3); - StringBuilder outputStr = new StringBuilder().append("\n"); - - List> output - = runObjectDetectionSingle(mdprefixDir, imgPath, context); - - for (List ele : output) { - for (ObjectDetectorOutput i : ele) { - outputStr.append("Class: " + i.getClassName() + "\n"); - outputStr.append("Probabilties: " + i.getProbability() + "\n"); - - List coord = Arrays.asList(i.getXMin() * width, - i.getXMax() * height, i.getYMin() * width, i.getYMax() * height); - StringBuilder sb = new StringBuilder(); - for (float c : coord) { - sb.append(", ").append(c); - } - outputStr.append("Coord:" + sb.substring(2) + "\n"); - } - } - logger.info(outputStr.toString()); - - List>> outputList = - runObjectDetectionBatch(mdprefixDir, imgDir, context); - - outputStr = new StringBuilder().append("\n"); - int index = 0; - for (List> i : outputList) { - for (List j : i) { - outputStr.append("*** Image " + (index + 1) + "***" + "\n"); - for (ObjectDetectorOutput k : j) { - outputStr.append("Class: " + k.getClassName() + "\n"); - outputStr.append("Probabilties: " + k.getProbability() + "\n"); - List coord = Arrays.asList(k.getXMin() * width, - k.getXMax() * height, k.getYMin() * width, k.getYMax() * height); - - StringBuilder sb = new StringBuilder(); - for (float c : coord) { - sb.append(", ").append(c); - } - outputStr.append("Coord:" + sb.substring(2) + "\n"); - } - index++; - } - } - logger.info(outputStr.toString()); - - } catch (Exception e) { - logger.error(e.getMessage(), e); - parser.printUsage(System.err); - System.exit(1); - } - System.exit(0); - } - - static Boolean checkExist(List arr) { - Boolean exist = true; - for (String item : arr) { - if (!(new File(item).exists())) { - logger.error("Cannot find: " + item); - exist = false; - } - } - return exist; - } -} diff --git a/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/predictor/PredictorExample.java b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/predictor/PredictorExample.java deleted file mode 100644 index c9b4426f52b3..000000000000 --- a/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/predictor/PredictorExample.java +++ /dev/null @@ -1,200 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.mxnetexamples.javaapi.infer.predictor; - -import org.apache.mxnet.infer.javaapi.Predictor; -import org.apache.mxnet.javaapi.*; -import org.kohsuke.args4j.CmdLineParser; -import org.kohsuke.args4j.Option; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.imageio.ImageIO; -import java.awt.Graphics2D; -import java.awt.image.BufferedImage; -import java.io.BufferedReader; -import java.io.File; -import java.io.FileReader; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -/** - * This Class is a demo to show how users can use Predictor APIs to do - * Image Classification with all hand-crafted Pre-processing. - * All helper functions for image pre-processing are - * currently available in ObjectDetector class. - */ -public class PredictorExample { - @Option(name = "--model-path-prefix", usage = "input model directory and prefix of the model") - private String modelPathPrefix = "/model/ssd_resnet50_512"; - @Option(name = "--input-image", usage = "the input image") - private String inputImagePath = "/images/dog.jpg"; - - final static Logger logger = LoggerFactory.getLogger(PredictorExample.class); - - /** - * Load the image from file to buffered image - * It can be replaced by loadImageFromFile from ObjectDetector - * @param inputImagePath input image Path in String - * @return Buffered image - */ - private static BufferedImage loadIamgeFromFile(String inputImagePath) { - BufferedImage buf = null; - try { - buf = ImageIO.read(new File(inputImagePath)); - } catch (IOException e) { - System.err.println(e); - } - return buf; - } - - /** - * Reshape the current image using ImageIO and Graph2D - * It can be replaced by reshapeImage from ObjectDetector - * @param buf Buffered image - * @param newWidth desired width - * @param newHeight desired height - * @return a reshaped bufferedImage - */ - private static BufferedImage reshapeImage(BufferedImage buf, int newWidth, int newHeight) { - BufferedImage resizedImage = new BufferedImage(newWidth, newHeight, BufferedImage.TYPE_INT_RGB); - Graphics2D g = resizedImage.createGraphics(); - g.drawImage(buf, 0, 0, newWidth, newHeight, null); - g.dispose(); - return resizedImage; - } - - /** - * Convert an image from a buffered image into pixels float array - * It can be replaced by bufferedImageToPixels from ObjectDetector - * @param buf buffered image - * @return Float array - */ - private static float[] imagePreprocess(BufferedImage buf) { - // Get height and width of the image - int w = buf.getWidth(); - int h = buf.getHeight(); - - // get an array of integer pixels in the default RGB color mode - int[] pixels = buf.getRGB(0, 0, w, h, null, 0, w); - - // 3 times height and width for R,G,B channels - float[] result = new float[3 * h * w]; - - int row = 0; - // copy pixels to array vertically - while (row < h) { - int col = 0; - // copy pixels to array horizontally - while (col < w) { - int rgb = pixels[row * w + col]; - // getting red color - result[0 * h * w + row * w + col] = (rgb >> 16) & 0xFF; - // getting green color - result[1 * h * w + row * w + col] = (rgb >> 8) & 0xFF; - // getting blue color - result[2 * h * w + row * w + col] = rgb & 0xFF; - col += 1; - } - row += 1; - } - buf.flush(); - return result; - } - - /** - * Helper class to print the maximum prediction result - * @param probabilities The float array of probability - * @param modelPathPrefix model Path needs to load the synset.txt - */ - private static String printMaximumClass(float[] probabilities, - String modelPathPrefix) throws IOException { - String synsetFilePath = modelPathPrefix.substring(0, - 1 + modelPathPrefix.lastIndexOf(File.separator)) + "/synset.txt"; - BufferedReader reader = new BufferedReader(new FileReader(synsetFilePath)); - ArrayList list = new ArrayList<>(); - String line = reader.readLine(); - - while (line != null){ - list.add(line); - line = reader.readLine(); - } - reader.close(); - - int maxIdx = 0; - for (int i = 1;i probabilities[maxIdx]) { - maxIdx = i; - } - } - - return "Probability : " + probabilities[maxIdx] + " Class : " + list.get(maxIdx) ; - } - - public static void main(String[] args) { - PredictorExample inst = new PredictorExample(); - CmdLineParser parser = new CmdLineParser(inst); - try { - parser.parseArgument(args); - } catch (Exception e) { - logger.error(e.getMessage(), e); - parser.printUsage(System.err); - System.exit(1); - } - // Prepare the model - List context = new ArrayList(); - if (System.getenv().containsKey("SCALA_TEST_ON_GPU") && - Integer.valueOf(System.getenv("SCALA_TEST_ON_GPU")) == 1) { - context.add(Context.gpu()); - } else { - context.add(Context.cpu()); - } - List inputDesc = new ArrayList<>(); - Shape inputShape = new Shape(new int[]{1, 3, 224, 224}); - inputDesc.add(new DataDesc("data", inputShape, DType.Float32(), "NCHW")); - Predictor predictor = new Predictor(inst.modelPathPrefix, inputDesc, context,0); - // Prepare data - BufferedImage img = loadIamgeFromFile(inst.inputImagePath); - - img = reshapeImage(img, 224, 224); - // predict - float[][] result = predictor.predict(new float[][]{imagePreprocess(img)}); - try { - System.out.println("Predict with Float input"); - System.out.println(printMaximumClass(result[0], inst.modelPathPrefix)); - } catch (IOException e) { - System.err.println(e); - } - // predict with NDArray - NDArray nd = new NDArray( - imagePreprocess(img), - new Shape(new int[]{1, 3, 224, 224}), - Context.cpu()); - List ndList = new ArrayList<>(); - ndList.add(nd); - List ndResult = predictor.predictWithNDArray(ndList); - try { - System.out.println("Predict with NDArray"); - System.out.println(printMaximumClass(ndResult.get(0).toArray(), inst.modelPathPrefix)); - } catch (IOException e) { - System.err.println(e); - } - } - -} diff --git a/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/predictor/README.md b/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/predictor/README.md deleted file mode 100644 index 1f2c9e0e813c..000000000000 --- a/scala-package/examples/src/main/java/org/apache/mxnetexamples/javaapi/infer/predictor/README.md +++ /dev/null @@ -1,61 +0,0 @@ -# Image Classification using Java Predictor - -In this example, you will learn how to use Java Inference API to -build and run pre-trained Resnet 18 model. - -## Contents - -1. [Prerequisites](#prerequisites) -2. [Download artifacts](#download-artifacts) -3. [Setup datapath and parameters](#setup-datapath-and-parameters) -4. [Run the image classifier example](#run-the-image-inference-example) - -## Prerequisites - -1. Build from source with [MXNet](https://mxnet.incubator.apache.org/install/index.html) -2. [IntelliJ IDE (or alternative IDE) project setup](https://github.com/apache/incubator-mxnet/blob/master/docs/tutorials/java/mxnet_java_on_intellij.md) with the MXNet Java Package -3. wget - -## Download Artifacts - -For this tutorial, you can get the model and sample input image by running following bash file. This script will use `wget` to download these artifacts from AWS S3. - -From the `scala-package/examples/scripts/infer/imageclassifier/` folder run: - -```bash -./get_resnet_18_data.sh -``` - -**Note**: You may need to run `chmod +x get_resnet_18_data.sh` before running this script. - -### Setup Datapath and Parameters - -The available arguments are as follows: - -| Argument | Comments | -| ----------------------------- | ---------------------------------------- | -| `model-dir`                   | Folder path with prefix to the model (including json, params, and any synset file). | -| `input-image` | The image to run inference on. | - -## Run the image classifier example - -After the previous steps, you should be able to run the code using the following script that will pass all of the required parameters to the Predictor API. - -From the `scala-package/examples/scripts/infer/predictor/` folder run: - -```bash -bash run_predictor_java_example.sh ../models/resnet-18/resnet-18 ../images/kitten.jpg -``` - -**Notes**: -* These are relative paths to this script. -* You may need to run `chmod +x run_predictor_java_example.sh` before running this script. - -The example should give an output similar to the one shown below: -``` -Predict with Float input -Probability : 0.30337515 Class : n02123159 tiger cat -Predict with NDArray -Probability : 0.30337515 Class : n02123159 tiger cat -``` -the outputs come from the the input image, with top1 predictions picked. \ No newline at end of file diff --git a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/infer/objectdetector/README.md b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/infer/objectdetector/README.md index 77aec7bb5dee..69328a44bab6 100644 --- a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/infer/objectdetector/README.md +++ b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/infer/objectdetector/README.md @@ -28,13 +28,18 @@ The model is trained on the [Pascal VOC 2012 dataset](http://host.robots.ox.ac.u ### Download Artifacts #### Step 1 You can download the files using the script `get_ssd_data.sh`. It will download and place the model files in a `model` folder and the test image files in a `image` folder in the current directory. -From the `scala-package/examples/scripts/infer/objectdetector/` folder run: +From the `scala-package/examples/scripts/infer/imageclassifier/` folder run: ```bash -./get_ssd_data.sh +./get_resnet_data.sh ``` -**Note**: You may need to run `chmod +x get_ssd_data.sh` before running this script. +**Note**: You may need to run `chmod +x get_resnet_data.sh` before running this script. + +Alternatively use the following links to download the Symbol and Params files via your browser: +- [resnet50_ssd_model-symbol.json](https://s3.amazonaws.com/model-server/models/resnet50_ssd/resnet50_ssd_model-symbol.json) +- [resnet50_ssd_model-0000.params](https://s3.amazonaws.com/model-server/models/resnet50_ssd/resnet50_ssd_model-0000.params) +- [synset.txt](https://github.com/awslabs/mxnet-model-server/blob/master/examples/ssd/synset.txt) In the pre-trained model, the `input_name` is `data` and shape is `(1, 3, 512, 512)`. This shape translates to: a batch of `1` image, the image has color and uses `3` channels (RGB), and the image has the dimensions of `512` pixels in height by `512` pixels in width. @@ -52,6 +57,13 @@ The output shape is `(1, 6132, 6)`. As with the input, the `1` is the number of ### Setup Datapath and Parameters #### Step 2 +The code `Line 31: val baseDir = System.getProperty("user.dir")` in the example will automatically searches the work directory you have defined. Please put the files in your [work directory](https://stackoverflow.com/questions/16239130/java-user-dir-property-what-exactly-does-it-mean). + +Alternatively, if you would like to use your own path, please change line 31 into your own path +```scala +val baseDir = +``` + The followings is the parameters defined for this example, you can find more information in the `class SSDClassifierExample`. | Argument | Comments | @@ -67,7 +79,7 @@ After the previous steps, you should be able to run the code using the following From the `scala-package/examples/scripts/inferexample/objectdetector/` folder run: ```bash -./run_ssd_example.sh ../models/resnet50_ssd/resnet50_ssd_model ../images/dog.jpg ../images +./run_ssd_example.sh ../models/resnet50_ssd_model ../images/dog.jpg ../images ``` **Notes**: diff --git a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/infer/objectdetector/SSDClassifierExample.scala b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/infer/objectdetector/SSDClassifierExample.scala index 07d1cc82e927..f752ef6dab58 100644 --- a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/infer/objectdetector/SSDClassifierExample.scala +++ b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/infer/objectdetector/SSDClassifierExample.scala @@ -182,9 +182,9 @@ object SSDClassifierExample { def checkExist(arr : Array[String]) : Boolean = { var exist : Boolean = true for (item <- arr) { - if (!(Files.exists(Paths.get(item)))) { + exist = Files.exists(Paths.get(item)) && exist + if (!exist) { logger.error("Cannot find: " + item) - exist = false } } exist diff --git a/scala-package/infer/pom.xml b/scala-package/infer/pom.xml index 91a1e1b30d2f..e50100169328 100644 --- a/scala-package/infer/pom.xml +++ b/scala-package/infer/pom.xml @@ -13,10 +13,6 @@ mxnet-infer_2.11 MXNet Scala Package - Inference - - true - - unittest @@ -24,6 +20,12 @@ false + + integrationtest + + true + + osx-x86_64-cpu diff --git a/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/ObjectDetector.scala b/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/ObjectDetector.scala deleted file mode 100644 index 08fffb410adf..000000000000 --- a/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/ObjectDetector.scala +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.mxnet.infer.javaapi - -// scalastyle:off -import java.awt.image.BufferedImage -// scalastyle:on - -import org.apache.mxnet.javaapi.{Context, DataDesc, NDArray, Shape} - -import scala.collection.JavaConverters -import scala.collection.JavaConverters._ - -/** - * The ObjectDetector class helps to run ObjectDetection tasks where the goal - * is to find bounding boxes and corresponding labels for objects in a image. - * - * @param modelPathPrefix Path prefix from where to load the model artifacts. - * These include the symbol, parameters, and synset.txt. - * Example: file://model-dir/ssd_resnet50_512 (containing - * ssd_resnet50_512-symbol.json, ssd_resnet50_512-0000.params, - * and synset.txt) - * @param inputDescriptors Descriptors defining the input node names, shape, - * layout and type parameters - * @param contexts Device contexts on which you want to run inference. - * Defaults to CPU. - * @param epoch Model epoch to load; defaults to 0 - */ -class ObjectDetector private[mxnet] (val objDetector: org.apache.mxnet.infer.ObjectDetector){ - - def this(modelPathPrefix: String, inputDescriptors: java.util.List[DataDesc], contexts: - java.util.List[Context], epoch: Int) - = this { - val informationDesc = JavaConverters.asScalaIteratorConverter(inputDescriptors.iterator) - .asScala.toIndexedSeq map {a => a: org.apache.mxnet.DataDesc} - val inContexts = (contexts.asScala.toList map {a => a: org.apache.mxnet.Context}).toArray - // scalastyle:off - new org.apache.mxnet.infer.ObjectDetector(modelPathPrefix, informationDesc, inContexts, Some(epoch)) - // scalastyle:on - } - - /** - * Detects objects and returns bounding boxes with corresponding class/label - * - * @param inputImage Path prefix of the input image - * @param topK Number of result elements to return, sorted by probability - * @return List of list of tuples of - * (class, [probability, xmin, ymin, xmax, ymax]) - */ - def imageObjectDetect(inputImage: BufferedImage, topK: Int): - java.util.List[java.util.List[ObjectDetectorOutput]] = { - val ret = objDetector.imageObjectDetect(inputImage, Some(topK)) - (ret map {a => (a map {e => new ObjectDetectorOutput(e._1, e._2)}).asJava}).asJava - } - - /** - * Takes input images as NDArrays. Useful when you want to perform multiple operations on - * the input array, or when you want to pass a batch of input images. - * - * @param input Indexed Sequence of NDArrays - * @param topK (Optional) How many top_k (sorting will be based on the last axis) - * elements to return. If not passed, returns all unsorted output. - * @return List of list of tuples of - * (class, [probability, xmin, ymin, xmax, ymax]) - */ - def objectDetectWithNDArray(input: java.util.List[NDArray], topK: Int): - java.util.List[java.util.List[ObjectDetectorOutput]] = { - val ret = objDetector.objectDetectWithNDArray(convert(input.asScala.toIndexedSeq), Some(topK)) - (ret map {a => (a map {e => new ObjectDetectorOutput(e._1, e._2)}).asJava}).asJava - } - - /** - * To classify batch of input images according to the provided model - * - * @param inputBatch Input array of buffered images - * @param topK Number of result elements to return, sorted by probability - * @return List of list of tuples of (class, probability) - */ - def imageBatchObjectDetect(inputBatch: java.util.List[BufferedImage], topK: Int): - java.util.List[java.util.List[ObjectDetectorOutput]] = { - val ret = objDetector.imageBatchObjectDetect(inputBatch.asScala, Some(topK)) - (ret map {a => (a map {e => new ObjectDetectorOutput(e._1, e._2)}).asJava}).asJava - } - - def convert[B, A <% B](l: IndexedSeq[A]): IndexedSeq[B] = l map { a => a: B } - -} - - -object ObjectDetector { - implicit def fromObjectDetector(OD: org.apache.mxnet.infer.ObjectDetector): - ObjectDetector = new ObjectDetector(OD) - - implicit def toObjectDetector(jOD: ObjectDetector): - org.apache.mxnet.infer.ObjectDetector = jOD.objDetector - - def loadImageFromFile(inputImagePath: String): BufferedImage = { - org.apache.mxnet.infer.ImageClassifier.loadImageFromFile(inputImagePath) - } - - def reshapeImage(img : BufferedImage, newWidth: Int, newHeight: Int): BufferedImage = { - org.apache.mxnet.infer.ImageClassifier.reshapeImage(img, newWidth, newHeight) - } - - def bufferedImageToPixels(resizedImage: BufferedImage, inputImageShape: Shape): NDArray = { - org.apache.mxnet.infer.ImageClassifier.bufferedImageToPixels(resizedImage, inputImageShape) - } - - def loadInputBatch(inputImagePaths: java.util.List[String]): java.util.List[BufferedImage] = { - org.apache.mxnet.infer.ImageClassifier - .loadInputBatch(inputImagePaths.asScala.toList).toList.asJava - } -} diff --git a/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/ObjectDetectorOutput.scala b/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/ObjectDetectorOutput.scala deleted file mode 100644 index 13369c8fcef5..000000000000 --- a/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/ObjectDetectorOutput.scala +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.mxnet.infer.javaapi - -class ObjectDetectorOutput (className: String, args: Array[Float]){ - - def getClassName: String = className - - def getProbability: Float = args(0) - - def getXMin: Float = args(1) - - def getXMax: Float = args(2) - - def getYMin: Float = args(3) - - def getYMax: Float = args(4) - -} diff --git a/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/Predictor.scala b/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/Predictor.scala deleted file mode 100644 index a5428e1c8219..000000000000 --- a/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/Predictor.scala +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.mxnet.infer.javaapi - -import org.apache.mxnet.javaapi.{Context, DataDesc, NDArray} - -import scala.collection.JavaConverters -import scala.collection.JavaConverters._ - -/** - * Implementation of prediction routines. - * - * @param modelPathPrefix Path prefix from where to load the model artifacts. - * These include the symbol, parameters, and synset.txt - * Example: file://model-dir/resnet-152 (containing - * resnet-152-symbol.json, resnet-152-0000.params, and synset.txt). - * @param inputDescriptors Descriptors defining the input node names, shape, - * layout and type parameters - *

Note: If the input Descriptors is missing batchSize - * ('N' in layout), a batchSize of 1 is assumed for the model. - * @param contexts Device contexts on which you want to run inference; defaults to CPU - * @param epoch Model epoch to load; defaults to 0 - - */ - -// JavaDoc description of class to be updated in https://issues.apache.org/jira/browse/MXNET-1178 -class Predictor private[mxnet] (val predictor: org.apache.mxnet.infer.Predictor){ - def this(modelPathPrefix: String, inputDescriptors: java.util.List[DataDesc], - contexts: java.util.List[Context], epoch: Int) - = this { - val informationDesc = JavaConverters.asScalaIteratorConverter(inputDescriptors.iterator) - .asScala.toIndexedSeq map {a => a: org.apache.mxnet.DataDesc} - val inContexts = (contexts.asScala.toList map {a => a: org.apache.mxnet.Context}).toArray - new org.apache.mxnet.infer.Predictor(modelPathPrefix, informationDesc, inContexts, Some(epoch)) - } - - /** - * Takes input as Array of one dimensional arrays and creates the NDArray needed for inference - * The array will be reshaped based on the input descriptors. - * - * @param input: An Array of a one-dimensional array. - An extra Array is needed for when the model has more than one input. - * @return Indexed sequence array of outputs - */ - def predict(input: Array[Array[Float]]): - Array[Array[Float]] = { - predictor.predict(input).toArray - } - - /** - * Takes input as List of one dimensional arrays and creates the NDArray needed for inference - * The array will be reshaped based on the input descriptors. - * - * @param input: A List of a one-dimensional array. - An extra List is needed for when the model has more than one input. - * @return Indexed sequence array of outputs - */ - def predict(input: java.util.List[java.util.List[Float]]): - java.util.List[java.util.List[Float]] = { - val in = JavaConverters.asScalaIteratorConverter(input.iterator).asScala.toIndexedSeq - (predictor.predict(in map {a => a.asScala.toArray}) map {b => b.toList.asJava}).asJava - } - - - - /** - * Predict using NDArray as input - * This method is useful when the input is a batch of data - * Note: User is responsible for managing allocation/deallocation of input/output NDArrays. - * - * @param input List of NDArrays - * @return Output of predictions as NDArrays - */ - def predictWithNDArray(input: java.util.List[NDArray]): - java.util.List[NDArray] = { - val ret = predictor.predictWithNDArray(convert(JavaConverters - .asScalaIteratorConverter(input.iterator).asScala.toIndexedSeq)) - // TODO: For some reason the implicit wasn't working here when trying to use convert. - // So did it this way. Needs to be figured out - (ret map {a => new NDArray(a)}).asJava - } - - private def convert[B, A <% B](l: IndexedSeq[A]): IndexedSeq[B] = l map { a => a: B } -} diff --git a/scala-package/macros/src/main/scala/org/apache/mxnet/APIDocGenerator.scala b/scala-package/macros/src/main/scala/org/apache/mxnet/APIDocGenerator.scala index ce12dc7cd5a0..64d665579d14 100644 --- a/scala-package/macros/src/main/scala/org/apache/mxnet/APIDocGenerator.scala +++ b/scala-package/macros/src/main/scala/org/apache/mxnet/APIDocGenerator.scala @@ -36,7 +36,6 @@ private[mxnet] object APIDocGenerator extends GeneratorBase { hashCollector += typeSafeClassGen(FILE_PATH, false) hashCollector += nonTypeSafeClassGen(FILE_PATH, true) hashCollector += nonTypeSafeClassGen(FILE_PATH, false) - hashCollector += javaClassGen(FILE_PATH) val finalHash = hashCollector.mkString("\n") } @@ -90,29 +89,6 @@ private[mxnet] object APIDocGenerator extends GeneratorBase { absFuncs) } - def javaClassGen(filePath : String) : String = { - val notGenerated = Set("Custom") - val absClassFunctions = functionsToGenerate(false, false, true) - val absFuncs = absClassFunctions.filterNot(ele => notGenerated.contains(ele.name)) - .groupBy(_.name.toLowerCase).map(ele => { - /* Pattern matching for not generating deprecated method - * Group all method name in lowercase - * Kill the capital lettered method such as Cast vs cast - * As it defined by default it deprecated - */ - if (ele._2.length == 1) ele._2.head - else { - if (ele._2.head.name.head.isLower) ele._2.head - else ele._2.last - } - }).map(absClassFunction => { - generateJavaAPISignature(absClassFunction) - }).toSeq - val packageName = "NDArrayBase" - val packageDef = "package org.apache.mxnet.javaapi" - writeFile(filePath + "javaapi/", packageName, packageDef, absFuncs) - } - def generateAPIDocFromBackend(func: Func, withParam: Boolean = true): String = { def fixDesc(desc: String): String = { var curDesc = desc @@ -156,7 +132,6 @@ private[mxnet] object APIDocGenerator extends GeneratorBase { argDef += "attr : Map[String, String] = null" } else { argDef += "out : Option[NDArray] = None" - } val returnType = func.returnType @@ -165,64 +140,6 @@ private[mxnet] object APIDocGenerator extends GeneratorBase { |def ${func.name} (${argDef.mkString(", ")}): $returnType""".stripMargin } - def generateJavaAPISignature(func : Func) : String = { - val useParamObject = func.listOfArgs.count(arg => arg.isOptional) >= 2 - var argDef = ListBuffer[String]() - var classDef = ListBuffer[String]() - var requiredParam = ListBuffer[String]() - func.listOfArgs.foreach(absClassArg => { - val currArgName = absClassArg.safeArgName - // scalastyle:off - if (absClassArg.isOptional && useParamObject) { - classDef += - s"""private var $currArgName: ${absClassArg.argType} = null - |/** - | * @param $currArgName\t\t${absClassArg.argDesc} - | */ - |def set${currArgName.capitalize}($currArgName : ${absClassArg.argType}): ${func.name}Param = { - | this.$currArgName = $currArgName - | this - | }""".stripMargin - } - else { - requiredParam += s" * @param $currArgName\t\t${absClassArg.argDesc}" - argDef += s"$currArgName : ${absClassArg.argType}" - } - classDef += s"def get${currArgName.capitalize}() = this.$currArgName" - // scalastyle:on - }) - val experimentalTag = "@Experimental" - val returnType = "Array[NDArray]" - val scalaDoc = generateAPIDocFromBackend(func) - val scalaDocNoParam = generateAPIDocFromBackend(func, false) - if(useParamObject) { - classDef += - s"""private var out : org.apache.mxnet.NDArray = null - |def setOut(out : NDArray) : ${func.name}Param = { - | this.out = out - | this - | } - | def getOut() = this.out - | """.stripMargin - s"""$scalaDocNoParam - | $experimentalTag - | def ${func.name}(po: ${func.name}Param) : $returnType - | /** - | * This Param Object is specifically used for ${func.name} - | ${requiredParam.mkString("\n")} - | */ - | class ${func.name}Param(${argDef.mkString(",")}) { - | ${classDef.mkString("\n ")} - | }""".stripMargin - } else { - argDef += "out : NDArray" - s"""$scalaDoc - |$experimentalTag - | def ${func.name}(${argDef.mkString(", ")}) : $returnType - | """.stripMargin - } - } - def writeFile(FILE_PATH: String, className: String, packageDef: String, absFuncs: Seq[String]): String = { @@ -253,7 +170,6 @@ private[mxnet] object APIDocGenerator extends GeneratorBase { |${absFuncs.mkString("\n")} |}""".stripMargin - val pw = new PrintWriter(new File(FILE_PATH + s"$className.scala")) pw.write(finalStr) pw.close() diff --git a/scala-package/macros/src/main/scala/org/apache/mxnet/GeneratorBase.scala b/scala-package/macros/src/main/scala/org/apache/mxnet/GeneratorBase.scala index 9245ef1b437f..f4c4a91bdf9a 100644 --- a/scala-package/macros/src/main/scala/org/apache/mxnet/GeneratorBase.scala +++ b/scala-package/macros/src/main/scala/org/apache/mxnet/GeneratorBase.scala @@ -36,9 +36,8 @@ abstract class GeneratorBase { case class Func(name: String, desc: String, listOfArgs: List[Arg], returnType: String) - def functionsToGenerate(isSymbol: Boolean, isContrib: Boolean, - isJava: Boolean = false): List[Func] = { - val l = getBackEndFunctions(isSymbol, isJava) + def functionsToGenerate(isSymbol: Boolean, isContrib: Boolean): List[Func] = { + val l = getBackEndFunctions(isSymbol) if (isContrib) { l.filter(func => func.name.startsWith("_contrib_") || !func.name.startsWith("_")) } else { @@ -59,18 +58,17 @@ abstract class GeneratorBase { res.filterNot(ele => notGenerated.contains(ele.name)) } - protected def getBackEndFunctions(isSymbol: Boolean, isJava: Boolean = false): List[Func] = { + protected def getBackEndFunctions(isSymbol: Boolean): List[Func] = { val opNames = ListBuffer.empty[String] _LIB.mxListAllOpNames(opNames) opNames.map(opName => { val opHandle = new RefLong _LIB.nnGetOpHandle(opName, opHandle) - makeAtomicFunction(opHandle.value, opName, isSymbol, isJava) + makeAtomicFunction(opHandle.value, opName, isSymbol) }).toList } - private def makeAtomicFunction(handle: Handle, aliasName: String, - isSymbol: Boolean, isJava: Boolean): Func = { + private def makeAtomicFunction(handle: Handle, aliasName: String, isSymbol: Boolean): Func = { val name = new RefString val desc = new RefString val keyVarNumArgs = new RefString @@ -91,17 +89,13 @@ abstract class GeneratorBase { val docStr = s"$aliasName $realName\n${desc.value}\n\n$paramStr\n$extraDoc\n" val argList = argNames zip argTypes zip argDescs map { case ((argName, argType), argDesc) => - val family = if (isJava) "org.apache.mxnet.javaapi.NDArray" - else if (isSymbol) "org.apache.mxnet.Symbol" - else "org.apache.mxnet.NDArray" + val family = if (isSymbol) "org.apache.mxnet.Symbol" else "org.apache.mxnet.NDArray" val typeAndOption = CToScalaUtils.argumentCleaner(argName, argType, family) Arg(argName, typeAndOption._1, argDesc, typeAndOption._2) } val returnType = - if (isJava) "Array[org.apache.mxnet.javaapi.NDArray]" - else if (isSymbol) "org.apache.mxnet.Symbol" - else "org.apache.mxnet.NDArrayFuncReturn" + if (isSymbol) "org.apache.mxnet.Symbol" else "org.apache.mxnet.NDArrayFuncReturn" Func(aliasName, desc.value, argList.toList, returnType) } diff --git a/scala-package/macros/src/main/scala/org/apache/mxnet/javaapi/JavaNDArrayMacro.scala b/scala-package/macros/src/main/scala/org/apache/mxnet/javaapi/JavaNDArrayMacro.scala deleted file mode 100644 index 4dfd6eb044a1..000000000000 --- a/scala-package/macros/src/main/scala/org/apache/mxnet/javaapi/JavaNDArrayMacro.scala +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.mxnet.javaapi - -import org.apache.mxnet.GeneratorBase - -import scala.annotation.StaticAnnotation -import scala.collection.mutable.ListBuffer -import scala.language.experimental.macros -import scala.reflect.macros.blackbox - -private[mxnet] class AddJNDArrayAPIs(isContrib: Boolean) extends StaticAnnotation { - private[mxnet] def macroTransform(annottees: Any*) = macro JavaNDArrayMacro.typeSafeAPIDefs -} - -private[mxnet] object JavaNDArrayMacro extends GeneratorBase { - - // scalastyle:off havetype - def typeSafeAPIDefs(c: blackbox.Context)(annottees: c.Expr[Any]*) = { - typeSafeAPIImpl(c)(annottees: _*) - } - // scalastyle:off havetype - - private def typeSafeAPIImpl(c: blackbox.Context)(annottees: c.Expr[Any]*) : c.Expr[Any] = { - import c.universe._ - - val isContrib: Boolean = c.prefix.tree match { - case q"new AddJNDArrayAPIs($b)" => c.eval[Boolean](c.Expr(b)) - } - // Defines Operators that should not generated - val notGenerated = Set("Custom") - - val newNDArrayFunctions = functionsToGenerate(false, false, true) - .filterNot(ele => notGenerated.contains(ele.name)).groupBy(_.name.toLowerCase).map(ele => { - /* Pattern matching for not generating deprecated method - * Group all method name in lowercase - * Kill the capital lettered method such as Cast vs cast - * As it defined by default it deprecated - */ - if (ele._2.length == 1) ele._2.head - else { - if (ele._2.head.name.head.isLower) ele._2.head - else ele._2.last - } - }) - - val functionDefs = ListBuffer[DefDef]() - val classDefs = ListBuffer[ClassDef]() - - newNDArrayFunctions.foreach { ndarrayfunction => - - val useParamObject = ndarrayfunction.listOfArgs.count(arg => arg.isOptional) >= 2 - // Construct argument field with all required args - var argDef = ListBuffer[String]() - // Construct function Implementation field (e.g norm) - var impl = ListBuffer[String]() - impl += "val map = scala.collection.mutable.Map[String, Any]()" - impl += - "val args= scala.collection.mutable.ArrayBuffer.empty[org.apache.mxnet.NDArray]" - ndarrayfunction.listOfArgs.foreach({ ndarrayArg => - // var is a special word used to define variable in Scala, - // need to changed to something else in order to make it work - var currArgName = ndarrayArg.safeArgName - if (useParamObject) currArgName = s"po.get${currArgName.capitalize}()" - argDef += s"$currArgName : ${ndarrayArg.argType}" - // NDArray arg implementation - val returnType = "org.apache.mxnet.javaapi.NDArray" - val base = - if (ndarrayArg.argType.equals(returnType)) { - s"args += $currArgName" - } else if (ndarrayArg.argType.equals(s"Array[$returnType]")){ - s"$currArgName.foreach(args+=_)" - } else { - "map(\"" + ndarrayArg.argName + "\") = " + currArgName - } - impl.append( - if (ndarrayArg.isOptional) s"if ($currArgName != null) $base" - else base - ) - }) - // add default out parameter - argDef += s"out: org.apache.mxnet.javaapi.NDArray" - if (useParamObject) { - impl += "if (po.getOut() != null) map(\"out\") = po.getOut()" - } else { - impl += "if (out != null) map(\"out\") = out" - } - val returnType = "Array[org.apache.mxnet.javaapi.NDArray]" - // scalastyle:off - // Combine and build the function string - impl += "val finalArr = org.apache.mxnet.NDArray.genericNDArrayFunctionInvoke(\"" + - ndarrayfunction.name + "\", args.toSeq, map.toMap).arr" - impl += "finalArr.map(ele => new NDArray(ele))" - if (useParamObject) { - val funcDef = - s"""def ${ndarrayfunction.name}(po: ${ndarrayfunction.name}Param): $returnType = { - | ${impl.mkString("\n")} - | }""".stripMargin - functionDefs += c.parse(funcDef).asInstanceOf[DefDef] - } else { - val funcDef = - s"""def ${ndarrayfunction.name}(${argDef.mkString(",")}): $returnType = { - | ${impl.mkString("\n")} - | }""".stripMargin - functionDefs += c.parse(funcDef).asInstanceOf[DefDef] - } - } - structGeneration(c)(functionDefs.toList, annottees : _*) - } -} diff --git a/scala-package/macros/src/main/scala/org/apache/mxnet/utils/CToScalaUtils.scala b/scala-package/macros/src/main/scala/org/apache/mxnet/utils/CToScalaUtils.scala index 2fd8b2e73c7a..d0ebe5b1d2cb 100644 --- a/scala-package/macros/src/main/scala/org/apache/mxnet/utils/CToScalaUtils.scala +++ b/scala-package/macros/src/main/scala/org/apache/mxnet/utils/CToScalaUtils.scala @@ -21,20 +21,19 @@ private[mxnet] object CToScalaUtils { // Convert C++ Types to Scala Types - def typeConversion(in : String, argType : String = "", argName : String, - returnType : String) : String = { - val header = returnType.split("\\.").dropRight(1) + def typeConversion(in : String, argType : String = "", + argName : String, returnType : String) : String = { in match { - case "Shape(tuple)" | "ShapeorNone" => s"${header.mkString(".")}.Shape" + case "Shape(tuple)" | "ShapeorNone" => "org.apache.mxnet.Shape" case "Symbol" | "NDArray" | "NDArray-or-Symbol" => returnType case "Symbol[]" | "NDArray[]" | "NDArray-or-Symbol[]" | "SymbolorSymbol[]" => s"Array[$returnType]" - case "float" | "real_t" | "floatorNone" => "java.lang.Float" - case "int" | "intorNone" | "int(non-negative)" => "java.lang.Integer" - case "long" | "long(non-negative)" => "java.lang.Long" - case "double" | "doubleorNone" => "java.lang.Double" + case "float" | "real_t" | "floatorNone" => "org.apache.mxnet.Base.MXFloat" + case "int" | "intorNone" | "int(non-negative)" => "Int" + case "long" | "long(non-negative)" => "Long" + case "double" | "doubleorNone" => "Double" case "string" => "String" - case "boolean" | "booleanorNone" => "java.lang.Boolean" + case "boolean" | "booleanorNone" => "Boolean" case "tupleof" | "tupleof" | "tupleof<>" | "ptr" | "" => "Any" case default => throw new IllegalArgumentException( s"Invalid type for args: $default\nString argType: $argType\nargName: $argName") @@ -53,8 +52,8 @@ private[mxnet] object CToScalaUtils { * @param argType Raw arguement Type description * @return (Scala_Type, isOptional) */ - def argumentCleaner(argName: String, argType : String, - returnType : String) : (String, Boolean) = { + def argumentCleaner(argName: String, + argType : String, returnType : String) : (String, Boolean) = { val spaceRemoved = argType.replaceAll("\\s+", "") var commaRemoved : Array[String] = new Array[String](0) // Deal with the case e.g: stype : {'csr', 'default', 'row_sparse'} diff --git a/scala-package/macros/src/test/scala/org/apache/mxnet/MacrosSuite.scala b/scala-package/macros/src/test/scala/org/apache/mxnet/MacrosSuite.scala index 4404b0885d57..c3a7c58c1afc 100644 --- a/scala-package/macros/src/test/scala/org/apache/mxnet/MacrosSuite.scala +++ b/scala-package/macros/src/test/scala/org/apache/mxnet/MacrosSuite.scala @@ -36,7 +36,7 @@ class MacrosSuite extends FunSuite with BeforeAndAfterAll { ) val output = List( ("org.apache.mxnet.Symbol", true), - ("java.lang.Integer", false), + ("Int", false), ("org.apache.mxnet.Shape", true), ("String", true), ("Any", false) diff --git a/scala-package/mxnet-demo/scala-demo/Makefile b/scala-package/mxnet-demo/Makefile similarity index 98% rename from scala-package/mxnet-demo/scala-demo/Makefile rename to scala-package/mxnet-demo/Makefile index 458077d13904..227697ba2e8a 100644 --- a/scala-package/mxnet-demo/scala-demo/Makefile +++ b/scala-package/mxnet-demo/Makefile @@ -17,7 +17,7 @@ SCALA_VERSION_PROFILE := 2.11 SCALA_VERSION := 2.11.8 -MXNET_VERSION := 1.3.0 +MXNET_VERSION := 1.2.0 ifeq ($(OS),Windows_NT) UNAME_S := Windows diff --git a/scala-package/mxnet-demo/scala-demo/README.md b/scala-package/mxnet-demo/README.md similarity index 88% rename from scala-package/mxnet-demo/scala-demo/README.md rename to scala-package/mxnet-demo/README.md index 300fc7b2e108..e30a61a2fc13 100644 --- a/scala-package/mxnet-demo/scala-demo/README.md +++ b/scala-package/mxnet-demo/README.md @@ -4,7 +4,7 @@ This is an project created to use Maven-published Scala package with two Scala e User are required to use `mvn package` to build the package, which are shown below: ```Bash -export SCALA_VERSION_PROFILE=2.11 SCALA_VERSION=2.11.8 MXNET_VERSION=1.3.0 +export SCALA_VERSION_PROFILE=2.11 SCALA_VERSION=2.11.8 MXNET_VERSION=1.2.0 export SCALA_PKG_PROFILE= mvn package -Dmxnet.profile=$(SCALA_PKG_PROFILE) \ -Dmxnet.scalaprofile=$(SCALA_VERSION_PROFILE) \ @@ -12,9 +12,7 @@ mvn package -Dmxnet.profile=$(SCALA_PKG_PROFILE) \ -Dscala.version=$(SCALA_VERSION) ``` These environment variable (`SCALA_PKG_PROFILE`, `SCALA_VERSION_PROFILE`, `MXNET_VERSION`, `SCALA_VERSION`) -should be set before executing the line above. - -To obtain the most recent MXNet version, please click [here](https://mvnrepository.com/search?q=org.apache.mxnet) +should be set before executing the line above. You can also use the `Makefile` as an alternative to do the same thing. Simply do the following: ```Bash @@ -27,7 +25,7 @@ This will load the default parameter for all the environment variable. ### Hello World The Scala file is being executed using Java. You can execute the helloWorld example as follows: ```Bash -java -cp $CLASSPATH sample.HelloWorld +java -Xmx8G -cp $CLASSPATH sample.HelloWorld ``` However, you have to define the Classpath before you run the demo code. More information can be found in the `demo.sh` And you can run the bash script as follows: ```Bash @@ -43,7 +41,7 @@ You can review the complete example [here](https://github.com/apache/incubator-m you can run using the command shown below: ```Bash -java -cp $CLASSPATH sample.ImageClassificationExample +java -Xmx8G -cp $CLASSPATH sample.ImageClassificationExample ``` or script as follows: ```Bash @@ -61,7 +59,7 @@ make scalaclean ``` ## Q & A -If you are facing opencv issue on Ubuntu, please try as follows to install opencv 3.4 (required by 1.2.0 package and above) +If you are facing opencv issue on Ubuntu, please try as follows to install opencv 3.4 (required by 1.2.0 package) ```Bash sudo add-apt-repository ppa:timsc/opencv-3.4 sudo apt-get update diff --git a/scala-package/mxnet-demo/scala-demo/bin/demo.sh b/scala-package/mxnet-demo/bin/demo.sh similarity index 100% rename from scala-package/mxnet-demo/scala-demo/bin/demo.sh rename to scala-package/mxnet-demo/bin/demo.sh diff --git a/scala-package/mxnet-demo/scala-demo/bin/run_im.sh b/scala-package/mxnet-demo/bin/run_im.sh similarity index 100% rename from scala-package/mxnet-demo/scala-demo/bin/run_im.sh rename to scala-package/mxnet-demo/bin/run_im.sh diff --git a/scala-package/mxnet-demo/java-demo/Makefile b/scala-package/mxnet-demo/java-demo/Makefile deleted file mode 100644 index 340a50f75965..000000000000 --- a/scala-package/mxnet-demo/java-demo/Makefile +++ /dev/null @@ -1,54 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -SCALA_VERSION_PROFILE := 2.11 -SCALA_VERSION := 2.11.8 -MXNET_VERSION := 1.3.1-SNAPSHOT - -ifeq ($(OS),Windows_NT) - UNAME_S := Windows -else - UNAME_S := $(shell uname -s) -endif - -ifeq ($(UNAME_S), Windows) - # TODO: currently scala package does not support windows - SCALA_PKG_PROFILE := windows -else - ifeq ($(UNAME_S), Darwin) - SCALA_PKG_PROFILE := osx-x86_64-cpu - else - SCALA_PKG_PROFILE := linux-x86_64 - ifeq ($(USE_CUDA), 1) - SCALA_PKG_PROFILE := $(SCALA_PKG_PROFILE)-gpu - else - SCALA_PKG_PROFILE := $(SCALA_PKG_PROFILE)-cpu - endif - endif -endif - -javademo: - (mvn package -Dmxnet.profile=$(SCALA_PKG_PROFILE) \ - -Dmxnet.scalaprofile=$(SCALA_VERSION_PROFILE) \ - -Dmxnet.version=$(MXNET_VERSION) \ - -Dscala.version=$(SCALA_VERSION)) - -javaclean: - (mvn clean -Dmxnet.profile=$(SCALA_PKG_PROFILE) \ - -Dmxnet.scalaprofile=$(SCALA_VERSION_PROFILE) \ - -Dmxnet.version=$(MXNET_VERSION) \ - -Dscala.version=$(SCALA_VERSION)) \ No newline at end of file diff --git a/scala-package/mxnet-demo/java-demo/README.md b/scala-package/mxnet-demo/java-demo/README.md deleted file mode 100644 index ffe614a29287..000000000000 --- a/scala-package/mxnet-demo/java-demo/README.md +++ /dev/null @@ -1,76 +0,0 @@ -# MXNet Java Sample Project -This is an project created to use Maven-published Scala/Java package with two Java examples. -## Setup -Please copy the downloaded MXNet Java package jar file under the `java-demo` folder. - -User are required to use `mvn package` to build the package, - which are shown below: -```Bash -export SCALA_VERSION_PROFILE=2.11 SCALA_VERSION=2.11.8 MXNET_VERSION=1.3.1-SNAPSHOT -export SCALA_PKG_PROFILE= -mvn package -Dmxnet.profile=$(SCALA_PKG_PROFILE) \ - -Dmxnet.scalaprofile=$(SCALA_VERSION_PROFILE) \ - -Dmxnet.version=$(MXNET_VERSION) \ - -Dscala.version=$(SCALA_VERSION) -``` -These environment variable (`SCALA_PKG_PROFILE`, `SCALA_VERSION_PROFILE`, `MXNET_VERSION`, `SCALA_VERSION`) -should be set before executing the line above. - -You can also use the `Makefile` as an alternative to do the same thing. Simply do the following: -```Bash -make javademo -``` -This will load the default parameter for all the environment variable. - If you want to run with GPU on Linux, just simply add `USE_CUDA=1` when you run the make file - -## Run -### Hello World -The Scala file is being executed using Java. You can execute the helloWorld example as follows: -```Bash -java -cp $CLASSPATH sample.HelloWorld -``` -However, you have to define the Classpath before you run the demo code. More information can be found in the `demo.sh` And you can run the bash script as follows: -```Bash -bash bin/java_sample.sh -``` -It will load the library automatically and run the example -### Object Detection using Inference API -We also provide an example to do object detection, which downloads a ImageNet trained resnet50 model and runs inference on an image to return the classification result as -```Bash -Class: car -Probabilties: 0.99847263 -Coord:312.21335, 72.02908, 456.01443, 150.66176 -Class: bicycle -Probabilties: 0.9047381 -Coord:155.9581, 149.96365, 383.83694, 418.94516 -Class: dog -Probabilties: 0.82268167 -Coord:83.82356, 179.14001, 206.63783, 476.78754 -``` - -you can run using the command shown below: -```Bash -java -cp $CLASSPATH sample.ObjectDetection -``` -or script as follows: -```Bash -bash bin/run_od.sh -``` - -If you want to test run on GPU, you can set a environment variable as follows: -```Bash -export SCALA_TEST_ON_GPU=1 -``` -## Clean up -Clean up for Maven package is simple, you can run the pre-configed `Makefile` as: -```Bash -make javaclean -``` - -## Q & A -If you are facing opencv issue on Ubuntu, please try as follows to install opencv 3.4 (required by 1.2.0 package and above) -```Bash -sudo add-apt-repository ppa:timsc/opencv-3.4 -sudo apt-get update -sudo apt install libopencv-imgcodecs3.4 -``` \ No newline at end of file diff --git a/scala-package/mxnet-demo/java-demo/bin/java_sample.sh b/scala-package/mxnet-demo/java-demo/bin/java_sample.sh deleted file mode 100644 index 50e7fb9eb97d..000000000000 --- a/scala-package/mxnet-demo/java-demo/bin/java_sample.sh +++ /dev/null @@ -1,20 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -#!/bin/bash -CURR_DIR=$(cd $(dirname $0)/../; pwd) -CLASSPATH=$CLASSPATH:$CURR_DIR/target/*:$CLASSPATH:$CURR_DIR/* -java -Xmx8G -cp $CLASSPATH sample.HelloWorld \ No newline at end of file diff --git a/scala-package/mxnet-demo/java-demo/bin/run_od.sh b/scala-package/mxnet-demo/java-demo/bin/run_od.sh deleted file mode 100644 index 5cbc53fbcefe..000000000000 --- a/scala-package/mxnet-demo/java-demo/bin/run_od.sh +++ /dev/null @@ -1,21 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -#!/bin/bash -CURR_DIR=$(cd $(dirname $0)/../; pwd) - -CLASSPATH=$CLASSPATH:$CURR_DIR/target/*:$CLASSPATH:$CURR_DIR/* -java -Xmx8G -cp $CLASSPATH sample.ObjectDetection \ No newline at end of file diff --git a/scala-package/mxnet-demo/java-demo/pom.xml b/scala-package/mxnet-demo/java-demo/pom.xml deleted file mode 100644 index 5014d2e09f55..000000000000 --- a/scala-package/mxnet-demo/java-demo/pom.xml +++ /dev/null @@ -1,25 +0,0 @@ - - - 4.0.0 - Demo - mxnet-java-demo - 1.0-SNAPSHOT - MXNet Java Demo - - - - org.apache.mxnet - mxnet-full_${mxnet.scalaprofile}-${mxnet.profile} - ${mxnet.version} - system - ${project.basedir}/mxnet-full_${mxnet.scalaprofile}-${mxnet.profile}-${mxnet.version}.jar - - - commons-io - commons-io - 2.4 - - - \ No newline at end of file diff --git a/scala-package/mxnet-demo/java-demo/src/main/java/sample/HelloWorld.java b/scala-package/mxnet-demo/java-demo/src/main/java/sample/HelloWorld.java deleted file mode 100644 index 60619dc8a806..000000000000 --- a/scala-package/mxnet-demo/java-demo/src/main/java/sample/HelloWorld.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package sample; - -import org.apache.mxnet.javaapi.*; -import java.util.Arrays; - -public class HelloWorld { - public static void main(String[] args) { - System.out.println("Hello World!"); - NDArray nd = new NDArray(new float[]{2.0f, 3.0f}, new Shape(new int[]{1, 2}), Context.cpu()); - System.out.println(nd.shape()); - } -} diff --git a/scala-package/mxnet-demo/java-demo/src/main/java/sample/ObjectDetection.java b/scala-package/mxnet-demo/java-demo/src/main/java/sample/ObjectDetection.java deleted file mode 100644 index bf9a93ae8217..000000000000 --- a/scala-package/mxnet-demo/java-demo/src/main/java/sample/ObjectDetection.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package sample; -import org.apache.mxnet.infer.javaapi.ObjectDetectorOutput; -import org.apache.mxnet.javaapi.*; -import org.apache.mxnet.infer.javaapi.ObjectDetector; -import org.apache.commons.io.FileUtils; -import java.io.File; -import java.net.URL; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -public class ObjectDetection { - private static String modelPath; - private static String imagePath; - - private static void downloadUrl(String url, String filePath) { - File tmpFile = new File(filePath); - if (!tmpFile.exists()) { - try { - FileUtils.copyURLToFile(new URL(url), tmpFile); - } catch (Exception exception) { - System.err.println(exception); - } - } - } - - public static void downloadModelImage() { - String tempDirPath = System.getProperty("java.io.tmpdir"); - System.out.println("tempDirPath: %s".format(tempDirPath)); - imagePath = tempDirPath + "/inputImages/resnetssd/dog-ssd.jpg"; - String imgURL = "https://s3.amazonaws.com/model-server/inputs/dog-ssd.jpg"; - downloadUrl(imgURL, imagePath); - modelPath = tempDirPath + "resnetssd/resnet50_ssd_model"; - System.out.println("Download model files, this can take a while..."); - String modelURL = "https://s3.amazonaws.com/model-server/models/resnet50_ssd/"; - downloadUrl(modelURL + "resnet50_ssd_model-symbol.json", - tempDirPath + "/resnetssd/resnet50_ssd_model-symbol.json"); - downloadUrl(modelURL + "resnet50_ssd_model-0000.params", - tempDirPath + "/resnetssd/resnet50_ssd_model-0000.params"); - downloadUrl(modelURL + "synset.txt", - tempDirPath + "/resnetssd/synset.txt"); - } - - static List> - runObjectDetectionSingle(String modelPathPrefix, String inputImagePath, List context) { - Shape inputShape = new Shape(new int[] {1, 3, 512, 512}); - List inputDescriptors = new ArrayList(); - inputDescriptors.add(new DataDesc("data", inputShape, DType.Float32(), "NCHW")); - ObjectDetector objDet = new ObjectDetector(modelPathPrefix, inputDescriptors, context, 0); - return objDet.imageObjectDetect(ObjectDetector.loadImageFromFile(inputImagePath), 3); - } - - public static void main(String[] args) { - List context = new ArrayList(); - if (System.getenv().containsKey("SCALA_TEST_ON_GPU") && - Integer.valueOf(System.getenv("SCALA_TEST_ON_GPU")) == 1) { - context.add(Context.gpu()); - } else { - context.add(Context.cpu()); - } - downloadModelImage(); - Shape inputShape = new Shape(new int[] {1, 3, 512, 512}); - Shape outputShape = new Shape(new int[] {1, 6132, 6}); - int width = inputShape.get(2); - int height = inputShape.get(3); - List> output - = runObjectDetectionSingle(modelPath, imagePath, context); - String outputStr = "\n"; - for (List ele : output) { - for (ObjectDetectorOutput i : ele) { - outputStr += "Class: " + i.getClassName() + "\n"; - outputStr += "Probabilties: " + i.getProbability() + "\n"; - - List coord = Arrays.asList(i.getXMin() * width, - i.getXMax() * height, i.getYMin() * width, i.getYMax() * height); - StringBuilder sb = new StringBuilder(); - for (float c: coord) { - sb.append(", ").append(c); - } - outputStr += "Coord:" + sb.substring(2)+ "\n"; - } - } - System.out.println(outputStr); - } -} \ No newline at end of file diff --git a/scala-package/mxnet-demo/scala-demo/pom.xml b/scala-package/mxnet-demo/pom.xml similarity index 100% rename from scala-package/mxnet-demo/scala-demo/pom.xml rename to scala-package/mxnet-demo/pom.xml diff --git a/scala-package/mxnet-demo/scala-demo/src/main/scala/sample/HelloWorld.scala b/scala-package/mxnet-demo/src/main/scala/sample/HelloWorld.scala similarity index 100% rename from scala-package/mxnet-demo/scala-demo/src/main/scala/sample/HelloWorld.scala rename to scala-package/mxnet-demo/src/main/scala/sample/HelloWorld.scala diff --git a/scala-package/mxnet-demo/scala-demo/src/main/scala/sample/ImageClassificationExample.scala b/scala-package/mxnet-demo/src/main/scala/sample/ImageClassificationExample.scala similarity index 100% rename from scala-package/mxnet-demo/scala-demo/src/main/scala/sample/ImageClassificationExample.scala rename to scala-package/mxnet-demo/src/main/scala/sample/ImageClassificationExample.scala diff --git a/scala-package/pom.xml b/scala-package/pom.xml index 3240c144e822..be28f0f09fe2 100644 --- a/scala-package/pom.xml +++ b/scala-package/pom.xml @@ -190,8 +190,8 @@ maven-compiler-plugin 3.3 - 1.7 - 1.7 + 1.6 + 1.6 UTF-8 diff --git a/tests/tutorials/test_sanity_tutorials.py b/tests/tutorials/test_sanity_tutorials.py index 9e5c38abc976..0ebeb59bf40d 100644 --- a/tests/tutorials/test_sanity_tutorials.py +++ b/tests/tutorials/test_sanity_tutorials.py @@ -50,15 +50,12 @@ 'scala/mnist.md', 'scala/index.md', 'scala/mxnet_scala_on_intellij.md', - 'scala/mxnet_java_install_and_run_examples.md', 'sparse/index.md', 'speech_recognition/index.md', 'unsupervised_learning/index.md', 'vision/index.md', 'tensorrt/index.md', - 'tensorrt/inference_with_trt.md', - 'java/mxnet_java_on_intellij.md', - 'java/ssd_inference.md'] + 'tensorrt/inference_with_trt.md'] whitelist_set = set(whitelist) def test_tutorial_downloadable(): From e25e18f86be04b258973ecf3dcf72d052e7d33e4 Mon Sep 17 00:00:00 2001 From: Xinyu Chen Date: Wed, 12 Dec 2018 03:57:09 +0800 Subject: [PATCH 25/38] Add Intel MKL blas to Jenkins (#13607) * add mkl blas to Jenkins * add mkl install script * fix bug in mkl script * remove python2 ut and add cpu-mkl node --- ci/docker/Dockerfile.build.ubuntu_cpu | 3 ++ ci/docker/install/ubuntu_mkl.sh | 31 ++++++++++++++ ci/docker/runtime_functions.sh | 28 ++++++++++++ ci/jenkins/Jenkins_steps.groovy | 62 +++++++++++++++++++++++++++ ci/jenkins/Jenkinsfile_unix_cpu | 6 ++- 5 files changed, 129 insertions(+), 1 deletion(-) create mode 100755 ci/docker/install/ubuntu_mkl.sh diff --git a/ci/docker/Dockerfile.build.ubuntu_cpu b/ci/docker/Dockerfile.build.ubuntu_cpu index 7c7e2240ee61..2df9f5887f54 100644 --- a/ci/docker/Dockerfile.build.ubuntu_cpu +++ b/ci/docker/Dockerfile.build.ubuntu_cpu @@ -54,6 +54,9 @@ RUN /work/ubuntu_clang.sh COPY install/ubuntu_gcc8.sh /work/ RUN /work/ubuntu_gcc8.sh +COPY install/ubuntu_mkl.sh /work/ +RUN /work/ubuntu_mkl.sh + COPY install/ubuntu_mklml.sh /work/ RUN /work/ubuntu_mklml.sh diff --git a/ci/docker/install/ubuntu_mkl.sh b/ci/docker/install/ubuntu_mkl.sh new file mode 100755 index 000000000000..36fc7b07ffdc --- /dev/null +++ b/ci/docker/install/ubuntu_mkl.sh @@ -0,0 +1,31 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# build and install are separated so changes to build don't invalidate +# the whole docker cache for the image + +set -ex + +apt-get update || true +# Install Intel Math Kernel Library (latest major release) +# https://software.intel.com/en-us/articles/installing-intel-free-libs-and-python-apt-repo +wget -O - wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS-2019.PUB | apt-key add - && \ + sh -c 'echo deb https://apt.repos.intel.com/mkl all main > /etc/apt/sources.list.d/intel-mkl.list' && \ + apt-get update && \ + apt-get install -y intel-mkl-2019.1-053 diff --git a/ci/docker/runtime_functions.sh b/ci/docker/runtime_functions.sh index 1fc10bf0e085..6dd5bb6f239d 100755 --- a/ci/docker/runtime_functions.sh +++ b/ci/docker/runtime_functions.sh @@ -326,6 +326,20 @@ build_ubuntu_cpu_openblas() { -j$(nproc) } +build_ubuntu_cpu_mkl() { + set -ex + export CC="ccache gcc" + export CXX="ccache g++" + make \ + DEV=1 \ + ENABLE_TESTCOVERAGE=1 \ + USE_CPP_PACKAGE=1 \ + USE_BLAS=mkl \ + USE_INTEL_PATH=/opt/intel \ + USE_DIST_KVSTORE=1 \ + -j$(nproc) +} + build_ubuntu_cpu_cmake_debug() { set -ex pushd . @@ -480,6 +494,20 @@ build_ubuntu_cpu_mkldnn() { -j$(nproc) } +build_ubuntu_cpu_mkldnn_mkl() { + set -ex + + build_ccache_wrappers + + make \ + DEV=1 \ + ENABLE_TESTCOVERAGE=1 \ + USE_CPP_PACKAGE=1 \ + USE_BLAS=mkl \ + USE_MKLDNN=1 \ + -j$(nproc) +} + build_ubuntu_gpu() { build_ubuntu_gpu_cuda91_cudnn7 } diff --git a/ci/jenkins/Jenkins_steps.groovy b/ci/jenkins/Jenkins_steps.groovy index f48a26737308..74bde1eee211 100644 --- a/ci/jenkins/Jenkins_steps.groovy +++ b/ci/jenkins/Jenkins_steps.groovy @@ -121,6 +121,20 @@ def compile_unix_openblas_debug_cpu() { }] } +def compile_unix_mkl_cpu() { + return ['CPU: MKL': { + node(NODE_LINUX_CPU) { + ws('workspace/build-cpu-mkl') { + timeout(time: max_time, unit: 'MINUTES') { + utils.init_git() + utils.docker_run('ubuntu_cpu', 'build_ubuntu_cpu_mkl', false) + utils.pack_lib('cpu_mkl', mx_dist_lib, true) + } + } + } + }] +} + def compile_unix_mkldnn_cpu() { return ['CPU: MKLDNN': { node(NODE_LINUX_CPU) { @@ -135,6 +149,20 @@ def compile_unix_mkldnn_cpu() { }] } +def compile_unix_mkldnn_mkl_cpu() { + return ['CPU: MKLDNN_MKL': { + node(NODE_LINUX_CPU) { + ws('workspace/build-mkldnn-cpu') { + timeout(time: max_time, unit: 'MINUTES') { + utils.init_git() + utils.docker_run('ubuntu_cpu', 'build_ubuntu_cpu_mkldnn_mkl', false) + utils.pack_lib('mkldnn_mkl_cpu', mx_mkldnn_lib, true) + } + } + } + }] +} + def compile_unix_mkldnn_gpu() { return ['GPU: MKLDNN': { node(NODE_LINUX_CPU) { @@ -580,6 +608,23 @@ def test_unix_python3_cpu() { }] } +def test_unix_python3_mkl_cpu() { + return ['Python3: MKL-CPU': { + node(NODE_LINUX_CPU) { + ws('workspace/ut-python3-cpu') { + try { + utils.unpack_and_init('cpu_mkl', mx_lib, true) + python3_ut('ubuntu_cpu') + utils.publish_test_coverage() + } finally { + utils.collect_test_results_unix('nosetests_unittest.xml', 'nosetests_python3_cpu_unittest.xml') + utils.collect_test_results_unix('nosetests_quantization.xml', 'nosetests_python3_cpu_quantization.xml') + } + } + } + }] +} + def test_unix_python3_gpu() { return ['Python3: GPU': { node(NODE_LINUX_GPU) { @@ -665,6 +710,23 @@ def test_unix_python3_mkldnn_cpu() { }] } +def test_unix_python3_mkldnn_mkl_cpu() { + return ['Python3: MKLDNN-MKL-CPU': { + node(NODE_LINUX_CPU) { + ws('workspace/ut-python3-mkldnn-mkl-cpu') { + try { + utils.unpack_and_init('mkldnn_mkl_cpu', mx_mkldnn_lib, true) + python3_ut_mkldnn('ubuntu_cpu') + utils.publish_test_coverage() + } finally { + utils.collect_test_results_unix('nosetests_unittest.xml', 'nosetests_python3_mkldnn_cpu_unittest.xml') + utils.collect_test_results_unix('nosetests_mkl.xml', 'nosetests_python3_mkldnn_cpu_mkl.xml') + } + } + } + }] +} + def test_unix_python3_mkldnn_gpu() { return ['Python3: MKLDNN-GPU': { node(NODE_LINUX_GPU) { diff --git a/ci/jenkins/Jenkinsfile_unix_cpu b/ci/jenkins/Jenkinsfile_unix_cpu index 9c9a41503772..e581bcf65dc5 100644 --- a/ci/jenkins/Jenkinsfile_unix_cpu +++ b/ci/jenkins/Jenkinsfile_unix_cpu @@ -36,15 +36,19 @@ core_logic: { utils.parallel_stage('Build', [ custom_steps.compile_unix_cpu_openblas(), custom_steps.compile_unix_openblas_debug_cpu(), - custom_steps.compile_unix_mkldnn_cpu() + custom_steps.compile_unix_mkl_cpu(), + custom_steps.compile_unix_mkldnn_cpu(), + custom_steps.compile_unix_mkldnn_mkl_cpu() ]) utils.parallel_stage('Tests', [ custom_steps.test_unix_python2_cpu(), custom_steps.test_unix_python3_cpu(), custom_steps.test_unix_python3_debug_cpu(), + custom_steps.test_unix_python3_mkl_cpu(), custom_steps.test_unix_python2_mkldnn_cpu(), custom_steps.test_unix_python3_mkldnn_cpu(), + custom_steps.test_unix_python3_mkldnn_mkl_cpu(), custom_steps.test_unix_scala_cpu(), custom_steps.test_unix_clojure_cpu(), custom_steps.test_unix_r_cpu(), From 449e17dbf2ec671037d4b127a28897b157f80bf3 Mon Sep 17 00:00:00 2001 From: Nicolas Modrzyk Date: Wed, 12 Dec 2018 07:17:37 +0900 Subject: [PATCH 26/38] #13385 [Clojure] - Turn examples into integration tests (#13554) --- .../cnn_text_classification/classifier.clj | 14 +- .../classifier_test.clj | 44 +++ .../clojure-package/examples/gan/project.clj | 3 +- .../examples/gan/src/gan/gan_mnist.clj | 6 +- .../examples/gan/src/gan/viz.clj | 4 +- .../examples/gan/test/gan/gan_test.clj | 25 ++ .../src/imclassification/train_mnist.clj | 20 +- .../imclassification/train_mnist_test.clj | 39 +++ .../test/test-symbol.json.ref | 105 +++++++ .../examples/module/test/mnist_mlp_test.clj | 29 ++ .../multi-label/test/multi_label_test.clj | 26 ++ .../neural-style/src/neural_style/core.clj | 22 +- .../test/neural_style/vgg_19_test.clj | 53 ++++ .../examples/profiler/src/profiler/core.clj | 6 +- .../examples/profiler/test/core_test.clj | 31 ++ .../test/profile-matmul-20iter.json.ref | 271 ++++++++++++++++++ .../examples/rnn/src/rnn/test_char_rnn.clj | 4 + .../examples/rnn/src/rnn/train_char_rnn.clj | 4 + .../examples/rnn/test/rnn/core_test.clj | 26 ++ .../examples/tutorial/.gitignore | 1 + .../examples/tutorial/project.clj | 2 + .../examples/tutorial/src/tutorial/module.clj | 35 ++- .../tutorial/src/tutorial/ndarray.clj | 8 +- .../examples/tutorial/src/tutorial/symbol.clj | 10 +- .../tutorial/test/tutorial/core_test.clj | 27 ++ .../test/visualization/core_test.clj | 28 ++ contrib/clojure-package/integration-tests.sh | 28 ++ .../apache_rat_license_check/rat-excludes | 4 +- 28 files changed, 841 insertions(+), 34 deletions(-) create mode 100644 contrib/clojure-package/examples/cnn-text-classification/test/cnn_text_classification/classifier_test.clj create mode 100644 contrib/clojure-package/examples/gan/test/gan/gan_test.clj create mode 100644 contrib/clojure-package/examples/imclassification/test/imclassification/train_mnist_test.clj create mode 100644 contrib/clojure-package/examples/imclassification/test/test-symbol.json.ref create mode 100644 contrib/clojure-package/examples/module/test/mnist_mlp_test.clj create mode 100644 contrib/clojure-package/examples/multi-label/test/multi_label_test.clj create mode 100644 contrib/clojure-package/examples/neural-style/test/neural_style/vgg_19_test.clj create mode 100644 contrib/clojure-package/examples/profiler/test/core_test.clj create mode 100644 contrib/clojure-package/examples/profiler/test/profile-matmul-20iter.json.ref create mode 100644 contrib/clojure-package/examples/rnn/test/rnn/core_test.clj create mode 100644 contrib/clojure-package/examples/tutorial/test/tutorial/core_test.clj create mode 100644 contrib/clojure-package/examples/visualization/test/visualization/core_test.clj create mode 100755 contrib/clojure-package/integration-tests.sh diff --git a/contrib/clojure-package/examples/cnn-text-classification/src/cnn_text_classification/classifier.clj b/contrib/clojure-package/examples/cnn-text-classification/src/cnn_text_classification/classifier.clj index 29ff36fe1ec0..94fd4f518c60 100644 --- a/contrib/clojure-package/examples/cnn-text-classification/src/cnn_text_classification/classifier.clj +++ b/contrib/clojure-package/examples/cnn-text-classification/src/cnn_text_classification/classifier.clj @@ -16,7 +16,9 @@ ;; (ns cnn-text-classification.classifier - (:require [cnn-text-classification.data-helper :as data-helper] + (:require [clojure.java.io :as io] + [clojure.java.shell :refer [sh]] + [cnn-text-classification.data-helper :as data-helper] [org.apache.clojure-mxnet.eval-metric :as eval-metric] [org.apache.clojure-mxnet.io :as mx-io] [org.apache.clojure-mxnet.module :as m] @@ -26,12 +28,18 @@ [org.apache.clojure-mxnet.context :as context]) (:gen-class)) +(def data-dir "data/") (def mr-dataset-path "data/mr-data") ;; the MR polarity dataset path (def glove-file-path "data/glove/glove.6B.50d.txt") (def num-filter 100) (def num-label 2) (def dropout 0.5) + + +(when-not (.exists (io/file (str data-dir))) + (do (println "Retrieving data for cnn text classification...") (sh "./get_data.sh"))) + (defn shuffle-data [test-num {:keys [data label sentence-count sentence-size embedding-size]}] (println "Shuffling the data and splitting into training and test sets") (println {:sentence-count sentence-count @@ -103,10 +111,10 @@ ;;; omit max-examples if you want to run all the examples in the movie review dataset ;; to limit mem consumption set to something like 1000 and adjust test size to 100 (println "Running with context devices of" devs) - (train-convnet {:devs [(context/cpu)] :embedding-size 50 :batch-size 10 :test-size 100 :num-epoch 10 :max-examples 1000}) + (train-convnet {:devs devs :embedding-size 50 :batch-size 10 :test-size 100 :num-epoch 10 :max-examples 1000}) ;; runs all the examples #_(train-convnet {:embedding-size 50 :batch-size 100 :test-size 1000 :num-epoch 10}))) (comment - (train-convnet {:devs [(context/cpu)] :embedding-size 50 :batch-size 10 :test-size 100 :num-epoch 10 :max-examples 1000})) + (train-convnet {:devs devs :embedding-size 50 :batch-size 10 :test-size 100 :num-epoch 10 :max-examples 1000})) diff --git a/contrib/clojure-package/examples/cnn-text-classification/test/cnn_text_classification/classifier_test.clj b/contrib/clojure-package/examples/cnn-text-classification/test/cnn_text_classification/classifier_test.clj new file mode 100644 index 000000000000..918a46f474d8 --- /dev/null +++ b/contrib/clojure-package/examples/cnn-text-classification/test/cnn_text_classification/classifier_test.clj @@ -0,0 +1,44 @@ +;; +;; Licensed to the Apache Software Foundation (ASF) under one or more +;; contributor license agreements. See the NOTICE file distributed with +;; this work for additional information regarding copyright ownership. +;; The ASF licenses this file to You under the Apache License, Version 2.0 +;; (the "License"); you may not use this file except in compliance with +;; the License. You may obtain a copy of the License at +;; +;; http://www.apache.org/licenses/LICENSE-2.0 +;; +;; Unless required by applicable law or agreed to in writing, software +;; distributed under the License is distributed on an "AS IS" BASIS, +;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +;; See the License for the specific language governing permissions and +;; limitations under the License. +;; + +(ns cnn-text-classification.classifier-test + (:require + [clojure.test :refer :all] + [org.apache.clojure-mxnet.module :as module] + [org.apache.clojure-mxnet.ndarray :as ndarray] + [org.apache.clojure-mxnet.util :as util] + [org.apache.clojure-mxnet.context :as context] + [cnn-text-classification.classifier :as classifier])) + +; +; The one and unique classifier test +; +(deftest classifier-test + (let [train + (classifier/train-convnet + {:devs [(context/default-context)] + :embedding-size 50 + :batch-size 10 + :test-size 100 + :num-epoch 1 + :max-examples 1000})] + (is (= ["data"] (util/scala-vector->vec (module/data-names train)))) + (is (= 20 (count (ndarray/->vec (-> train module/outputs first first))))))) + ;(prn (util/scala-vector->vec (data-shapes train))) + ;(prn (util/scala-vector->vec (label-shapes train))) + ;(prn (output-names train)) + ;(prn (output-shapes train)) \ No newline at end of file diff --git a/contrib/clojure-package/examples/gan/project.clj b/contrib/clojure-package/examples/gan/project.clj index b8f6903cabba..a326f7a5605f 100644 --- a/contrib/clojure-package/examples/gan/project.clj +++ b/contrib/clojure-package/examples/gan/project.clj @@ -20,5 +20,6 @@ :plugins [[lein-cljfmt "0.5.7"]] :dependencies [[org.clojure/clojure "1.9.0"] [org.apache.mxnet.contrib.clojure/clojure-mxnet "1.5.0-SNAPSHOT"] - [nu.pattern/opencv "2.4.9-7"]] + [org.openpnp/opencv "3.4.2-1"] + ] :main gan.gan-mnist) diff --git a/contrib/clojure-package/examples/gan/src/gan/gan_mnist.clj b/contrib/clojure-package/examples/gan/src/gan/gan_mnist.clj index e2e3364535ec..944791bce604 100644 --- a/contrib/clojure-package/examples/gan/src/gan/gan_mnist.clj +++ b/contrib/clojure-package/examples/gan/src/gan/gan_mnist.clj @@ -157,7 +157,9 @@ (save-img-diff i n calc-diff)))) -(defn train [devs] +(defn train + ([devs] (train devs num-epoch)) + ([devs num-epoch] (let [mod-d (-> (m/module (discriminator) {:contexts devs :data-names ["data"] :label-names ["label"]}) (m/bind {:data-shapes (mx-io/provide-data-desc mnist-iter) :label-shapes (mx-io/provide-label-desc mnist-iter) @@ -203,7 +205,7 @@ (save-img-gout i n (ndarray/copy (ffirst out-g))) (save-img-data i n batch) (calc-diff i n (ffirst diff-d))) - (inc n))))))) + (inc n)))))))) (defn -main [& args] (let [[dev dev-num] args diff --git a/contrib/clojure-package/examples/gan/src/gan/viz.clj b/contrib/clojure-package/examples/gan/src/gan/viz.clj index 8b57b9432a7e..67f78806de66 100644 --- a/contrib/clojure-package/examples/gan/src/gan/viz.clj +++ b/contrib/clojure-package/examples/gan/src/gan/viz.clj @@ -22,7 +22,7 @@ (:import (nu.pattern OpenCV) (org.opencv.core Core CvType Mat Size) (org.opencv.imgproc Imgproc) - (org.opencv.highgui Highgui))) + (org.opencv.imgcodecs Imgcodecs))) ;;; Viz stuff (OpenCV/loadShared) @@ -83,5 +83,5 @@ _ (Core/vconcat (java.util.ArrayList. line-mats) result)] (do (Imgproc/resize result resized-img (new Size (* (.width result) 1.5) (* (.height result) 1.5))) - (Highgui/imwrite (str output-path title ".jpg") resized-img) + (Imgcodecs/imwrite (str output-path title ".jpg") resized-img) (Thread/sleep 1000)))) diff --git a/contrib/clojure-package/examples/gan/test/gan/gan_test.clj b/contrib/clojure-package/examples/gan/test/gan/gan_test.clj new file mode 100644 index 000000000000..71b9126cae25 --- /dev/null +++ b/contrib/clojure-package/examples/gan/test/gan/gan_test.clj @@ -0,0 +1,25 @@ +;; +;; Licensed to the Apache Software Foundation (ASF) under one or more +;; contributor license agreements. See the NOTICE file distributed with +;; this work for additional information regarding copyright ownership. +;; The ASF licenses this file to You under the Apache License, Version 2.0 +;; (the "License"); you may not use this file except in compliance with +;; the License. You may obtain a copy of the License at +;; +;; http://www.apache.org/licenses/LICENSE-2.0 +;; +;; Unless required by applicable law or agreed to in writing, software +;; distributed under the License is distributed on an "AS IS" BASIS, +;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +;; See the License for the specific language governing permissions and +;; limitations under the License. +;; + +(ns gan.gan_test + (:require + [gan.gan-mnist :refer :all] + [org.apache.clojure-mxnet.context :as context] + [clojure.test :refer :all])) + +(deftest check-pdf + (train [(context/cpu)] 1)) \ No newline at end of file diff --git a/contrib/clojure-package/examples/imclassification/src/imclassification/train_mnist.clj b/contrib/clojure-package/examples/imclassification/src/imclassification/train_mnist.clj index a43dc3b69bd9..e61e9ebf6fbb 100644 --- a/contrib/clojure-package/examples/imclassification/src/imclassification/train_mnist.clj +++ b/contrib/clojure-package/examples/imclassification/src/imclassification/train_mnist.clj @@ -32,7 +32,7 @@ (def batch-size 10) ;; the batch size (def optimizer (optimizer/sgd {:learning-rate 0.01 :momentum 0.0})) (def eval-metric (eval-metric/accuracy)) -(def num-epoch 5) ;; the number of training epochs +(def num-epoch 1) ;; the number of training epochs (def kvstore "local") ;; the kvstore type ;;; Note to run distributed you might need to complile the engine with an option set (def role "worker") ;; scheduler/server/worker @@ -82,7 +82,9 @@ (sym/fully-connected "fc3" {:data data :num-hidden 10}) (sym/softmax-output "softmax" {:data data}))) -(defn start [devs] +(defn start + ([devs] (start devs num-epoch)) + ([devs _num-epoch] (when scheduler-host (println "Initing PS enviornments with " envs) (kvstore-server/init envs)) @@ -94,14 +96,18 @@ (do (println "Starting Training of MNIST ....") (println "Running with context devices of" devs) - (let [mod (m/module (get-symbol) {:contexts devs})] - (m/fit mod {:train-data train-data + (let [_mod (m/module (get-symbol) {:contexts devs})] + (m/fit _mod {:train-data train-data :eval-data test-data - :num-epoch num-epoch + :num-epoch _num-epoch :fit-params (m/fit-params {:kvstore kvstore :optimizer optimizer - :eval-metric eval-metric})})) - (println "Finish fit")))) + :eval-metric eval-metric})}) + (println "Finish fit") + _mod + ) + + )))) (defn -main [& args] (let [[dev dev-num] args diff --git a/contrib/clojure-package/examples/imclassification/test/imclassification/train_mnist_test.clj b/contrib/clojure-package/examples/imclassification/test/imclassification/train_mnist_test.clj new file mode 100644 index 000000000000..2ebefc2fc664 --- /dev/null +++ b/contrib/clojure-package/examples/imclassification/test/imclassification/train_mnist_test.clj @@ -0,0 +1,39 @@ +;; +;; Licensed to the Apache Software Foundation (ASF) under one or more +;; contributor license agreements. See the NOTICE file distributed with +;; this work for additional information regarding copyright ownership. +;; The ASF licenses this file to You under the Apache License, Version 2.0 +;; (the "License"); you may not use this file except in compliance with +;; the License. You may obtain a copy of the License at +;; +;; http://www.apache.org/licenses/LICENSE-2.0 +;; +;; Unless required by applicable law or agreed to in writing, software +;; distributed under the License is distributed on an "AS IS" BASIS, +;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +;; See the License for the specific language governing permissions and +;; limitations under the License. +;; + +(ns imclassification.train-mnist-test + (:require + [clojure.test :refer :all] + [clojure.java.io :as io] + [clojure.string :as s] + [org.apache.clojure-mxnet.context :as context] + [org.apache.clojure-mxnet.module :as module] + [imclassification.train-mnist :as mnist])) + +(defn- file-to-filtered-seq [file] + (->> + file + (io/file) + (io/reader) + (line-seq) + (filter #(not (s/includes? % "mxnet_version"))))) + +(deftest mnist-two-epochs-test + (module/save-checkpoint (mnist/start [(context/cpu)] 2) {:prefix "target/test" :epoch 2}) + (is (= + (file-to-filtered-seq "test/test-symbol.json.ref") + (file-to-filtered-seq "target/test-symbol.json")))) \ No newline at end of file diff --git a/contrib/clojure-package/examples/imclassification/test/test-symbol.json.ref b/contrib/clojure-package/examples/imclassification/test/test-symbol.json.ref new file mode 100644 index 000000000000..ba1d2fad3a8a --- /dev/null +++ b/contrib/clojure-package/examples/imclassification/test/test-symbol.json.ref @@ -0,0 +1,105 @@ +{ + "nodes": [ + { + "op": "null", + "name": "data", + "inputs": [] + }, + { + "op": "null", + "name": "fc1_weight", + "attrs": {"num_hidden": "128"}, + "inputs": [] + }, + { + "op": "null", + "name": "fc1_bias", + "attrs": {"num_hidden": "128"}, + "inputs": [] + }, + { + "op": "FullyConnected", + "name": "fc1", + "attrs": {"num_hidden": "128"}, + "inputs": [[0, 0, 0], [1, 0, 0], [2, 0, 0]] + }, + { + "op": "Activation", + "name": "relu1", + "attrs": {"act_type": "relu"}, + "inputs": [[3, 0, 0]] + }, + { + "op": "null", + "name": "fc2_weight", + "attrs": {"num_hidden": "64"}, + "inputs": [] + }, + { + "op": "null", + "name": "fc2_bias", + "attrs": {"num_hidden": "64"}, + "inputs": [] + }, + { + "op": "FullyConnected", + "name": "fc2", + "attrs": {"num_hidden": "64"}, + "inputs": [[4, 0, 0], [5, 0, 0], [6, 0, 0]] + }, + { + "op": "Activation", + "name": "relu2", + "attrs": {"act_type": "relu"}, + "inputs": [[7, 0, 0]] + }, + { + "op": "null", + "name": "fc3_weight", + "attrs": {"num_hidden": "10"}, + "inputs": [] + }, + { + "op": "null", + "name": "fc3_bias", + "attrs": {"num_hidden": "10"}, + "inputs": [] + }, + { + "op": "FullyConnected", + "name": "fc3", + "attrs": {"num_hidden": "10"}, + "inputs": [[8, 0, 0], [9, 0, 0], [10, 0, 0]] + }, + { + "op": "null", + "name": "softmax_label", + "inputs": [] + }, + { + "op": "SoftmaxOutput", + "name": "softmax", + "inputs": [[11, 0, 0], [12, 0, 0]] + } + ], + "arg_nodes": [0, 1, 2, 5, 6, 9, 10, 12], + "node_row_ptr": [ + 0, + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9, + 10, + 11, + 12, + 13, + 14 + ], + "heads": [[13, 0, 0]], + "attrs": {"mxnet_version": ["int", 10400]} +} \ No newline at end of file diff --git a/contrib/clojure-package/examples/module/test/mnist_mlp_test.clj b/contrib/clojure-package/examples/module/test/mnist_mlp_test.clj new file mode 100644 index 000000000000..5fbcdd3c0b39 --- /dev/null +++ b/contrib/clojure-package/examples/module/test/mnist_mlp_test.clj @@ -0,0 +1,29 @@ +;; +;; Licensed to the Apache Software Foundation (ASF) under one or more +;; contributor license agreements. See the NOTICE file distributed with +;; this work for additional information regarding copyright ownership. +;; The ASF licenses this file to You under the Apache License, Version 2.0 +;; (the "License"); you may not use this file except in compliance with +;; the License. You may obtain a copy of the License at +;; +;; http://www.apache.org/licenses/LICENSE-2.0 +;; +;; Unless required by applicable law or agreed to in writing, software +;; distributed under the License is distributed on an "AS IS" BASIS, +;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +;; See the License for the specific language governing permissions and +;; limitations under the License. +;; +(ns mnist-mlp-test + (:require + [mnist-mlp :refer :all] + [org.apache.clojure-mxnet.context :as context] + [clojure.test :refer :all])) + +(deftest run-those-tests + (let [devs [(context/cpu)]] + (run-intermediate-level-api :devs devs) + (run-intermediate-level-api :devs devs :load-model-epoch (dec num-epoch)) + (run-high-level-api devs) + (run-prediction-iterator-api devs) + (run-predication-and-calc-accuracy-manually devs))) \ No newline at end of file diff --git a/contrib/clojure-package/examples/multi-label/test/multi_label_test.clj b/contrib/clojure-package/examples/multi-label/test/multi_label_test.clj new file mode 100644 index 000000000000..446a84626e72 --- /dev/null +++ b/contrib/clojure-package/examples/multi-label/test/multi_label_test.clj @@ -0,0 +1,26 @@ +;; +;; Licensed to the Apache Software Foundation (ASF) under one or more +;; contributor license agreements. See the NOTICE file distributed with +;; this work for additional information regarding copyright ownership. +;; The ASF licenses this file to You under the Apache License, Version 2.0 +;; (the "License"); you may not use this file except in compliance with +;; the License. You may obtain a copy of the License at +;; +;; http://www.apache.org/licenses/LICENSE-2.0 +;; +;; Unless required by applicable law or agreed to in writing, software +;; distributed under the License is distributed on an "AS IS" BASIS, +;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +;; See the License for the specific language governing permissions and +;; limitations under the License. +;; + +(ns multi_label_test + (:require + [multi-label.core :as label] + [clojure.java.io :as io] + [org.apache.clojure-mxnet.context :as context] + [clojure.test :refer :all])) + +(deftest run-multi-label + (label/train [(context/cpu)])) \ No newline at end of file diff --git a/contrib/clojure-package/examples/neural-style/src/neural_style/core.clj b/contrib/clojure-package/examples/neural-style/src/neural_style/core.clj index fcf402f3466d..ac1f537f1c26 100644 --- a/contrib/clojure-package/examples/neural-style/src/neural_style/core.clj +++ b/contrib/clojure-package/examples/neural-style/src/neural_style/core.clj @@ -24,6 +24,8 @@ [org.apache.clojure-mxnet.random :as random] [org.apache.clojure-mxnet.shape :as mx-shape] [org.apache.clojure-mxnet.symbol :as sym] + [clojure.java.io :as io] + [clojure.java.shell :refer [sh]] [mikera.image.core :as img] [mikera.image.filters :as img-filter] [think.image.pixel :as pixel] @@ -31,6 +33,9 @@ (:gen-class));; An Implementation of the paper A Neural Algorithm of Artistic Style ;;by Leon A. Gatys, Alexander S. Ecker, and Matthias Bethge +(when-not (.exists (io/file "input")) + (do (println "Retrieving data...") (sh "./download.sh"))) + (def content-image "input/IMG_4343.jpg") (def style-image "input/starry_night.jpg") (def model-path "model/vgg19.params") @@ -39,7 +44,7 @@ (def content-weight 5) ;; the weight for the content image (def blur-radius 1) ;; the blur filter radius (def output-dir "output") -(def lr 10) ;; the learning rate +(def lr 10.0) ;; the learning rate (def tv-weight 0.01) ;; the magnitude on the tv loss (def num-epochs 1000) (def num-channels 3) @@ -157,9 +162,10 @@ out (ndarray/* out tv-weight)] (sym/bind out ctx {"img" img "kernel" kernel})))) -(defn train [devs] - - (let [dev (first devs) +(defn train + ([devs] (train devs 20)) + ([devs n-epochs] + (let [dev (first devs) content-np (preprocess-content-image content-image max-long-edge) content-np-shape (mx-shape/->vec (ndarray/shape content-np)) style-np (preprocess-style-image style-image content-np-shape) @@ -212,7 +218,7 @@ tv-grad-executor (get-tv-grad-executor img dev tv-weight) eps 0.0 e 0] - (doseq [i (range 20)] + (doseq [i (range n-epochs)] (ndarray/set (:data model-executor) img) (-> (:executor model-executor) (executor/forward) @@ -237,8 +243,10 @@ (println "Epoch " i "relative change " eps) (when (zero? (mod i 2)) (save-image (ndarray/copy img) (str output-dir "/out_" i ".png") blur-radius true))) - - (ndarray/set old-img img)))) + (ndarray/set old-img img)) + ; (save-image (ndarray/copy img) (str output-dir "/final.png") 0 false) + ; (postprocess-image img) + ))) (defn -main [& args] ;;; Note this only works on cpu right now diff --git a/contrib/clojure-package/examples/neural-style/test/neural_style/vgg_19_test.clj b/contrib/clojure-package/examples/neural-style/test/neural_style/vgg_19_test.clj new file mode 100644 index 000000000000..a7c978607e4f --- /dev/null +++ b/contrib/clojure-package/examples/neural-style/test/neural_style/vgg_19_test.clj @@ -0,0 +1,53 @@ +;; +;; Licensed to the Apache Software Foundation (ASF) under one or more +;; contributor license agreements. See the NOTICE file distributed with +;; this work for additional information regarding copyright ownership. +;; The ASF licenses this file to You under the Apache License, Version 2.0 +;; (the "License"); you may not use this file except in compliance with +;; the License. You may obtain a copy of the License at +;; +;; http://www.apache.org/licenses/LICENSE-2.0 +;; +;; Unless required by applicable law or agreed to in writing, software +;; distributed under the License is distributed on an "AS IS" BASIS, +;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +;; See the License for the specific language governing permissions and +;; limitations under the License. +;; + +(ns neural-style.vgg-19-test + (:require + [clojure.test :refer :all] + [mikera.image.core :as img] + [clojure.java.io :as io] + [org.apache.clojure-mxnet.ndarray :as ndarray] + [org.apache.clojure-mxnet.context :as context] + [neural-style.core :as neural])) + +(defn pic-to-ndarray-vec[path] + (-> path + img/load-image + neural/image->ndarray + ndarray/->vec)) + +(defn last-modified-check[x] + (let [t (- (System/currentTimeMillis) (.lastModified x)) ] + (if (> 10000 t) ; 10 seconds + x + (throw (Exception. (str "Generated File Too Old: (" t " ms) [" x "]")))))) + +(defn latest-pic-to-ndarray-vec[folder] + (->> folder + io/as-file + (.listFiles) + (sort-by #(.lastModified %)) + last + (last-modified-check) + (.getPath) + pic-to-ndarray-vec)) + +(deftest vgg-19-test + (neural/train [(context/cpu)] 3) + (is (not (nil? (latest-pic-to-ndarray-vec "output"))))) +; generated file different depending on the platform :/ +; (pic-to-ndarray-vec "test/ref_out_2.png")))) \ No newline at end of file diff --git a/contrib/clojure-package/examples/profiler/src/profiler/core.clj b/contrib/clojure-package/examples/profiler/src/profiler/core.clj index e366c578c551..67ba0feb8a9b 100644 --- a/contrib/clojure-package/examples/profiler/src/profiler/core.clj +++ b/contrib/clojure-package/examples/profiler/src/profiler/core.clj @@ -27,9 +27,9 @@ (def profiler-mode "symbolic") ;; can be symbolic, imperative, api, mem (def output-path ".") ;; the profile file output directory (def profiler-name "profile-matmul-20iter.json") -(def iter-num 100) -(def begin-profiling-iter 50) -(def end-profiling-iter 70) +(def iter-num 5) +(def begin-profiling-iter 0) +(def end-profiling-iter 1) (def gpu? false) (defn run [] diff --git a/contrib/clojure-package/examples/profiler/test/core_test.clj b/contrib/clojure-package/examples/profiler/test/core_test.clj new file mode 100644 index 000000000000..1173f0755bbd --- /dev/null +++ b/contrib/clojure-package/examples/profiler/test/core_test.clj @@ -0,0 +1,31 @@ +;; +;; Licensed to the Apache Software Foundation (ASF) under one or more +;; contributor license agreements. See the NOTICE file distributed with +;; this work for additional information regarding copyright ownership. +;; The ASF licenses this file to You under the Apache License, Version 2.0 +;; (the "License"); you may not use this file except in compliance with +;; the License. You may obtain a copy of the License at +;; +;; http://www.apache.org/licenses/LICENSE-2.0 +;; +;; Unless required by applicable law or agreed to in writing, software +;; distributed under the License is distributed on an "AS IS" BASIS, +;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +;; See the License for the specific language governing permissions and +;; limitations under the License. +;; + +(ns core_test + (:require + [profiler.core :as profiler] + [clojure.java.io :as io] + [clojure.test :refer :all])) + +(defn count-lines[file] + (count (line-seq (io/reader (io/as-file file))))) + +(deftest run-profiler + (profiler/run) + (let [new-file (clojure.java.io/as-file profiler/profiler-name)] + (is (.exists new-file)) + (is (> 10000 (- (System/currentTimeMillis) (.lastModified new-file)))))) \ No newline at end of file diff --git a/contrib/clojure-package/examples/profiler/test/profile-matmul-20iter.json.ref b/contrib/clojure-package/examples/profiler/test/profile-matmul-20iter.json.ref new file mode 100644 index 000000000000..d6baa42114cd --- /dev/null +++ b/contrib/clojure-package/examples/profiler/test/profile-matmul-20iter.json.ref @@ -0,0 +1,271 @@ +{ + "traceEvents": [ + { + "ph": "M", + "args": { + "name": "cpu/0" + }, + "pid": 0, + "name": "process_name" + }, + { + "ph": "M", + "args": { + "name": "cpu/1" + }, + "pid": 1, + "name": "process_name" + }, + { + "ph": "M", + "args": { + "name": "cpu/2" + }, + "pid": 2, + "name": "process_name" + }, + { + "ph": "M", + "args": { + "name": "cpu/3" + }, + "pid": 3, + "name": "process_name" + }, + { + "ph": "M", + "args": { + "name": "cpu pinned/" + }, + "pid": 4, + "name": "process_name" + }, + { + "ph": "M", + "args": { + "name": "cpu shared/" + }, + "pid": 5, + "name": "process_name" + }, { + "ph": "M", + "args": { + "name": "MXNET_C_API" + }, + "pid": 13841910479334118176, + "name": "process_name" + }, + + { + "name": "MXNet C API Calls", + "cat": "MXNET_C_API", + "ph": "C", + "ts": 51195258331, + "args": { "MXNet C API Calls": 1 }, + "pid": 13841910479334118176, + "tid": 6902988396839073221 + } +, + { + "name": "MXNet C API Concurrency", + "cat": "MXNET_C_API", + "ph": "C", + "ts": 51195258338, + "args": { "MXNet C API Concurrency": 1 }, + "pid": 13841910479334118176, + "tid": 6902988396839073221 + } +, + { + "name": "MXExecutorForward", + "cat": "MXNET_C_API", + "ph": "b", + "ts": 51195258348, + "id": 6902988396839073221, + "pid": 13841910479334118176, + "tid": 6902988396839073221 + } +, + { + "name": "MXExecutorForward", + "cat": "MXNET_C_API", + "ph": "e", + "ts": 51195258357, + "id": 6902988396839073221, + "pid": 13841910479334118176, + "tid": 6902988396839073221 + } +, + { + "name": "MXNet C API Concurrency", + "cat": "MXNET_C_API", + "ph": "C", + "ts": 51195258358, + "args": { "MXNet C API Concurrency": 0 }, + "pid": 13841910479334118176, + "tid": 6902988396839073221 + } +, { + "ph": "M", + "args": { + "name": "Device Storage" + }, + "pid": 13545698322897290393, + "name": "process_name" + }, + + { + "name": "Memory: cpu/0", + "cat": "Device Storage", + "ph": "C", + "ts": 51195543378, + "args": { "Memory: cpu/0": 8 }, + "pid": 13545698322897290393, + "tid": 5603937861270119161 + } +, + { + "name": "MXNet C API Calls", + "cat": "MXNET_C_API", + "ph": "C", + "ts": 51195258559, + "args": { "MXNet C API Calls": 2 }, + "pid": 13841910479334118176, + "tid": 6902988396839073221 + } +, + { + "name": "Memory: cpu/0", + "cat": "Device Storage", + "ph": "C", + "ts": 51195857697, + "args": { "Memory: cpu/0": 67108872 }, + "pid": 13545698322897290393, + "tid": 5603937861270119161 + } +, + { + "name": "MXNet C API Concurrency", + "cat": "MXNET_C_API", + "ph": "C", + "ts": 51195258560, + "args": { "MXNet C API Concurrency": 1 }, + "pid": 13841910479334118176, + "tid": 6902988396839073221 + } +, + + { + "name": "[dot]", + "cat": "operator", + "ph": "B", + "ts": 51195857671, + "pid": 0, + "tid": 5603937861270119161 + } +, + { + "name": "[dot]", + "cat": "operator", + "ph": "E", + "ts": 51196931353, + "pid": 0, + "tid": 5603937861270119161 + } +, + + { + "name": "WaitForVar", + "cat": "operator", + "ph": "B", + "ts": 51196931369, + "pid": 0, + "tid": 5603937861270119161 + } +, + { + "name": "WaitForVar", + "cat": "operator", + "ph": "E", + "ts": 51196931376, + "pid": 0, + "tid": 5603937861270119161 + } +, { + "ph": "M", + "args": { + "name": "operator" + }, + "pid": 10847949044720084585, + "name": "process_name" + }, + + { + "name": "[dot]", + "cat": "operator", + "ph": "b", + "ts": 51195857671, + "id": 5603937861270119161, + "pid": 10847949044720084585, + "tid": 5603937861270119161 + } +, + { + "name": "[dot]", + "cat": "operator", + "ph": "e", + "ts": 51196931350, + "id": 5603937861270119161, + "pid": 10847949044720084585, + "tid": 5603937861270119161 + } +, + { + "name": "MXNDArrayWaitToRead", + "cat": "MXNET_C_API", + "ph": "b", + "ts": 51195258561, + "id": 6902988396839073221, + "pid": 13841910479334118176, + "tid": 6902988396839073221 + } +, + { + "name": "MXNDArrayWaitToRead", + "cat": "MXNET_C_API", + "ph": "e", + "ts": 51196931386, + "id": 6902988396839073221, + "pid": 13841910479334118176, + "tid": 6902988396839073221 + } +, + { + "name": "WaitForVar", + "cat": "operator", + "ph": "b", + "ts": 51196931369, + "id": 5603937861270119161, + "pid": 10847949044720084585, + "tid": 5603937861270119161 + } +, + { + "name": "WaitForVar", + "cat": "operator", + "ph": "e", + "ts": 51196931376, + "id": 5603937861270119161, + "pid": 10847949044720084585, + "tid": 5603937861270119161 + } +, + { + "name": "MXNet C API Concurrency", + "cat": "MXNET_C_API", + "ph": "C", + "ts": 51196931391, + "args": { "MXNet C API Concurrency": 0 }, + "pid": 13841910479334118176, + "tid": 6902988396839073221 + } diff --git a/contrib/clojure-package/examples/rnn/src/rnn/test_char_rnn.clj b/contrib/clojure-package/examples/rnn/src/rnn/test_char_rnn.clj index d03b1a6b36e4..22a2982f222b 100644 --- a/contrib/clojure-package/examples/rnn/src/rnn/test_char_rnn.clj +++ b/contrib/clojure-package/examples/rnn/src/rnn/test_char_rnn.clj @@ -17,6 +17,7 @@ (ns rnn.test-char-rnn (:require [clojure.string :as string] + [clojure.java.shell :refer [sh]] [rnn.util :as util] [rnn.lstm :as lstm] [org.apache.clojure-mxnet.context :as context] @@ -24,6 +25,9 @@ [org.apache.clojure-mxnet.module :as m] [org.apache.clojure-mxnet.ndarray :as ndarray])) +(when-not (.exists (clojure.java.io/file "data")) + (do (println "Retrieving data...") (sh "./get_data.sh"))) + (def data-path "data/obama.txt") (def model-prefix) (def start-sentence "The joke ") diff --git a/contrib/clojure-package/examples/rnn/src/rnn/train_char_rnn.clj b/contrib/clojure-package/examples/rnn/src/rnn/train_char_rnn.clj index 150cd94e673c..41a764f7af95 100644 --- a/contrib/clojure-package/examples/rnn/src/rnn/train_char_rnn.clj +++ b/contrib/clojure-package/examples/rnn/src/rnn/train_char_rnn.clj @@ -17,6 +17,7 @@ (ns rnn.train-char-rnn (:require [clojure.string :as string] + [clojure.java.shell :refer [sh]] [rnn.util :as util] [rnn.lstm :as lstm] [rnn.test-char-rnn :as test-rnn] @@ -34,6 +35,9 @@ ;;https://github.com/apache/incubator-mxnet/blob/master/example/rnn/old/char-rnn.ipynb +(when-not (.exists (clojure.java.io/file "data")) + (do (println "Retrieving data...") (sh "./get_data.sh"))) + ;; batch size for training (def batch-size 32) ;; we can support various length input diff --git a/contrib/clojure-package/examples/rnn/test/rnn/core_test.clj b/contrib/clojure-package/examples/rnn/test/rnn/core_test.clj new file mode 100644 index 000000000000..b198577241c3 --- /dev/null +++ b/contrib/clojure-package/examples/rnn/test/rnn/core_test.clj @@ -0,0 +1,26 @@ +;; +;; Licensed to the Apache Software Foundation (ASF) under one or more +;; contributor license agreements. See the NOTICE file distributed with +;; this work for additional information regarding copyright ownership. +;; The ASF licenses this file to You under the Apache License, Version 2.0 +;; (the "License"); you may not use this file except in compliance with +;; the License. You may obtain a copy of the License at +;; +;; http://www.apache.org/licenses/LICENSE-2.0 +;; +;; Unless required by applicable law or agreed to in writing, software +;; distributed under the License is distributed on an "AS IS" BASIS, +;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +;; See the License for the specific language governing permissions and +;; limitations under the License. +;; + +(ns rnn.core_test + (:require + [rnn.test-char-rnn :as rnn] + [clojure.test :refer :all])) + +(deftest check-trained-network + (is (= + "The joke that we can start by the challenges of the American people. The American people have been talking about how to compete with the streets of San Antonio who the courage to come together as one " + (rnn/rnn-test "data/obama" 75 200 false)))) \ No newline at end of file diff --git a/contrib/clojure-package/examples/tutorial/.gitignore b/contrib/clojure-package/examples/tutorial/.gitignore index c53038ec0e3d..338927e78384 100644 --- a/contrib/clojure-package/examples/tutorial/.gitignore +++ b/contrib/clojure-package/examples/tutorial/.gitignore @@ -9,3 +9,4 @@ pom.xml.asc /.nrepl-port .hgignore .hg/ +filename \ No newline at end of file diff --git a/contrib/clojure-package/examples/tutorial/project.clj b/contrib/clojure-package/examples/tutorial/project.clj index 8a78ec6a6abf..58a10f04f28b 100644 --- a/contrib/clojure-package/examples/tutorial/project.clj +++ b/contrib/clojure-package/examples/tutorial/project.clj @@ -19,6 +19,8 @@ :description "MXNET tutorials" :plugins [[lein-cljfmt "0.5.7"]] :dependencies [[org.clojure/clojure "1.9.0"] + [org.apache.mxnet.contrib.clojure/clojure-mxnet "1.5.0-SNAPSHOT"] + ;; Uncomment the one appropriate for your machine & configuration: #_[org.apache.mxnet.contrib.clojure/clojure-mxnet-linux-cpu "1.4.0"] #_[org.apache.mxnet.contrib.clojure/clojure-mxnet-linux-gpu "1.4.0"] diff --git a/contrib/clojure-package/examples/tutorial/src/tutorial/module.clj b/contrib/clojure-package/examples/tutorial/src/tutorial/module.clj index 4ca50ff5cd44..e19498111022 100644 --- a/contrib/clojure-package/examples/tutorial/src/tutorial/module.clj +++ b/contrib/clojure-package/examples/tutorial/src/tutorial/module.clj @@ -184,7 +184,7 @@ ])) (m/save-checkpoint mod {:prefix save-prefix :epoch epoch-num - :save-opt-states true}))) + :save-opt-states true}))) ;; INFO org.apache.mxnet.module.Module: Saved checkpoint to my-model-0000.params ;; INFO org.apache.mxnet.module.Module: Saved optimizer state to my-model-0000.states @@ -247,7 +247,40 @@ new-mod ;=> #object[org.apache.mxnet.module.Module 0x5304d0f4 "org.apache.mxnet. ;; Create `fit-params` and then use it to set `begin-epoch` so that ;; `fit` knows to resume from a saved epoch. + + +(comment +;; FIXME +; Caused by: java.io.EOFException +; at java.io.DataInputStream.readInt(DataInputStream.java:392) +; at java.io.ObjectInputStream$BlockDataInputStream.readInt(ObjectInputStream.java:3182) +; at java.io.ObjectInputStream.readInt(ObjectInputStream.java:1032) +; at org.apache.mxnet.Optimizer$$anon$1$$anonfun$deserializeState$1.apply$mcVI$sp(Optimizer.scala:84) +; at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:160) +; at org.apache.mxnet.Optimizer$$anon$1.deserializeState(Optimizer.scala:83) +; at org.apache.mxnet.module.Module$$anonfun$loadOptimizerStates$3.apply(Module.scala:594) +; at org.apache.mxnet.module.Module$$anonfun$loadOptimizerStates$3.apply(Module.scala:589) +; at scala.Option.foreach(Option.scala:257) +; at org.apache.mxnet.module.Module.loadOptimizerStates(Module.scala:589) +; at org.apache.mxnet.module.Module$$anonfun$initOptimizer$4.apply(Module.scala:407) +; at org.apache.mxnet.module.Module$$anonfun$initOptimizer$4.apply(Module.scala:406) +; at scala.Option.foreach(Option.scala:257) +; at org.apache.mxnet.module.Module.initOptimizer(Module.scala:406) +; at org.apache.mxnet.module.BaseModule.fit(BaseModule.scala:407) +; at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) +; at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) +; at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) +; at java.lang.reflect.Method.invoke(Method.java:498) +; at clojure.lang.Reflector.invokeMatchingMethod(Reflector.java:93) +; at clojure.lang.Reflector.invokeInstanceMethod(Reflector.java:28) +; at org.apache.clojure_mxnet.module$fit.invokeStatic(module.clj:551) +; at org.apache.clojure_mxnet.module$fit.invoke(module.clj:538) +; at tutorial.module$eval1787.invokeStatic(module.clj:250) +; at tutorial.module$eval1787.invoke(module.clj:250) + (m/fit new-mod {:train-data train-data :eval-data test-data :num-epoch 2 :fit-params (m/fit-params {:begin-epoch 1})}) + +) \ No newline at end of file diff --git a/contrib/clojure-package/examples/tutorial/src/tutorial/ndarray.clj b/contrib/clojure-package/examples/tutorial/src/tutorial/ndarray.clj index 8e51de215157..d18bb53daaf1 100644 --- a/contrib/clojure-package/examples/tutorial/src/tutorial/ndarray.clj +++ b/contrib/clojure-package/examples/tutorial/src/tutorial/ndarray.clj @@ -91,8 +91,8 @@ (ndarray/save "filename" {"arr1" arr1 "arr2" arr2}) ;; (you can also do "s3://path" or "hdfs") -(ndarray/save "/Users/daveliepmann/src/coursework/mxnet-clj-tutorials/abc" - {"arr1" arr1 "arr2" arr2}) +;; (ndarray/save "/Users/daveliepmann/src/coursework/mxnet-clj-tutorials/abc" +;; {"arr1" arr1 "arr2" arr2}) ;; To load: (def from-file (ndarray/load "filename")) @@ -114,7 +114,9 @@ from-file ;=>{"arr1" #object[org.apache.mxnet.NDArray 0x6115ba61 "org.apache.mxn (def cpu-a (ndarray/zeros [100 200])) (ndarray/context cpu-a) ;=> #object[org.apache.mxnet.Context 0x3f376123 "cpu(0)"] -(def gpu-b (ndarray/zeros [100 200] {:ctx (context/gpu 0)})) ;; to use with gpu +(comment + (def gpu-b (ndarray/zeros [100 200] {:ctx (context/gpu 0)})) ;; to use with gpu +) ;; Currently, we do not allow operations among arrays from different ;; contexts. To manually enable this, use the `copy-to` function to diff --git a/contrib/clojure-package/examples/tutorial/src/tutorial/symbol.clj b/contrib/clojure-package/examples/tutorial/src/tutorial/symbol.clj index ebf4f7e96797..e88260069015 100644 --- a/contrib/clojure-package/examples/tutorial/src/tutorial/symbol.clj +++ b/contrib/clojure-package/examples/tutorial/src/tutorial/symbol.clj @@ -125,7 +125,9 @@ net ;=> #object[org.apache.mxnet.Symbol 0x5c78c8c2 "org.apache.mxnet.Symbol@5c78 (first) (ndarray/->vec));=> [2.0 2.0 2.0 2.0] -;; We can evaluate the same symbol on GPU with different data. -;; (To do this you must have the correct native library jar defined as a dependency.) -(def ex (sym/bind c (context/gpu 0) {"a" (ndarray/ones [2 2]) - "b" (ndarray/ones [2 2])})) +(comment + ;; We can evaluate the same symbol on GPU with different data. + ;; (To do this you must have the correct native library jar defined as a dependency.) + (def ex (sym/bind c (context/gpu 0) {"a" (ndarray/ones [2 2]) + "b" (ndarray/ones [2 2])})) +) diff --git a/contrib/clojure-package/examples/tutorial/test/tutorial/core_test.clj b/contrib/clojure-package/examples/tutorial/test/tutorial/core_test.clj new file mode 100644 index 000000000000..0e5169c5cfaa --- /dev/null +++ b/contrib/clojure-package/examples/tutorial/test/tutorial/core_test.clj @@ -0,0 +1,27 @@ +;; +;; Licensed to the Apache Software Foundation (ASF) under one or more +;; contributor license agreements. See the NOTICE file distributed with +;; this work for additional information regarding copyright ownership. +;; The ASF licenses this file to You under the Apache License, Version 2.0 +;; (the "License"); you may not use this file except in compliance with +;; the License. You may obtain a copy of the License at +;; +;; http://www.apache.org/licenses/LICENSE-2.0 +;; +;; Unless required by applicable law or agreed to in writing, software +;; distributed under the License is distributed on an "AS IS" BASIS, +;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +;; See the License for the specific language governing permissions and +;; limitations under the License. +;; + +(ns tutorial.core_test + (:require [clojure.test :refer :all]) + (:require + [tutorial.introduction] + [tutorial.kvstore] + [tutorial.module] + [tutorial.ndarray] + [tutorial.symbol])) + +(deftest if-this-goes-here-then-tutorials-have-loaded-properly (is true)) \ No newline at end of file diff --git a/contrib/clojure-package/examples/visualization/test/visualization/core_test.clj b/contrib/clojure-package/examples/visualization/test/visualization/core_test.clj new file mode 100644 index 000000000000..1b10695cb34c --- /dev/null +++ b/contrib/clojure-package/examples/visualization/test/visualization/core_test.clj @@ -0,0 +1,28 @@ +;; +;; Licensed to the Apache Software Foundation (ASF) under one or more +;; contributor license agreements. See the NOTICE file distributed with +;; this work for additional information regarding copyright ownership. +;; The ASF licenses this file to You under the Apache License, Version 2.0 +;; (the "License"); you may not use this file except in compliance with +;; the License. You may obtain a copy of the License at +;; +;; http://www.apache.org/licenses/LICENSE-2.0 +;; +;; Unless required by applicable law or agreed to in writing, software +;; distributed under the License is distributed on an "AS IS" BASIS, +;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +;; See the License for the specific language governing permissions and +;; limitations under the License. +;; + +(ns visualization.core_test + (:require + [visualization.core :as visualization] + [clojure.test :refer :all])) + +(deftest check-pdf + (visualization/test-viz) + (let [new-pdf (clojure.java.io/as-file "testviz.pdf")] + (is (.exists new-pdf)) + (is (> 10000 (- (System/currentTimeMillis) (.lastModified new-pdf)))))) + \ No newline at end of file diff --git a/contrib/clojure-package/integration-tests.sh b/contrib/clojure-package/integration-tests.sh new file mode 100755 index 000000000000..3297fdc2c329 --- /dev/null +++ b/contrib/clojure-package/integration-tests.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +set -evx + +MXNET_HOME=${PWD} +EXAMPLES_HOME=${MXNET_HOME}/contrib/clojure-package/examples +#cd ${MXNET_HOME}/contrib/clojure-package +#lein test +#lein cloverage --codecov +for i in `find ${EXAMPLES_HOME} -name test` ; do +cd ${i} && lein test +done diff --git a/tests/nightly/apache_rat_license_check/rat-excludes b/tests/nightly/apache_rat_license_check/rat-excludes index 0d95792efc15..a488eb84d069 100755 --- a/tests/nightly/apache_rat_license_check/rat-excludes +++ b/tests/nightly/apache_rat_license_check/rat-excludes @@ -58,4 +58,6 @@ moderngpu/* deformable_im2col.cuh deformable_im2col.h REQUIRE -include/* \ No newline at end of file +include/* +*/test/test-symbol.json.ref +*/profiler/test/profile-matmul-20iter.json.ref \ No newline at end of file From 1f8bb26b63f030ce9d64d3a45aae0cc216572de0 Mon Sep 17 00:00:00 2001 From: Lanking Date: Tue, 11 Dec 2018 15:11:27 -0800 Subject: [PATCH 27/38] fix the Float not showing correctly problem (#13617) Merge this PR for 1.4.x --- .../scala/org/apache/mxnet/infer/javaapi/Predictor.scala | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/Predictor.scala b/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/Predictor.scala index 8c48742e6f0d..0466693be9bc 100644 --- a/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/Predictor.scala +++ b/scala-package/infer/src/main/scala/org/apache/mxnet/infer/javaapi/Predictor.scala @@ -80,10 +80,11 @@ class Predictor private[mxnet] (val predictor: org.apache.mxnet.infer.Predictor) An extra List is needed for when the model has more than one input. * @return Indexed sequence array of outputs */ - def predict(input: java.util.List[java.util.List[Float]]): - java.util.List[java.util.List[Float]] = { + def predict(input: java.util.List[java.util.List[java.lang.Float]]): + java.util.List[java.util.List[java.lang.Float]] = { val in = JavaConverters.asScalaIteratorConverter(input.iterator).asScala.toIndexedSeq - (predictor.predict(in map {a => a.asScala.toArray}) map {b => b.toList.asJava}).asJava + (predictor.predict(in map {a => a.asScala.map(Float2float).toArray}) + map {b => b.map(float2Float).toList.asJava}).asJava } From a4c97eca9f4dc88d9a29d33728c45ea8158a0f9e Mon Sep 17 00:00:00 2001 From: Zach Kimberg Date: Tue, 11 Dec 2018 15:19:06 -0800 Subject: [PATCH 28/38] [MXNET-1155] Add scala packageTest utility (#13046) * [MXNET-1155] Add scala packageTest utility * Clean up utility * Safe change directory in Makefile for scala * mvn install file instructions with details --- Makefile | 35 +++- scala-package/.gitignore | 1 + scala-package/examples/pom.xml | 14 ++ scala-package/packageTest/Makefile | 87 +++++++++ scala-package/packageTest/README.md | 72 ++++++++ scala-package/packageTest/core/pom.xml | 39 ++++ scala-package/packageTest/core/scripts | 1 + scala-package/packageTest/examples/pom.xml | 48 +++++ scala-package/packageTest/examples/scripts | 1 + scala-package/packageTest/infer/pom.xml | 38 ++++ scala-package/packageTest/pom.xml | 196 +++++++++++++++++++++ 11 files changed, 523 insertions(+), 9 deletions(-) create mode 100644 scala-package/packageTest/Makefile create mode 100644 scala-package/packageTest/README.md create mode 100644 scala-package/packageTest/core/pom.xml create mode 120000 scala-package/packageTest/core/scripts create mode 100644 scala-package/packageTest/examples/pom.xml create mode 120000 scala-package/packageTest/examples/scripts create mode 100644 scala-package/packageTest/infer/pom.xml create mode 100644 scala-package/packageTest/pom.xml diff --git a/Makefile b/Makefile index 16ea59f3d585..822704e26752 100644 --- a/Makefile +++ b/Makefile @@ -600,11 +600,19 @@ rpkgtest: Rscript -e 'res<-covr:::package_coverage("R-package");fileConn<-file(paste("r-package_coverage_",toString(runif(1)),".json"));writeLines(covr:::to_codecov(res), fileConn);close(fileConn)' scalaclean: - (cd $(ROOTDIR)/scala-package; \ + (cd $(ROOTDIR)/scala-package && \ mvn clean -P$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE)) +scalatestcompile: + (cd $(ROOTDIR)/scala-package && \ + mvn test-compile -P$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE) -Dcxx="$(CXX)" \ + -Dbuild.platform="$(SCALA_PKG_PROFILE)" \ + -Dcflags="$(CFLAGS)" -Dldflags="$(LDFLAGS)" \ + -Dcurrent_libdir="$(ROOTDIR)/lib" \ + -Dlddeps="$(LIB_DEP) $(ROOTDIR)/lib/libmxnet.a") + scalapkg: - (cd $(ROOTDIR)/scala-package; \ + (cd $(ROOTDIR)/scala-package && \ mvn package -P$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE) -Dcxx="$(CXX)" \ -Dbuild.platform="$(SCALA_PKG_PROFILE)" \ -Dcflags="$(CFLAGS)" -Dldflags="$(LDFLAGS)" \ @@ -612,49 +620,58 @@ scalapkg: -Dlddeps="$(LIB_DEP) $(ROOTDIR)/lib/libmxnet.a") scalaunittest: - (cd $(ROOTDIR)/scala-package; \ + (cd $(ROOTDIR)/scala-package && \ mvn integration-test -P$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE),unittest -Dcxx="$(CXX)" \ -Dcflags="$(CFLAGS)" -Dldflags="$(LDFLAGS)" \ -Dlddeps="$(LIB_DEP) $(ROOTDIR)/lib/libmxnet.a" $(SCALA_TEST_ARGS)) scalaintegrationtest: - (cd $(ROOTDIR)/scala-package; \ + (cd $(ROOTDIR)/scala-package && \ mvn integration-test -P$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE),integrationtest -Dcxx="$(CXX)" \ -Dcflags="$(CFLAGS)" -Dldflags="$(LDFLAGS)" \ -Dlddeps="$(LIB_DEP) $(ROOTDIR)/lib/libmxnet.a" $(SCALA_TEST_ARGS)) scalainstall: - (cd $(ROOTDIR)/scala-package; \ + (cd $(ROOTDIR)/scala-package && \ mvn install -P$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE) -DskipTests=true -Dcxx="$(CXX)" \ -Dbuild.platform="$(SCALA_PKG_PROFILE)" \ -Dcflags="$(CFLAGS)" -Dldflags="$(LDFLAGS)" \ -Dlddeps="$(LIB_DEP) $(ROOTDIR)/lib/libmxnet.a") scalarelease-dryrun: - (cd $(ROOTDIR)/scala-package; \ + (cd $(ROOTDIR)/scala-package && \ mvn release:clean release:prepare -DdryRun=true -DautoVersionSubmodules=true \ -Papache-release,$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE) \ -Darguments=""-Dbuild\.platform=\""$(SCALA_PKG_PROFILE)\""\ -DskipTests=true\ -Dcflags=\""$(CFLAGS)\""\ -Dcxx=\""$(CXX)\""\ -Dldflags=\""$(LDFLAGS)\""\ -Dlddeps=\""$(LIB_DEP) $(ROOTDIR)/lib/libmxnet.a\"""") scalarelease-prepare: - (cd $(ROOTDIR)/scala-package; \ + (cd $(ROOTDIR)/scala-package && \ mvn release:clean release:prepare -DautoVersionSubmodules=true \ -Papache-release,$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE) \ -Darguments=""-Dbuild\.platform=\""$(SCALA_PKG_PROFILE)\""\ -DskipTests=true\ -Dcflags=\""$(CFLAGS)\""\ -Dcxx=\""$(CXX)\""\ -Dldflags=\""$(LDFLAGS)\""\ -Dlddeps=\""$(LIB_DEP) $(ROOTDIR)/lib/libmxnet.a\"""") scalarelease-perform: - (cd $(ROOTDIR)/scala-package; \ + (cd $(ROOTDIR)/scala-package && \ mvn release:perform -DautoVersionSubmodules=true \ -Papache-release,$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE) \ -Darguments=""-Dbuild\.platform=\""$(SCALA_PKG_PROFILE)\""\ -DskipTests=true\ -Dcflags=\""$(CFLAGS)\""\ -Dcxx=\""$(CXX)\""\ -Dldflags=\""$(LDFLAGS)\""\ -Dlddeps=\""$(LIB_DEP) $(ROOTDIR)/lib/libmxnet.a\"""") scaladeploy: - (cd $(ROOTDIR)/scala-package; \ + (cd $(ROOTDIR)/scala-package && \ mvn deploy -Papache-release,$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE) \-DskipTests=true -Dcxx="$(CXX)" \ -Dbuild.platform="$(SCALA_PKG_PROFILE)" \ -Dcflags="$(CFLAGS)" -Dldflags="$(LDFLAGS)" \ -Dlddeps="$(LIB_DEP) $(ROOTDIR)/lib/libmxnet.a") +scaladeploylocal: + (cd $(ROOTDIR)/scala-package && \ + mvn deploy -Papache-release,deployLocal,$(SCALA_PKG_PROFILE),$(SCALA_VERSION_PROFILE) \-DskipTests=true -Dcxx="$(CXX)" \ + -DaltDeploymentRepository=snapshot-repo::default::file:local-snapshot \ + -Dgpg.skip \ + -Dbuild.platform="$(SCALA_PKG_PROFILE)" \ + -Dcflags="$(CFLAGS)" -Dldflags="$(LDFLAGS)" \ + -Dlddeps="$(LIB_DEP) $(ROOTDIR)/lib/libmxnet.a") + jnilint: 3rdparty/dmlc-core/scripts/lint.py mxnet-jnicpp cpp scala-package/native/src diff --git a/scala-package/.gitignore b/scala-package/.gitignore index 6aa4da6b1cfc..8bc87f53e802 100644 --- a/scala-package/.gitignore +++ b/scala-package/.gitignore @@ -6,3 +6,4 @@ core/src/main/scala/org/apache/mxnet/SymbolAPIBase.scala core/src/main/scala/org/apache/mxnet/SymbolBase.scala examples/scripts/infer/images/ examples/scripts/infer/models/ +local-snapshot \ No newline at end of file diff --git a/scala-package/examples/pom.xml b/scala-package/examples/pom.xml index 8d3d156a0b18..bc53339eb388 100644 --- a/scala-package/examples/pom.xml +++ b/scala-package/examples/pom.xml @@ -42,6 +42,20 @@ linux-x86_64-gpu + + deployLocal + + + + org.apache.maven.plugins + maven-deploy-plugin + + false + + + + + release diff --git a/scala-package/packageTest/Makefile b/scala-package/packageTest/Makefile new file mode 100644 index 000000000000..6073ff8a722f --- /dev/null +++ b/scala-package/packageTest/Makefile @@ -0,0 +1,87 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +SCALA_VERSION_PROFILE := 2.11 +SCALA_VERSION := 2.11.8 +MXNET_VERSION := "[1.3.0-SNAPSHOT,)" + +MXNET_REPO = https://repository.apache.org/content/repositories/snapshots + +ifeq ($(OS),Windows_NT) + UNAME_S := Windows +else + UNAME_S := $(shell uname -s) +endif + +ifeq ($(UNAME_S), Windows) + # TODO: currently scala package does not support windows + SCALA_PKG_PROFILE := windows +else + ifeq ($(UNAME_S), Darwin) + SCALA_PKG_PROFILE := osx-x86_64-cpu + else + SCALA_PKG_PROFILE := linux-x86_64 + ifeq ($(USE_CUDA), 1) + SCALA_PKG_PROFILE := $(SCALA_PKG_PROFILE)-gpu + else + SCALA_PKG_PROFILE := $(SCALA_PKG_PROFILE)-cpu + endif + endif +endif + +PROFILES := -Ptest +ifeq ($(UNIT), 1) + PROFILES := "$(PROFILES),unittest" +endif +ifeq ($(INTEGRATION), 1) + PROFILES := "$(PROFILES),integrationtest" +endif + +ifneq ($(UNIT), 1) + ifneq ($(INTEGRATION), 1) + PROFILES := "$(PROFILES),unittest,integrationtest" + endif +endif + + +clean: + (mvn clean -Dmxnet.profile=$(SCALA_PKG_PROFILE) \ + -Dmxnet.scalaprofile=$(SCALA_VERSION_PROFILE) \ + -Dmxnet.version=$(MXNET_VERSION) \ + -Dscala.version=$(SCALA_VERSION)) + +testinstall: + (mvn integration-test -Dmxnet.profile=$(SCALA_PKG_PROFILE) \ + $(PROFILES) \ + -Dmxnet.scalaprofile=$(SCALA_VERSION_PROFILE) \ + -Dmxnet.version=$(MXNET_VERSION) \ + -Dscala.version=$(SCALA_VERSION)) + +testlocal: + (mvn integration-test -Dmxnet.profile=$(SCALA_PKG_PROFILE) \ + $(PROFILES),fromLocal \ + -Dmxnet.scalaprofile=$(SCALA_VERSION_PROFILE) \ + -Dmxnet.version=$(MXNET_VERSION) \ + -Dscala.version=$(SCALA_VERSION)) + +testsnapshot: + (mvn integration-test -Dmxnet.profile=$(SCALA_PKG_PROFILE) \ + $(PROFILES),fromSnapshots \ + -Dmxnet.scalaprofile=$(SCALA_VERSION_PROFILE) \ + -Dmxnet.repo=$(MXNET_REPO) \ + -Dmxnet.version=$(MXNET_VERSION) \ + -Dscala.version=$(SCALA_VERSION)) diff --git a/scala-package/packageTest/README.md b/scala-package/packageTest/README.md new file mode 100644 index 000000000000..3f1eeb842f07 --- /dev/null +++ b/scala-package/packageTest/README.md @@ -0,0 +1,72 @@ +# MXNet Scala Package Test + +This is an project created to run the test suite on a fully packaged mxnet jar. The test suite is found locally but mxnet is from the target jarfile. + +## General Setup + +To setup the packageTest, you must first build your tests. To build the tests, follow these steps from the mxnet main directory: + +1. Build MXNet and the scala package from source following the directions [here](https://mxnet.incubator.apache.org/install/scala_setup.html#source) +2. Build the tests by running `make scalatestcompile`. +3. Follow setup instructions below for your testing goal + +## Running + +There are three different modes of operation for testing based on the location of the jar and where it is coming from: + +### Test Installed Jars + +If you have a jar file, you can install it to your maven cache repository(`~/.m2/repository`). This might be useful if you acquire the .jar file from elsewhere. To install, it is easiest to use `mvn install:install-file -Dfile= -DpomFile=`. If the pom file is not available, you can also run `mvn install:install-file -Dfile= -DgroupId= -DartifactId= -Dversion= -Dpackaging=`. With the full mxnet jar, this might look like `mvn install:install-file -Dfile= -DgroupId=org.apache.mxnet -DartifactId=mxnet-full_2.11-linux-x86_64-cpu -Dversion=1.3.0 -Dpackaging=jar`. + +You can also run `make scalainstall` to install from a local build. + +After installing, run `make testinstall` in the package test directory to run the tests. Note that unless you also install an additional mxnetexamples jar, you can only run the unit tests. + +### Test Local Deployment + +To test the jars that would be produced by a deployment, you can run `make scaladeploylocal` from the main mxnet directory. This produces a local snapshot located at `scala-package/local-snapshot`. To test this local snapshot, run `make testlocal`. + +### Remote Repository Snapshot + +This mode is to test a jar located in a remote repository. The default repository is the apache snapshot repisotory located at `https://repository.apache.org/content/repositories/snapshots`. Note that the actual jar in a repisotory should be located at `$repoUrl/org/apache/mxnet/mxnet-full_$scalaVersion-$osMode/$version/*.jar`. + +Test the snapshot repo using `make testsnapshot` or a different repo using `make testsnapshot MXNET_REPO=$NEW_REPO_URL`. + +### Options + +You are able to run unit tests, integration tests, or both using this utility. To run the unit tests, add the flag `UNIT=1` to make (e.g. `make testsnapshot UNIT=1`). Use `INTEGRATION=1` for integration tests. The default behavior is to run both the unit and integration tests. However, the integration tests require that the mxnet examples be installed in addition to the full mxnet package (see test mode instructions above). + +An additional option, you can specify the mxnet version with `MXNET_VERSION=1.3.1-SNAPSHOT`. + +## Cleaning Up + +You can clean temporary files and target artifacts by running `make scalaclean`. + +## Troubleshooting + +### Missing Examples + +If you fail with the following error +``` +[ERROR] Failed to execute goal org.scalatest:scalatest-maven-plugin:1.0:test (test) on project mxnet-scala-packagetest-examples_2.11: There are test failures -> [Help 1] +[ERROR] +[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch. +[ERROR] Re-run Maven using the -X switch to enable full debug logging. +[ERROR] +[ERROR] For more information about the errors and possible solutions, please read the following articles: +[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException +[ERROR] +[ERROR] After correcting the problems, you can resume the build with the command +[ERROR] mvn -rf :mxnet-scala-packagetest-examples_2.11 +Makefile:57: recipe for target 'scalaintegrationtest' failed +make: *** [scalaintegrationtest] Error 1 +``` + +and stacktrace begins with the following, + +``` +*** RUN ABORTED *** + java.lang.NoClassDefFoundError: org/apache/mxnetexamples/Util$ +``` + +you are missing the mxnetexamples package. See your test mode installation section for details. diff --git a/scala-package/packageTest/core/pom.xml b/scala-package/packageTest/core/pom.xml new file mode 100644 index 000000000000..bdcd7662f082 --- /dev/null +++ b/scala-package/packageTest/core/pom.xml @@ -0,0 +1,39 @@ + + + 4.0.0 + + PackageTest + mxnet-scala-packagetest_2.11 + 1.0-SNAPSHOT + ../pom.xml + + + mxnet-scala-packagetest-core_2.11 + MXNet Scala Package Test + pom + + + + unittest + + false + + + + + + + + org.scalatest + scalatest-maven-plugin + 1.0 + + ${project.build.outputDirectory},${project.build.testOutputDirectory},../../core/target/test-classes + + + + + + diff --git a/scala-package/packageTest/core/scripts b/scala-package/packageTest/core/scripts new file mode 120000 index 000000000000..f806668aa847 --- /dev/null +++ b/scala-package/packageTest/core/scripts @@ -0,0 +1 @@ +../../core/scripts \ No newline at end of file diff --git a/scala-package/packageTest/examples/pom.xml b/scala-package/packageTest/examples/pom.xml new file mode 100644 index 000000000000..e11be657e225 --- /dev/null +++ b/scala-package/packageTest/examples/pom.xml @@ -0,0 +1,48 @@ + + + 4.0.0 + + PackageTest + mxnet-scala-packagetest_2.11 + 1.0-SNAPSHOT + ../pom.xml + + + mxnet-scala-packagetest-examples_2.11 + MXNet Scala Package Test + pom + + + + integrationtest + + false + + + + + + + + org.scalatest + scalatest-maven-plugin + 1.0 + + ${project.build.outputDirectory},${project.build.testOutputDirectory},../../examples/target/test-classes + + + + + + + + org.apache.mxnet + mxnet-examples_${mxnet.scalaprofile} + ${mxnet.version} + test + + + + diff --git a/scala-package/packageTest/examples/scripts b/scala-package/packageTest/examples/scripts new file mode 120000 index 000000000000..2bba4eeece74 --- /dev/null +++ b/scala-package/packageTest/examples/scripts @@ -0,0 +1 @@ +../../examples/scripts \ No newline at end of file diff --git a/scala-package/packageTest/infer/pom.xml b/scala-package/packageTest/infer/pom.xml new file mode 100644 index 000000000000..7c5a096d6e14 --- /dev/null +++ b/scala-package/packageTest/infer/pom.xml @@ -0,0 +1,38 @@ + + + 4.0.0 + + PackageTest + mxnet-scala-packagetest_2.11 + 1.0-SNAPSHOT + ../pom.xml + + + mxnet-scala-packagetest-infer_2.11 + MXNet Scala Package Test + pom + + + + unittest + + false + + + + + + + + org.scalatest + scalatest-maven-plugin + 1.0 + + ${project.build.outputDirectory},${project.build.testOutputDirectory},../../infer/target/test-classes + + + + + diff --git a/scala-package/packageTest/pom.xml b/scala-package/packageTest/pom.xml new file mode 100644 index 000000000000..9c5c11cf2779 --- /dev/null +++ b/scala-package/packageTest/pom.xml @@ -0,0 +1,196 @@ + + + 4.0.0 + PackageTest + mxnet-scala-packagetest_2.11 + 1.0-SNAPSHOT + MXNet Scala Package Test + pom + + + core + infer + + + + + test + + + integrationtest + + examples + + + + fromSnapshots + + + apache-snapshots + ${mxnet.repo} + default + + true + + + + + + fromLocal + + + local-snapshot + file://${basedir}/../local-snapshot + + true + + + + + + + + true + + + + + org.apache.mxnet + mxnet-full_${mxnet.scalaprofile}-${mxnet.profile} + ${mxnet.version} + + + org.scala-lang + scala-library + ${scala.version} + + + commons-io + commons-io + 2.4 + + + org.scalatest + scalatest_${mxnet.scalaprofile} + 3.0.4 + test + + + org.scalacheck + scalacheck_${mxnet.scalaprofile} + 1.13.5 + test + + + org.mockito + mockito-all + 1.10.19 + test + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.3 + + 1.6 + 1.6 + UTF-8 + + + + maven-resources-plugin + 2.7 + + + org.apache.maven.plugins + maven-dependency-plugin + 2.9 + + + copy-dependencies + package + + copy-dependencies + + + ${project.build.outputDirectory}/lib + runtime + test,provided + false + false + true + + + + + + org.apache.maven.plugins + maven-jar-plugin + 2.5 + + + package + + jar + + + + **/* + + + + + + + net.alchim31.maven + scala-maven-plugin + 3.2.2 + + + compile + + compile + + compile + + + + + org.apache.maven.plugins + maven-surefire-plugin + 2.19 + + true + + + + org.scalatest + scalatest-maven-plugin + 1.0 + + ${skipTests} + ${project.build.directory}/surefire-reports + . + F + WDF TestSuite.txt + + + + test + integration-test + + test + + + + + + + + From b242b0c1fb71da43fba8d6208ee8ca282e735474 Mon Sep 17 00:00:00 2001 From: Frank Liu Date: Tue, 11 Dec 2018 15:21:05 -0800 Subject: [PATCH 29/38] [MXNET-1224]: improve scala maven jni build and packing. (#13493) Major JNI feature changes. Please find more info here: https://cwiki.apache.org/confluence/display/MXNET/Scala+maven+build+improvement --- .../assembly/linux-x86_64-cpu/pom.xml | 4 ++ .../src/main/assembly/assembly.xml | 6 ++ .../assembly/linux-x86_64-gpu/pom.xml | 4 ++ .../src/main/assembly/assembly.xml | 6 ++ .../osx-x86_64-cpu/main/assembly/assembly.xml | 30 --------- scala-package/assembly/osx-x86_64-cpu/pom.xml | 4 ++ .../src/main/assembly/assembly.xml | 6 ++ scala-package/core/pom.xml | 8 +++ .../mxnet/util/NativeLibraryLoader.scala | 55 ++++++++++------ scala-package/examples/pom.xml | 4 ++ scala-package/infer/pom.xml | 4 ++ .../init-native/linux-x86_64/pom.xml | 42 ++++++++++--- scala-package/init-native/osx-x86_64/pom.xml | 49 +++++++++++++-- scala-package/native/README.md | 63 +++++++++++++++++++ scala-package/native/linux-x86_64-cpu/pom.xml | 25 ++++---- scala-package/native/linux-x86_64-gpu/pom.xml | 25 ++++---- scala-package/native/osx-x86_64-cpu/pom.xml | 50 ++++++++++++--- scala-package/pom.xml | 2 + 18 files changed, 291 insertions(+), 96 deletions(-) delete mode 100644 scala-package/assembly/osx-x86_64-cpu/main/assembly/assembly.xml create mode 100644 scala-package/native/README.md diff --git a/scala-package/assembly/linux-x86_64-cpu/pom.xml b/scala-package/assembly/linux-x86_64-cpu/pom.xml index abefead175c7..1658f36e6bbd 100644 --- a/scala-package/assembly/linux-x86_64-cpu/pom.xml +++ b/scala-package/assembly/linux-x86_64-cpu/pom.xml @@ -14,6 +14,10 @@ MXNet Scala Package - Full Linux-x86_64 CPU-only jar + + ${project.parent.parent.basedir}/.. + + org.apache.mxnet diff --git a/scala-package/assembly/linux-x86_64-cpu/src/main/assembly/assembly.xml b/scala-package/assembly/linux-x86_64-cpu/src/main/assembly/assembly.xml index a574f8af25d9..f4c2017c8241 100644 --- a/scala-package/assembly/linux-x86_64-cpu/src/main/assembly/assembly.xml +++ b/scala-package/assembly/linux-x86_64-cpu/src/main/assembly/assembly.xml @@ -25,4 +25,10 @@ + + + ${MXNET_DIR}/lib/libmxnet.so + lib/native + + diff --git a/scala-package/assembly/linux-x86_64-gpu/pom.xml b/scala-package/assembly/linux-x86_64-gpu/pom.xml index 96ffa38c6af2..c80515e7b107 100644 --- a/scala-package/assembly/linux-x86_64-gpu/pom.xml +++ b/scala-package/assembly/linux-x86_64-gpu/pom.xml @@ -14,6 +14,10 @@ MXNet Scala Package - Full Linux-x86_64 GPU jar + + ${project.parent.parent.basedir}/.. + + org.apache.mxnet diff --git a/scala-package/assembly/linux-x86_64-gpu/src/main/assembly/assembly.xml b/scala-package/assembly/linux-x86_64-gpu/src/main/assembly/assembly.xml index 3a064bf9f2ce..2aca64bdf1a9 100644 --- a/scala-package/assembly/linux-x86_64-gpu/src/main/assembly/assembly.xml +++ b/scala-package/assembly/linux-x86_64-gpu/src/main/assembly/assembly.xml @@ -25,4 +25,10 @@ + + + ${MXNET_DIR}/lib/libmxnet.so + lib/native + + diff --git a/scala-package/assembly/osx-x86_64-cpu/main/assembly/assembly.xml b/scala-package/assembly/osx-x86_64-cpu/main/assembly/assembly.xml deleted file mode 100644 index fecafecad31e..000000000000 --- a/scala-package/assembly/osx-x86_64-cpu/main/assembly/assembly.xml +++ /dev/null @@ -1,30 +0,0 @@ - - full - - jar - - false - - - - *:*:jar - - / - true - true - runtime - - - lib/native - ${artifact.artifactId}${dashClassifier?}.${artifact.extension} - false - false - false - - *:*:dll:* - *:*:so:* - *:*:jnilib:* - - - - diff --git a/scala-package/assembly/osx-x86_64-cpu/pom.xml b/scala-package/assembly/osx-x86_64-cpu/pom.xml index 5c5733a9a4ce..62979a140fdc 100644 --- a/scala-package/assembly/osx-x86_64-cpu/pom.xml +++ b/scala-package/assembly/osx-x86_64-cpu/pom.xml @@ -14,6 +14,10 @@ MXNet Scala Package - Full OSX-x86_64 CPU-only jar + + ${project.parent.parent.basedir}/.. + + org.apache.mxnet diff --git a/scala-package/assembly/osx-x86_64-cpu/src/main/assembly/assembly.xml b/scala-package/assembly/osx-x86_64-cpu/src/main/assembly/assembly.xml index bdbd09f170c0..e9bc3728fcd0 100644 --- a/scala-package/assembly/osx-x86_64-cpu/src/main/assembly/assembly.xml +++ b/scala-package/assembly/osx-x86_64-cpu/src/main/assembly/assembly.xml @@ -25,4 +25,10 @@ + + + ${MXNET_DIR}/lib/libmxnet.so + lib/native + + diff --git a/scala-package/core/pom.xml b/scala-package/core/pom.xml index 484fbbd96790..976383f2e7d5 100644 --- a/scala-package/core/pom.xml +++ b/scala-package/core/pom.xml @@ -12,6 +12,7 @@ true + ${project.parent.basedir}/.. mxnet-core_2.11 @@ -77,6 +78,9 @@ -Djava.library.path=${project.parent.basedir}/native/${platform}/target \ -Dlog4j.configuration=file://${project.basedir}/src/test/resources/log4j.properties + + ${MXNET_DIR}/lib + @@ -88,6 +92,10 @@ -Djava.library.path=${project.parent.basedir}/native/${platform}/target ${skipTests} + always + + ${MXNET_DIR}/lib + diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/util/NativeLibraryLoader.scala b/scala-package/core/src/main/scala/org/apache/mxnet/util/NativeLibraryLoader.scala index e94d320391fa..2ce893b478ed 100644 --- a/scala-package/core/src/main/scala/org/apache/mxnet/util/NativeLibraryLoader.scala +++ b/scala-package/core/src/main/scala/org/apache/mxnet/util/NativeLibraryLoader.scala @@ -85,12 +85,10 @@ private[mxnet] object NativeLibraryLoader { } logger.debug(s"Attempting to load $loadLibname") val libFileInJar = libPathInJar + loadLibname - val is: InputStream = getClass.getResourceAsStream(libFileInJar) - if (is == null) { - throw new UnsatisfiedLinkError(s"Couldn't find the resource $loadLibname") - } - logger.info(s"Loading $loadLibname from $libPathInJar copying to $libname") - loadLibraryFromStream(libname, is) + saveLibraryToTemp("libmxnet.so", "/lib/native/libmxnet.so") + val tempfile: File = saveLibraryToTemp(libname, libFileInJar) + + loadLibraryFromFile(libname, tempfile) } /** @@ -109,7 +107,7 @@ private[mxnet] object NativeLibraryLoader { @throws(classOf[IOException]) private def createTempFile(name: String): File = { - new File(_tempDir + File.separator + name) + new File(_tempDir, name) } /** @@ -117,11 +115,34 @@ private[mxnet] object NativeLibraryLoader { * and loads from there. * * @param libname name of the library (just used in constructing the library name) - * @param is InputStream pointing to the library + * @param tempfile File pointing to the library */ - private def loadLibraryFromStream(libname: String, is: InputStream) { + private def loadLibraryFromFile(libname: String, tempfile: File) { + try { + logger.debug("Loading library from {}", tempfile.getPath) + System.load(tempfile.getPath) + } catch { + case ule: UnsatisfiedLinkError => + logger.error("Couldn't load copied link file: {}", ule.toString) + throw ule + } + } + + /** + * Load a system library from a stream. Copies the library to a temp file + * and loads from there. + * + * @param libname name of the library (just used in constructing the library name) + * @param resource String resource path in the jar file + */ + private def saveLibraryToTemp(libname: String, resource: String): File = { try { - val tempfile: File = createTempFile(libname) + val is: InputStream = getClass.getResourceAsStream(resource) + if (is == null) { + throw new UnsatisfiedLinkError(s"Couldn't find the resource $resource") + } + + val tempfile: File = new File(_tempDir, libname) val os: OutputStream = new FileOutputStream(tempfile) logger.debug("tempfile.getPath() = {}", tempfile.getPath) val savedTime: Long = System.currentTimeMillis @@ -131,20 +152,14 @@ private[mxnet] object NativeLibraryLoader { os.write(buf, 0, len) len = is.read(buf) } - os.flush() - val lock: InputStream = new FileInputStream(tempfile) os.close() + is.close() val seconds: Double = (System.currentTimeMillis - savedTime).toDouble / 1e3 - logger.debug(s"Copying took $seconds seconds.") - logger.debug("Loading library from {}", tempfile.getPath) - System.load(tempfile.getPath) - lock.close() + logger.debug(s"Copying $libname took $seconds seconds.") + tempfile } catch { case io: IOException => - logger.error("Could not create the temp file: {}", io.toString) - case ule: UnsatisfiedLinkError => - logger.error("Couldn't load copied link file: {}", ule.toString) - throw ule + throw new UnsatisfiedLinkError(s"Could not create temp file for $libname") } } } diff --git a/scala-package/examples/pom.xml b/scala-package/examples/pom.xml index bc53339eb388..3ebb39b9a67e 100644 --- a/scala-package/examples/pom.xml +++ b/scala-package/examples/pom.xml @@ -15,6 +15,7 @@ true + ${project.parent.basedir}/.. @@ -151,6 +152,9 @@ -Djava.library.path=${project.parent.basedir}/native/${platform}/target \ -Dlog4j.configuration=file://${project.basedir}/src/test/resources/log4j.properties + + ${MXNET_DIR}/lib + diff --git a/scala-package/infer/pom.xml b/scala-package/infer/pom.xml index ac76cdd19f3b..fb5cf370a009 100644 --- a/scala-package/infer/pom.xml +++ b/scala-package/infer/pom.xml @@ -15,6 +15,7 @@ true + ${project.parent.basedir}/.. @@ -77,6 +78,9 @@ -Djava.library.path=${project.parent.basedir}/native/${platform}/target \ -Dlog4j.configuration=file://${project.basedir}/src/test/resources/log4j.properties + + ${MXNET_DIR}/lib + diff --git a/scala-package/init-native/linux-x86_64/pom.xml b/scala-package/init-native/linux-x86_64/pom.xml index b71d7cf71528..242f2f3d5626 100644 --- a/scala-package/init-native/linux-x86_64/pom.xml +++ b/scala-package/init-native/linux-x86_64/pom.xml @@ -16,6 +16,10 @@ so + + ${project.parent.parent.basedir}/.. + + org.apache.mxnet @@ -62,22 +66,24 @@ -std=c++0x - -I${project.basedir}/../../../include - ${all_includes} - ${cflags} + -I${MXNET_DIR}/include + -I${MXNET_DIR}/3rdparty/dmlc-core/include + -I${MXNET_DIR}/3rdparty/mshadow + -I${MXNET_DIR}/3rdparty/dlpack/include + -I${MXNET_DIR}/3rdparty/tvm/nnvm/include + -DMSHADOW_USE_MKL=0 -DMSHADOW_USE_CUDA=0 + -O3 -DNDEBUG=1 -fPIC -msse3 -mf16c + -Wall -Wsign-compare -Wno-unused-parameter -Wno-unknown-pragmas -Wno-unused-local-typedefs -shared - ${all_ldpaths} -Wl,--whole-archive - ${lddeps} - -Wl,--no-whole-archive + -Wl,--no-whole-archive -pthread -lm -fopenmp -lrt - ${ldflags} - -fopenmp + -Wl,-rpath=${dollar}ORIGIN -lmxnet -L${MXNET_DIR}/lib @@ -86,7 +92,6 @@ javah generate-sources - linux default ${project.build.directory}/custom-javah ${basedir} @@ -101,6 +106,25 @@ + + + org.codehaus.mojo + exec-maven-plugin + 1.6.0 + + + link-native-lib + generate-resources + + exec + + + ln + -sf ${MXNET_DIR}/lib/libmxnet.so ${project.build.directory}/libmxnet.so + + + + diff --git a/scala-package/init-native/osx-x86_64/pom.xml b/scala-package/init-native/osx-x86_64/pom.xml index b4a0b1d6584a..12f4d800eba4 100644 --- a/scala-package/init-native/osx-x86_64/pom.xml +++ b/scala-package/init-native/osx-x86_64/pom.xml @@ -16,6 +16,10 @@ jnilib + + ${project.parent.parent.basedir}/.. + + org.apache.mxnet @@ -62,8 +66,14 @@ -std=c++0x - -I${project.basedir}/../../../include - ${cflags} + -I${MXNET_DIR}/include + -I${MXNET_DIR}/3rdparty/dmlc-core/include + -I${MXNET_DIR}/3rdparty/mshadow + -I${MXNET_DIR}/3rdparty/dlpack/include + -I${MXNET_DIR}/3rdparty/tvm/nnvm/include + -DMSHADOW_USE_MKL=0 -DMSHADOW_USE_CUDA=0 + -g -O0 -fPIC -msse3 -mf16c + -Wall -Wsign-compare -Wno-unused-parameter -Wno-unknown-pragmas -Wno-unused-local-typedefs -shared @@ -72,11 +82,9 @@ -framework JavaVM -Wl,-exported_symbol,_Java_* -Wl,-x - ${lddeps} - -force_load ${project.basedir}/../../../lib/libmxnet.a - ${ldflags} + -lmxnet -L${MXNET_DIR}/lib @@ -85,7 +93,6 @@ javah generate-sources - darwin default ${project.build.directory}/custom-javah ${basedir} @@ -100,6 +107,36 @@ + + + org.codehaus.mojo + exec-maven-plugin + 1.6.0 + + + post-native-build + package + + exec + + + install_name_tool + -change lib/libmxnet.so @loader_path/libmxnet.so ${project.build.directory}/${artifactId}.jnilib + + + + link-native-lib + generate-resources + + exec + + + ln + -sf ${MXNET_DIR}/lib/libmxnet.so ${project.build.directory}/libmxnet.so + + + + diff --git a/scala-package/native/README.md b/scala-package/native/README.md new file mode 100644 index 000000000000..cb6dd3890dd2 --- /dev/null +++ b/scala-package/native/README.md @@ -0,0 +1,63 @@ +# MXNet Scala JNI + +MXNet Scala JNI is a thin wrapper layer of underlying libmxnet.so. + +## javah +JNI native code requires a header file that matches the java/scala interface, +this file is usually generated with javah. + +In our case, jni_helper_func.h is generated and will be used to compile native code. + + +## Linker options + +Scala JNI (libmxnet-scala.so/libmxnet-scala.jnilib) is dynamically linked to libmxnet.so. +MXNet Scala will trying to load libmxnet.so from system LD_LIBRARY_PATH first. +If it failed, the try to resolve libmxnet.so in the same location as libmxnet-scala.so file. + +### Linux +``` +-Wl,-rpath=$ORIGIN -lmxnet +``` +Above option will tell system to looking for libmxnet.so from the same location. + + +### Mac OSX +On Mac, we have to execute install_name_tool command to change library loading path: +```bash +install_name_tool -change lib/libmxnet.so @loader_path/libmxnet.so libmxnet-scala.jnilib +``` + +Other linker options: +* -shared : link as shared library +* -Wl,-install_name,libmxnet-scala.jnilib : avoid use build machine's absolute path +* -framework JavaVM : Stand jni options for mac +* -Wl,-exported_symbol,_Java_* : Stand jni options for mac +* -Wl,-x : Do not put non-global symbols in the output file's symbol table. + + +## Compiler flags + +Scala JNI code technically doesn't need on any of MXNet make flags, +however c_api.h header links to many other dependencies header file, +which requires us to add DMSHADOW_USE_MKL and DMSHADOW_USE_CUDA to compile the JNI code. +These flags are not actually used by JNI and won't impact Scala's behavior. + + +### Linux + +``` +-DMSHADOW_USE_MKL=0 +-DMSHADOW_USE_CUDA=0 +-O3 -DNDEBUG=1 -fPIC -msse3 -mf16c +-Wall -Wsign-compare -Wno-unused-parameter -Wno-unknown-pragmas -Wno-unused-local-typedefs +``` + +### Mac OSX + +``` +-DMSHADOW_USE_MKL=0 +-DMSHADOW_USE_CUDA=0 +-g -O0 -fPIC -msse3 -mf16c +-Wall -Wsign-compare -Wno-unused-parameter -Wno-unknown-pragmas -Wno-unused-local-typedefs +``` diff --git a/scala-package/native/linux-x86_64-cpu/pom.xml b/scala-package/native/linux-x86_64-cpu/pom.xml index 2415cf7d26db..7cfd01a4ef79 100644 --- a/scala-package/native/linux-x86_64-cpu/pom.xml +++ b/scala-package/native/linux-x86_64-cpu/pom.xml @@ -16,6 +16,10 @@ so + + ${project.parent.parent.basedir}/.. + + org.apache.mxnet @@ -62,22 +66,20 @@ -std=c++0x - -I${project.basedir}/../../../include - ${all_includes} - ${cflags} + -I${MXNET_DIR}/include + -I${MXNET_DIR}/3rdparty/dmlc-core/include + -I${MXNET_DIR}/3rdparty/mshadow + -I${MXNET_DIR}/3rdparty/dlpack/include + -I${MXNET_DIR}/3rdparty/tvm/nnvm/include + -DMSHADOW_USE_MKL=0 -DMSHADOW_USE_CUDA=0 + -O3 -DNDEBUG=1 -fPIC -msse3 -mf16c + -Wall -Wsign-compare -Wno-unused-parameter -Wno-unknown-pragmas -Wno-unused-local-typedefs -shared - - ${all_ldpaths} - -Wl,--whole-archive - ${lddeps} - -Wl,--no-whole-archive - - ${ldflags} - -fopenmp + -Wl,-rpath=${dollar}ORIGIN -lmxnet -L${MXNET_DIR}/lib @@ -86,7 +88,6 @@ javah generate-sources - linux default ${project.build.directory}/custom-javah ${basedir} diff --git a/scala-package/native/linux-x86_64-gpu/pom.xml b/scala-package/native/linux-x86_64-gpu/pom.xml index 0186217234bc..668f330b5ff9 100644 --- a/scala-package/native/linux-x86_64-gpu/pom.xml +++ b/scala-package/native/linux-x86_64-gpu/pom.xml @@ -16,6 +16,10 @@ so + + ${project.parent.parent.basedir}/.. + + org.apache.mxnet @@ -62,22 +66,20 @@ -std=c++0x - -I${project.basedir}/../../../include - ${all_includes} - ${cflags} + -I${MXNET_DIR}/include + -I${MXNET_DIR}/3rdparty/dmlc-core/include + -I${MXNET_DIR}/3rdparty/mshadow + -I${MXNET_DIR}/3rdparty/dlpack/include + -I${MXNET_DIR}/3rdparty/tvm/nnvm/include + -DMSHADOW_USE_MKL=0 -DMSHADOW_USE_CUDA=0 + -O3 -DNDEBUG=1 -fPIC -msse3 -mf16c + -Wall -Wsign-compare -Wno-unused-parameter -Wno-unknown-pragmas -Wno-unused-local-typedefs -shared - - ${all_ldpaths} - -Wl,--whole-archive - ${lddeps} - -Wl,--no-whole-archive - - ${ldflags} - -fopenmp + -Wl,-rpath=${dollar}ORIGIN -lmxnet -L${MXNET_DIR}/lib @@ -86,7 +88,6 @@ javah generate-sources - linux default ${project.build.directory}/custom-javah ${basedir} diff --git a/scala-package/native/osx-x86_64-cpu/pom.xml b/scala-package/native/osx-x86_64-cpu/pom.xml index 0ab7ca1dd0f0..425ca96815de 100644 --- a/scala-package/native/osx-x86_64-cpu/pom.xml +++ b/scala-package/native/osx-x86_64-cpu/pom.xml @@ -16,6 +16,10 @@ jnilib + + ${project.parent.parent.basedir}/.. + + org.apache.mxnet @@ -62,8 +66,14 @@ -std=c++0x - -I../../../include - ${cflags} + -I${MXNET_DIR}/include + -I${MXNET_DIR}/3rdparty/dmlc-core/include + -I${MXNET_DIR}/3rdparty/mshadow + -I${MXNET_DIR}/3rdparty/dlpack/include + -I${MXNET_DIR}/3rdparty/tvm/nnvm/include + -DMSHADOW_USE_MKL=0 -DMSHADOW_USE_CUDA=0 + -g -O0 -fPIC -msse3 -mf16c + -Wall -Wsign-compare -Wno-unused-parameter -Wno-unknown-pragmas -Wno-unused-local-typedefs -shared @@ -72,12 +82,9 @@ -framework JavaVM -Wl,-exported_symbol,_Java_* -Wl,-x - ${lddeps} - -force_load ${project.basedir}/../../../lib/libmxnet.a - -force_load ${project.basedir}/../../../3rdparty/tvm/nnvm/lib/libnnvm.a - ${ldflags} + -Wl,-install_name,libmxnet-scala.jnilib -lmxnet -L${MXNET_DIR}/lib @@ -86,7 +93,6 @@ javah generate-sources - darwin default ${project.build.directory}/custom-javah ${basedir} @@ -101,6 +107,36 @@ + + + org.codehaus.mojo + exec-maven-plugin + 1.6.0 + + + post-native-build + package + + exec + + + install_name_tool + -change lib/libmxnet.so @loader_path/libmxnet.so ${project.build.directory}/${artifactId}.jnilib + + + + link-native-lib + generate-resources + + exec + + + ln + -sf ${MXNET_DIR}/lib/libmxnet.so ${project.build.directory}/libmxnet.so + + + + diff --git a/scala-package/pom.xml b/scala-package/pom.xml index 151462cbcc68..6eb573bf3e23 100644 --- a/scala-package/pom.xml +++ b/scala-package/pom.xml @@ -39,6 +39,8 @@ 2.11.8 2.11 + g++ + $ pom From 97e0c972178177011ee928407719b2e002fa116f Mon Sep 17 00:00:00 2001 From: Zach Kimberg Date: Tue, 11 Dec 2018 15:23:13 -0800 Subject: [PATCH 30/38] [MXNET-1225] Always use config.mk in make install instructions (#13364) * Always use config.mk in make install instructions * Specify Cuda 0 for ubuntu with mkldnn * Scala install doc avoid build_from_source Minor doc fixes * Fix build_from_source CMake usage * CPP Install Instruction with CMake * Use cmake out of source build --- docs/install/build_from_source.md | 41 +++++++++++++++++++++---------- docs/install/c_plus_plus.md | 3 ++- docs/install/java_setup.md | 4 ++- docs/install/osx_setup.md | 9 ++++++- docs/install/scala_setup.md | 4 ++- docs/install/ubuntu_setup.md | 21 +++++++++++++--- 6 files changed, 61 insertions(+), 21 deletions(-) diff --git a/docs/install/build_from_source.md b/docs/install/build_from_source.md index e41b1d0f1804..e807fb44b599 100644 --- a/docs/install/build_from_source.md +++ b/docs/install/build_from_source.md @@ -2,6 +2,7 @@ This document explains how to build MXNet from source code. +**For Java/Scala/Clojure, please follow [this guide instead](./scala_setup.md)** ## Overview @@ -27,7 +28,6 @@ MXNet's newest and most popular API is Gluon. Gluon is built into the Python bin - [Python (includes Gluon)](../api/python/index.html) - [C++](../api/c++/index.html) - [Clojure](../api/clojure/index.html) - - Java (coming soon) - [Julia](../api/julia/index.html) - [Perl](../api/perl/index.html) - [R](../api/r/index.html) @@ -35,6 +35,7 @@ MXNet's newest and most popular API is Gluon. Gluon is built into the Python bin - [Java](../api/java/index.html)


+ ## Build Instructions by Operating System Detailed instructions are provided per operating system. Each of these guides also covers how to install the specific [Language Bindings](#installing-mxnet-language-bindings) you require. @@ -160,7 +161,7 @@ More information on turning these features on or off are found in the following ## Build Configurations There is a configuration file for make, -[`make/config.mk`](https://github.com/apache/incubator-mxnet/blob/master/make/config.mk), that contains all the compilation options. You can edit it and then run `make` or `cmake`. `cmake` is recommended for building MXNet (and is required to build with MKLDNN), however you may use `make` instead. +[`make/config.mk`](https://github.com/apache/incubator-mxnet/blob/master/make/config.mk), that contains all the compilation options. You can edit it and then run `make` or `cmake`. `cmake` is recommended for building MXNet (and is required to build with MKLDNN), however you may use `make` instead. For building with Java/Scala/Clojure, only `make` is supported.
@@ -203,18 +204,18 @@ It is recommended to set environment variable NCCL_LAUNCH_MODE to PARALLEL when ### Build MXNet with C++ -* To enable C++ package, just add `USE_CPP_PACKAGE=1` when you run `make` or `cmake`. +* To enable C++ package, just add `USE_CPP_PACKAGE=1` when you run `make` or `cmake` (see examples).
### Usage Examples -* `-j` runs multiple jobs against multi-core CPUs. - For example, you can specify using all cores on Linux as follows: ```bash -cmake -j$(nproc) +mkdir build && cd build +cmake -GNinja . +ninja -v ``` @@ -222,28 +223,36 @@ cmake -j$(nproc) * Build MXNet with `cmake` and install with MKL DNN, GPU, and OpenCV support: ```bash -cmake -j USE_CUDA=1 USE_CUDA_PATH=/usr/local/cuda USE_CUDNN=1 USE_MKLDNN=1 +mkdir build && cd build +cmake -DUSE_CUDA=1 -DUSE_CUDA_PATH=/usr/local/cuda -DUSE_CUDNN=1 -DUSE_MKLDNN=1 -GNinja . +ninja -v ``` #### Recommended for Systems with NVIDIA GPUs * Build with both OpenBLAS, GPU, and OpenCV support: ```bash -cmake -j BLAS=open USE_CUDA=1 USE_CUDA_PATH=/usr/local/cuda USE_CUDNN=1 +mkdir build && cd build +cmake -DBLAS=open -DUSE_CUDA=1 -DUSE_CUDA_PATH=/usr/local/cuda -DUSE_CUDNN=1 -GNinja . +ninja -v ``` #### Recommended for Systems with Intel CPUs * Build MXNet with `cmake` and install with MKL DNN, and OpenCV support: ```bash -cmake -j USE_CUDA=0 USE_MKLDNN=1 +mkdir build && cd build +cmake -DUSE_CUDA=0 -DUSE_MKLDNN=1 -GNinja . +ninja -v ``` #### Recommended for Systems with non-Intel CPUs * Build MXNet with `cmake` and install with OpenBLAS and OpenCV support: ```bash -cmake -j USE_CUDA=0 BLAS=open +mkdir build && cd build +cmake -DUSE_CUDA=0 -DBLAS=open -GNinja . +ninja -v ``` #### Other Examples @@ -251,20 +260,26 @@ cmake -j USE_CUDA=0 BLAS=open * Build without using OpenCV: ```bash -cmake USE_OPENCV=0 +mkdir build && cd build +cmake -DUSE_OPENCV=0 -GNinja . +ninja -v ``` * Build on **macOS** with the default BLAS library (Apple Accelerate) and Clang installed with `xcode` (OPENMP is disabled because it is not supported by the Apple version of Clang): ```bash -cmake -j BLAS=apple USE_OPENCV=0 USE_OPENMP=0 +mkdir build && cd build +cmake -DBLAS=apple -DUSE_OPENCV=0 -DUSE_OPENMP=0 -GNinja . +ninja -v ``` * To use OpenMP on **macOS** you need to install the Clang compiler, `llvm` (the one provided by Apple does not support OpenMP): ```bash brew install llvm -cmake -j BLAS=apple USE_OPENMP=1 +mkdir build && cd build +cmake -DBLAS=apple -DUSE_OPENMP=1 -GNinja . +ninja -v ```
diff --git a/docs/install/c_plus_plus.md b/docs/install/c_plus_plus.md index 6078877c27c8..6ad67e2803db 100644 --- a/docs/install/c_plus_plus.md +++ b/docs/install/c_plus_plus.md @@ -6,7 +6,8 @@ To enable C++ package, just add `USE_CPP_PACKAGE=1` in the [build from source](b For example to build MXNet with GPU support and the C++ package, OpenCV, and OpenBLAS, from the project root you would run: ```bash -make -j USE_CPP_PACKAGE=1 USE_OPENCV=1 USE_BLAS=openblas USE_CUDA=1 +cmake -DUSE_CUDA=1 -DUSE_CUDA_PATH=/usr/local/cuda -DUSE_CUDNN=1 -DUSE_MKLDNN=1 -DUSE_CPP_PACKAGE=1 -GNinja . +ninja -v ``` You may also want to add the MXNet shared library to your `LD_LIBRARY_PATH`: diff --git a/docs/install/java_setup.md b/docs/install/java_setup.md index 34b0967c421e..0075e9205880 100644 --- a/docs/install/java_setup.md +++ b/docs/install/java_setup.md @@ -89,11 +89,13 @@ The official Java Packages will be released with the release of MXNet 1.4 and wi The previously mentioned setup with Maven is recommended. Otherwise, the following instructions for macOS and Ubuntu are provided for reference only: +**If you have already built mxnet from source using `cmake`, run `make clean` and then follow the appropriate guide below*** + | OS | Step 1 | Step 2 | |---|---|---| |macOS | [Shared Library for macOS](../install/osx_setup.html#build-the-shared-library) | [Scala Package for macOS](http://mxnet.incubator.apache.org/install/osx_setup.html#install-the-mxnet-package-for-scala) | | Ubuntu | [Shared Library for Ubuntu](../install/ubuntu_setup.html#installing-mxnet-on-ubuntu) | [Scala Package for Ubuntu](http://mxnet.incubator.apache.org/install/ubuntu_setup.html#install-the-mxnet-package-for-scala) | -| Windows | [Shared Library for Windows](../install/windows_setup.html#build-the-shared-library) | Call for Contribution | +| Windows | | Call for Contribution | #### Build Java from an Existing MXNet Installation diff --git a/docs/install/osx_setup.md b/docs/install/osx_setup.md index 4e9293efce93..a2b59fe03618 100644 --- a/docs/install/osx_setup.md +++ b/docs/install/osx_setup.md @@ -96,7 +96,14 @@ The file called ```osx.mk``` has the configuration required for building MXNet o To build with MKLDNN ```bash -LIBRARY_PATH=$(brew --prefix llvm)/lib/ make -j $(sysctl -n hw.ncpu) CC=$(brew --prefix llvm)/bin/clang++ CXX=$(brew --prefix llvm)/bin/clang++ USE_OPENCV=1 USE_OPENMP=1 USE_MKLDNN=1 USE_BLAS=apple USE_PROFILER=1 +echo "CC=$(brew --prefix llvm)/bin/clang++" >> ./config.mk +echo "CXX=$(brew --prefix llvm)/bin/clang++" >> ./config.mk +echo "USE_OPENCV=1" >> ./config.mk +echo "USE_OPENMP=1" >> ./config.mk +echo "USE_MKLDNN=1" >> ./config.mk +echo "USE_BLAS=apple" >> ./config.mk +echo "USE_PROFILER=1" >> ./config.mk +LIBRARY_PATH=$(brew --prefix llvm)/lib/ make -j $(sysctl -n hw.ncpu) ``` If building with ```GPU``` support, add the following configuration to config.mk and build: diff --git a/docs/install/scala_setup.md b/docs/install/scala_setup.md index 0dadd8bca400..98e752b21dd5 100644 --- a/docs/install/scala_setup.md +++ b/docs/install/scala_setup.md @@ -79,11 +79,13 @@ https://mvnrepository.com/artifact/org.apache.mxnet The previously mentioned setup with Maven is recommended. Otherwise, the following instructions for macOS, Ubuntu, and Windows are provided for reference only: +**If you have already built mxnet from source using `cmake`, run `make clean` and then follow the appropriate guide below*** + | OS | Step 1 | Step 2 | |---|---|---| |macOS | [Shared Library for macOS](http://mxnet.incubator.apache.org/install/osx_setup.html#build-the-shared-library) | [Scala Package for macOS](http://mxnet.incubator.apache.org/install/osx_setup.html#install-the-mxnet-package-for-scala) | | Ubuntu | [Shared Library for Ubuntu](http://mxnet.incubator.apache.org/install/ubuntu_setup.html#installing-mxnet-on-ubuntu) | [Scala Package for Ubuntu](http://mxnet.incubator.apache.org/install/ubuntu_setup.html#install-the-mxnet-package-for-scala) | -| Windows | [Shared Library for Windows](http://mxnet.incubator.apache.org/install/windows_setup.html#build-the-shared-library) | Call for Contribution | +| Windows | | Call for Contribution | #### Build Scala from an Existing MXNet Installation diff --git a/docs/install/ubuntu_setup.md b/docs/install/ubuntu_setup.md index bd1b441d5556..bf964182b50a 100644 --- a/docs/install/ubuntu_setup.md +++ b/docs/install/ubuntu_setup.md @@ -153,7 +153,9 @@ If building on CPU and using OpenBLAS: ```bash git clone --recursive https://github.com/apache/incubator-mxnet.git cd incubator-mxnet - make -j $(nproc) USE_OPENCV=1 USE_BLAS=openblas + echo "USE_OPENCV = 1" >> ./config.mk + echo "USE_BLAS = openblas" >> ./config.mk + make -j $(nproc) ``` If building on CPU and using MKL and MKL-DNN (make sure MKL is installed according to [Math Library Selection](build_from_source.html#math-library-selection) and [MKL-DNN README](https://github.com/apache/incubator-mxnet/blob/master/MKLDNN_README.md)): @@ -161,7 +163,11 @@ If building on CPU and using MKL and MKL-DNN (make sure MKL is installed accordi ```bash git clone --recursive https://github.com/apache/incubator-mxnet.git cd incubator-mxnet - make -j $(nproc) USE_OPENCV=1 USE_BLAS=mkl USE_MKLDNN=1 + echo "USE_OPENCV = 1" >> ./config.mk + echo "USE_BLAS = openblas" >> ./config.mk + echo "USE_CUDA = 0" >> ./config.mk + echo "USE_MKLDNN = 1" >> ./config.mk + make -j $(nproc) ``` If building on GPU and you want OpenCV and OpenBLAS (make sure you have installed the [CUDA dependencies first](#cuda-dependencies)): @@ -169,7 +175,12 @@ If building on GPU and you want OpenCV and OpenBLAS (make sure you have installe ```bash git clone --recursive https://github.com/apache/incubator-mxnet.git cd incubator-mxnet - make -j $(nproc) USE_OPENCV=1 USE_BLAS=openblas USE_CUDA=1 USE_CUDA_PATH=/usr/local/cuda USE_CUDNN=1 + echo "USE_OPENCV = 1" >> ./config.mk + echo "USE_BLAS = openblas" >> ./config.mk + echo "USE_CUDA = 1" >> ./config.mk + echo "USE_CUDA_PATH = /usr/local/cuda" >> ./config.mk + echo "USE_CUDNN = 1" >> ./config.mk + make -j $(nproc) ``` *Note* - USE_OPENCV and USE_BLAS are make file flags to set compilation options to use OpenCV and BLAS library. You can explore and use more compilation options in `make/config.mk` and also review common [usage examples](build_from_source.html#usage-examples). @@ -339,7 +350,9 @@ $ sudo apt-get install -y libopencv-dev ```bash $ git clone --recursive https://github.com/apache/incubator-mxnet $ cd incubator-mxnet -$ make -j $(nproc) USE_OPENCV=1 USE_BLAS=openblas +$ echo "USE_OPENCV = 1" >> ./config.mk +$ echo "USE_BLAS = openblas" >> ./config.mk +$ make -j $(nproc) ``` *Note* - USE_OPENCV and USE_BLAS are make file flags to set compilation options to use OpenCV and BLAS library. You can explore and use more compilation options in `make/config.mk`. From 9ce7eabcbc9575128240f71f79f9f7cce1a19aa7 Mon Sep 17 00:00:00 2001 From: Anirudh Subramanian Date: Tue, 11 Dec 2018 17:22:02 -0800 Subject: [PATCH 31/38] Fix warning in waitall doc (#13618) --- python/mxnet/ndarray/ndarray.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/python/mxnet/ndarray/ndarray.py b/python/mxnet/ndarray/ndarray.py index 4e6d0cdc929f..9a62620da85c 100644 --- a/python/mxnet/ndarray/ndarray.py +++ b/python/mxnet/ndarray/ndarray.py @@ -157,11 +157,13 @@ def waitall(): """Wait for all async operations to finish in MXNet. This function is used for benchmarking only. + .. warning:: - If your code has exceptions, `waitall` can cause silent failures. - For this reason you should avoid `waitall` in your code. - Use it only if you are confident that your code is error free. - Then make sure you call `wait_to_read` on all outputs after `waitall`. + + If your code has exceptions, `waitall` can cause silent failures. + For this reason you should avoid `waitall` in your code. + Use it only if you are confident that your code is error free. + Then make sure you call `wait_to_read` on all outputs after `waitall`. """ check_call(_LIB.MXNDArrayWaitAll()) From 002e0bb08d03813a992d8a485e563f4c15f0f991 Mon Sep 17 00:00:00 2001 From: zhaoyao73 Date: Wed, 12 Dec 2018 09:24:49 -0500 Subject: [PATCH 32/38] Optimize C++ API (#13496) * Optimize C++ API Pass parameter with reference instead of value. Add const as well as it is not changed. * fix docs/architecture/overview.md Fix BinaryShapeFunction typedef Add a right brace for SmoothL1Shape_ --- cpp-package/example/utils.h | 2 +- cpp-package/include/mxnet-cpp/operator.h | 4 ++-- cpp-package/include/mxnet-cpp/operator.hpp | 4 ++-- docs/architecture/overview.md | 5 +++-- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/cpp-package/example/utils.h b/cpp-package/example/utils.h index 98b6472685b3..2ed5c4c11f02 100644 --- a/cpp-package/example/utils.h +++ b/cpp-package/example/utils.h @@ -42,7 +42,7 @@ bool check_datafiles(const std::vector &data_files) { return true; } -bool setDataIter(MXDataIter *iter , std::string useType, +bool setDataIter(MXDataIter *iter , const std::string &useType, const std::vector &data_files, int batch_size) { if (!check_datafiles(data_files)) return false; diff --git a/cpp-package/include/mxnet-cpp/operator.h b/cpp-package/include/mxnet-cpp/operator.h index 4d4bedac8fec..9f289f0e248b 100644 --- a/cpp-package/include/mxnet-cpp/operator.h +++ b/cpp-package/include/mxnet-cpp/operator.h @@ -86,7 +86,7 @@ class Operator { * \param symbol the input symbol * \return reference of self */ - Operator &SetInput(const std::string &name, Symbol symbol); + Operator &SetInput(const std::string &name, const Symbol &symbol); /*! * \brief add an input symbol * \param symbol the input symbol @@ -133,7 +133,7 @@ class Operator { * \param ndarray the input ndarray * \return reference of self */ - Operator &SetInput(const std::string &name, NDArray ndarray); + Operator &SetInput(const std::string &name, const NDArray &ndarray); /*! * \brief add an input ndarray * \param ndarray the input ndarray diff --git a/cpp-package/include/mxnet-cpp/operator.hpp b/cpp-package/include/mxnet-cpp/operator.hpp index f4ce43d58d2d..edc396f1477c 100644 --- a/cpp-package/include/mxnet-cpp/operator.hpp +++ b/cpp-package/include/mxnet-cpp/operator.hpp @@ -158,7 +158,7 @@ inline void Operator::Invoke(NDArray &output) { Invoke(outputs); } -inline Operator &Operator::SetInput(const std::string &name, Symbol symbol) { +inline Operator &Operator::SetInput(const std::string &name, const Symbol &symbol) { if (symbol.GetHandle()) { input_keys_.push_back(name.c_str()); input_symbols_.push_back(symbol.GetHandle()); @@ -166,7 +166,7 @@ inline Operator &Operator::SetInput(const std::string &name, Symbol symbol) { return *this; } -inline Operator &Operator::SetInput(const std::string &name, NDArray ndarray) { +inline Operator &Operator::SetInput(const std::string &name, const NDArray &ndarray) { input_keys_.push_back(name.c_str()); input_ndarrays_.push_back(ndarray.GetHandle()); return *this; diff --git a/docs/architecture/overview.md b/docs/architecture/overview.md index a7632d4a61e8..6a37f8830479 100644 --- a/docs/architecture/overview.md +++ b/docs/architecture/overview.md @@ -567,8 +567,8 @@ let's check input data shape consistency and provide output shape. ```cpp typedef TShape (*UnaryShapeFunction)(const TShape& src, const EnvArguments& env); - typedef TShape (*BinaryShapeFunction)(const TShape& const TShape& rhs,lhs, - + typedef TShape (*BinaryShapeFunction)(const TShape& lhs, + const TShape& rhs, const EnvArguments& env); ``` You can use `mshadow::TShape` to check input data shape and designate output data shape. @@ -597,6 +597,7 @@ Written explicitly, it is: inline TShape SmoothL1Shape_(const TShape& src, const EnvArguments& env) { return TShape(src); + } ``` ### Define Functions From e36f888990ec3975b3c9319087c41b1167da526e Mon Sep 17 00:00:00 2001 From: ciyong Date: Thu, 13 Dec 2018 05:45:35 +0800 Subject: [PATCH 33/38] fix quantize pass error when the quantization supported Op are excluded in the model (#13596) --- .../quantization/quantize_graph_pass.cc | 5 +- .../python/quantization/test_quantization.py | 87 +++++++++++-------- 2 files changed, 53 insertions(+), 39 deletions(-) diff --git a/src/operator/quantization/quantize_graph_pass.cc b/src/operator/quantization/quantize_graph_pass.cc index b1e3bb67ad79..fcd0fb4218be 100644 --- a/src/operator/quantization/quantize_graph_pass.cc +++ b/src/operator/quantization/quantize_graph_pass.cc @@ -222,7 +222,7 @@ Graph QuantizeGraph(Graph &&src) { // skip non-quantized input continue; } - if (quantized_op_map.count(e.node->op())) { + if (NeedQuantize(e.node, excluded_nodes)) { // here we calculate the output number (exclude min/max, in order to // calculate min/max index from mirror node) based on assumption that // there is only 1min and 1max output from mirror node (which is @@ -314,7 +314,8 @@ Graph QuantizeGraph(Graph &&src) { std::vector outputs; for (const auto& e : src.outputs) { - if (quantized_op_map.count(e.node->op())) { + if (NeedQuantize(e.node, excluded_nodes)) { + // Only insert dequantize for those Ops supports quantize and not excluded. NodePtr mirror_node = mirror_map.at(e.node.get()); NodeEntry mirror_entry = NodeEntry{mirror_node, e.index, e.version}; size_t num_inputs = e.node->num_inputs(); diff --git a/tests/python/quantization/test_quantization.py b/tests/python/quantization/test_quantization.py index ca8070cfc224..518b69626246 100644 --- a/tests/python/quantization/test_quantization.py +++ b/tests/python/quantization/test_quantization.py @@ -406,12 +406,16 @@ def get_fp32_sym(): def get_fp32_residual(): data = mx.sym.Variable('data') - conv = mx.sym.Convolution(data=data, num_filter=4, kernel=(1,1), pad=(0,0), - no_bias=True, name='conv') - bn = mx.sym.BatchNorm(data=conv, fix_gamma=False, eps=2e-5, momentum=0.9, name='bn') - act = mx.sym.Activation(data=bn + data, act_type='relu', name='relu') - pool = mx.sym.Pooling(act, kernel=(4, 4), pool_type='avg', name='pool') - fc = mx.sym.FullyConnected(pool, num_hidden=10, flatten=True, name='fc') + conv0 = mx.sym.Convolution(data=data, num_filter=4, kernel=(1,1), pad=(0,0), + no_bias=True, name='conv0') + bn = mx.sym.BatchNorm(data=conv0, fix_gamma=False, eps=2e-5, momentum=0.9, name='bn') + act0 = mx.sym.Activation(data=bn + data, act_type='relu', name='relu0') + pool0 = mx.sym.Pooling(act0, kernel=(4, 4), pool_type='avg', name='pool0') + conv1 = mx.sym.Convolution(data=pool0, num_filter=4, kernel=(1,1), pad=(0,0), + no_bias=False, name='conv1') + act1 = mx.sym.Activation(data=conv1, act_type='relu', name='relu1') + pool1 = mx.sym.Pooling(act1, kernel=(4, 4), pool_type='avg', name='pool1') + fc = mx.sym.FullyConnected(pool1, num_hidden=10, flatten=True, name='fc') sym = mx.sym.SoftmaxOutput(fc, grad_scale=1, ignore_label=-1, multi_output=False, out_grad=False, preserve_shape=False, use_ignore=False, name='softmax') return sym @@ -574,38 +578,47 @@ def check_qsym_forward(qsym, qarg_params, qaux_params, data_shape, label_shape): mod.init_params() arg_params, aux_params = mod.get_params() - excluded_sym_names = [] + excluded_names = [] if mx.current_context() == mx.cpu(): - excluded_sym_names += ['fc'] - excluded_sym_names += ['concat'] - qsym, qarg_params, qaux_params = mx.contrib.quant.quantize_model(sym=s, - arg_params=arg_params, - aux_params=aux_params, - excluded_sym_names=excluded_sym_names, - ctx=mx.current_context(), - quantized_dtype=qdtype, - calib_mode='none') - check_params(arg_params, qarg_params, qsym) - check_params(aux_params, qaux_params) - check_qsym_forward(qsym, qarg_params, qaux_params, dshape, lshape) - - calib_data = mx.nd.random.uniform(shape=dshape) - calib_data = NDArrayIter(data=calib_data, batch_size=batch_size) - calib_data = DummyIter(calib_data) - qsym, qarg_params, qaux_params = mx.contrib.quant.quantize_model(sym=s, - arg_params=arg_params, - aux_params=aux_params, - excluded_sym_names=excluded_sym_names, - ctx=mx.current_context(), - quantized_dtype=qdtype, - calib_mode='naive', - calib_data=calib_data, - num_calib_examples=20) - check_params(arg_params, qarg_params, qsym) - check_params(aux_params, qaux_params) - check_qsym_calibrated(qsym) - check_qsym_qdtype(qsym, qdtype) - check_qsym_forward(qsym, qarg_params, qaux_params, dshape, lshape) + excluded_names += ['fc'] + excluded_names += ['concat'] + + optional_names = ['pool0'] + for skip_optional_names in [False, True]: + exclude_sym_names = [] + if skip_optional_names: + excluded_sym_names = excluded_names + else: + excluded_sym_names = excluded_names + optional_names + + qsym, qarg_params, qaux_params = mx.contrib.quant.quantize_model(sym=s, + arg_params=arg_params, + aux_params=aux_params, + excluded_sym_names=excluded_sym_names, + ctx=mx.current_context(), + quantized_dtype=qdtype, + calib_mode='none') + check_params(arg_params, qarg_params, qsym) + check_params(aux_params, qaux_params) + check_qsym_forward(qsym, qarg_params, qaux_params, dshape, lshape) + + calib_data = mx.nd.random.uniform(shape=dshape) + calib_data = NDArrayIter(data=calib_data, batch_size=batch_size) + calib_data = DummyIter(calib_data) + qsym, qarg_params, qaux_params = mx.contrib.quant.quantize_model(sym=s, + arg_params=arg_params, + aux_params=aux_params, + excluded_sym_names=excluded_sym_names, + ctx=mx.current_context(), + quantized_dtype=qdtype, + calib_mode='naive', + calib_data=calib_data, + num_calib_examples=20) + check_params(arg_params, qarg_params, qsym) + check_params(aux_params, qaux_params) + check_qsym_calibrated(qsym) + check_qsym_qdtype(qsym, qdtype) + check_qsym_forward(qsym, qarg_params, qaux_params, dshape, lshape) for qdtype in ['int8', 'uint8']: check_quantize_model(qdtype) From 439f167db225af5ca9e27f53e4b6bfc3abd03acb Mon Sep 17 00:00:00 2001 From: Sheng Zha Date: Wed, 12 Dec 2018 17:14:13 -0800 Subject: [PATCH 34/38] Scripts for building dependency libraries of MXNet (#13282) * openblas script * ps-lite dependencies * USE_S3 dependencies * image libraries * license --- tools/dependencies/README.md | 14 +++ tools/dependencies/cityhash.sh | 32 +++++ tools/dependencies/curl.sh | 64 ++++++++++ tools/dependencies/eigen.sh | 34 +++++ tools/dependencies/libpng.sh | 40 ++++++ tools/dependencies/libtiff.sh | 32 +++++ tools/dependencies/libturbojpeg.sh | 47 +++++++ tools/dependencies/libz.sh | 36 ++++++ tools/dependencies/lz4.sh | 31 +++++ tools/dependencies/openblas.sh | 35 ++++++ tools/dependencies/opencv.sh | 191 +++++++++++++++++++++++++++++ tools/dependencies/openssl.sh | 38 ++++++ tools/dependencies/protobuf.sh | 41 +++++++ tools/dependencies/zmq.sh | 38 ++++++ 14 files changed, 673 insertions(+) create mode 100644 tools/dependencies/README.md create mode 100755 tools/dependencies/cityhash.sh create mode 100755 tools/dependencies/curl.sh create mode 100755 tools/dependencies/eigen.sh create mode 100755 tools/dependencies/libpng.sh create mode 100755 tools/dependencies/libtiff.sh create mode 100755 tools/dependencies/libturbojpeg.sh create mode 100755 tools/dependencies/libz.sh create mode 100755 tools/dependencies/lz4.sh create mode 100755 tools/dependencies/openblas.sh create mode 100755 tools/dependencies/opencv.sh create mode 100755 tools/dependencies/openssl.sh create mode 100755 tools/dependencies/protobuf.sh create mode 100755 tools/dependencies/zmq.sh diff --git a/tools/dependencies/README.md b/tools/dependencies/README.md new file mode 100644 index 000000000000..cfe3d6c75dc9 --- /dev/null +++ b/tools/dependencies/README.md @@ -0,0 +1,14 @@ +# Overview + +This folder contains scripts for building the dependencies from source. The static libraries from +the build artifacts can be used to create self-contained shared object for mxnet through static +linking. + +# Settings + +The scripts use the following environment variables for setting behavior: + +`DEPS_PATH`: the location in which the libraries are downloaded, built, and installed. +`PLATFORM`: name of the OS in lower case. Supported options are 'linux' and 'darwin'. + +It also expects the following build tools in path: make, cmake, tar, unzip, autoconf, nasm diff --git a/tools/dependencies/cityhash.sh b/tools/dependencies/cityhash.sh new file mode 100755 index 000000000000..81cc9cbaad3a --- /dev/null +++ b/tools/dependencies/cityhash.sh @@ -0,0 +1,32 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This script builds the static library of cityhash that can be used as dependency of mxnet. +CITYHASH_VERSION=1.1.1 +if [[ ! -f $DEPS_PATH/lib/libcityhash.a ]]; then + # Download and build cityhash + >&2 echo "Building cityhash..." + git clone https://github.com/google/cityhash $DEPS_PATH/cityhash-$CITYHASH_VERSION + cd $DEPS_PATH/cityhash-$CITYHASH_VERSION + git reset --hard 8af9b8c2b889d80c22d6bc26ba0df1afb79a30db + ./configure -prefix=$DEPS_PATH --enable-sse4.2 + make CXXFLAGS="-g -O3 -msse4.2" + make install + cd - +fi diff --git a/tools/dependencies/curl.sh b/tools/dependencies/curl.sh new file mode 100755 index 000000000000..9633edb78538 --- /dev/null +++ b/tools/dependencies/curl.sh @@ -0,0 +1,64 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This script builds the static library of libcurl that can be used as dependency of mxnet. +LIBCURL_VERSION=7.61.0 +if [[ ! -f $DEPS_PATH/lib/libcurl.a ]]; then + # download and build libcurl + >&2 echo "Building libcurl..." + curl -s -L https://curl.haxx.se/download/curl-$LIBCURL_VERSION.zip -o $DEPS_PATH/libcurl.zip + unzip -q $DEPS_PATH/libcurl.zip -d $DEPS_PATH + cd $DEPS_PATH/curl-$LIBCURL_VERSION + if [[ $PLATFORM == 'linux' ]]; then + CONFIG_FLAG="" + elif [[ $PLATFORM == 'darwin' ]]; then + CONFIG_FLAG="--with-darwinssl" + fi + ./configure $CONFIG_FLAG \ + --with-zlib \ + --with-nghttps2 \ + --without-zsh-functions-dir \ + --without-librtmp \ + --without-libssh2 \ + --disable-debug \ + --disable-curldebug \ + --enable-symbol-hiding=yes \ + --enable-optimize=yes \ + --enable-shared=no \ + --enable-http=yes \ + --enable-ipv6=yes \ + --disable-ftp \ + --disable-ldap \ + --disable-ldaps \ + --disable-rtsp \ + --disable-proxy \ + --disable-dict \ + --disable-telnet \ + --disable-tftp \ + --disable-pop3 \ + --disable-imap \ + --disable-smb \ + --disable-smtp \ + --disable-gopher \ + --disable-manual \ + --prefix=$DEPS_PATH + make + make install + cd - +fi diff --git a/tools/dependencies/eigen.sh b/tools/dependencies/eigen.sh new file mode 100755 index 000000000000..ac2f75a03a52 --- /dev/null +++ b/tools/dependencies/eigen.sh @@ -0,0 +1,34 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This script imports the headers from eigen3 that can be used to in opencv. +EIGEN_VERSION=3.3.4 +if [[ ! -d $DEPS_PATH/include/eigen3 ]]; then + # download eigen + >&2 echo "Loading eigen..." + curl -s -L https://github.com/eigenteam/eigen-git-mirror/archive/$EIGEN_VERSION.zip -o $DEPS_PATH/eigen.zip + unzip -q $DEPS_PATH/eigen.zip -d $DEPS_PATH + mkdir -p $DEPS_PATH/eigen-git-mirror-$EIGEN_VERSION/build + cd $DEPS_PATH/eigen-git-mirror-$EIGEN_VERSION/build + cmake \ + -D CMAKE_BUILD_TYPE=RELEASE \ + -D CMAKE_INSTALL_PREFIX=$DEPS_PATH .. + make install + cd - +fi diff --git a/tools/dependencies/libpng.sh b/tools/dependencies/libpng.sh new file mode 100755 index 000000000000..d1523c654478 --- /dev/null +++ b/tools/dependencies/libpng.sh @@ -0,0 +1,40 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This script builds the static library of libpng that can be used as dependency of mxnet/opencv. +PNG_VERSION=1.6.34 +if [[ ! -f $DEPS_PATH/lib/libpng.a ]]; then + # download and build libpng + >&2 echo "Building libpng..." + curl -s -L https://github.com/glennrp/libpng/archive/v$PNG_VERSION.zip -o $DEPS_PATH/libpng.zip + unzip -q $DEPS_PATH/libpng.zip -d $DEPS_PATH + mkdir -p $DEPS_PATH/libpng-$PNG_VERSION/build + cd $DEPS_PATH/libpng-$PNG_VERSION/build + cmake \ + -D PNG_SHARED=OFF \ + -D PNG_STATIC=ON \ + -D CMAKE_BUILD_TYPE=RELEASE \ + -D CMAKE_INSTALL_PREFIX=$DEPS_PATH \ + -D CMAKE_C_FLAGS=-fPIC .. + make + make install + mkdir -p $DEPS_PATH/include/libpng + ln -s $DEPS_PATH/include/png.h $DEPS_PATH/include/libpng/png.h + cd - +fi diff --git a/tools/dependencies/libtiff.sh b/tools/dependencies/libtiff.sh new file mode 100755 index 000000000000..14dcb2d7bde0 --- /dev/null +++ b/tools/dependencies/libtiff.sh @@ -0,0 +1,32 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This script builds the static library of libtiff that can be used as dependency of mxnet/opencv. +TIFF_VERSION="4-0-9" +if [[ ! -f $DEPS_PATH/lib/libtiff.a ]]; then + # download and build libtiff + >&2 echo "Building libtiff..." + curl -s -L https://gitlab.com/libtiff/libtiff/-/archive/Release-v$TIFF_VERSION/libtiff-Release-v$TIFF_VERSION.zip -o $DEPS_PATH/libtiff.zip + unzip -q $DEPS_PATH/libtiff.zip -d $DEPS_PATH + cd $DEPS_PATH/libtiff-Release-v$TIFF_VERSION + ./configure --quiet --disable-shared --disable-jpeg --disable-zlib --disable-jbig --disable-lzma --prefix=$DEPS_PATH + make + make install + cd - +fi diff --git a/tools/dependencies/libturbojpeg.sh b/tools/dependencies/libturbojpeg.sh new file mode 100755 index 000000000000..4991906f8878 --- /dev/null +++ b/tools/dependencies/libturbojpeg.sh @@ -0,0 +1,47 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This script builds the static library of libturbojpeg that can be used as dependency of +# mxnet/opencv. +TURBO_JPEG_VERSION=1.5.90 +if [[ $PLATFORM == 'darwin' ]]; then + JPEG_NASM_OPTION="-D CMAKE_ASM_NASM_COMPILER=/usr/local/bin/nasm" +fi + +if [[ ! -f $DEPS_PATH/lib/libjpeg.a ]] || [[ ! -f $DEPS_PATH/lib/libturbojpeg.a ]]; then + # download and build libjpeg + >&2 echo "Building libjpeg-turbo..." + curl -s -L https://github.com/libjpeg-turbo/libjpeg-turbo/archive/$TURBO_JPEG_VERSION.zip -o $DEPS_PATH/libjpeg.zip + unzip -q $DEPS_PATH/libjpeg.zip -d $DEPS_PATH + mkdir -p $DEPS_PATH/libjpeg-turbo-$TURBO_JPEG_VERSION/build + cd $DEPS_PATH/libjpeg-turbo-$TURBO_JPEG_VERSION/build + cmake \ + -G"Unix Makefiles" \ + -D CMAKE_BUILD_TYPE=RELEASE \ + -D CMAKE_INSTALL_PREFIX=$DEPS_PATH \ + -D CMAKE_C_FLAGS=-fPIC \ + -D WITH_JAVA=FALSE \ + -D WITH_JPEG7=TRUE \ + -D WITH_JPEG8=TRUE \ + $JPEG_NASM_OPTION \ + -D ENABLE_SHARED=FALSE .. + make + make install + cd - +fi diff --git a/tools/dependencies/libz.sh b/tools/dependencies/libz.sh new file mode 100755 index 000000000000..927f1de82e72 --- /dev/null +++ b/tools/dependencies/libz.sh @@ -0,0 +1,36 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This script builds the static library of libz that can be used as dependency of mxnet. +ZLIB_VERSION=1.2.6 +if [[ ! -f $DEPS_PATH/lib/libz.a ]]; then + # Download and build zlib + >&2 echo "Building zlib..." + curl -s -L https://github.com/LuaDist/zlib/archive/$ZLIB_VERSION.zip -o $DEPS_PATH/zlib.zip + unzip -q $DEPS_PATH/zlib.zip -d $DEPS_PATH + mkdir -p $DEPS_PATH/zlib-$ZLIB_VERSION/build + cd $DEPS_PATH/zlib-$ZLIB_VERSION/build + cmake \ + -D CMAKE_BUILD_TYPE=RELEASE \ + -D CMAKE_INSTALL_PREFIX=$DEPS_PATH \ + -D BUILD_SHARED_LIBS=OFF .. + make + make install + cd - +fi diff --git a/tools/dependencies/lz4.sh b/tools/dependencies/lz4.sh new file mode 100755 index 000000000000..a4269bf29bb9 --- /dev/null +++ b/tools/dependencies/lz4.sh @@ -0,0 +1,31 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This script builds the static library of lz4 that can be used as dependency of mxnet. +LZ4_VERSION=r130 +if [[ ! -f $DEPS_PATH/lib/liblz4.a ]]; then + # Download and build lz4 + >&2 echo "Building lz4..." + curl -s -L https://github.com/lz4/lz4/archive/$LZ4_VERSION.zip -o $DEPS_PATH/lz4.zip + unzip -q $DEPS_PATH/lz4.zip -d $DEPS_PATH + cd $DEPS_PATH/lz4-$LZ4_VERSION + make + make PREFIX=$DEPS_PATH install + cd - +fi diff --git a/tools/dependencies/openblas.sh b/tools/dependencies/openblas.sh new file mode 100755 index 000000000000..9463e3325e0d --- /dev/null +++ b/tools/dependencies/openblas.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This script builds the static library of openblas that can be used as dependency of mxnet. +OPENBLAS_VERSION=0.3.3 +if [[ ! -e $DEPS_PATH/lib/libopenblas.a ]]; then + # download and build openblas + >&2 echo "Building openblas..." + + curl -s -L https://github.com/xianyi/OpenBLAS/archive/v$OPENBLAS_VERSION.zip -o $DEPS_PATH/openblas.zip + unzip -q $DEPS_PATH/openblas.zip -d $DEPS_PATH + cd $DEPS_PATH/OpenBLAS-$OPENBLAS_VERSION + + make DYNAMIC_ARCH=1 NO_SHARED=1 USE_OPENMP=1 + make PREFIX=$DEPS_PATH install + cd - + ln -s libopenblas.a $DEPS_PATH/lib/libcblas.a + ln -s libopenblas.a $DEPS_PATH/lib/liblapack.a +fi diff --git a/tools/dependencies/opencv.sh b/tools/dependencies/opencv.sh new file mode 100755 index 000000000000..98ff115f1765 --- /dev/null +++ b/tools/dependencies/opencv.sh @@ -0,0 +1,191 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This script builds the static library of opencv that can be used as dependency of mxnet. +# It expects openblas, libjpeg, libpng, libtiff, eigen, etc., to be in $DEPS_PATH. +OPENCV_VERSION=3.4.2 +if [[ $PLATFORM == 'linux' ]]; then + OPENCV_LAPACK_OPTIONS=" \ + -D OpenBLAS_HOME=$DEPS_PATH \ + -D OpenBLAS_INCLUDE_DIR=$DEPS_PATH/include \ + -D OpenBLAS_LIB=$DEPS_PATH/lib/libopenblas.a \ + -D LAPACK_INCLUDE_DIR=$DEPS_PATH/include \ + -D LAPACK_LINK_LIBRARIES=$DEPS_PATH/lib/ \ + -D LAPACK_LIBRARIES=$DEPS_PATH/lib/libopenblas.a \ + -D LAPACK_CBLAS_H='cblas.h' \ + -D LAPACK_LAPACKE_H='lapacke.h' \ + -D LAPACK_IMPL='OpenBLAS' \ + -D HAVE_LAPACK=1" +fi + +if [[ ! -f $DEPS_PATH/lib/libopencv_core.a ]] || [[ ! -f $DEPS_PATH/lib/libopencv_imgcodecs.a ]] || [[ ! -f $DEPS_PATH/lib/libopencv_imgproc.a ]]; then + # download and build opencv since we need the static library + >&2 echo "Building opencv..." + curl -s -L https://github.com/opencv/opencv/archive/$OPENCV_VERSION.zip -o $DEPS_PATH/opencv.zip + unzip -q $DEPS_PATH/opencv.zip -d $DEPS_PATH + mkdir -p $DEPS_PATH/opencv-$OPENCV_VERSION/build + cd $DEPS_PATH/opencv-$OPENCV_VERSION/build + cmake \ + -D OPENCV_ENABLE_NONFREE=OFF \ + -D WITH_1394=OFF \ + -D WITH_ARAVIS=OFF \ + -D WITH_AVFOUNDATION=OFF \ + -D WITH_CAROTENE=OFF \ + -D WITH_CLP=OFF \ + -D WITH_CSTRIPES=OFF \ + -D WITH_CPUFEATURES=OFF \ + -D WITH_CUBLAS=OFF \ + -D WITH_CUDA=OFF \ + -D WITH_CUFFT=OFF \ + -D WITH_DIRECTX=OFF \ + -D WITH_DSHOW=OFF \ + -D WITH_EIGEN=ON \ + -D WITH_FFMPEG=OFF \ + -D WITH_GDAL=OFF \ + -D WITH_GDCM=OFF \ + -D WITH_GIGEAPI=OFF \ + -D WITH_GPHOTO2=OFF \ + -D WITH_GSTREAMER=OFF \ + -D WITH_GSTREAMER_0_10=OFF \ + -D WITH_GTK=OFF \ + -D WITH_GTK_2_X=OFF \ + -D WITH_HALIDE=OFF \ + -D WITH_IMAGEIO=OFF \ + -D WITH_IMGCODEC_HDR=OFF \ + -D WITH_IMGCODEC_PXM=OFF \ + -D WITH_IMGCODEC_SUNRASTER=OFF \ + -D WITH_INF_ENGINE=OFF \ + -D WITH_INTELPERC=OFF \ + -D WITH_IPP=OFF \ + -D WITH_IPP_A=OFF \ + -D WITH_ITT=OFF \ + -D WITH_JASPER=OFF \ + -D WITH_JPEG=ON \ + -D WITH_LAPACK=ON \ + -D WITH_LIBREALSENSE=OFF \ + -D WITH_LIBV4L=OFF \ + -D WITH_MATLAB=OFF \ + -D WITH_MFX=OFF \ + -D WITH_MSMF=OFF \ + -D WITH_NVCUVID=OFF \ + -D WITH_OPENCL=OFF \ + -D WITH_OPENCLAMDBLAS=OFF \ + -D WITH_OPENCLAMDFFT=OFF \ + -D WITH_OPENCL_SVM=OFF \ + -D WITH_OPENEXR=OFF \ + -D WITH_OPENGL=OFF \ + -D WITH_OPENMP=OFF \ + -D WITH_OPENNI=OFF \ + -D WITH_OPENNI2=OFF \ + -D WITH_OPENVX=OFF \ + -D WITH_PNG=ON \ + -D WITH_PROTOBUF=OFF \ + -D WITH_PTHREADS_PF=ON \ + -D WITH_PVAPI=OFF \ + -D WITH_QT=OFF \ + -D WITH_QTKIT=OFF \ + -D WITH_QUICKTIME=OFF \ + -D WITH_TBB=OFF \ + -D WITH_TIFF=ON \ + -D WITH_UNICAP=OFF \ + -D WITH_V4L=OFF \ + -D WITH_VA=OFF \ + -D WITH_VA_INTEL=OFF \ + -D WITH_VFW=OFF \ + -D WITH_VTK=OFF \ + -D WITH_WEBP=OFF \ + -D WITH_WIN32UI=OFF \ + -D WITH_XIMEA=OFF \ + -D WITH_XINE=OFF \ + -D BUILD_ANDROID_EXAMPLES=OFF \ + -D BUILD_ANDROID_PROJECTS=OFF \ + -D BUILD_ANDROID_SERVICE=OFF \ + -D BUILD_CUDA_STUBS=OFF \ + -D BUILD_DOCS=OFF \ + -D BUILD_EXAMPLES=OFF \ + -D BUILD_FAT_JAVA_LIB=OFF \ + -D BUILD_IPP_IW=OFF \ + -D BUILD_ITT_IW=OFF \ + -D BUILD_JAVA=OFF \ + -D BUILD_JASPER=OFF \ + -D BUILD_JPEG=OFF \ + -D BUILD_OPENEXR=OFF \ + -D BUILD_PACKAGE=OFF \ + -D BUILD_PERF_TESTS=OFF \ + -D BUILD_PNG=OFF \ + -D BUILD_SHARED_LIBS=OFF \ + -D BUILD_TBB=OFF \ + -D BUILD_TESTS=OFF \ + -D BUILD_TIFF=OFF \ + -D BUILD_WEBP=OFF \ + -D BUILD_WITH_DEBUG_INFO=OFF \ + -D BUILD_WITH_DYNAMIC_IPP=OFF \ + -D BUILD_WITH_STATIC_CRT=OFF \ + -D BUILD_ZLIB=OFF \ + -D BUILD_opencv_apps=OFF \ + -D BUILD_opencv_aruco=OFF \ + -D BUILD_opencv_calib3d=OFF \ + -D BUILD_opencv_contrib=OFF \ + -D BUILD_opencv_dnn=OFF \ + -D BUILD_opencv_features2d=OFF \ + -D BUILD_opencv_flann=OFF \ + -D BUILD_opencv_gpu=OFF \ + -D BUILD_opencv_gpuarithm=OFF \ + -D BUILD_opencv_gpubgsegm=OFF \ + -D BUILD_opencv_gpucodec=OFF \ + -D BUILD_opencv_gpufeatures2d=OFF \ + -D BUILD_opencv_gpufilters=OFF \ + -D BUILD_opencv_gpuimgproc=OFF \ + -D BUILD_opencv_gpulegacy=OFF \ + -D BUILD_opencv_gpuoptflow=OFF \ + -D BUILD_opencv_gpustereo=OFF \ + -D BUILD_opencv_gpuwarping=OFF \ + -D BUILD_opencv_highgui=OFF \ + -D BUILD_opencv_java=OFF \ + -D BUILD_opencv_js=OFF \ + -D BUILD_opencv_ml=OFF \ + -D BUILD_opencv_ml=OFF \ + -D BUILD_opencv_nonfree=OFF \ + -D BUILD_opencv_objdetect=OFF \ + -D BUILD_opencv_photo=OFF \ + -D BUILD_opencv_python=OFF \ + -D BUILD_opencv_python2=OFF \ + -D BUILD_opencv_python3=OFF \ + -D BUILD_opencv_superres=OFF \ + -D BUILD_opencv_video=OFF \ + -D BUILD_opencv_videoio=OFF \ + -D BUILD_opencv_videostab=OFF \ + -D BUILD_opencv_viz=OFF \ + -D BUILD_opencv_world=OFF \ + $OPENCV_LAPACK_OPTIONS \ + -D OPENCV_LIB_INSTALL_PATH=lib \ + -D OPENCV_INCLUDE_INSTALL_PATH=include \ + -D CMAKE_LIBRARY_PATH=$DEPS_PATH/lib \ + -D CMAKE_INCLUDE_PATH=$DEPS_PATH/include \ + -D CMAKE_BUILD_TYPE=RELEASE \ + -D CMAKE_INSTALL_PREFIX=$DEPS_PATH .. + if [[ $PLATFORM == 'linux' ]]; then + cp $DEPS_PATH/../patch/opencv_lapack.h ./ + fi + make + make install + cd - + # @szha: compatibility header + cat $DEPS_PATH/include/opencv2/imgcodecs/imgcodecs_c.h >> $DEPS_PATH/include/opencv2/imgcodecs.hpp +fi diff --git a/tools/dependencies/openssl.sh b/tools/dependencies/openssl.sh new file mode 100755 index 000000000000..b7e4317d4a89 --- /dev/null +++ b/tools/dependencies/openssl.sh @@ -0,0 +1,38 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This script builds the static library of openssl that can be used as dependency of mxnet. +OPENSSL_VERSION=1.0.2l +if [[ ! -f $DEPS_PATH/lib/libssl.a ]] || [[ ! -f $DEPS_PATH/lib/libcrypto.a ]]; then + # download and build openssl + >&2 echo "Building openssl..." + OPENSSL_VERSION=$(echo $OPENSSL_VERSION | sed 's/\./_/g') + curl -s -L https://github.com/openssl/openssl/archive/OpenSSL_$OPENSSL_VERSION.zip -o $DEPS_PATH/openssl.zip + unzip -q $DEPS_PATH/openssl.zip -d $DEPS_PATH + cd $DEPS_PATH/openssl-OpenSSL_$OPENSSL_VERSION + if [[ $PLATFORM == 'linux' ]]; then + TARGET=linux-x86_64 + elif [[ $PLATFORM == 'darwin' ]]; then + TARGET=darwin64-x86_64-cc + fi + ./Configure no-shared no-zlib --prefix=$DEPS_PATH --openssldir=$DEPS_PATH/ssl $TARGET + make + make install + cd - +fi diff --git a/tools/dependencies/protobuf.sh b/tools/dependencies/protobuf.sh new file mode 100755 index 000000000000..dfa3d71f3750 --- /dev/null +++ b/tools/dependencies/protobuf.sh @@ -0,0 +1,41 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This script builds the static library of protobuf along with protoc, that can be used as dependency of mxnet. +PROTOBUF_VERSION=3.5.1 +if [[ $PLATFORM == 'darwin' ]]; then + DY_EXT="dylib" +else + DY_EXT="so" +fi + +LIBPROTOBUF="$DEPS_PATH/lib/libprotobuf.$DY_EXT" +LIBPROTOC="$DEPS_PATH/lib/libprotoc.$DY_EXT" +if [[ ! -e $LIBPROTOBUF ]] || [[ ! -e $LIBPROTOC ]]; then + # Download and build protobuf + >&2 echo "Building protobuf..." + curl -s -L https://github.com/google/protobuf/archive/v$PROTOBUF_VERSION.zip -o $DEPS_PATH/protobuf.zip + unzip -q $DEPS_PATH/protobuf.zip -d $DEPS_PATH + cd $DEPS_PATH/protobuf-$PROTOBUF_VERSION + ./autogen.sh + ./configure -prefix=$DEPS_PATH + make + make install + cd - +fi diff --git a/tools/dependencies/zmq.sh b/tools/dependencies/zmq.sh new file mode 100755 index 000000000000..55e17798c2d3 --- /dev/null +++ b/tools/dependencies/zmq.sh @@ -0,0 +1,38 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This script builds the static library of zeroMQ that can be used as dependency of mxnet. +ZEROMQ_VERSION=4.2.2 +if [[ ! -f $DEPS_PATH/lib/libzmq.a ]]; then + # Download and build zmq + >&2 echo "Building zmq..." + curl -s -L https://github.com/zeromq/libzmq/archive/v$ZEROMQ_VERSION.zip -o $DEPS_PATH/zeromq.zip + unzip -q $DEPS_PATH/zeromq.zip -d $DEPS_PATH + mkdir -p $DEPS_PATH/libzmq-$ZEROMQ_VERSION/build + cd $DEPS_PATH/libzmq-$ZEROMQ_VERSION/build + cmake \ + -D CMAKE_BUILD_TYPE=RELEASE \ + -D CMAKE_INSTALL_PREFIX=$DEPS_PATH \ + -D WITH_LIBSODIUM=OFF \ + -D BUILD_SHARED_LIBS=OFF .. + make + make install + cp $DEPS_PATH/lib/x86_64-linux-gnu/libzmq.a $DEPS_PATH/lib/libzmq.a + cd - +fi From b45e1273ece8eba1a011107ce12032af58efe661 Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Wed, 12 Dec 2018 18:37:46 -0800 Subject: [PATCH 35/38] add batch norm test (#13625) * add batch norm test * fix formatting * use out_arr as input * fix typo * remove const * use ptr * eval ptr --- tests/cpp/operator/mkldnn_operator_test.cc | 151 ++++++++++++++++++++- 1 file changed, 150 insertions(+), 1 deletion(-) diff --git a/tests/cpp/operator/mkldnn_operator_test.cc b/tests/cpp/operator/mkldnn_operator_test.cc index a500d4c2df6d..3bf3228a4b44 100644 --- a/tests/cpp/operator/mkldnn_operator_test.cc +++ b/tests/cpp/operator/mkldnn_operator_test.cc @@ -347,6 +347,31 @@ OpAttrs GetDeconvBackwardOp(int kernel, int num_filters, int dim, int stride, in return attrs; } +OpAttrs GetBNOp() { + OpAttrs attrs; + attrs.attrs.op = Op::Get("BatchNorm"); + attrs.num_inputs = 5; + attrs.num_outputs = 3; + attrs.accept_dims.insert(4); + attrs.requests.insert(OpReqType::kWriteTo); + attrs.attrs.op->attr_parser(&attrs.attrs); + attrs.input_types = ArrayTypes::Normal | + ArrayTypes::MKLDNN; + attrs.output_types = ArrayTypes::Normal | + ArrayTypes::MKLDNN; + return attrs; +} + +OpAttrs GetBNBackwardOp() { + OpAttrs attrs; + attrs.attrs.op = Op::Get("_backward_BatchNorm"); + attrs.num_inputs = 8; + attrs.num_outputs = 3; + attrs.attrs.op->attr_parser(&attrs.attrs); + attrs.requests.insert(OpReqType::kWriteTo); + return attrs; +} + void AssertEqual(const std::vector &in_arrs, const std::vector &out_arrs, float rtol = 1e-5, float atol = 1e-8) { @@ -710,7 +735,7 @@ void TestOpEx(const OpAttrs &forward_attrs, const OpAttrs &backwards_attrs) { // If the array is a view, we shouldn't write data to it. if (in_arr.arr.IsView()) - continue; + continue; NDArrayAttrs orig(in_arr.arr.Copy(in_arr.arr.ctx()), "InPlace Copy"); for (int i = 0; i < forward_attrs.num_inputs; i++) @@ -735,6 +760,124 @@ void TestOpEx(const OpAttrs &forward_attrs, const OpAttrs &backwards_attrs) { } } + +void TestOpExBNBackward(const OpAttrs &forward_attrs, + const OpAttrs &backwards_attrs, + const OpReqType &req, + const std::vector &inputs, + const std::vector &outputs, + const NDArrayAttrs &in_arr, + NDArrayAttrs* out_arr) { + std::vector backwards_input(backwards_attrs.num_inputs); + + std::vector backwards_buffer(backwards_attrs.num_outputs); + std::vector backwards_buffer2(backwards_attrs.num_outputs); + + std::vector backwards_outputs(backwards_attrs.num_outputs); + std::vector backwards_ex_outputs(backwards_attrs.num_outputs); + std::vector backwards_req(backwards_attrs.num_outputs); + + if (req == kWriteTo) { + backwards_input[0] = &(out_arr->arr); // output grad + backwards_input[1] = outputs[1]; // mean + backwards_input[2] = outputs[2]; // var + backwards_input[3] = inputs[0]; // data + backwards_input[4] = inputs[1]; // gamma + backwards_input[5] = inputs[2]; // beta + backwards_input[6] = inputs[3]; // moving mean + backwards_input[7] = inputs[4]; // moving var + + for (size_t i = 0; i < backwards_attrs.num_outputs; i++) { + auto tmp_output = in_arr.arr; + backwards_buffer.emplace_back(tmp_output.Copy(Context())); + backwards_buffer2.emplace_back(tmp_output.Copy(Context())); + backwards_outputs[i] = &backwards_buffer.back(); + backwards_ex_outputs[i] = &backwards_buffer2.back(); + Engine::Get()->WaitForAll(); + backwards_req[i] = kWriteTo; + } + + std::cout << "Backwards: "; + PrintVerifyMsg(*out_arr, in_arr); + Imperative::Get()->InvokeOp( + Context(), backwards_attrs.attrs, backwards_input, backwards_outputs, + backwards_req, DispatchMode::kFCompute, mxnet::OpStatePtr()); + Imperative::Get()->InvokeOp( + Context(), backwards_attrs.attrs, backwards_input, backwards_ex_outputs, + backwards_req, DispatchMode::kFComputeEx, mxnet::OpStatePtr()); + Engine::Get()->WaitForAll(); + AssertEqual(backwards_outputs, backwards_ex_outputs); + } +} + +// compares output of fcompute with fcomputex +void TestOpExBN(const OpAttrs &forward_attrs, const OpAttrs &backwards_attrs) { + std::vector inputs(forward_attrs.num_inputs); + std::vector inputs2(forward_attrs.num_inputs); + std::vector inputs_buffer(forward_attrs.num_inputs); + std::vector inputs2_buffer(forward_attrs.num_inputs); + std::vector outputs(forward_attrs.num_outputs); + std::vector ex_outputs(forward_attrs.num_outputs); + std::vector req(forward_attrs.num_outputs); + + TestArrayShapes tas = GetTestArrayShapes(); + std::vector pds = tas.pds; + + std::vector in_arrs = GetTestInputArrays(forward_attrs.input_types, false); + std::vector> out_arrs(forward_attrs.num_outputs); + std::vector> ex_out_arrs(forward_attrs.num_outputs); + + if (forward_attrs.requests.find(OpReqType::kWriteTo) != forward_attrs.requests.end()) { + for (int i1 = 0; i1 < in_arrs.size(); i1++) { + auto in_arr = in_arrs[i1]; + + CHECK_NE(forward_attrs.accept_dims.size(), 0); + if (forward_attrs.accept_dims.find(in_arr.arr.shape().ndim()) == + forward_attrs.accept_dims.end()) + continue; + for (int i = 0; i < forward_attrs.num_outputs; i++) { + out_arrs[i] = + GetTestOutputArrays(in_arr.arr.shape(), pds, {1}, true, forward_attrs.output_types); + ex_out_arrs[i] = + GetTestOutputArrays(in_arr.arr.shape(), pds, {1}, true, forward_attrs.output_types); + } + for (size_t output_i = 0; output_i < out_arrs[0].size(); output_i++) { + inputs_buffer.clear(); + inputs2_buffer.clear(); + + for (int i = 0; i < forward_attrs.num_inputs; i++) { + inputs_buffer.emplace_back(in_arr.arr.Copy(Context())); + inputs2_buffer.emplace_back(in_arr.arr.Copy(Context())); + Engine::Get()->WaitForAll(); + inputs[i] = &inputs_buffer.back(); + inputs2[i] = &inputs2_buffer.back(); + } + for (int i = 0; i < forward_attrs.num_outputs; i++) { + req[i] = kWriteTo; + outputs[i] = &out_arrs[i][output_i].arr; + ex_outputs[i] = &ex_out_arrs[i][output_i].arr; + } + Imperative::Get()->set_is_training(true); + + PrintVerifyMsg(in_arr, out_arrs[0][output_i]); + Imperative::Get()->InvokeOp( + Context(), forward_attrs.attrs, inputs, outputs, req, + DispatchMode::kFCompute, mxnet::OpStatePtr()); + Imperative::Get()->InvokeOp( + Context(), forward_attrs.attrs, inputs2, ex_outputs, req, + DispatchMode::kFComputeEx, mxnet::OpStatePtr()); + Engine::Get()->WaitForAll(); + AssertEqual(outputs, ex_outputs); + + if (!backwards_attrs.requests.empty()) { + TestOpExBNBackward(forward_attrs, backwards_attrs, OpReqType::kWriteTo, + inputs, outputs, in_arr, &out_arrs[0][output_i]); + } + } + } + } +} + // Computes second dimension of FC weight matrix based on input shape uint32_t GetFCWeightDim2(const nnvm::TShape arr) { uint32_t dim = 1; @@ -1204,4 +1347,10 @@ TEST(IMPERATIVE, DeconvOp) { } } +TEST(IMPERATIVE, BNOp) { + OpAttrs forward_attrs = GetBNOp(); + OpAttrs backwards_attrs = GetBNBackwardOp(); + TestOpExBN(forward_attrs, backwards_attrs); +} + #endif From 5d2b763a2c21ddcf0853a0dd768f88e9b24e7a25 Mon Sep 17 00:00:00 2001 From: Yuxi Hu Date: Wed, 12 Dec 2018 20:55:24 -0800 Subject: [PATCH 36/38] Set install path for libmxnet.so dynamic lib on Mac OS (#13629) --- Makefile | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/Makefile b/Makefile index 822704e26752..f15968bfe526 100644 --- a/Makefile +++ b/Makefile @@ -486,6 +486,11 @@ build/plugin/%.o: plugin/%.cc @mkdir -p $(@D) $(CXX) -std=c++11 -c $(CFLAGS) -MMD -Isrc/operator -c $< -o $@ +# Set install path for libmxnet.so on Mac OS +ifeq ($(UNAME_S), Darwin) + LDFLAGS += -Wl,-install_name,@rpath/libmxnet.so +endif + # NOTE: to statically link libmxnet.a we need the option # --Wl,--whole-archive -lmxnet --Wl,--no-whole-archive lib/libmxnet.a: $(ALLX_DEP) From afb670353c0355c85dfae5573c8461370b03af2e Mon Sep 17 00:00:00 2001 From: Terrorblade Date: Thu, 13 Dec 2018 13:41:44 +0800 Subject: [PATCH 37/38] Fix the bug of BidirectionalCell (#13575) * Fix the bug of BidirectionalCell I did hybridize( ) and pass "valid_length" to the unroll( ) function of BidirectionalCell, then returned AssertionError in line 79. Because symbol.split( ) return a symbol but not a symbol list. Result in the length of inputs dont equal parameter "length" when call unroll( ) to compute r_outputs and r_states. * add a test for BidirectionalCell * Fix the bug of BidirectionalCell I did hybridize( ) and pass "valid_length" to the unroll( ) function of BidirectionalCell, then returned AssertionError in line 79. Because symbol.split( ) return a symbol but not a symbol list. Result in the length of inputs dont equal parameter "length" when call unroll( ) to compute r_outputs and r_states. * fix test_bidirectional_unroll_valid_length( ) Fix the error of parameter. * Fix the bug of BidirectionalCell I did hybridize( ) and pass "valid_length" to the unroll( ) function of BidirectionalCell, then returned AssertionError in line 79. Because symbol.split( ) return a symbol but not a symbol list. Result in the length of inputs dont equal parameter "length" when call unroll( ) to compute r_outputs and r_states. * fix test_bidirectional_unroll_valid_length( ) --- CONTRIBUTORS.md | 1 + python/mxnet/gluon/rnn/rnn_cell.py | 37 +++++++++++++------------ tests/python/unittest/test_gluon_rnn.py | 28 +++++++++++++++++++ 3 files changed, 49 insertions(+), 17 deletions(-) diff --git a/CONTRIBUTORS.md b/CONTRIBUTORS.md index d1dd9b90708a..b9f84d592a70 100644 --- a/CONTRIBUTORS.md +++ b/CONTRIBUTORS.md @@ -192,6 +192,7 @@ List of Contributors * [Rahul Padmanabhan](https://github.com/rahul3) * [Yuxi Hu](https://github.com/yuxihu) * [Harsh Patel](https://github.com/harshp8l) +* [Xiao Wang](https://github.com/BeyonderXX) Label Bot --------- diff --git a/python/mxnet/gluon/rnn/rnn_cell.py b/python/mxnet/gluon/rnn/rnn_cell.py index 98e96fc6da17..6ef3604eb973 100644 --- a/python/mxnet/gluon/rnn/rnn_cell.py +++ b/python/mxnet/gluon/rnn/rnn_cell.py @@ -102,6 +102,23 @@ def _mask_sequence_variable_length(F, data, length, valid_length, time_axis, mer squeeze_axis=True)) return outputs +def _reverse_sequences(sequences, unroll_step, valid_length=None): + if isinstance(sequences[0], symbol.Symbol): + F = symbol + else: + F = ndarray + + if valid_length is None: + reversed_sequences = list(reversed(sequences)) + else: + reversed_sequences = F.SequenceReverse(F.stack(*sequences, axis=0), + sequence_length=valid_length, + use_sequence_length=True) + reversed_sequences = F.split(reversed_sequences, axis=0, num_outputs=unroll_step, squeeze_axis=True) + + return reversed_sequences + + class RecurrentCell(Block): """Abstract base class for RNN cells @@ -1035,14 +1052,7 @@ def unroll(self, length, inputs, begin_state=None, layout='NTC', merge_outputs=N self.reset() inputs, axis, F, batch_size = _format_sequence(length, inputs, layout, False) - if valid_length is None: - reversed_inputs = list(reversed(inputs)) - else: - reversed_inputs = F.SequenceReverse(F.stack(*inputs, axis=0), - sequence_length=valid_length, - use_sequence_length=True) - reversed_inputs = _as_list(F.split(reversed_inputs, axis=0, num_outputs=length, - squeeze_axis=True)) + reversed_inputs = list(_reverse_sequences(inputs, length, valid_length)) begin_state = _get_begin_state(self, F, begin_state, inputs, batch_size) states = begin_state @@ -1056,15 +1066,8 @@ def unroll(self, length, inputs, begin_state=None, layout='NTC', merge_outputs=N begin_state=states[len(l_cell.state_info(batch_size)):], layout=layout, merge_outputs=False, valid_length=valid_length) - if valid_length is None: - reversed_r_outputs = list(reversed(r_outputs)) - else: - reversed_r_outputs = F.SequenceReverse(F.stack(*r_outputs, axis=0), - sequence_length=valid_length, - use_sequence_length=True, - axis=0) - reversed_r_outputs = _as_list(F.split(reversed_r_outputs, axis=0, num_outputs=length, - squeeze_axis=True)) + reversed_r_outputs = _reverse_sequences(r_outputs, length, valid_length) + if merge_outputs is None: merge_outputs = isinstance(l_outputs, tensor_types) l_outputs, _, _, _ = _format_sequence(None, l_outputs, layout, merge_outputs) diff --git a/tests/python/unittest/test_gluon_rnn.py b/tests/python/unittest/test_gluon_rnn.py index eee3adda2c65..edc43d21b36b 100644 --- a/tests/python/unittest/test_gluon_rnn.py +++ b/tests/python/unittest/test_gluon_rnn.py @@ -600,6 +600,34 @@ def test_layer_fill_shape(): assert layer.l0_i2h_weight.shape[1] == 7, layer.l0_i2h_weight.shape[1] +def test_bidirectional_unroll_valid_length(): + # Test BidirectionalCell. + # In 1.3.1 version, after hybridize( ), BidirectionalCell would failed when pass valid_length to unroll( ). + class BiLSTM(gluon.nn.HybridBlock): + def __init__(self, rnn_size, time_step, **kwargs): + super(BiLSTM, self).__init__(**kwargs) + self.time_step = time_step + with self.name_scope(): + self.bi_lstm = gluon.rnn.BidirectionalCell( + gluon.rnn.LSTMCell(rnn_size, prefix='rnn_l0_'), + gluon.rnn.LSTMCell(rnn_size, prefix='rnn_r0_'), + output_prefix='lstm_bi_') + + def hybrid_forward(self, F, inputs, valid_len): + outputs, states = self.bi_lstm.unroll(self.time_step, inputs, valid_length=valid_len, + layout='NTC', merge_outputs=True) + return outputs, states + + rnn_size, time_step = 100, 3 + net = BiLSTM(rnn_size, time_step) + net.initialize() + net.hybridize() + inputs_data = mx.nd.random.uniform(shape=(10, 3, 50)) + valid_len = mx.nd.array([1]*10) + outputs, _ = net(inputs_data, valid_len) + assert outputs.shape == (10, 3, 200) + + if __name__ == '__main__': import nose nose.runmodule() From 5bcf2bd6e8b48fa27bfcfdafd06401ec2d28978b Mon Sep 17 00:00:00 2001 From: Alexander Zai Date: Wed, 12 Dec 2018 23:54:18 -0800 Subject: [PATCH 38/38] Feature/mkldnn static (#13628) * Revert "Revert "Feature/mkldnn static 2 (#13503)" (#13540)" This reverts commit a3eca5f5c96eed0bc29bd4e58e470997091a1fb3. * include headers on mkldnn lib * retrigger * retrigger --- CMakeLists.txt | 1 + Makefile | 7 +++- ci/docker/runtime_functions.sh | 3 -- ci/jenkins/Jenkins_steps.groovy | 8 ++-- mkldnn.mk | 12 ++++-- tests/cpp/unittest.mk | 8 ++-- tests/python/mkl/test_mkldnn.py | 6 +-- tests/python/mkl/test_mkldnn_install.py | 56 ------------------------- 8 files changed, 25 insertions(+), 76 deletions(-) delete mode 100644 tests/python/mkl/test_mkldnn_install.py diff --git a/CMakeLists.txt b/CMakeLists.txt index 3b8bbd2e0272..161705643194 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -227,6 +227,7 @@ if(USE_MKLDNN) include(cmake/DownloadMKLML.cmake) # CPU architecture (e.g., C5) can't run on another architecture (e.g., g3). if(NOT MSVC) + set(MKLDNN_LIBRARY_TYPE "STATIC" CACHE INTERNAL "" FORCE) set(ARCH_OPT_FLAGS "-mtune=generic") else() set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /EHsc") diff --git a/Makefile b/Makefile index f15968bfe526..31722e86c085 100644 --- a/Makefile +++ b/Makefile @@ -132,7 +132,12 @@ ifeq ($(USE_MKLDNN), 1) LDFLAGS += -L$(MKLROOT)/lib endif CFLAGS += -I$(MKLDNNROOT)/include - LDFLAGS += -L$(MKLDNNROOT)/lib -lmkldnn -Wl,-rpath,'$${ORIGIN}' + # MKLDNN but to needs to be dynamically linked for windows as not all VS compilers support static linking + ifneq ($(UNAME_S), Windows) + LIB_DEP += $(MKLDNNROOT)/lib/libmkldnn.a + else + LDFLAGS += -L$(MKLDNNROOT)/lib -lmkldnn -Wl,-rpath,'$${ORIGIN}' + endif endif # setup opencv diff --git a/ci/docker/runtime_functions.sh b/ci/docker/runtime_functions.sh index 6dd5bb6f239d..82e6feb2a728 100755 --- a/ci/docker/runtime_functions.sh +++ b/ci/docker/runtime_functions.sh @@ -657,9 +657,6 @@ build_ubuntu_gpu_cmake_mkldnn() { /work/mxnet ninja -v - # libmkldnn.so.0 is a link file. We need an actual binary file named libmkldnn.so.0. - cp 3rdparty/mkldnn/src/libmkldnn.so.0 3rdparty/mkldnn/src/libmkldnn.so.0.tmp - mv 3rdparty/mkldnn/src/libmkldnn.so.0.tmp 3rdparty/mkldnn/src/libmkldnn.so.0 } build_ubuntu_gpu_cmake() { diff --git a/ci/jenkins/Jenkins_steps.groovy b/ci/jenkins/Jenkins_steps.groovy index 74bde1eee211..d5cbd97683ed 100644 --- a/ci/jenkins/Jenkins_steps.groovy +++ b/ci/jenkins/Jenkins_steps.groovy @@ -23,19 +23,19 @@ utils = load('ci/Jenkinsfile_utils.groovy') // mxnet libraries -mx_lib = 'lib/libmxnet.so, lib/libmxnet.a, 3rdparty/dmlc-core/libdmlc.a, 3rdparty/tvm/nnvm/lib/libnnvm.a' +mx_lib = 'lib/libmxnet.so, lib/libmxnet.a, lib/libiomp5.so, lib/libmklml_intel.so, 3rdparty/dmlc-core/libdmlc.a, 3rdparty/tvm/nnvm/lib/libnnvm.a' // Python wheels mx_pip = 'build/*.whl' // for scala build, need to pass extra libs when run with dist_kvstore -mx_dist_lib = 'lib/libmxnet.so, lib/libmxnet.a, 3rdparty/dmlc-core/libdmlc.a, 3rdparty/tvm/nnvm/lib/libnnvm.a, 3rdparty/ps-lite/build/libps.a, deps/lib/libprotobuf-lite.a, deps/lib/libzmq.a' +mx_dist_lib = 'lib/libmxnet.so, lib/libmxnet.a, 3rdparty/dmlc-core/libdmlc.a, 3rdparty/tvm/nnvm/lib/libnnvm.a, 3rdparty/ps-lite/build/libps.a, deps/lib/libprotobuf-lite.a, deps/lib/libzmq.a, lib/libmkldnn.a' // mxnet cmake libraries, in cmake builds we do not produce a libnvvm static library by default. mx_cmake_lib = 'build/libmxnet.so, build/libmxnet.a, build/3rdparty/dmlc-core/libdmlc.a, build/tests/mxnet_unit_tests, build/3rdparty/openmp/runtime/src/libomp.so' // mxnet cmake libraries, in cmake builds we do not produce a libnvvm static library by default. mx_cmake_lib_debug = 'build/libmxnet.so, build/libmxnet.a, build/3rdparty/dmlc-core/libdmlc.a, build/tests/mxnet_unit_tests' -mx_cmake_mkldnn_lib = 'build/libmxnet.so, build/libmxnet.a, build/3rdparty/dmlc-core/libdmlc.a, build/tests/mxnet_unit_tests, build/3rdparty/openmp/runtime/src/libomp.so, build/3rdparty/mkldnn/src/libmkldnn.so.0' -mx_mkldnn_lib = 'lib/libmxnet.so, lib/libmxnet.a, lib/libiomp5.so, lib/libmkldnn.so.0, lib/libmklml_intel.so, 3rdparty/dmlc-core/libdmlc.a, 3rdparty/tvm/nnvm/lib/libnnvm.a' +mx_cmake_mkldnn_lib = 'build/libmxnet.so, build/libmxnet.a, build/3rdparty/dmlc-core/libdmlc.a, build/tests/mxnet_unit_tests, build/3rdparty/openmp/runtime/src/libomp.so' +mx_mkldnn_lib = 'lib/libmxnet.so, lib/libmxnet.a, lib/libiomp5.so, lib/libmklml_intel.so, 3rdparty/dmlc-core/libdmlc.a, 3rdparty/tvm/nnvm/lib/libnnvm.a' mx_tensorrt_lib = 'lib/libmxnet.so, lib/libnvonnxparser_runtime.so.0, lib/libnvonnxparser.so.0, lib/libonnx_proto.so, lib/libonnx.so' mx_lib_cpp_examples = 'lib/libmxnet.so, lib/libmxnet.a, 3rdparty/dmlc-core/libdmlc.a, 3rdparty/tvm/nnvm/lib/libnnvm.a, 3rdparty/ps-lite/build/libps.a, deps/lib/libprotobuf-lite.a, deps/lib/libzmq.a, build/cpp-package/example/*' mx_lib_cpp_examples_cpu = 'build/libmxnet.so, build/cpp-package/example/*' diff --git a/mkldnn.mk b/mkldnn.mk index d79bbe7d2a0e..5af3e9b1d741 100644 --- a/mkldnn.mk +++ b/mkldnn.mk @@ -19,14 +19,20 @@ ifeq ($(USE_MKLDNN), 1) MKLDNN_SUBMODDIR = $(ROOTDIR)/3rdparty/mkldnn MKLDNN_BUILDDIR = $(MKLDNN_SUBMODDIR)/build MXNET_LIBDIR = $(ROOTDIR)/lib + MKLDNN_LIBRARY_TYPE=STATIC ifeq ($(UNAME_S), Darwin) OMP_LIBFILE = $(MKLDNNROOT)/lib/libiomp5.dylib MKLML_LIBFILE = $(MKLDNNROOT)/lib/libmklml.dylib - MKLDNN_LIBFILE = $(MKLDNNROOT)/lib/libmkldnn.0.dylib + MKLDNN_LIBFILE = $(MKLDNNROOT)/lib/libmkldnn.a +else ifeq ($(UNAME_S), Windows) + OMP_LIBFILE = $(MKLDNNROOT)/lib/libiomp5.so + MKLML_LIBFILE = $(MKLDNNROOT)/lib/libmklml_intel.so + MKLDNN_LIBFILE = $(MKLDNNROOT)/lib/libmkldnn.so + MKLDNN_LIBRARY_TYPE=SHARED else OMP_LIBFILE = $(MKLDNNROOT)/lib/libiomp5.so MKLML_LIBFILE = $(MKLDNNROOT)/lib/libmklml_intel.so - MKLDNN_LIBFILE = $(MKLDNNROOT)/lib/libmkldnn.so.0 + MKLDNN_LIBFILE = $(MKLDNNROOT)/lib/libmkldnn.a endif endif @@ -37,7 +43,7 @@ mkldnn_build: $(MKLDNN_LIBFILE) $(MKLDNN_LIBFILE): mkdir -p $(MKLDNNROOT) cd $(MKLDNN_SUBMODDIR) && rm -rf external && cd scripts && ./prepare_mkl.sh && cd .. && cp -a external/*/* $(MKLDNNROOT)/. - cmake $(MKLDNN_SUBMODDIR) -DCMAKE_INSTALL_PREFIX=$(MKLDNNROOT) -B$(MKLDNN_BUILDDIR) -DARCH_OPT_FLAGS="-mtune=generic" -DWITH_TEST=OFF -DWITH_EXAMPLE=OFF + cmake $(MKLDNN_SUBMODDIR) -DCMAKE_INSTALL_PREFIX=$(MKLDNNROOT) -B$(MKLDNN_BUILDDIR) -DARCH_OPT_FLAGS="-mtune=generic" -DWITH_TEST=OFF -DWITH_EXAMPLE=OFF -DMKLDNN_LIBRARY_TYPE=$(MKLDNN_LIBRARY_TYPE) $(MAKE) -C $(MKLDNN_BUILDDIR) VERBOSE=1 $(MAKE) -C $(MKLDNN_BUILDDIR) install mkdir -p $(MXNET_LIBDIR) diff --git a/tests/cpp/unittest.mk b/tests/cpp/unittest.mk index 746ee2f096f1..665ce6982874 100644 --- a/tests/cpp/unittest.mk +++ b/tests/cpp/unittest.mk @@ -41,22 +41,22 @@ gtest-all.o : $(GTEST_SRCS_) gtest.a : gtest-all.o $(AR) $(ARFLAGS) $@ $^ -build/tests/cpp/%.o : tests/cpp/%.cc | mkldnn +build/tests/cpp/%.o : tests/cpp/%.cc @mkdir -p $(@D) $(CXX) -std=c++11 $(TEST_CFLAGS) -I$(GTEST_INC) -MM -MT tests/cpp/$* $< > build/tests/cpp/$*.d $(CXX) -c -std=c++11 $(TEST_CFLAGS) -I$(GTEST_INC) -o build/tests/cpp/$*.o $(filter %.cc %.a, $^) -build/tests/cpp/operator/%.o : tests/cpp/operator/%.cc | mkldnn +build/tests/cpp/operator/%.o : tests/cpp/operator/%.cc @mkdir -p $(@D) $(CXX) -std=c++11 $(TEST_CFLAGS) -I$(GTEST_INC) -MM -MT tests/cpp/operator/$* $< > build/tests/cpp/operator/$*.d $(CXX) -c -std=c++11 $(TEST_CFLAGS) -I$(GTEST_INC) -o build/tests/cpp/operator/$*.o $(filter %.cc %.a, $^) -build/tests/cpp/storage/%.o : tests/cpp/storage/%.cc | mkldnn +build/tests/cpp/storage/%.o : tests/cpp/storage/%.cc @mkdir -p $(@D) $(CXX) -std=c++11 $(TEST_CFLAGS) -I$(GTEST_INC) -MM -MT tests/cpp/storage/$* $< > build/tests/cpp/storage/$*.d $(CXX) -c -std=c++11 $(TEST_CFLAGS) -I$(GTEST_INC) -o build/tests/cpp/storage/$*.o $(filter %.cc %.a, $^) -build/tests/cpp/engine/%.o : tests/cpp/engine/%.cc | mkldnn +build/tests/cpp/engine/%.o : tests/cpp/engine/%.cc @mkdir -p $(@D) $(CXX) -std=c++11 $(TEST_CFLAGS) -I$(GTEST_INC) -MM -MT tests/cpp/engine/$* $< > build/tests/cpp/engine/$*.d $(CXX) -c -std=c++11 $(TEST_CFLAGS) -I$(GTEST_INC) -o build/tests/cpp/engine/$*.o $(filter %.cc %.a, $^) diff --git a/tests/python/mkl/test_mkldnn.py b/tests/python/mkl/test_mkldnn.py index c6c0a0832f1f..d9d3abfc3ced 100644 --- a/tests/python/mkl/test_mkldnn.py +++ b/tests/python/mkl/test_mkldnn.py @@ -27,7 +27,6 @@ from mxnet import gluon from mxnet.gluon import nn from mxnet.test_utils import * -import test_mkldnn_install as install curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__))) sys.path.append(os.path.join(curr_path, '../unittest/')) from common import with_seed @@ -441,7 +440,4 @@ def backward(self, req, out_grad, in_data, out_data, in_grad, aux): custom = mx.symbol.Custom(name='custom', data=conv, op_type='custom') exec1 = custom.bind(mx.cpu(), args={'data': mx.nd.ones([10,3,96,96]), 'conv_weight': mx.nd.ones([8,3,5,5])}) exec1.forward()[0].wait_to_read() - - -if __name__ == '__main__': - install.test_mkldnn_install() + diff --git a/tests/python/mkl/test_mkldnn_install.py b/tests/python/mkl/test_mkldnn_install.py deleted file mode 100644 index c2f26df72f2e..000000000000 --- a/tests/python/mkl/test_mkldnn_install.py +++ /dev/null @@ -1,56 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -""" -MKL-DNN related test cases -""" - -import sys -import os -import logging - - -def test_mkldnn_install(): - """ - This test will verify that MXNet is built/installed correctly when - compiled with Intel MKL-DNN library. The method will try to import - the mxnet module and see if the mkldnn library is mapped to this - process's address space. - """ - logging.basicConfig(level=logging.INFO) - - if not sys.platform.startswith('linux'): - logging.info("Bypass mkldnn install test for non-Linux OS") - return - - try: - #pylint: disable=unused-variable - import mxnet as mx - except (ImportError, OSError) as e: - assert 0, "Import mxnet error: %s. Please double check your build/" \ - "install steps or environment variable settings" % str(e) - - pid = os.getpid() - rc = os.system("cat /proc/" + str(pid) + - "/maps | grep libmkldnn > /dev/null") - - if rc == 0: - logging.info("MXNet is built/installed correctly with MKL-DNN") - else: - assert 0, "MXNet is built/installed incorrectly with MKL-DNN, please " \ - "double check your build/install steps or environment " \ - "variable settings"