nswamy closed pull request #13189: Fix scaladoc build errors
URL: https://github.com/apache/incubator-mxnet/pull/13189
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/docs/mxdoc.py b/docs/mxdoc.py
index 8570caeaeda..8b26c89d2eb 100644
--- a/docs/mxdoc.py
+++ b/docs/mxdoc.py
@@ -110,8 +110,13 @@ def build_scala(app):
 def build_scala_docs(app):
     """build scala doc and then move the outdir"""
     scala_path = app.builder.srcdir + '/../scala-package'
-    # scaldoc fails on some apis, so exit 0 to pass the check
-    _run_cmd('cd ' + scala_path + '; scaladoc `find . -type f -name "*.scala" 
| egrep \"\/core|\/infer\" | egrep -v \"Suite|javaapi\"`; exit 0')
+    scala_doc_sources = 'find . -type f -name "*.scala" | egrep 
\"\.\/core|\.\/infer\" | egrep -v \"Suite\"'
+    scala_doc_classpath = ':'.join([
+        '`find native -name "*.jar" | grep "target/lib/" | tr "\\n" ":" `',
+        '`find macros -name "*-SNAPSHOT.jar" | tr "\\n" ":" `'
+    ])
+    _run_cmd('cd {}; scaladoc `{}` -classpath {} -feature -deprecation'
+             .format(scala_path, scala_doc_sources, scala_doc_classpath))
     dest_path = app.builder.outdir + '/api/scala/docs'
     _run_cmd('rm -rf ' + dest_path)
     _run_cmd('mkdir -p ' + dest_path)
diff --git a/scala-package/core/pom.xml b/scala-package/core/pom.xml
index e93169f08fa..56ff4db1408 100644
--- a/scala-package/core/pom.xml
+++ b/scala-package/core/pom.xml
@@ -93,10 +93,6 @@
         <groupId>org.scalastyle</groupId>
         <artifactId>scalastyle-maven-plugin</artifactId>
       </plugin>
-      <plugin>
-        <groupId>org.scalastyle</groupId>
-        <artifactId>scalastyle-maven-plugin</artifactId>
-      </plugin>
     </plugins>
   </build>
   <dependencies>
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/Context.scala 
b/scala-package/core/src/main/scala/org/apache/mxnet/Context.scala
index beeb430f62f..ab44f434b16 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/Context.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/Context.scala
@@ -17,6 +17,8 @@
 
 package org.apache.mxnet
 
+import scala.language.implicitConversions
+
 object Context {
   val devtype2str = Map(1 -> "cpu", 2 -> "gpu", 3 -> "cpu_pinned")
   val devstr2type = Map("cpu" -> 1, "gpu" -> 2, "cpu_pinned" -> 3)
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/Executor.scala 
b/scala-package/core/src/main/scala/org/apache/mxnet/Executor.scala
index 19fb6fe5cee..b342a96097f 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/Executor.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/Executor.scala
@@ -224,7 +224,6 @@ class Executor private[mxnet](private[mxnet] val handle: 
ExecutorHandle,
   /**
    * Get dictionary representation of argument arrrays.
    * @return The dictionary that maps name of arguments to NDArrays.
-   * @throws IllegalArgumentException if there are duplicated names in the 
arguments.
    */
   def argDict: Map[String, NDArray] = {
     if (_argDict == null) {
@@ -236,7 +235,6 @@ class Executor private[mxnet](private[mxnet] val handle: 
ExecutorHandle,
   /**
    * Get dictionary representation of gradient arrays.
    * @return The dictionary that maps name of arguments to gradient arrays.
-   * @throws IllegalArgumentException if there are duplicated names in the 
grads.
    */
   def gradDict: Map[String, NDArray] = {
     if (_gradDict == null) {
@@ -248,7 +246,6 @@ class Executor private[mxnet](private[mxnet] val handle: 
ExecutorHandle,
   /**
    * Get dictionary representation of auxiliary states arrays.
    * @return The dictionary that maps name of auxiliary states to NDArrays.
-   * @throws IllegalArgumentException if there are duplicated names in the 
auxiliary states.
    */
   def auxDict: Map[String, NDArray] = {
     if (_auxDict == null) {
@@ -265,8 +262,6 @@ class Executor private[mxnet](private[mxnet] val handle: 
ExecutorHandle,
    *        Whether allow extra parameters that are not needed by symbol
    *        If this is True, no error will be thrown when arg_params or 
aux_params
    *        contain extra parameters that is not needed by the executor.
-   * @throws IllegalArgumentException
-   *         If there is additional parameters in the dict but 
allow_extra_params=False
    */
   def copyParamsFrom(argParams: Map[String, NDArray],
                      auxParams: Map[String, NDArray],
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/IO.scala 
b/scala-package/core/src/main/scala/org/apache/mxnet/IO.scala
index e8351422c48..b580ad10a04 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/IO.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/IO.scala
@@ -25,6 +25,7 @@ import org.slf4j.LoggerFactory
 import scala.annotation.varargs
 import scala.collection.immutable.ListMap
 import scala.collection.mutable.ListBuffer
+import scala.language.implicitConversions
 /**
  * IO iterators for loading training & validation data
  */
@@ -340,11 +341,11 @@ abstract class DataIter extends Iterator[DataBatch] {
   def getIndex(): IndexedSeq[Long]
 
   // The name and shape of data provided by this iterator
-  @deprecated
+  @deprecated("Use provideDataDesc instead", "1.3.0")
   def provideData: ListMap[String, Shape]
 
   // The name and shape of label provided by this iterator
-  @deprecated
+  @deprecated("Use provideLabelDesc instead", "1.3.0")
   def provideLabel: ListMap[String, Shape]
 
   // Provide type:DataDesc of the data
@@ -404,7 +405,7 @@ object DataDesc {
     }
   }
 
-  @deprecated
+  @deprecated("Please use DataDesc methods instead", "1.3.0")
   implicit def ListMap2Descs(shapes: ListMap[String, Shape]): 
IndexedSeq[DataDesc] = {
     if (shapes != null) {
       shapes.map { case (k, s) => new DataDesc(k, s) }.toIndexedSeq
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/KVStore.scala 
b/scala-package/core/src/main/scala/org/apache/mxnet/KVStore.scala
index 45189a13aef..b2d4349b4f6 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/KVStore.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/KVStore.scala
@@ -286,7 +286,7 @@ class KVStore(private[mxnet] val handle: KVStoreHandle) 
extends NativeResource {
       case cachedStates: MXKVStoreCachedStates =>
         val bis = new BufferedInputStream (new FileInputStream (fname) )
         try {
-        val bArray = Stream.continually (bis.read).takeWhile (- 1 !=).map 
(_.toByte).toArray
+        val bArray = Stream.continually (bis.read).takeWhile (_ != -1).map 
(_.toByte).toArray
           cachedStates.deserializeState(bArray)
         } finally {
           bis.close ()
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/NDArray.scala 
b/scala-package/core/src/main/scala/org/apache/mxnet/NDArray.scala
index f9f2dbe42a9..3a0c3c11f16 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/NDArray.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/NDArray.scala
@@ -25,6 +25,7 @@ import org.slf4j.LoggerFactory
 
 import scala.collection.mutable
 import scala.collection.mutable.{ArrayBuffer, ListBuffer}
+import scala.language.implicitConversions
 import scala.ref.WeakReference
 
 /**
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/Optimizer.scala 
b/scala-package/core/src/main/scala/org/apache/mxnet/Optimizer.scala
index c3f8aaec6d6..1fb634cebb2 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/Optimizer.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/Optimizer.scala
@@ -144,7 +144,7 @@ abstract class Optimizer extends Serializable {
   def deserializeState(bytes: Array[Byte]): AnyRef
 
   // Set individual learning rate scale for parameters
-  @deprecated("Use setLrMult instead.")
+  @deprecated("Use setLrMult instead.", "0.10.0")
   def setLrScale(lrScale: Map[Int, Float]): Unit = {
     val argsLrScale: Map[Either[Int, String], Float] = lrScale.map { case (k, 
v) => Left(k) -> v }
     setLrMult(argsLrScale)
diff --git 
a/scala-package/core/src/main/scala/org/apache/mxnet/ResourceScope.scala 
b/scala-package/core/src/main/scala/org/apache/mxnet/ResourceScope.scala
index 30fe1473a2c..bb363c0c396 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/ResourceScope.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/ResourceScope.scala
@@ -27,7 +27,7 @@ import scala.util.Try
 import scala.util.control.{ControlThrowable, NonFatal}
 
 /**
-  * This class manages automatically releasing of [[NativeResource]]s
+  * This class manages automatically releasing of 
`org.apache.mxnet.NativeResource`s
   */
 class ResourceScope extends AutoCloseable {
 
@@ -43,8 +43,8 @@ class ResourceScope extends AutoCloseable {
   ResourceScope.addToThreadLocal(this)
 
   /**
-    * Releases all the [[NativeResource]] by calling
-    * the associated [[NativeResource.close()]] method
+    * Releases all the `org.apache.mxnet.NativeResource` by calling
+    * the associated`'org.apache.mxnet.NativeResource.close()` method
     */
   override def close(): Unit = {
     ResourceScope.removeFromThreadLocal(this)
diff --git a/scala-package/core/src/main/scala/org/apache/mxnet/Symbol.scala 
b/scala-package/core/src/main/scala/org/apache/mxnet/Symbol.scala
index 4472a8426f9..01349a689b6 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/Symbol.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/Symbol.scala
@@ -22,6 +22,7 @@ import org.apache.mxnet.DType.DType
 import org.slf4j.{Logger, LoggerFactory}
 
 import scala.collection.mutable.{ArrayBuffer, ListBuffer}
+import scala.language.implicitConversions
 
 /**
  * Symbolic configuration API of mxnet. <br />
diff --git 
a/scala-package/core/src/main/scala/org/apache/mxnet/Visualization.scala 
b/scala-package/core/src/main/scala/org/apache/mxnet/Visualization.scala
index 2a7b7a8b31b..b990137b5a4 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/Visualization.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/Visualization.scala
@@ -21,6 +21,7 @@ import scala.util.parsing.json._
 import java.io.File
 import java.io.PrintWriter
 import scala.collection.mutable.ArrayBuffer
+import scala.language.postfixOps
 
 object Visualization {
 
diff --git 
a/scala-package/core/src/main/scala/org/apache/mxnet/io/MXDataIter.scala 
b/scala-package/core/src/main/scala/org/apache/mxnet/io/MXDataIter.scala
index 998017750db..a84bd106b76 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/io/MXDataIter.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/io/MXDataIter.scala
@@ -158,11 +158,11 @@ private[mxnet] class MXDataIter(private[mxnet] val 
handle: DataIterHandle,
   }
 
   // The name and shape of data provided by this iterator
-  @deprecated
+  @deprecated("Please use provideDataDesc instead", "1.3.0")
   override def provideData: ListMap[String, Shape] = _provideData
 
   // The name and shape of label provided by this iterator
-  @deprecated
+  @deprecated("Please use provideLabelDesc instead", "1.3.0")
   override def provideLabel: ListMap[String, Shape] = _provideLabel
 
   // Provide type:DataDesc of the data
diff --git 
a/scala-package/core/src/main/scala/org/apache/mxnet/io/NDArrayIter.scala 
b/scala-package/core/src/main/scala/org/apache/mxnet/io/NDArrayIter.scala
index e6be0ad02f8..0032a54dd80 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/io/NDArrayIter.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/io/NDArrayIter.scala
@@ -237,11 +237,11 @@ class NDArrayIter(data: IndexedSeq[(DataDesc, NDArray)],
 
 
   // The name and shape of data provided by this iterator
-  @deprecated
+  @deprecated("Please use provideDataDesc instead", "1.3.0")
   override def provideData: ListMap[String, Shape] = _provideData
 
   // The name and shape of label provided by this iterator
-  @deprecated
+  @deprecated("Please use provideLabelDesc instead", "1.3.0")
   override def provideLabel: ListMap[String, Shape] = _provideLabel
 
   // Provide type:DataDesc of the data
diff --git 
a/scala-package/core/src/main/scala/org/apache/mxnet/io/PrefetchingIter.scala 
b/scala-package/core/src/main/scala/org/apache/mxnet/io/PrefetchingIter.scala
index e59e3706317..d277351b124 100644
--- 
a/scala-package/core/src/main/scala/org/apache/mxnet/io/PrefetchingIter.scala
+++ 
b/scala-package/core/src/main/scala/org/apache/mxnet/io/PrefetchingIter.scala
@@ -178,11 +178,11 @@ class PrefetchingIter(
   override def getPad(): Int = this.currentBatch.pad
 
   // The name and shape of label provided by this iterator
-  @deprecated
+  @deprecated("Please use provideDataDesc instead", "1.3.0")
   override def provideLabel: ListMap[String, Shape] = this._provideLabel
 
   // The name and shape of data provided by this iterator
-  @deprecated
+  @deprecated("Please use provideLabelDesc instead", "1.3.0")
   override def provideData: ListMap[String, Shape] = this._provideData
 
   // Provide type:DataDesc of the data
diff --git 
a/scala-package/core/src/main/scala/org/apache/mxnet/io/ResizeIter.scala 
b/scala-package/core/src/main/scala/org/apache/mxnet/io/ResizeIter.scala
index e840af9395f..9bc042a7b98 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/io/ResizeIter.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/io/ResizeIter.scala
@@ -134,13 +134,13 @@ class ResizeIter(
   }
 
   // The name and shape of data provided by this iterator
-  @deprecated
+  @deprecated("Please use provideDataDesc instead", "1.3.0")
   override def provideData: ListMap[String, Shape] = {
     dataIter.provideData
   }
 
   // The name and shape of label provided by this iterator
-  @deprecated
+  @deprecated("Please use provideLabelDesc instead", "1.3.0")
   override def provideLabel: ListMap[String, Shape] = {
     dataIter.provideLabel
   }
diff --git 
a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Context.scala 
b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Context.scala
index 5f0caedcc40..acae8bf5994 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Context.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Context.scala
@@ -17,6 +17,7 @@
 package org.apache.mxnet.javaapi
 
 import collection.JavaConverters._
+import scala.language.implicitConversions
 
 class Context(val context: org.apache.mxnet.Context) {
 
diff --git 
a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/IO.scala 
b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/IO.scala
index 47b1c367c1c..888a5d812c7 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/IO.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/IO.scala
@@ -17,6 +17,8 @@
 
 package org.apache.mxnet.javaapi
 
+import scala.language.implicitConversions
+
 class DataDesc(val dataDesc: org.apache.mxnet.DataDesc) {
 
   def this(name: String, shape: Shape, dType: DType.DType, layout: String) =
diff --git 
a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Shape.scala 
b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Shape.scala
index 594e3a60578..5c4464f8421 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Shape.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/javaapi/Shape.scala
@@ -18,6 +18,7 @@
 package org.apache.mxnet.javaapi
 
 import collection.JavaConverters._
+import scala.language.implicitConversions
 
 /**
   * Shape of [[NDArray]] or other data
diff --git 
a/scala-package/core/src/main/scala/org/apache/mxnet/module/BaseModule.scala 
b/scala-package/core/src/main/scala/org/apache/mxnet/module/BaseModule.scala
index 30e57c57fbe..b73f4ad4b11 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/module/BaseModule.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/module/BaseModule.scala
@@ -210,7 +210,7 @@ abstract class BaseModule {
    * @param reset Default is `True`, indicating whether we should reset the 
data iter before start
    *              doing prediction.
    * @return The return value will be a nested list like
-   *         `[[out1_batch1, out2_batch1, ...], [out1_batch2, out2_batch2, 
...]]`
+   *         `[ [out1_batch1, out2_batch1, ...], [out1_batch2, out2_batch2, 
...] ]`
    *         This mode is useful because in some cases (e.g. bucketing),
    *         the module does not necessarily produce the same number of 
outputs.
    */
@@ -501,7 +501,7 @@ abstract class BaseModule {
    * Get outputs of the previous forward computation.
    * @return In the case when data-parallelism is used,
    *         the outputs will be collected from multiple devices.
-   *         The results will look like `[[out1_dev1, out1_dev2], [out2_dev1, 
out2_dev2]]`,
+   *         The results will look like `[ [out1_dev1, out1_dev2], [out2_dev1, 
out2_dev2] ]`,
    *         those `NDArray` might live on different devices.
    */
   def getOutputs(): IndexedSeq[IndexedSeq[NDArray]]
@@ -519,7 +519,7 @@ abstract class BaseModule {
    * Get the gradients to the inputs, computed in the previous backward 
computation.
    * @return In the case when data-parallelism is used,
    *         the grads will be collected from multiple devices.
-   *         The results will look like `[[grad1_dev1, grad1_dev2], 
[grad2_dev1, grad2_dev2]]`,
+   *         The results will look like `[ [grad1_dev1, grad1_dev2], 
[grad2_dev1, grad2_dev2] ]`,
    *         those `NDArray` might live on different devices.
    */
   def getInputGrads(): IndexedSeq[IndexedSeq[NDArray]]
diff --git 
a/scala-package/core/src/main/scala/org/apache/mxnet/module/BucketingModule.scala
 
b/scala-package/core/src/main/scala/org/apache/mxnet/module/BucketingModule.scala
index 2262f5c6aa3..1ac798e1b61 100644
--- 
a/scala-package/core/src/main/scala/org/apache/mxnet/module/BucketingModule.scala
+++ 
b/scala-package/core/src/main/scala/org/apache/mxnet/module/BucketingModule.scala
@@ -339,7 +339,7 @@ class BucketingModule(symGen: AnyRef => (Symbol, 
IndexedSeq[String], IndexedSeq[
    * Get outputs of the previous forward computation.
    * @return In the case when data-parallelism is used,
    *         the outputs will be collected from multiple devices.
-   *         The results will look like `[[out1_dev1, out1_dev2], [out2_dev1, 
out2_dev2]]`,
+   *         The results will look like `[ [out1_dev1, out1_dev2], [out2_dev1, 
out2_dev2] ]`,
    *         those `NDArray` might live on different devices.
    */
   override def getOutputs(): IndexedSeq[IndexedSeq[NDArray]] = {
@@ -363,7 +363,7 @@ class BucketingModule(symGen: AnyRef => (Symbol, 
IndexedSeq[String], IndexedSeq[
    * Get the gradients to the inputs, computed in the previous backward 
computation.
    * @return In the case when data-parallelism is used,
    *         the grads will be collected from multiple devices.
-   *         The results will look like `[[grad1_dev1, grad1_dev2], 
[grad2_dev1, grad2_dev2]]`,
+   *         The results will look like `[ [grad1_dev1, grad1_dev2], 
[grad2_dev1, grad2_dev2] ]`,
    *         those `NDArray` might live on different devices.
    */
   override def getInputGrads(): IndexedSeq[IndexedSeq[NDArray]] = {
diff --git 
a/scala-package/core/src/main/scala/org/apache/mxnet/module/DataParallelExecutorGroup.scala
 
b/scala-package/core/src/main/scala/org/apache/mxnet/module/DataParallelExecutorGroup.scala
index 5c567fe5d53..df66ea7721f 100644
--- 
a/scala-package/core/src/main/scala/org/apache/mxnet/module/DataParallelExecutorGroup.scala
+++ 
b/scala-package/core/src/main/scala/org/apache/mxnet/module/DataParallelExecutorGroup.scala
@@ -517,7 +517,7 @@ class DataParallelExecutorGroup private[module](
    * Get outputs of the previous forward computation.
    * @return In the case when data-parallelism is used,
    *         the outputs will be collected from multiple devices.
-   *         The results will look like `[[out1_dev1, out1_dev2], [out2_dev1, 
out2_dev2]]`,
+   *         The results will look like `[ [out1_dev1, out1_dev2], [out2_dev1, 
out2_dev2] ]`,
    *         those `NDArray` might live on different devices.
    */
   def getOutputs(): IndexedSeq[IndexedSeq[NDArray]] = {
@@ -539,7 +539,7 @@ class DataParallelExecutorGroup private[module](
    * Get the gradients to the inputs, computed in the previous backward 
computation.
    * @return In the case when data-parallelism is used,
    *         the grads will be collected from multiple devices.
-   *         The results will look like `[[grad1_dev1, grad1_dev2], 
[grad2_dev1, grad2_dev2]]`,
+   *         The results will look like `[ [grad1_dev1, grad1_dev2], 
[grad2_dev1, grad2_dev2] ]`,
    *         those `NDArray` might live on different devices.
    */
   def getInputGrads(): IndexedSeq[IndexedSeq[NDArray]] = {
diff --git 
a/scala-package/core/src/main/scala/org/apache/mxnet/module/Module.scala 
b/scala-package/core/src/main/scala/org/apache/mxnet/module/Module.scala
index fec1ba0dc35..97df3dcb307 100644
--- a/scala-package/core/src/main/scala/org/apache/mxnet/module/Module.scala
+++ b/scala-package/core/src/main/scala/org/apache/mxnet/module/Module.scala
@@ -486,7 +486,7 @@ class Module(symbolVar: Symbol,
    * Get outputs of the previous forward computation.
    * @return In the case when data-parallelism is used,
    *         the outputs will be collected from multiple devices.
-   *         The results will look like `[[out1_dev1, out1_dev2], [out2_dev1, 
out2_dev2]]`,
+   *         The results will look like `[ [out1_dev1, out1_dev2], [out2_dev1, 
out2_dev2] ]`,
    *         those `NDArray` might live on different devices.
    */
   def getOutputs(): IndexedSeq[IndexedSeq[NDArray]] = {
@@ -510,7 +510,7 @@ class Module(symbolVar: Symbol,
    * Get the gradients to the inputs, computed in the previous backward 
computation.
    * @return In the case when data-parallelism is used,
    *         the grads will be collected from multiple devices.
-   *         The results will look like `[[grad1_dev1, grad1_dev2], 
[grad2_dev1, grad2_dev2]]`,
+   *         The results will look like `[ [grad1_dev1, grad1_dev2], 
[grad2_dev1, grad2_dev2] ]`,
    *         those `NDArray` might live on different devices.
    */
   def getInputGrads(): IndexedSeq[IndexedSeq[NDArray]] = {
diff --git 
a/scala-package/core/src/main/scala/org/apache/mxnet/module/SequentialModule.scala
 
b/scala-package/core/src/main/scala/org/apache/mxnet/module/SequentialModule.scala
index e75550a1d1f..2e506c08e54 100644
--- 
a/scala-package/core/src/main/scala/org/apache/mxnet/module/SequentialModule.scala
+++ 
b/scala-package/core/src/main/scala/org/apache/mxnet/module/SequentialModule.scala
@@ -346,7 +346,7 @@ class SequentialModule extends BaseModule {
    * Get outputs of the previous forward computation.
    * @return In the case when data-parallelism is used,
    *         the outputs will be collected from multiple devices.
-   *         The results will look like `[[out1_dev1, out1_dev2], [out2_dev1, 
out2_dev2]]`,
+   *         The results will look like `[ [out1_dev1, out1_dev2], [out2_dev1, 
out2_dev2] ]`,
    *         those `NDArray` might live on different devices.
    */
   def getOutputs(): IndexedSeq[IndexedSeq[NDArray]] = {
@@ -370,7 +370,7 @@ class SequentialModule extends BaseModule {
    * Get the gradients to the inputs, computed in the previous backward 
computation.
    * @return In the case when data-parallelism is used,
    *         the grads will be collected from multiple devices.
-   *         The results will look like `[[grad1_dev1, grad1_dev2], 
[grad2_dev1, grad2_dev2]]`,
+   *         The results will look like `[ [grad1_dev1, grad1_dev2], 
[grad2_dev1, grad2_dev2] ]`,
    *         those `NDArray` might live on different devices.
    */
   def getInputGrads(): IndexedSeq[IndexedSeq[NDArray]] = {


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

Reply via email to