This is an automated email from the ASF dual-hosted git repository.

haibin pushed a commit to branch doc-patch
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet.git

commit ad0db62c5e6488a4a101f4ef64329269ee9dfb2f
Author: Haibin Lin <[email protected]>
AuthorDate: Mon Aug 12 16:36:28 2019 -0700

    Update basic_layers.py
---
 python/mxnet/gluon/nn/basic_layers.py | 17 ++++++++++-------
 1 file changed, 10 insertions(+), 7 deletions(-)

diff --git a/python/mxnet/gluon/nn/basic_layers.py 
b/python/mxnet/gluon/nn/basic_layers.py
index b1482ce..0c42077 100644
--- a/python/mxnet/gluon/nn/basic_layers.py
+++ b/python/mxnet/gluon/nn/basic_layers.py
@@ -150,8 +150,9 @@ class Dense(HybridBlock):
     created by the layer, and `bias` is a bias vector created by the layer
     (only applicable if `use_bias` is `True`).
 
-    Note: the input must be a tensor with rank 2. Use `flatten` to convert it
-    to rank 2 manually if necessary.
+    .. note::
+        the input must be a tensor with rank 2. Use `flatten` to convert it
+        to rank 2 manually if necessary.
 
     Parameters
     ----------
@@ -374,11 +375,13 @@ class Embedding(HybridBlock):
     r"""Turns non-negative integers (indexes/tokens) into dense vectors
     of fixed size. eg. [4, 20] -> [[0.25, 0.1], [0.6, -0.2]]
 
-    Note: if `sparse_grad` is set to True, the gradient w.r.t weight will be
-    sparse. Only a subset of optimizers support sparse gradients, including 
SGD, AdaGrad
-    and Adam. By default lazy updates is turned on, which may perform 
differently
-    from standard updates. For more details, please check the Optimization API 
at:
-    
https://mxnet.incubator.apache.org/api/python/optimization/optimization.html
+    .. note::
+        if `sparse_grad` is set to True, the gradient w.r.t weight will be
+        sparse. Only a subset of optimizers support sparse gradients, 
including SGD,
+        AdaGrad and Adam. By default lazy updates is turned on, which may 
perform
+        differently from standard updates. For more details, please check the
+        Optimization API at:
+        
https://mxnet.incubator.apache.org/api/python/optimization/optimization.html
 
     Parameters
     ----------

Reply via email to