This is an automated email from the ASF dual-hosted git repository.

jxie pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet.git

commit 36f342211b3c83ce4d44befce892178af784466e
Author: ThomasDelteil <thomas.delte...@gmail.com>
AuthorDate: Thu Apr 19 18:08:53 2018 -0700

    add warning on input data for RNN for layout NTC
---
 python/mxnet/gluon/rnn/rnn_layer.py | 12 ++++++++++++
 1 file changed, 12 insertions(+)

diff --git a/python/mxnet/gluon/rnn/rnn_layer.py 
b/python/mxnet/gluon/rnn/rnn_layer.py
index a4150c9..53c9ec0 100644
--- a/python/mxnet/gluon/rnn/rnn_layer.py
+++ b/python/mxnet/gluon/rnn/rnn_layer.py
@@ -286,6 +286,10 @@ class RNN(_RNNLayer):
     Inputs:
         - **data**: input tensor with shape `(sequence_length, batch_size, 
input_size)`
           when `layout` is "TNC". For other layouts dimensions are permuted 
accordingly.
+          Be aware that a `transpose` operation with a ndarray results in a 
new allocation of
+          memory. For optimal performance and when applicable, consider 
transposing 
+          your layout to "TNC" before loading your data into
+          a ndarray.
         - **states**: initial recurrent state tensor with shape
           `(num_layers, batch_size, num_hidden)`. If `bidirectional` is True,
           shape will instead be `(2*num_layers, batch_size, num_hidden)`. If
@@ -386,6 +390,10 @@ class LSTM(_RNNLayer):
     Inputs:
         - **data**: input tensor with shape `(sequence_length, batch_size, 
input_size)`
           when `layout` is "TNC". For other layouts dimensions are permuted 
accordingly.
+          Be aware that a `transpose` operation with a ndarray results in a 
new allocation of
+          memory. For optimal performance and when applicable, consider 
transposing 
+          your layout to "TNC" before loading your data into
+          a ndarray.
         - **states**: a list of two initial recurrent state tensors. Each has 
shape
           `(num_layers, batch_size, num_hidden)`. If `bidirectional` is True,
           shape will instead be `(2*num_layers, batch_size, num_hidden)`. If
@@ -483,6 +491,10 @@ class GRU(_RNNLayer):
     Inputs:
         - **data**: input tensor with shape `(sequence_length, batch_size, 
input_size)`
           when `layout` is "TNC". For other layouts dimensions are permuted 
accordingly.
+          Be aware that a `transpose` operation with a ndarray results in a 
new allocation of
+          memory. For optimal performance and when applicable, consider 
transposing 
+          your layout to "TNC" before loading your data into
+          a ndarray.
         - **states**: initial recurrent state tensor with shape
           `(num_layers, batch_size, num_hidden)`. If `bidirectional` is True,
           shape will instead be `(2*num_layers, batch_size, num_hidden)`. If

-- 
To stop receiving notification emails like this one, please contact
j...@apache.org.

Reply via email to