gautham-kollu closed pull request #12512: [MXNET-938] Allow fusing weight ahead
of calling forward()
URL: https://github.com/apache/incubator-mxnet/pull/12512
This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:
As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):
diff --git a/python/mxnet/gluon/rnn/rnn_layer.py
b/python/mxnet/gluon/rnn/rnn_layer.py
index d2c6ac9d9f2..49ad874f4e2 100644
--- a/python/mxnet/gluon/rnn/rnn_layer.py
+++ b/python/mxnet/gluon/rnn/rnn_layer.py
@@ -50,6 +50,7 @@ def __init__(self, hidden_size, num_layers, layout,
self._h2h_weight_initializer = h2h_weight_initializer
self._i2h_bias_initializer = i2h_bias_initializer
self._h2h_bias_initializer = h2h_bias_initializer
+ self.fused_params = None
self._gates = {'rnn_relu': 1, 'rnn_tanh': 1, 'lstm': 4, 'gru': 3}[mode]
@@ -183,6 +184,16 @@ def begin_state(self, batch_size=0, func=ndarray.zeros,
**kwargs):
states.append(func(name='%sh0_%d'%(self.prefix, i), **info))
return states
+ def fuse_weights(self, F):
+ if F is ndarray:
+ kwargs = {i: j.data() for i, j in self._reg_params.items()}
+ params = (kwargs['{}{}_{}_{}'.format(d, l, g, t)].reshape(-1)
+ for t in ['weight', 'bias']
+ for l in range(self._num_layers)
+ for d in ['l', 'r'][:self._dir]
+ for g in ['i2h', 'h2h'])
+ self.fused_params = F.concat(*params, dim=0)
+
def hybrid_forward(self, F, inputs, states=None, **kwargs):
if F is ndarray:
batch_size = inputs.shape[self._layout.find('N')]
@@ -209,12 +220,16 @@ def _forward_kernel(self, F, inputs, states, **kwargs):
""" forward using CUDNN or CPU kenrel"""
if self._layout == 'NTC':
inputs = F.swapaxes(inputs, dim1=0, dim2=1)
- params = (kwargs['{}{}_{}_{}'.format(d, l, g, t)].reshape(-1)
- for t in ['weight', 'bias']
- for l in range(self._num_layers)
- for d in ['l', 'r'][:self._dir]
- for g in ['i2h', 'h2h'])
- params = F._internal._rnn_param_concat(*params, dim=0)
+
+ if F is ndarray and self.fused_params is not None:
+ params = self.fused_params
+ else:
+ params = (kwargs['{}{}_{}_{}'.format(d, l, g, t)].reshape(-1)
+ for t in ['weight', 'bias']
+ for l in range(self._num_layers)
+ for d in ['l', 'r'][:self._dir]
+ for g in ['i2h', 'h2h'])
+ params = F._internal._rnn_param_concat(*params, dim=0)
rnn = F.RNN(inputs, params, *states, state_size=self._hidden_size,
num_layers=self._num_layers, bidirectional=self._dir == 2,
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services