sxjscience edited a comment on pull request #18690: URL: https://github.com/apache/incubator-mxnet/pull/18690#issuecomment-670991882
@mseth10 Here is an example of layer dropout as in paper https://arxiv.org/pdf/1909.11556.pdf. You may use this for profiling. ```python import mxnet as mx from mxnet.gluon import nn mx.npx.set_np() def np_cond(F, pred, then_func, else_func): out = F.contrib.cond(pred.as_nd_ndarray(), then_func.as_nd_ndarray(), else_func.as_nd_ndarray()) return out.as_np_ndarray() class LayerDropMLP(nn.HybridBlock): def __init__(self, use_layer_drop, layer_drop_ratio=0.1, units=32, num_layers=10): super().__init__() self._num_layers = num_layers self._use_layer_drop = use_layer_drop self._layer_drop_ratio = layer_drop_ratio self.layers = nn.HybridSequential() for i in range(num_layers): layer = nn.HybridSequential() layer.add(nn.Dense(units, in_units=units)) layer.add(nn.Activation('tanh')) self.layers.add(layer) def hybrid_forward(self, F, x): out = x for i in range(self._num_layers): choose_new = F.np.random.uniform(0, 1) > self._layer_drop_ratio if F == mx.ndarray: if choose_new.asnumpy(): out = self.layers[i](out) else: forward_out = self.layers[i](out) out = np_cond(F, choose_new.astype('float32'), forward_out, out) return out units = 32 foo = LayerDropMLP(use_layer_drop=True, units=units, layer_drop_ratio=0.2) foo.initialize() foo.hybridize() out = foo(mx.np.random.normal(0, 1, (32, units), dtype=mx.np.float32)) ``` ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [email protected]
