larroy commented on a change in pull request #14779: [WIP] Fully connected,
higher order grad
URL: https://github.com/apache/incubator-mxnet/pull/14779#discussion_r305553693
##########
File path: tests/python/unittest/test_higher_order_grad.py
##########
@@ -173,6 +182,87 @@ def check_second_order_unary(x, op, grad_grad_op):
# Validate the gradients.
assert_almost_equal(expected_grad_grad, x.grad.asnumpy())
+def arange_shape_like(y):
+ shape = y.shape
+ nelems = reduce(mul, shape)
+ x = nd.arange(nelems).reshape(shape)
+ return x
+
+class RandomShapes(object):
+ def __init__(self, dim, startdim=1):
+ self.dim = dim
+ self.curdim = startdim
+
+ def __iter__(self):
+ return self
+
+ @staticmethod
+ def random_shape(dimensions):
+ shape = rand_shape_nd(dimensions)
+ # x = nd.random.normal(shape=shape)
+ nelems = reduce(mul, shape)
+ x = nd.arange(nelems).reshape(shape)
+ return x
+
+ def next(self):
+ return self.__next__()
+
+ def __next__(self):
+ if self.curdim > self.dim:
+ raise StopIteration
+ x = RandomShapes.random_shape(self.curdim)
+ self.curdim += 1
+ return x
+
+
+def flatten2d_right(x):
+ s_0 = x.shape[0]
+ s_1 = reduce(mul, x.shape[1:])
+ return x.reshape((s_0, s_1))
+
+
+def flatten2d_left(x):
+ s_0 = reduce(mul, x.shape[:-1])
+ s_1 = x.shape[-1]
+ return x.reshape((s_0, s_1))
+
+
+@with_seed()
+def test_dense_backward_flatten():
+ for x in RandomShapes(4,2):
+ hidden = random.randrange(1, 4)
+ net = gluon.nn.Sequential()
+ with net.name_scope():
+ net.add(gluon.nn.Dense(hidden, flatten=True))
+ net.initialize(mxnet.initializer.Constant(.5))
+ x.attach_grad()
+ with ag.record():
Review comment:
How? Is not unary... could you propose an example?
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services