[GitHub] [incubator-mxnet] apeforest commented on a change in pull request #14779: Fully connected, higher order grad

2019-08-15 Thread GitBox
apeforest commented on a change in pull request #14779: Fully connected, higher 
order grad
URL: https://github.com/apache/incubator-mxnet/pull/14779#discussion_r314185191
 
 

 ##
 File path: tests/python/unittest/test_higher_order_grad.py
 ##
 @@ -210,6 +217,168 @@ def check_second_order_unary(x, op, grad_grad_op, 
rtol=None, atol=None):
 x.grad.asnumpy(), rtol=rtol, atol=atol)
 
 
+def arange_shape_like(y):
+shape = y.shape
+nelems = reduce(mul, shape)
+x = nd.arange(nelems).reshape(shape)
+return x
+
+
+class NDArrayGenerator(object):
+def __init__(self, dim, startdim=1):
+self.dim = dim
+self.curdim = startdim
+
+def __iter__(self):
+return self
+
+@staticmethod
+def gen(dimensions):
+shape = rand_shape_nd(dimensions, 4)
+nelems = reduce(mul, shape)
+x = nd.arange(nelems).reshape(shape)
+return x
+
+def next(self):
+return self.__next__()
+
+def __next__(self):
+if self.curdim > self.dim:
+raise StopIteration
+x = NDArrayGenerator.gen(self.curdim)
+self.curdim += 1
+return x
+
+
+def flatten2d_right(x):
+s_0 = x.shape[0]
+s_1 = reduce(mul, x.shape[1:])
+return x.reshape((s_0, s_1))
+
+
+def flatten2d_left(x):
+s_0 = reduce(mul, x.shape[:-1])
+s_1 = x.shape[-1]
+return x.reshape((s_0, s_1))
+
+
+@with_seed()
+def test_dense_backward_flatten():
+print("2nd order gradient for Fully Connected, flatten=True")
+for x in NDArrayGenerator(4,2):
+hidden = random.randrange(1, 4)
+net = gluon.nn.Sequential()
+with net.name_scope():
+net.add(gluon.nn.Dense(hidden, flatten=True))
+net.initialize(mxnet.initializer.Constant(.5))
+x.attach_grad()
+with autograd.record():
+y = net.forward(x)
+o_y = arange_shape_like(y)  # head gradient of y
+params = [p.data() for p in net.collect_params().values()]
+w = params[0]
+b = params[1]
+print("Checking y ({}) = x({}) * w^T({}) + b({})".format(y.shape, 
x.shape, w.shape, b.shape))
+x_grad = autograd.grad(heads=y, variables=x, head_grads=o_y,
+   create_graph=True, retain_graph=True)[0]
+o_x_grad = arange_shape_like(x_grad)
+w_grad_grad = autograd.grad(heads=x_grad, variables=w,
+head_grads=o_x_grad, 
create_graph=False)[0]
+w_grad = autograd.grad(heads=y, variables=w, head_grads=o_y,
+   create_graph=True, retain_graph=True)[0]
+o_w_grad = arange_shape_like(w_grad)
+x_grad_grad = autograd.grad(heads=w_grad, variables=x,
+head_grads=o_w_grad, 
create_graph=False)[0]
+# Expected results
+w_grad_e = nd.dot(o_y, x, transpose_a=True)
+w_grad_grad_e = nd.dot(o_y, o_x_grad, transpose_a=True)
+x_grad_e = nd.dot(o_y, w)
+x_grad_grad_e = nd.dot(o_y, o_w_grad)
+ok_(w_grad.shape == w.shape)
+ok_(w_grad_grad.shape == w.shape)
+ok_(x_grad.shape == x.shape)
+ok_(x_grad_grad.shape == x.shape)
+#print("x_grad:\n{}".format(x_grad));
+#print("x_grad_e:\n{}".format(x_grad_e));
+#print("x_grad_grad:\n{}".format(x_grad_grad));
+#print("x_grad_grad_e:\n{}".format(x_grad_grad_e));
+#print("w_grad:\n{}".format(w_grad));
+#print("w_grad_e:\n{}".format(w_grad_e));
+#print("w_grad_grad:\n{}".format(w_grad_grad));
+#print("w_grad_grad_e:\n{}".format(w_grad_grad_e));
+w_grad_check = same(flatten2d_right(w_grad), flatten2d_right(w_grad_e))
+w_grad_grad_check = same(flatten2d_right(w_grad_grad), 
flatten2d_right(w_grad_grad_e))
+x_grad_check = same(flatten2d_right(x_grad), flatten2d_right(x_grad_e))
+x_grad_grad_check = same(flatten2d_right(x_grad_grad), 
flatten2d_right(x_grad_grad_e))
+ok_(x_grad_check)
+ok_(w_grad_check)
+ok_(x_grad_grad_check)
+ok_(w_grad_grad_check)
+
+@with_seed()
+def test_dense_backward_no_flatten():
+print("2nd order gradient for Fully Connected, flatten=False")
+for x in NDArrayGenerator(5,3):
+hidden = random.randrange(1, 4)
+net = gluon.nn.Sequential()
+with net.name_scope():
+net.add(gluon.nn.Dense(hidden, flatten=False))
+net.initialize(mxnet.initializer.Constant(.5))
+x.attach_grad()
+with autograd.record():
+y = net.forward(x)
+o_y = arange_shape_like(y)  # head gradient of y
+params = [p.data() for p in net.collect_params().values()]
+w = params[0]
+b = params[1]
+print("Checking y ({}) = x({}) * w^T({}) + b({})".format(y.shape, 
x.shape, w.shape, b.shape))
+x_grad = 

[GitHub] [incubator-mxnet] apeforest commented on a change in pull request #14779: Fully connected, higher order grad

2019-08-15 Thread GitBox
apeforest commented on a change in pull request #14779: Fully connected, higher 
order grad
URL: https://github.com/apache/incubator-mxnet/pull/14779#discussion_r314184675
 
 

 ##
 File path: tests/python/unittest/test_higher_order_grad.py
 ##
 @@ -210,6 +217,168 @@ def check_second_order_unary(x, op, grad_grad_op, 
rtol=None, atol=None):
 x.grad.asnumpy(), rtol=rtol, atol=atol)
 
 
+def arange_shape_like(y):
+shape = y.shape
+nelems = reduce(mul, shape)
+x = nd.arange(nelems).reshape(shape)
+return x
+
+
+class NDArrayGenerator(object):
+def __init__(self, dim, startdim=1):
+self.dim = dim
+self.curdim = startdim
+
+def __iter__(self):
+return self
+
+@staticmethod
+def gen(dimensions):
+shape = rand_shape_nd(dimensions, 4)
+nelems = reduce(mul, shape)
+x = nd.arange(nelems).reshape(shape)
+return x
+
+def next(self):
+return self.__next__()
+
+def __next__(self):
+if self.curdim > self.dim:
+raise StopIteration
+x = NDArrayGenerator.gen(self.curdim)
+self.curdim += 1
+return x
+
+
+def flatten2d_right(x):
+s_0 = x.shape[0]
+s_1 = reduce(mul, x.shape[1:])
+return x.reshape((s_0, s_1))
+
+
+def flatten2d_left(x):
+s_0 = reduce(mul, x.shape[:-1])
+s_1 = x.shape[-1]
+return x.reshape((s_0, s_1))
+
+
+@with_seed()
+def test_dense_backward_flatten():
+print("2nd order gradient for Fully Connected, flatten=True")
+for x in NDArrayGenerator(4,2):
+hidden = random.randrange(1, 4)
+net = gluon.nn.Sequential()
+with net.name_scope():
+net.add(gluon.nn.Dense(hidden, flatten=True))
+net.initialize(mxnet.initializer.Constant(.5))
+x.attach_grad()
+with autograd.record():
+y = net.forward(x)
+o_y = arange_shape_like(y)  # head gradient of y
+params = [p.data() for p in net.collect_params().values()]
+w = params[0]
+b = params[1]
+print("Checking y ({}) = x({}) * w^T({}) + b({})".format(y.shape, 
x.shape, w.shape, b.shape))
 
 Review comment:
   > Output is ignored by nosetests unless specifically enabled. It's ok to 
leave print in unit tests. Check nosetest for more detail.
   
   Reference? For most mxnet python test, not adding print statement has been a 
general practice.


This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services


[GitHub] [incubator-mxnet] apeforest commented on a change in pull request #14779: Fully connected, higher order grad

2019-08-07 Thread GitBox
apeforest commented on a change in pull request #14779: Fully connected, higher 
order grad
URL: https://github.com/apache/incubator-mxnet/pull/14779#discussion_r311760576
 
 

 ##
 File path: tests/python/unittest/test_higher_order_grad.py
 ##
 @@ -210,6 +217,168 @@ def check_second_order_unary(x, op, grad_grad_op, 
rtol=None, atol=None):
 x.grad.asnumpy(), rtol=rtol, atol=atol)
 
 
+def arange_shape_like(y):
+shape = y.shape
+nelems = reduce(mul, shape)
+x = nd.arange(nelems).reshape(shape)
+return x
+
+
+class NDArrayGenerator(object):
+def __init__(self, dim, startdim=1):
+self.dim = dim
+self.curdim = startdim
+
+def __iter__(self):
+return self
+
+@staticmethod
+def gen(dimensions):
+shape = rand_shape_nd(dimensions, 4)
+nelems = reduce(mul, shape)
+x = nd.arange(nelems).reshape(shape)
+return x
+
+def next(self):
+return self.__next__()
+
+def __next__(self):
+if self.curdim > self.dim:
+raise StopIteration
+x = NDArrayGenerator.gen(self.curdim)
+self.curdim += 1
+return x
+
+
+def flatten2d_right(x):
 
 Review comment:
   Can we use existing test util instead of creating new ones?
   
   


This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services


[GitHub] [incubator-mxnet] apeforest commented on a change in pull request #14779: Fully connected, higher order grad

2019-08-07 Thread GitBox
apeforest commented on a change in pull request #14779: Fully connected, higher 
order grad
URL: https://github.com/apache/incubator-mxnet/pull/14779#discussion_r311760456
 
 

 ##
 File path: tests/python/unittest/test_higher_order_grad.py
 ##
 @@ -210,6 +217,168 @@ def check_second_order_unary(x, op, grad_grad_op, 
rtol=None, atol=None):
 x.grad.asnumpy(), rtol=rtol, atol=atol)
 
 
+def arange_shape_like(y):
+shape = y.shape
+nelems = reduce(mul, shape)
+x = nd.arange(nelems).reshape(shape)
+return x
+
+
+class NDArrayGenerator(object):
 
 Review comment:
   Can we use existing test util instead of creating new ones?


This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services


[GitHub] [incubator-mxnet] apeforest commented on a change in pull request #14779: Fully connected, higher order grad

2019-08-07 Thread GitBox
apeforest commented on a change in pull request #14779: Fully connected, higher 
order grad
URL: https://github.com/apache/incubator-mxnet/pull/14779#discussion_r311760034
 
 

 ##
 File path: tests/python/unittest/test_higher_order_grad.py
 ##
 @@ -210,6 +217,168 @@ def check_second_order_unary(x, op, grad_grad_op, 
rtol=None, atol=None):
 x.grad.asnumpy(), rtol=rtol, atol=atol)
 
 
+def arange_shape_like(y):
+shape = y.shape
+nelems = reduce(mul, shape)
+x = nd.arange(nelems).reshape(shape)
+return x
+
+
+class NDArrayGenerator(object):
+def __init__(self, dim, startdim=1):
+self.dim = dim
+self.curdim = startdim
+
+def __iter__(self):
+return self
+
+@staticmethod
+def gen(dimensions):
+shape = rand_shape_nd(dimensions, 4)
+nelems = reduce(mul, shape)
+x = nd.arange(nelems).reshape(shape)
+return x
+
+def next(self):
+return self.__next__()
+
+def __next__(self):
+if self.curdim > self.dim:
+raise StopIteration
+x = NDArrayGenerator.gen(self.curdim)
+self.curdim += 1
+return x
+
+
+def flatten2d_right(x):
+s_0 = x.shape[0]
+s_1 = reduce(mul, x.shape[1:])
+return x.reshape((s_0, s_1))
+
+
+def flatten2d_left(x):
+s_0 = reduce(mul, x.shape[:-1])
+s_1 = x.shape[-1]
+return x.reshape((s_0, s_1))
+
+
+@with_seed()
+def test_dense_backward_flatten():
 
 Review comment:
   Can we create a generic method to test gluon blocks such as 
`test_gluon_block(data, weight, block, expected)` just like we are doing with 
check_second_order_unary? It will be easier to verify if the test is correct.


This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services


[GitHub] [incubator-mxnet] apeforest commented on a change in pull request #14779: Fully connected, higher order grad

2019-08-07 Thread GitBox
apeforest commented on a change in pull request #14779: Fully connected, higher 
order grad
URL: https://github.com/apache/incubator-mxnet/pull/14779#discussion_r311758931
 
 

 ##
 File path: tests/python/unittest/test_higher_order_grad.py
 ##
 @@ -210,6 +217,168 @@ def check_second_order_unary(x, op, grad_grad_op, 
rtol=None, atol=None):
 x.grad.asnumpy(), rtol=rtol, atol=atol)
 
 
+def arange_shape_like(y):
+shape = y.shape
+nelems = reduce(mul, shape)
+x = nd.arange(nelems).reshape(shape)
+return x
+
+
+class NDArrayGenerator(object):
+def __init__(self, dim, startdim=1):
+self.dim = dim
+self.curdim = startdim
+
+def __iter__(self):
+return self
+
+@staticmethod
+def gen(dimensions):
+shape = rand_shape_nd(dimensions, 4)
+nelems = reduce(mul, shape)
+x = nd.arange(nelems).reshape(shape)
+return x
+
+def next(self):
+return self.__next__()
+
+def __next__(self):
+if self.curdim > self.dim:
+raise StopIteration
+x = NDArrayGenerator.gen(self.curdim)
+self.curdim += 1
+return x
+
+
+def flatten2d_right(x):
+s_0 = x.shape[0]
+s_1 = reduce(mul, x.shape[1:])
+return x.reshape((s_0, s_1))
+
+
+def flatten2d_left(x):
+s_0 = reduce(mul, x.shape[:-1])
+s_1 = x.shape[-1]
+return x.reshape((s_0, s_1))
+
+
+@with_seed()
+def test_dense_backward_flatten():
+print("2nd order gradient for Fully Connected, flatten=True")
+for x in NDArrayGenerator(4,2):
+hidden = random.randrange(1, 4)
+net = gluon.nn.Sequential()
+with net.name_scope():
+net.add(gluon.nn.Dense(hidden, flatten=True))
+net.initialize(mxnet.initializer.Constant(.5))
+x.attach_grad()
+with autograd.record():
+y = net.forward(x)
+o_y = arange_shape_like(y)  # head gradient of y
+params = [p.data() for p in net.collect_params().values()]
+w = params[0]
+b = params[1]
+print("Checking y ({}) = x({}) * w^T({}) + b({})".format(y.shape, 
x.shape, w.shape, b.shape))
 
 Review comment:
   remove print


This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services


[GitHub] [incubator-mxnet] apeforest commented on a change in pull request #14779: Fully connected, higher order grad

2019-08-07 Thread GitBox
apeforest commented on a change in pull request #14779: Fully connected, higher 
order grad
URL: https://github.com/apache/incubator-mxnet/pull/14779#discussion_r311757428
 
 

 ##
 File path: tests/python/unittest/test_higher_order_grad.py
 ##
 @@ -210,6 +217,168 @@ def check_second_order_unary(x, op, grad_grad_op, 
rtol=None, atol=None):
 x.grad.asnumpy(), rtol=rtol, atol=atol)
 
 
+def arange_shape_like(y):
+shape = y.shape
+nelems = reduce(mul, shape)
+x = nd.arange(nelems).reshape(shape)
+return x
+
+
+class NDArrayGenerator(object):
+def __init__(self, dim, startdim=1):
+self.dim = dim
+self.curdim = startdim
+
+def __iter__(self):
+return self
+
+@staticmethod
+def gen(dimensions):
+shape = rand_shape_nd(dimensions, 4)
+nelems = reduce(mul, shape)
+x = nd.arange(nelems).reshape(shape)
+return x
+
+def next(self):
+return self.__next__()
+
+def __next__(self):
+if self.curdim > self.dim:
+raise StopIteration
+x = NDArrayGenerator.gen(self.curdim)
+self.curdim += 1
+return x
+
+
+def flatten2d_right(x):
+s_0 = x.shape[0]
+s_1 = reduce(mul, x.shape[1:])
+return x.reshape((s_0, s_1))
+
+
+def flatten2d_left(x):
+s_0 = reduce(mul, x.shape[:-1])
+s_1 = x.shape[-1]
+return x.reshape((s_0, s_1))
+
+
+@with_seed()
+def test_dense_backward_flatten():
+print("2nd order gradient for Fully Connected, flatten=True")
+for x in NDArrayGenerator(4,2):
+hidden = random.randrange(1, 4)
+net = gluon.nn.Sequential()
+with net.name_scope():
+net.add(gluon.nn.Dense(hidden, flatten=True))
+net.initialize(mxnet.initializer.Constant(.5))
+x.attach_grad()
+with autograd.record():
+y = net.forward(x)
+o_y = arange_shape_like(y)  # head gradient of y
+params = [p.data() for p in net.collect_params().values()]
+w = params[0]
+b = params[1]
+print("Checking y ({}) = x({}) * w^T({}) + b({})".format(y.shape, 
x.shape, w.shape, b.shape))
+x_grad = autograd.grad(heads=y, variables=x, head_grads=o_y,
+   create_graph=True, retain_graph=True)[0]
+o_x_grad = arange_shape_like(x_grad)
+w_grad_grad = autograd.grad(heads=x_grad, variables=w,
+head_grads=o_x_grad, 
create_graph=False)[0]
+w_grad = autograd.grad(heads=y, variables=w, head_grads=o_y,
+   create_graph=True, retain_graph=True)[0]
+o_w_grad = arange_shape_like(w_grad)
+x_grad_grad = autograd.grad(heads=w_grad, variables=x,
+head_grads=o_w_grad, 
create_graph=False)[0]
+# Expected results
+w_grad_e = nd.dot(o_y, x, transpose_a=True)
+w_grad_grad_e = nd.dot(o_y, o_x_grad, transpose_a=True)
+x_grad_e = nd.dot(o_y, w)
+x_grad_grad_e = nd.dot(o_y, o_w_grad)
+ok_(w_grad.shape == w.shape)
+ok_(w_grad_grad.shape == w.shape)
+ok_(x_grad.shape == x.shape)
+ok_(x_grad_grad.shape == x.shape)
+#print("x_grad:\n{}".format(x_grad));
+#print("x_grad_e:\n{}".format(x_grad_e));
+#print("x_grad_grad:\n{}".format(x_grad_grad));
+#print("x_grad_grad_e:\n{}".format(x_grad_grad_e));
+#print("w_grad:\n{}".format(w_grad));
+#print("w_grad_e:\n{}".format(w_grad_e));
+#print("w_grad_grad:\n{}".format(w_grad_grad));
+#print("w_grad_grad_e:\n{}".format(w_grad_grad_e));
+w_grad_check = same(flatten2d_right(w_grad), flatten2d_right(w_grad_e))
+w_grad_grad_check = same(flatten2d_right(w_grad_grad), 
flatten2d_right(w_grad_grad_e))
+x_grad_check = same(flatten2d_right(x_grad), flatten2d_right(x_grad_e))
+x_grad_grad_check = same(flatten2d_right(x_grad_grad), 
flatten2d_right(x_grad_grad_e))
+ok_(x_grad_check)
+ok_(w_grad_check)
+ok_(x_grad_grad_check)
+ok_(w_grad_grad_check)
+
+@with_seed()
+def test_dense_backward_no_flatten():
+print("2nd order gradient for Fully Connected, flatten=False")
+for x in NDArrayGenerator(5,3):
+hidden = random.randrange(1, 4)
+net = gluon.nn.Sequential()
+with net.name_scope():
+net.add(gluon.nn.Dense(hidden, flatten=False))
+net.initialize(mxnet.initializer.Constant(.5))
+x.attach_grad()
+with autograd.record():
+y = net.forward(x)
+o_y = arange_shape_like(y)  # head gradient of y
+params = [p.data() for p in net.collect_params().values()]
+w = params[0]
+b = params[1]
+print("Checking y ({}) = x({}) * w^T({}) + b({})".format(y.shape, 
x.shape, w.shape, b.shape))
+x_grad = 

[GitHub] [incubator-mxnet] apeforest commented on a change in pull request #14779: Fully connected, higher order grad

2019-08-07 Thread GitBox
apeforest commented on a change in pull request #14779: Fully connected, higher 
order grad
URL: https://github.com/apache/incubator-mxnet/pull/14779#discussion_r311757281
 
 

 ##
 File path: tests/python/unittest/test_higher_order_grad.py
 ##
 @@ -173,6 +182,87 @@ def check_second_order_unary(x, op, grad_grad_op):
 # Validate the gradients.
 assert_almost_equal(expected_grad_grad, x.grad.asnumpy())
 
+def arange_shape_like(y):
+shape = y.shape
+nelems = reduce(mul, shape)
+x = nd.arange(nelems).reshape(shape)
+return x
+
+class RandomShapes(object):
+def __init__(self, dim, startdim=1):
+self.dim = dim
+self.curdim = startdim
+
+def __iter__(self):
+return self
+
+@staticmethod
+def random_shape(dimensions):
+shape = rand_shape_nd(dimensions)
+# x = nd.random.normal(shape=shape)
+nelems = reduce(mul, shape)
+x = nd.arange(nelems).reshape(shape)
+return x
+
+def next(self):
+return self.__next__()
+
+def __next__(self):
+if self.curdim > self.dim:
+raise StopIteration
+x = RandomShapes.random_shape(self.curdim)
+self.curdim += 1
+return x
+
+
+def flatten2d_right(x):
+s_0 = x.shape[0]
+s_1 = reduce(mul, x.shape[1:])
+return x.reshape((s_0, s_1))
+
+
+def flatten2d_left(x):
+s_0 = reduce(mul, x.shape[:-1])
+s_1 = x.shape[-1]
+return x.reshape((s_0, s_1))
+
+
+@with_seed()
+def test_dense_backward_flatten():
+for x in RandomShapes(4,2):
+hidden = random.randrange(1, 4)
+net = gluon.nn.Sequential()
+with net.name_scope():
+net.add(gluon.nn.Dense(hidden, flatten=True))
+net.initialize(mxnet.initializer.Constant(.5))
 
 Review comment:
   Please change to random if you finished debugging this.


This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services


[GitHub] [incubator-mxnet] apeforest commented on a change in pull request #14779: Fully connected, higher order grad

2019-08-07 Thread GitBox
apeforest commented on a change in pull request #14779: Fully connected, higher 
order grad
URL: https://github.com/apache/incubator-mxnet/pull/14779#discussion_r311756950
 
 

 ##
 File path: src/operator/nn/fully_connected.cc
 ##
 @@ -210,11 +222,11 @@ inline static bool FCStorageType(const nnvm::NodeAttrs& 
attrs,
   return dispatched;
 }
 
-inline static bool BackwardFCStorageType(const nnvm::NodeAttrs& attrs,
- const int dev_mask,
- DispatchMode* dispatch_mode,
- std::vector *in_attrs,
- std::vector *out_attrs) {
+static bool BackwardFCStorageType(const nnvm::NodeAttrs& attrs,
 
 Review comment:
   Please add reference. According to this article, the compiler can 
automatically detect whether inline is needed. 
https://www.geeksforgeeks.org/inline-functions-cpp/
   
   Moreover, if this does not affect the feature you are developing in this PR, 
please not change.


This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services


[GitHub] [incubator-mxnet] apeforest commented on a change in pull request #14779: Fully connected, higher order grad

2019-08-07 Thread GitBox
apeforest commented on a change in pull request #14779: Fully connected, higher 
order grad
URL: https://github.com/apache/incubator-mxnet/pull/14779#discussion_r311756546
 
 

 ##
 File path: src/operator/nn/fully_connected.cc
 ##
 @@ -176,11 +176,23 @@ struct FullyConnectedGrad {
   }
 };
 
-inline static bool FCStorageType(const nnvm::NodeAttrs& attrs,
- const int dev_mask,
- DispatchMode* dispatch_mode,
- std::vector *in_attrs,
- std::vector *out_attrs) {
+
+struct FullyConnectedGradGrad {
+  const char *op_name;
+  std::vector operator()(const nnvm::NodePtr& n,
+  const std::vector& 
ograds) const {
+std::vector heads(ograds.begin(), ograds.end());
+heads.push_back(n->inputs[0]);  // o_y : head gradient of the output y
+return MakeGradNode(op_name, n, heads, n->attrs.dict);
+  }
+};
+
+
+static bool FCStorageType(const nnvm::NodeAttrs& attrs,
 
 Review comment:
   Can you provide reference and document it somewhere. It's not clear why we 
need to remove inline here especially it's not related to this feature at all.


This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services


[GitHub] [incubator-mxnet] apeforest commented on a change in pull request #14779: Fully connected, higher order grad

2019-07-25 Thread GitBox
apeforest commented on a change in pull request #14779: Fully connected, higher 
order grad
URL: https://github.com/apache/incubator-mxnet/pull/14779#discussion_r307593777
 
 

 ##
 File path: tests/python/unittest/test_gluon.py
 ##
 @@ -21,7 +21,7 @@
 import mxnet as mx
 from mxnet import gluon
 from mxnet.gluon import nn
-from mxnet.test_utils import assert_almost_equal
+from mxnet.test_utils import assert_almost_equal, same
 
 Review comment:
   But there is no change in the test in this file.


This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services


[GitHub] [incubator-mxnet] apeforest commented on a change in pull request #14779: Fully connected, higher order grad

2019-07-25 Thread GitBox
apeforest commented on a change in pull request #14779: Fully connected, higher 
order grad
URL: https://github.com/apache/incubator-mxnet/pull/14779#discussion_r307593378
 
 

 ##
 File path: src/operator/nn/fully_connected-inl.h
 ##
 @@ -249,6 +285,114 @@ void FullyConnectedGradCompute(const nnvm::NodeAttrs& 
attrs,
   }
 }
 
+
+
+///
+// Inputs are:
+// o_x_grad : head gradient for x_grad
+// o_w_grad : head gradient for w_grad
+// o_b_grad : if param.no_bias is false
+// o_y : head gradient of y
+//
+// outputs are:
+// o_y_grad : gradient of o_y
+// x_grad_grad : o_y *  o_w_grad
+// w_grad_grad : o_y.T * o_x_grad
+// b_grad_grad: if param.no_bias is false
+//
+// For implementation details see this PR: 
https://github.com/apache/incubator-mxnet/pull/14779
+
+/**
+ * Second order gradient for Fully Connected
+ * x_grad_grad = o_y * o_w_grad
+ * w_grad_grad = o_y.T * o_x_grad
+ *
+ * @tparam xpu
+ * @tparam DType
+ * @param attrs
+ * @param ctx
+ * @param inputs
+ * @param req
+ * @param outputs
+ */
+template
+void FullyConnectedGradGradCompute(const nnvm::NodeAttrs& attrs,
+   const OpContext& ctx,
+   const std::vector& inputs,
+   const std::vector& req,
+   const std::vector& outputs) {
+  using namespace std;
+  using namespace fullc;
+  Stream *stream = ctx.get_stream();
+  const FullyConnectedParam& param = 
nnvm::get(attrs.parsed);
+  const size_t num_inputs = param.no_bias ? 3U : 4U;
+  // outputs are: o_x_grad, o_w_grad, o_y   || o_x_grad, o_w_grad, o_b_grad, 
o_y
+  const size_t num_outputs = 3U;
+  CHECK_EQ(inputs.size(), num_inputs);
+  CHECK_EQ(outputs.size(), num_outputs);
+  CHECK_EQ(req.size(), num_outputs);
+
+  // inputs
+  Tensor o_x_grad;
+  Tensor o_w_grad;
+  Tensor o_y;
+  // unused
+  // Tensor o_b_grad;
+
+  // outputs
+  Tensor o_y_grad;
+  TBlob o_y_grad_blob = outputs[kOyGrad];
+  Tensor x_grad_grad;
+  Tensor w_grad_grad;
+  Tensor b_grad_grad;
+  size_t o_y_idx = std::numeric_limits::max();
+  if (param.no_bias)
+o_y_idx = kOy;
+  else
+o_y_idx = kOyBias;
+  if (!param.flatten) {
+o_x_grad = FlattenAs2DHead(inputs[kOxGrad], ctx);
+o_w_grad = inputs[kOwGrad].get(stream);
+o_y = FlattenAs2DHead(inputs[o_y_idx], ctx);
+x_grad_grad = FlattenAs2DHead(outputs[kXGradGrad], ctx);
+w_grad_grad = FlattenAs2DHead(outputs[kWGradGrad], ctx);
+  } else {
+o_x_grad = FlattenAs2DTail(inputs[kOxGrad], ctx);
+o_w_grad = FlattenAs2DTail(inputs[kOwGrad], ctx);
+o_y = inputs[o_y_idx].get(stream);
+x_grad_grad = FlattenAs2DTail(outputs[kXGradGrad], ctx);
+w_grad_grad = FlattenAs2DTail(outputs[kWGradGrad], ctx);
+  }
+  linalg_gemm(o_y, o_w_grad, x_grad_grad, false, false, stream);
+  linalg_gemm(o_y, o_x_grad, w_grad_grad, true, false, stream);
+  // 3rd order not supported
+  Fill(stream, o_y_grad_blob, kWriteTo, static_cast(0));
+  /* TODO(larroy) bias is not supported yet as there's no bias input to 
backward. Bias grad grad is
 
 Review comment:
   @sxjscience Could you please review if this is correct?


This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services


[GitHub] [incubator-mxnet] apeforest commented on a change in pull request #14779: Fully connected, higher order grad

2019-07-25 Thread GitBox
apeforest commented on a change in pull request #14779: Fully connected, higher 
order grad
URL: https://github.com/apache/incubator-mxnet/pull/14779#discussion_r307592893
 
 

 ##
 File path: src/operator/nn/fully_connected-inl.h
 ##
 @@ -47,7 +48,24 @@ namespace fullc {
 enum FullyConnectedOpInputs {kData, kWeight, kBias};
 enum FullyConnectedOpResource {kTempSpace};
 enum FullyConnectedOpOutputs {kOut};
-}  // fullc
+enum FullyConnectedGradGradOutputs {
 
 Review comment:
   nit: Can we keep the same code style as previous enums


This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services


[GitHub] [incubator-mxnet] apeforest commented on a change in pull request #14779: Fully connected, higher order grad

2019-07-25 Thread GitBox
apeforest commented on a change in pull request #14779: Fully connected, higher 
order grad
URL: https://github.com/apache/incubator-mxnet/pull/14779#discussion_r307592850
 
 

 ##
 File path: src/operator/nn/fully_connected-inl.h
 ##
 @@ -47,7 +48,24 @@ namespace fullc {
 enum FullyConnectedOpInputs {kData, kWeight, kBias};
 enum FullyConnectedOpResource {kTempSpace};
 enum FullyConnectedOpOutputs {kOut};
-}  // fullc
+enum FullyConnectedGradGradOutputs {
+  kOyGrad,
+  kXGradGrad,
+  kWGradGrad,
+  kBGradGrad
+};
+enum Inputs {
 
 Review comment:
   A more specific name?


This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services


[GitHub] [incubator-mxnet] apeforest commented on a change in pull request #14779: Fully connected, higher order grad

2019-06-28 Thread GitBox
apeforest commented on a change in pull request #14779: Fully connected, higher 
order grad
URL: https://github.com/apache/incubator-mxnet/pull/14779#discussion_r298748450
 
 

 ##
 File path: tests/python/unittest/test_higher_order_grad.py
 ##
 @@ -129,6 +131,44 @@ def check_second_order_unary(x, op, grad_grad_op):
 # Validate the gradients.
 assert_almost_equal(expected_grad_grad, x.grad.asnumpy())
 
+class RandomShapes(object):
+def __init__(self, dim):
+self.dim = dim
+self.curdim = 1
+
+def __iter__(self):
+return self
+
+def next(self):
+return self.__next__()
+
+def __next__(self):
+if self.curdim > self.dim:
+raise StopIteration
+shape = rand_shape_nd(self.curdim)
+print(shape)
+x = nd.random.normal(shape=shape)
+self.curdim += 1
+return x
+
+
+@with_seed()
+def test_dense_backward():
 
 Review comment:
   The reason I was suggesting to test FullyConnected operator itself is then 
you could reuse the `check_second_order_unary` utility methods like all the 
other operators do. Having to write ad hoc test for each operator is not 
scalable and error prone.


This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services


[GitHub] [incubator-mxnet] apeforest commented on a change in pull request #14779: Fully connected, higher order grad

2019-06-28 Thread GitBox
apeforest commented on a change in pull request #14779: Fully connected, higher 
order grad
URL: https://github.com/apache/incubator-mxnet/pull/14779#discussion_r298748009
 
 

 ##
 File path: tests/python/unittest/test_higher_order_grad.py
 ##
 @@ -129,6 +135,83 @@ def check_second_order_unary(x, op, grad_grad_op):
 # Validate the gradients.
 assert_almost_equal(expected_grad_grad, x.grad.asnumpy())
 
+class RandomShapes(object):
+def __init__(self, dim, startdim=1):
+self.dim = dim
+self.curdim = startdim
+
+def __iter__(self):
+return self
+
+def next(self):
+return self.__next__()
+
+def __next__(self):
+if self.curdim > self.dim:
+raise StopIteration
+shape = rand_shape_nd(self.curdim)
+x = nd.random.normal(shape=shape)
+self.curdim += 1
+return x
+
+
+@with_seed()
+def test_dense_backward():
+for x in RandomShapes(4,2):
+net = gluon.nn.Sequential()
+with net.name_scope():
+net.add(gluon.nn.Dense(1))
+
+net.initialize(mxnet.initializer.Constant(.5))
+x.attach_grad()
+with ag.record():
+y = net.forward(x)
+x_grad = ag.grad(heads=y, variables=x, create_graph=True, 
retain_graph=True)[0]
+x_grad.backward()
+same(x.grad, nd.zeros(4))
+
+with ag.record():
+y = net.forward(x)
+x_grad = ag.grad(heads=y, variables=x, create_graph=True, 
retain_graph=True)[0]
+random_multiplier = nd.random.uniform_like(x_grad)
+z = (random_multiplier * x_grad).sum()
+z.backward()
+same(x.grad, nd.zeros(4))
+
+with ag.record():
+y = net.forward(x)
+x_grad_0 = ag.grad(heads=y, variables=x, create_graph=True, 
retain_graph=True)[0]
+x_grad_grad_0 = x.grad
+
+w_0 = list(net.collect_params().values())[0].data()
+h_w = nd.ones_like(w_0) * 0.01
+net.initialize(mxnet.initializer.Constant(w_0 + h_w), 
force_reinit=True)
+w_1 = list(net.collect_params().values())[0].data()
+with ag.record():
+y = net.forward(x)
+x_grad_1 = ag.grad(heads=y, variables=x, create_graph=True, 
retain_graph=True)[0]
+x_grad_1.backward()
+x_grad_grad_1 = x.grad
+ok_(not np.array_equal(x_grad_0, x_grad_1))
+ok_(np.array_equal(x_grad_grad_0, x_grad_grad_1))
+
+w = list(net.collect_params().values())[0].data()
+with ag.record():
+y = net.forward(x)
+w_grad_0 = ag.grad(heads=y, variables=w, create_graph=True, 
retain_graph=True)[0]
+w_grad_0.backward()
+w_grad_grad_0 = w.grad
+
+x = x + nd.ones_like(x) * 0.01
+with ag.record():
 
 Review comment:
   What are you trying to test here? Maybe break the `with` blocks in this 
method with multiple methods so we know the purpose of each test?


This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services


[GitHub] [incubator-mxnet] apeforest commented on a change in pull request #14779: Fully connected, higher order grad

2019-06-28 Thread GitBox
apeforest commented on a change in pull request #14779: Fully connected, higher 
order grad
URL: https://github.com/apache/incubator-mxnet/pull/14779#discussion_r298747764
 
 

 ##
 File path: tests/python/unittest/test_higher_order_grad.py
 ##
 @@ -129,6 +135,83 @@ def check_second_order_unary(x, op, grad_grad_op):
 # Validate the gradients.
 assert_almost_equal(expected_grad_grad, x.grad.asnumpy())
 
+class RandomShapes(object):
+def __init__(self, dim, startdim=1):
+self.dim = dim
+self.curdim = startdim
+
+def __iter__(self):
+return self
+
+def next(self):
+return self.__next__()
+
+def __next__(self):
+if self.curdim > self.dim:
+raise StopIteration
+shape = rand_shape_nd(self.curdim)
+x = nd.random.normal(shape=shape)
+self.curdim += 1
+return x
+
+
+@with_seed()
+def test_dense_backward():
+for x in RandomShapes(4,2):
+net = gluon.nn.Sequential()
+with net.name_scope():
+net.add(gluon.nn.Dense(1))
+
+net.initialize(mxnet.initializer.Constant(.5))
+x.attach_grad()
+with ag.record():
+y = net.forward(x)
+x_grad = ag.grad(heads=y, variables=x, create_graph=True, 
retain_graph=True)[0]
+x_grad.backward()
+same(x.grad, nd.zeros(4))
+
+with ag.record():
+y = net.forward(x)
+x_grad = ag.grad(heads=y, variables=x, create_graph=True, 
retain_graph=True)[0]
+random_multiplier = nd.random.uniform_like(x_grad)
+z = (random_multiplier * x_grad).sum()
+z.backward()
+same(x.grad, nd.zeros(4))
+
+with ag.record():
 
 Review comment:
   What are you trying to test here?


This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services


[GitHub] [incubator-mxnet] apeforest commented on a change in pull request #14779: Fully connected, higher order grad

2019-06-28 Thread GitBox
apeforest commented on a change in pull request #14779: Fully connected, higher 
order grad
URL: https://github.com/apache/incubator-mxnet/pull/14779#discussion_r298747388
 
 

 ##
 File path: tests/python/unittest/test_higher_order_grad.py
 ##
 @@ -18,8 +18,14 @@
 
 import math
 from mxnet import nd, autograd
-from mxnet.test_utils import assert_almost_equal, random_arrays, rand_shape_nd
+from mxnet.test_utils import assert_almost_equal, random_arrays, 
rand_shape_nd, same
 from common import with_seed
+import mxnet.autograd as ag
+import mxnet.ndarray as nd
+from mxnet import gluon
+import mxnet
+from nose.tools import ok_
 
 Review comment:
   this module should be imported before mxnet module


This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services


[GitHub] [incubator-mxnet] apeforest commented on a change in pull request #14779: Fully connected, higher order grad

2019-06-28 Thread GitBox
apeforest commented on a change in pull request #14779: Fully connected, higher 
order grad
URL: https://github.com/apache/incubator-mxnet/pull/14779#discussion_r298747266
 
 

 ##
 File path: tests/python/unittest/test_higher_order_grad.py
 ##
 @@ -18,8 +18,14 @@
 
 import math
 from mxnet import nd, autograd
-from mxnet.test_utils import assert_almost_equal, random_arrays, rand_shape_nd
+from mxnet.test_utils import assert_almost_equal, random_arrays, 
rand_shape_nd, same
 from common import with_seed
+import mxnet.autograd as ag
+import mxnet.ndarray as nd
 
 Review comment:
   many of these imports are duplicated. Could you please keep the required 
ones?


This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services


[GitHub] [incubator-mxnet] apeforest commented on a change in pull request #14779: Fully connected, higher order grad

2019-06-26 Thread GitBox
apeforest commented on a change in pull request #14779: Fully connected, higher 
order grad
URL: https://github.com/apache/incubator-mxnet/pull/14779#discussion_r297877572
 
 

 ##
 File path: tests/python/unittest/test_higher_order_grad.py
 ##
 @@ -129,6 +131,44 @@ def check_second_order_unary(x, op, grad_grad_op):
 # Validate the gradients.
 assert_almost_equal(expected_grad_grad, x.grad.asnumpy())
 
+class RandomShapes(object):
+def __init__(self, dim):
+self.dim = dim
+self.curdim = 1
+
+def __iter__(self):
+return self
+
+def next(self):
+return self.__next__()
+
+def __next__(self):
+if self.curdim > self.dim:
+raise StopIteration
+shape = rand_shape_nd(self.curdim)
+print(shape)
+x = nd.random.normal(shape=shape)
+self.curdim += 1
+return x
+
+
+@with_seed()
+def test_dense_backward():
 
 Review comment:
   Since your change in C++ is directly on the FullyConnected operator, 
wouldn't it make sense to test on it directly?


This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services


[GitHub] [incubator-mxnet] apeforest commented on a change in pull request #14779: Fully connected, higher order grad

2019-06-26 Thread GitBox
apeforest commented on a change in pull request #14779: Fully connected, higher 
order grad
URL: https://github.com/apache/incubator-mxnet/pull/14779#discussion_r297778687
 
 

 ##
 File path: tests/python/unittest/test_higher_order_grad.py
 ##
 @@ -129,6 +131,44 @@ def check_second_order_unary(x, op, grad_grad_op):
 # Validate the gradients.
 assert_almost_equal(expected_grad_grad, x.grad.asnumpy())
 
+class RandomShapes(object):
+def __init__(self, dim):
+self.dim = dim
+self.curdim = 1
+
+def __iter__(self):
+return self
+
+def next(self):
+return self.__next__()
+
+def __next__(self):
+if self.curdim > self.dim:
+raise StopIteration
+shape = rand_shape_nd(self.curdim)
+print(shape)
+x = nd.random.normal(shape=shape)
+self.curdim += 1
+return x
+
+
+@with_seed()
+def test_dense_backward():
+import mxnet.autograd as ag
+import mxnet.ndarray as nd
+for x in RandomShapes(5):
+net = gluon.nn.Sequential()
+with net.name_scope():
+#net.add(gluon.nn.Dense(1, in_units=x.shape[1]))
+net.add(gluon.nn.Dense(1))
+net.initialize(mxnet.initializer.Constant(.5))
+x.attach_grad()
+with ag.record():
+y = net.forward(x)
+x_grad = ag.grad(y, x, create_graph=True, retain_graph=True)[0]
 
 Review comment:
   please explicit set argument keyword like other tests for clarity


This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services


[GitHub] [incubator-mxnet] apeforest commented on a change in pull request #14779: Fully connected, higher order grad

2019-06-26 Thread GitBox
apeforest commented on a change in pull request #14779: Fully connected, higher 
order grad
URL: https://github.com/apache/incubator-mxnet/pull/14779#discussion_r297778313
 
 

 ##
 File path: tests/python/unittest/test_higher_order_grad.py
 ##
 @@ -129,6 +131,44 @@ def check_second_order_unary(x, op, grad_grad_op):
 # Validate the gradients.
 assert_almost_equal(expected_grad_grad, x.grad.asnumpy())
 
+class RandomShapes(object):
+def __init__(self, dim):
+self.dim = dim
+self.curdim = 1
+
+def __iter__(self):
+return self
+
+def next(self):
+return self.__next__()
+
+def __next__(self):
+if self.curdim > self.dim:
+raise StopIteration
+shape = rand_shape_nd(self.curdim)
+print(shape)
+x = nd.random.normal(shape=shape)
+self.curdim += 1
+return x
+
+
+@with_seed()
+def test_dense_backward():
 
 Review comment:
   Can we add a test for FullyConnected operator itself?


This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services


[GitHub] [incubator-mxnet] apeforest commented on a change in pull request #14779: Fully connected, higher order grad

2019-06-26 Thread GitBox
apeforest commented on a change in pull request #14779: Fully connected, higher 
order grad
URL: https://github.com/apache/incubator-mxnet/pull/14779#discussion_r297776313
 
 

 ##
 File path: tests/python/unittest/test_higher_order_grad.py
 ##
 @@ -129,6 +131,44 @@ def check_second_order_unary(x, op, grad_grad_op):
 # Validate the gradients.
 assert_almost_equal(expected_grad_grad, x.grad.asnumpy())
 
+class RandomShapes(object):
+def __init__(self, dim):
+self.dim = dim
+self.curdim = 1
+
+def __iter__(self):
+return self
+
+def next(self):
+return self.__next__()
+
+def __next__(self):
+if self.curdim > self.dim:
+raise StopIteration
+shape = rand_shape_nd(self.curdim)
+print(shape)
+x = nd.random.normal(shape=shape)
+self.curdim += 1
+return x
+
+
+@with_seed()
+def test_dense_backward():
+import mxnet.autograd as ag
 
 Review comment:
   Move these imports to file head


This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services


[GitHub] [incubator-mxnet] apeforest commented on a change in pull request #14779: Fully connected, higher order grad

2019-06-26 Thread GitBox
apeforest commented on a change in pull request #14779: Fully connected, higher 
order grad
URL: https://github.com/apache/incubator-mxnet/pull/14779#discussion_r297776313
 
 

 ##
 File path: tests/python/unittest/test_higher_order_grad.py
 ##
 @@ -129,6 +131,44 @@ def check_second_order_unary(x, op, grad_grad_op):
 # Validate the gradients.
 assert_almost_equal(expected_grad_grad, x.grad.asnumpy())
 
+class RandomShapes(object):
+def __init__(self, dim):
+self.dim = dim
+self.curdim = 1
+
+def __iter__(self):
+return self
+
+def next(self):
+return self.__next__()
+
+def __next__(self):
+if self.curdim > self.dim:
+raise StopIteration
+shape = rand_shape_nd(self.curdim)
+print(shape)
+x = nd.random.normal(shape=shape)
+self.curdim += 1
+return x
+
+
+@with_seed()
+def test_dense_backward():
+import mxnet.autograd as ag
 
 Review comment:
   Move this import to file head


This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services


[GitHub] [incubator-mxnet] apeforest commented on a change in pull request #14779: Fully connected, higher order grad

2019-06-26 Thread GitBox
apeforest commented on a change in pull request #14779: Fully connected, higher 
order grad
URL: https://github.com/apache/incubator-mxnet/pull/14779#discussion_r297775674
 
 

 ##
 File path: tests/python/unittest/test_higher_order_grad.py
 ##
 @@ -129,6 +131,44 @@ def check_second_order_unary(x, op, grad_grad_op):
 # Validate the gradients.
 assert_almost_equal(expected_grad_grad, x.grad.asnumpy())
 
+class RandomShapes(object):
+def __init__(self, dim):
+self.dim = dim
+self.curdim = 1
+
+def __iter__(self):
+return self
+
+def next(self):
+return self.__next__()
+
+def __next__(self):
+if self.curdim > self.dim:
+raise StopIteration
+shape = rand_shape_nd(self.curdim)
+print(shape)
+x = nd.random.normal(shape=shape)
+self.curdim += 1
+return x
+
+
+@with_seed()
+def test_dense_backward():
+import mxnet.autograd as ag
+import mxnet.ndarray as nd
+for x in RandomShapes(5):
+net = gluon.nn.Sequential()
+with net.name_scope():
+#net.add(gluon.nn.Dense(1, in_units=x.shape[1]))
 
 Review comment:
   please remove commented line


This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services