SINGA-371 Implement functional operations in c++ for autograd - add test case for conv2d operation.
Project: http://git-wip-us.apache.org/repos/asf/incubator-singa/repo Commit: http://git-wip-us.apache.org/repos/asf/incubator-singa/commit/5c8504a9 Tree: http://git-wip-us.apache.org/repos/asf/incubator-singa/tree/5c8504a9 Diff: http://git-wip-us.apache.org/repos/asf/incubator-singa/diff/5c8504a9 Branch: refs/heads/master Commit: 5c8504a94c66af3459a515f654fa01f5099dc790 Parents: 78e1fc2 Author: xuewanqi <[email protected]> Authored: Thu Jun 21 15:36:49 2018 +0000 Committer: xuewanqi <[email protected]> Committed: Fri Jun 22 02:37:17 2018 +0000 ---------------------------------------------------------------------- python/singa/autograd.py | 2 +- test/python/test_operation.py | 48 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 49 insertions(+), 1 deletion(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/5c8504a9/python/singa/autograd.py ---------------------------------------------------------------------- diff --git a/python/singa/autograd.py b/python/singa/autograd.py index 7ba68f5..4f45bf1 100644 --- a/python/singa/autograd.py +++ b/python/singa/autograd.py @@ -672,7 +672,7 @@ class Conv2d_GPU(Operation): return singa.CudnnConvForward(xs[0], xs[1], xs[2], self.convhandle, self.cudnnconvhandle) def backward(self, dy): - assert training is True and hasattr(self, 'x'), 'Please set \'trainging\' as True before do BP. ' + assert training is True and hasattr(self, 'x'), 'Please set \'training\' as True before do BP. ' # todo check device? dy.ToDevice(self.dev) http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/5c8504a9/test/python/test_operation.py ---------------------------------------------------------------------- diff --git a/test/python/test_operation.py b/test/python/test_operation.py new file mode 100644 index 0000000..295b2d2 --- /dev/null +++ b/test/python/test_operation.py @@ -0,0 +1,48 @@ +import unittest +from builtins import str + +from singa import tensor +from singa import singa_wrap as singa +from singa import device +from singa import autograd + +autograd.training = True + +CTensor = singa.Tensor + +dev = device.create_cuda_gpu() + +gpu_input_tensor = tensor.Tensor(shape=(2, 3, 3, 3), device=dev) +gpu_input_tensor.gaussian(0.0, 1.0) + +dy = CTensor([2, 1, 2, 2]) +singa.Gaussian(0.0, 1.0, dy) +dy.ToDevice(dev) + +conv = autograd.Conv2d_GPU(3, 1, 2) # (in_channels, out_channels, kernel_size) + + +def _tuple_to_string(t): + lt = [str(x) for x in t] + return '(' + ', '.join(lt) + ')' + + +class TestPythonOperation(unittest.TestCase): + + def check_shape(self, actual, expect): + self.assertEqual(actual, expect, 'shape mismatch, actual shape is %s' + ' exepcted is %s' % (_tuple_to_string(actual), + _tuple_to_string(expect)) + ) + + def test(self): + y = conv(gpu_input_tensor) # PyTensor + dx, dW, db = conv.backward(dy) # CTensor + + self.check_shape(y.shape, (2, 1, 2, 2)) + self.check_shape(dx.shape(), (2, 3, 3, 3)) + self.check_shape(dW.shape(), (1, 3, 2, 2)) + self.check_shape(db.shape(), (1,)) + +if __name__ == '__main__': + unittest.main()
