Repository: incubator-singa Updated Branches: refs/heads/master fefcab993 -> 6c39e4594
SINGA-315 - Reduce memory footprint by Python generator for parameter gradient Fixed a bug in net::train() function which missed the gradient argument when calling backward(). Project: http://git-wip-us.apache.org/repos/asf/incubator-singa/repo Commit: http://git-wip-us.apache.org/repos/asf/incubator-singa/commit/4e7db054 Tree: http://git-wip-us.apache.org/repos/asf/incubator-singa/tree/4e7db054 Diff: http://git-wip-us.apache.org/repos/asf/incubator-singa/diff/4e7db054 Branch: refs/heads/master Commit: 4e7db0545c68d52ed51bc14f8a8448e3617ba86f Parents: 5aecb47 Author: Wei Wang <[email protected]> Authored: Thu Jun 1 11:04:16 2017 +0800 Committer: Wei Wang <[email protected]> Committed: Thu Jun 1 11:04:16 2017 +0800 ---------------------------------------------------------------------- python/singa/net.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/4e7db054/python/singa/net.py ---------------------------------------------------------------------- diff --git a/python/singa/net.py b/python/singa/net.py index e4c1de9..26b7463 100644 --- a/python/singa/net.py +++ b/python/singa/net.py @@ -181,13 +181,14 @@ class FeedForwardNet(object): ''' out = self.forward(kTrain, x) l = self.loss.forward(kTrain, out, y) + g = self.loss.backward() m = None if self.metric is not None: m = self.metric.evaluate(out, y) grads = [] # store all gradient tensors; memory inefficient - for _, _, grad, _ in self.backward(): - grads.append(grad) - return grads[::-1], l.l1(), m + for _, _, grad, _ in self.backward(g): + grads.extend(grad) + return grads[::-1], (l.l1(), m) def evaluate(self, x, y): '''Evaluate the loss and metric of the given data.
