SINGA-350 Error from python3 test

Add encode() back in optimizer.py


Project: http://git-wip-us.apache.org/repos/asf/incubator-singa/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-singa/commit/2d255613
Tree: http://git-wip-us.apache.org/repos/asf/incubator-singa/tree/2d255613
Diff: http://git-wip-us.apache.org/repos/asf/incubator-singa/diff/2d255613

Branch: refs/heads/master
Commit: 2d2556135bc04282f382ad3fc5071fc51a6aad28
Parents: 72b1a69
Author: Wang Wei <[email protected]>
Authored: Wed May 2 21:39:28 2018 +0800
Committer: Wang Wei <[email protected]>
Committed: Wed May 2 21:39:28 2018 +0800

----------------------------------------------------------------------
 python/singa/optimizer.py | 16 ++++++++--------
 1 file changed, 8 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/2d255613/python/singa/optimizer.py
----------------------------------------------------------------------
diff --git a/python/singa/optimizer.py b/python/singa/optimizer.py
index 5cb02e5..975641a 100644
--- a/python/singa/optimizer.py
+++ b/python/singa/optimizer.py
@@ -206,7 +206,7 @@ class SGD(Optimizer):
         if self.momentum is not None:
             conf.momentum = self.momentum
         conf.type = 'sgd'
-        self.opt = singa.CreateOptimizer('SGD')
+        self.opt = singa.CreateOptimizer('SGD'.encode())
         self.opt.Setup(conf.SerializeToString())
 
     def apply_with_lr(self, epoch, lr, grad, value, name, step=-1):
@@ -216,7 +216,7 @@ class SGD(Optimizer):
             epoch, value, grad, name, step)
         if name is not None and name in self.learning_rate_multiplier:
             lr = lr * self.learning_rate_multiplier[name]
-        self.opt.Apply(epoch, lr, name, grad.data,
+        self.opt.Apply(epoch, lr, name.encode(), grad.data,
                        value.data)
         return value
 
@@ -235,7 +235,7 @@ class Nesterov(Optimizer):
         if self.momentum is not None:
             conf.momentum = momentum
         conf.type = 'nesterov'
-        self.opt = singa.CreateOptimizer('Nesterov')
+        self.opt = singa.CreateOptimizer('Nesterov'.encode())
         self.opt.Setup(conf.SerializeToString())
 
     def apply_with_lr(self, epoch, lr, grad, value, name, step=-1):
@@ -246,7 +246,7 @@ class Nesterov(Optimizer):
             epoch, value, grad, name, step)
         if name is not None and name in self.learning_rate_multiplier:
             lr = lr * self.learning_rate_multiplier[name]
-        self.opt.Apply(epoch, lr, name, grad.data,
+        self.opt.Apply(epoch, lr, name.encode(), grad.data,
                        value.data)
         return value
 
@@ -268,7 +268,7 @@ class RMSProp(Optimizer):
         conf = model_pb2.OptimizerConf()
         conf.rho = rho
         conf.delta = epsilon
-        self.opt = singa.CreateOptimizer('RMSProp')
+        self.opt = singa.CreateOptimizer('RMSProp'.encode())
         self.opt.Setup(conf.SerializeToString())
 
     def apply_with_lr(self, epoch, lr, grad, value, name, step=-1):
@@ -279,7 +279,7 @@ class RMSProp(Optimizer):
             epoch, value, grad, name, step)
         if name is not None and name in self.learning_rate_multiplier:
             lr = lr * self.learning_rate_multiplier[name]
-        self.opt.Apply(step, lr,  name, grad.data,
+        self.opt.Apply(step, lr,  name.encode(), grad.data,
                        value.data)
         return value
 
@@ -300,7 +300,7 @@ class AdaGrad(Optimizer):
         conf = model_pb2.OptimizerConf()
         conf.delta = epsilon
         conf.type = 'adagrad'
-        self.opt = singa.CreateOptimizer('AdaGrad')
+        self.opt = singa.CreateOptimizer('AdaGrad'.encode())
         self.opt.Setup(conf.SerializeToString())
 
     def apply_with_lr(self, epoch, lr, grad, value, name, step=-1):
@@ -311,7 +311,7 @@ class AdaGrad(Optimizer):
             epoch, value, grad, name, step)
         if name is not None and name in self.learning_rate_multiplier:
             lr = lr * self.learning_rate_multiplier[name]
-        self.opt.Apply(epoch, lr,  name, grad.data,
+        self.opt.Apply(epoch, lr,  name.encode(), grad.data,
                        value.data)
         return value
 

Reply via email to