Author: bugman
Date: Tue Nov 11 10:12:48 2014
New Revision: 26508
URL: http://svn.gna.org/viewcvs/relax?rev=26508&view=rev
Log:
Python 3 fixes for all print statements in the extern.numdifftools package.
The print statements have been manually converted into print() functions.
Modified:
trunk/extern/numdifftools/speed_comparison/run_benchmarks.py
trunk/extern/numdifftools/speed_comparison/use_adolc.py
trunk/extern/numdifftools/speed_comparison/use_algopy.py
trunk/extern/numdifftools/speed_comparison/use_funcdesigner.py
trunk/extern/numdifftools/speed_comparison/use_theano.py
trunk/extern/numdifftools/test/testnumdifftools.py
Modified: trunk/extern/numdifftools/speed_comparison/run_benchmarks.py
URL:
http://svn.gna.org/viewcvs/relax/trunk/extern/numdifftools/speed_comparison/run_benchmarks.py?rev=26508&r1=26507&r2=26508&view=diff
==============================================================================
--- trunk/extern/numdifftools/speed_comparison/run_benchmarks.py
(original)
+++ trunk/extern/numdifftools/speed_comparison/run_benchmarks.py Tue Nov
11 10:12:48 2014
@@ -20,7 +20,7 @@
results_gradient_list = []
for N in gradient_N_list:
- print 'N=', N
+ print('N=', N)
results_gradient = np.zeros((4, 3))
# algopy, UTPS variant
f = benchmark1.F(N)
@@ -55,17 +55,17 @@
results_gradient_list.append(results_gradient)
results_gradients = np.array(results_gradient_list)+1e-18
-print 'results_gradients=\n', results_gradients
+print('results_gradients=\n', results_gradients)
# HESSIAN COMPUTATION
# -------------------
-print 'starting hessian computation '
+print('starting hessian computation ')
results_hessian_list = []
hessian_N_list = [1, 2, 4, 8, 16, 32, 64]
# hessian_N_list = [2]
for N in hessian_N_list:
- print 'N=', N
+ print('N=', N)
results_hessian = np.zeros((4, 3))
f = benchmark1.F(N)
@@ -98,13 +98,13 @@
results_hessians = np.array(results_hessian_list)+1e-18
-print hessian_N_list
-print 'results_hessians=\n', results_hessians
+print(hessian_N_list)
+print('results_hessians=\n', results_hessians)
# PLOT RESULTS
-print results_gradients.shape
+print(results_gradients.shape)
import matplotlib.pyplot as pyplot
#import prettyplotting
Modified: trunk/extern/numdifftools/speed_comparison/use_adolc.py
URL:
http://svn.gna.org/viewcvs/relax/trunk/extern/numdifftools/speed_comparison/use_adolc.py?rev=26508&r1=26507&r2=26508&view=diff
==============================================================================
--- trunk/extern/numdifftools/speed_comparison/use_adolc.py (original)
+++ trunk/extern/numdifftools/speed_comparison/use_adolc.py Tue Nov 11
10:12:48 2014
@@ -14,7 +14,7 @@
cg = algopy.CGraph()
x = np.array([algopy.Function(x[i]) for i in range(len(x))])
y = f(x)
- # print 'y=',y
+ # print('y=',y)
cg.trace_off()
cg.independentFunctionList = x
cg.dependentFunctionList = [y]
Modified: trunk/extern/numdifftools/speed_comparison/use_algopy.py
URL:
http://svn.gna.org/viewcvs/relax/trunk/extern/numdifftools/speed_comparison/use_algopy.py?rev=26508&r1=26507&r2=26508&view=diff
==============================================================================
--- trunk/extern/numdifftools/speed_comparison/use_algopy.py (original)
+++ trunk/extern/numdifftools/speed_comparison/use_algopy.py Tue Nov 11
10:12:48 2014
@@ -9,7 +9,7 @@
cg = algopy.CGraph()
x = np.array([algopy.Function(x[i]) for i in range(len(x))])
y = f(x)
- # print 'y=',y
+ # print('y=',y)
cg.trace_off()
cg.independentFunctionList = x
cg.dependentFunctionList = [y]
Modified: trunk/extern/numdifftools/speed_comparison/use_funcdesigner.py
URL:
http://svn.gna.org/viewcvs/relax/trunk/extern/numdifftools/speed_comparison/use_funcdesigner.py?rev=26508&r1=26507&r2=26508&view=diff
==============================================================================
--- trunk/extern/numdifftools/speed_comparison/use_funcdesigner.py
(original)
+++ trunk/extern/numdifftools/speed_comparison/use_funcdesigner.py Tue Nov
11 10:12:48 2014
@@ -10,7 +10,7 @@
sx = FuncDesigner.oovar('x', size = len(x))
sy = 0.5*FuncDesigner.dot(sx*sx, FuncDesigner.dot(f.A, sx))
- print 'sy=', sy
+ print('sy=', sy)
# self.sA = sA
self.sx = sx
@@ -22,8 +22,8 @@
def gradient(self, x):
point = {self.sx:x}
- # print point
- # print self.sy
+ # print(point)
+ # print(self.sy)
retval = self.sy.D(point)[self.sx]
return retval
Modified: trunk/extern/numdifftools/speed_comparison/use_theano.py
URL:
http://svn.gna.org/viewcvs/relax/trunk/extern/numdifftools/speed_comparison/use_theano.py?rev=26508&r1=26507&r2=26508&view=diff
==============================================================================
--- trunk/extern/numdifftools/speed_comparison/use_theano.py (original)
+++ trunk/extern/numdifftools/speed_comparison/use_theano.py Tue Nov 11
10:12:48 2014
@@ -62,6 +62,6 @@
# end_time = time.time()
-# print end_time - start_time
+# print(end_time - start_time)
-# print g - numpy.dot(A,x)
+# print(g - numpy.dot(A,x))
Modified: trunk/extern/numdifftools/test/testnumdifftools.py
URL:
http://svn.gna.org/viewcvs/relax/trunk/extern/numdifftools/test/testnumdifftools.py?rev=26508&r1=26507&r2=26508&view=diff
==============================================================================
--- trunk/extern/numdifftools/test/testnumdifftools.py (original)
+++ trunk/extern/numdifftools/test/testnumdifftools.py Tue Nov 11 10:12:48 2014
@@ -38,9 +38,9 @@
x = np.linspace(0, 2. * np.pi, 13)
y = dsin(x)
small = np.abs(y - np.cos(x)) < dsin.error_estimate * 10
- # print np.abs(y - np.cos(x))
- # print dsin.error_estimate
- # print small
+ # print(np.abs(y - np.cos(x)))
+ # print(dsin.error_estimate)
+ # print(small)
self.assertTrue(np.all(small))
def test_high_order_derivative_sin(self):
_______________________________________________
relax (http://www.nmr-relax.com)
This is the relax-commits mailing list
[email protected]
To unsubscribe from this list, get a password
reminder, or change your subscription options,
visit the list information page at
https://mail.gna.org/listinfo/relax-commits