szha closed pull request #11648: Modernize Python 2 code to get ready for 
Python 3
URL: https://github.com/apache/incubator-mxnet/pull/11648
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/benchmark/python/sparse/sparse_op.py 
b/benchmark/python/sparse/sparse_op.py
index 95ea7d54e2b..ffa6de6d762 100644
--- a/benchmark/python/sparse/sparse_op.py
+++ b/benchmark/python/sparse/sparse_op.py
@@ -1,3 +1,4 @@
+from __future__ import print_function
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -96,9 +97,9 @@ def get_iter(path, data_shape, batch_size):
         os.system("head -n 2000 %r > %r" % (path, mini_path))
         assert os.path.exists(mini_path)
 
-    print "Running Benchmarking on %r data" % data_dict['data_mini']
+    print("Running Benchmarking on %r data" % data_dict['data_mini'])
     for batch_size in data_dict['batch_size']:  # iterator through different 
batch size of choice
-        print "batch_size is %d" % batch_size
+        print("batch_size is %d" % batch_size)
         # model
         data_shape = (k, )
         train_iter = get_iter(mini_path, data_shape, batch_size)
diff --git a/cpp-package/scripts/lint.py b/cpp-package/scripts/lint.py
index f9f284ffc00..f6e549878a4 100644
--- a/cpp-package/scripts/lint.py
+++ b/cpp-package/scripts/lint.py
@@ -21,6 +21,7 @@
 """Lint helper to generate lint summary of source.
 Copyright by Contributors
 """
+from __future__ import print_function
 import codecs
 import sys
 import re
@@ -91,7 +92,7 @@ def process_python(self, path):
         (pylint_stdout, pylint_stderr) = epylint.py_run(
             ' '.join([str(path)] + self.pylint_opts), return_std=True)
         emap = {}
-        print pylint_stderr.read()
+        print(pylint_stderr.read())
         for line in pylint_stdout:
             sys.stderr.write(line)
             key = line.split(':')[-1].split('(')[0].strip()
diff --git a/example/kaggle-ndsb1/train_dsb.py 
b/example/kaggle-ndsb1/train_dsb.py
index 5cec0f6d4fd..dc2084a668d 100644
--- a/example/kaggle-ndsb1/train_dsb.py
+++ b/example/kaggle-ndsb1/train_dsb.py
@@ -1,3 +1,4 @@
+from __future__ import print_function
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -98,4 +99,4 @@ def get_iterator(args, kv):
 # train
 tic=time.time()
 train_model.fit(args, net, get_iterator)
-print "time elapsed to train model", time.time()-tic
+print("time elapsed to train model", time.time()-tic)
diff --git a/example/python-howto/multiple_outputs.py 
b/example/python-howto/multiple_outputs.py
index 43b4538d1d7..7c1ddd22055 100644
--- a/example/python-howto/multiple_outputs.py
+++ b/example/python-howto/multiple_outputs.py
@@ -19,6 +19,7 @@
 
 This example shows how to create a multiple output configuration.
 """
+from __future__ import print_function
 import mxnet as mx
 
 net = mx.symbol.Variable('data')
@@ -28,7 +29,7 @@
 out = mx.symbol.SoftmaxOutput(data=net, name='softmax')
 # group fc1 and out together
 group = mx.symbol.Group([fc1, out])
-print group.list_outputs()
+print(group.list_outputs())
 
 # You can go ahead and bind on the group
 # executor = group.simple_bind(data=data_shape)
diff --git a/example/speech_recognition/singleton.py 
b/example/speech_recognition/singleton.py
index aa9531b9443..1d68edfb3ca 100644
--- a/example/speech_recognition/singleton.py
+++ b/example/speech_recognition/singleton.py
@@ -1,3 +1,4 @@
+from __future__ import print_function
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -30,7 +31,7 @@ def getInstance(self):
             return self._instance
 
     def __new__(class_, *args, **kwargs):
-        print "__new__"
+        print("__new__")
         class_.instances[class_] = super(Singleton, class_).__new__(class_, 
*args, **kwargs)
         return class_.instances[class_]
 
diff --git a/tests/nightly/TestDoc/doc_spell_checker.py 
b/tests/nightly/TestDoc/doc_spell_checker.py
index a33807e3d57..88baab85f8d 100644
--- a/tests/nightly/TestDoc/doc_spell_checker.py
+++ b/tests/nightly/TestDoc/doc_spell_checker.py
@@ -21,6 +21,7 @@
     An exclude list is provided to avoid checking specific word,
     such as NDArray.
 """
+from __future__ import print_function
 
 import os
 import sys
@@ -171,8 +172,8 @@ def check_grammar(self, file_name):
             spell_check_res = DOC_PARSER.get_res()[0]
             grammar_check_res = DOC_PARSER.get_res()[1]
             if len(spell_check_res) > 0:
-                print "%s has typo:" % os.path.join(root, read_file)
-                print "%s\n" % spell_check_res
+                print("%s has typo:" % os.path.join(root, read_file))
+                print("%s\n" % spell_check_res)
                 ALL_CLEAR = False
     if ALL_CLEAR:
-        print "No typo is found."
+        print("No typo is found.")
diff --git a/tests/nightly/compilation_warnings/process_output.py 
b/tests/nightly/compilation_warnings/process_output.py
index d7ed3297e20..3d2dd91c5ba 100755
--- a/tests/nightly/compilation_warnings/process_output.py
+++ b/tests/nightly/compilation_warnings/process_output.py
@@ -1,3 +1,4 @@
+from __future__ import print_function
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -40,17 +41,17 @@ def generate_stats(warnings):
 
 def print_summary(time, warnings):
     sorted_warnings, total_count = generate_stats(warnings)
-    print "START - Compilation warnings count"
-    print total_count, 'warnings'
-    print "END - Compilation warnings count"
-    print 'START - Compilation warnings summary'
-    print 'Time taken to compile:', time, 's'
-    print 'Total number of warnings:', total_count, '\n'
+    print("START - Compilation warnings count")
+    print(total_count, 'warnings')
+    print("END - Compilation warnings count")
+    print('START - Compilation warnings summary')
+    print('Time taken to compile:', time, 's')
+    print('Total number of warnings:', total_count, '\n')
     if total_count>0:
-        print 'Below is the list of unique warnings and the number of 
occurrences of that warning'
+        print('Below is the list of unique warnings and the number of 
occurrences of that warning')
         for warning, count in sorted_warnings:
-            print count, ': ', warning
-    print 'END - Compilation warnings summary'
+            print(count, ': ', warning)
+    print('END - Compilation warnings summary')
 
 c_output = open(sys.argv[1],'r')
 time, warnings = process_output(c_output.read())
diff --git a/tests/nightly/multi_lenet.py b/tests/nightly/multi_lenet.py
index 687588bacbe..9475c72266a 100644
--- a/tests/nightly/multi_lenet.py
+++ b/tests/nightly/multi_lenet.py
@@ -29,6 +29,7 @@
 # are performed, which can be controlled by either increasing the batch size or
 # decreasing the number of epochs
 
+from __future__ import print_function
 import os, sys
 curr_path = os.path.abspath(os.path.dirname(__file__))
 sys.path.append(os.path.join(curr_path, "../../example/image-classification"))
@@ -89,7 +90,7 @@ def get_XY(data_iter):
 def test_data(data_iter):
     # test whether we will get the identical data each time
     X, Y = get_XY(data_iter)
-    print X.shape, Y.shape
+    print(X.shape, Y.shape)
     for i in range(4):
         A, B = get_XY(data_iter)
         assert(A.shape == X.shape)
diff --git a/tools/accnn/utils.py b/tools/accnn/utils.py
index 2795f8558f7..b19521f9e86 100644
--- a/tools/accnn/utils.py
+++ b/tools/accnn/utils.py
@@ -1,3 +1,4 @@
+from __future__ import print_function
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -62,7 +63,7 @@ def sym_factory(node, data):
     for k, v in node['param'].items():
       try:
         params[k] = ast.literal_eval(v)
-      except ValueError, e:
+      except ValueError as e:
         params[k] = v
   return getattr(mx.symbol, node['op'])(data=data, name=name, **params)
 
@@ -83,8 +84,8 @@ def replace_conv_layer(layer_name, old_model, sym_handle, 
arg_handle):
                                   if not 
input_node['name'].startswith(node['name'])]
       try:
         data=sym_dict[datas[0]]
-      except Exception, e:
-        print 'can not find symbol %s'%(datas[0])
+      except Exception as e:
+        print('can not find symbol %s'%(datas[0]))
         raise e
       if node['name'] == layer_name:
         sym = sym_handle(data, node)
@@ -101,7 +102,7 @@ def replace_conv_layer(layer_name, old_model, sym_handle, 
arg_handle):
     arg_shape_dic = dict(zip(arg_names, arg_shapes))
     try:
       arg_handle(arg_shape_dic, arg_params)
-    except Exception, e:
+    except Exception as e:
       raise Exception('Exception in arg_handle')
 
   new_model = mx.model.FeedForward(
diff --git a/tools/coreml/converter/_mxnet_converter.py 
b/tools/coreml/converter/_mxnet_converter.py
index a9ea0f4d7ad..2d91eb86a96 100644
--- a/tools/coreml/converter/_mxnet_converter.py
+++ b/tools/coreml/converter/_mxnet_converter.py
@@ -1,3 +1,4 @@
+from __future__ import print_function
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -68,9 +69,9 @@ def check_error(model, path, shapes, output = 
'softmax_output', verbose = True):
     error = _np.linalg.norm(e_out - mx_out)
 
     if verbose:
-        print "First few predictions from CoreML : %s" % e_out[0:10]
-        print "First few predictions from MXNet  : %s" % e_out[0:10]
-        print "L2 Error on random data %s" % error
+        print("First few predictions from CoreML : %s" % e_out[0:10])
+        print("First few predictions from MXNet  : %s" % e_out[0:10])
+        print("L2 Error on random data %s" % error)
     return error
 
 def _set_input_output_layers(builder, input_names, output_names):
@@ -228,4 +229,4 @@ def remove_batch(dim):
         builder.set_class_labels(class_labels = labels)
 
     # Return the model
-    return _coremltools.models.MLModel(builder.spec)
\ No newline at end of file
+    return _coremltools.models.MLModel(builder.spec)
diff --git a/tools/coreml/test/test_mxnet_image.py 
b/tools/coreml/test/test_mxnet_image.py
index 2bbf7b1e264..e373caeba93 100644
--- a/tools/coreml/test/test_mxnet_image.py
+++ b/tools/coreml/test/test_mxnet_image.py
@@ -1,3 +1,4 @@
+from __future__ import print_function
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -107,27 +108,27 @@ def _test_image_prediction(self, model_name, epoch, 
label_name):
                 num_batch += 1
             if (num_batch == 5): break # we only use a subset of the batches.
 
-        print "MXNet acc %s" % np.mean(mxnet_acc)
-        print "Coreml acc %s" % np.mean(coreml_acc)
-        print "MXNet top 5 acc %s" % np.mean(mxnet_top_5_acc)
-        print "Coreml top 5 acc %s" % np.mean(coreml_top_5_acc)
+        print("MXNet acc %s" % np.mean(mxnet_acc))
+        print("Coreml acc %s" % np.mean(coreml_acc))
+        print("MXNet top 5 acc %s" % np.mean(mxnet_top_5_acc))
+        print("Coreml top 5 acc %s" % np.mean(coreml_top_5_acc))
         self.assertAlmostEqual(np.mean(mxnet_acc), np.mean(coreml_acc), 
delta=1e-4)
         self.assertAlmostEqual(np.mean(mxnet_top_5_acc), 
np.mean(coreml_top_5_acc), delta=1e-4)
 
     def test_squeezenet(self):
-        print "Testing Image Classification with Squeezenet"
+        print("Testing Image Classification with Squeezenet")
         self._test_image_prediction(model_name='squeezenet_v1.1', epoch=0, 
label_name='prob_label')
 
     def test_inception_with_batch_normalization(self):
-        print "Testing Image Classification with Inception/BatchNorm"
+        print("Testing Image Classification with Inception/BatchNorm")
         self._test_image_prediction(model_name='Inception-BN', epoch=126, 
label_name='softmax_label')
 
     def test_resnet18(self):
-        print "Testing Image Classification with ResNet18"
+        print("Testing Image Classification with ResNet18")
         self._test_image_prediction(model_name='resnet-18', epoch=0, 
label_name='softmax_label')
 
     def test_vgg16(self):
-        print "Testing Image Classification with vgg16"
+        print("Testing Image Classification with vgg16")
         self._test_image_prediction(model_name='vgg16', epoch=0, 
label_name='prob_label')
 
 
diff --git a/tools/coreml/test/test_mxnet_models.py 
b/tools/coreml/test/test_mxnet_models.py
index 1732fb833c5..36ac0ece364 100644
--- a/tools/coreml/test/test_mxnet_models.py
+++ b/tools/coreml/test/test_mxnet_models.py
@@ -1,3 +1,4 @@
+from __future__ import print_function
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -116,7 +117,7 @@ def _test_model(self, model_name, epoch_num, 
input_shape=(1, 3, 224, 224), files
             self.assertEqual(len(mxnet_pred), len(coreml_pred))
             div.append(_kl_divergence(mxnet_pred, coreml_pred))
 
-        print "Average KL divergence is % s" % np.mean(div)
+        print("Average KL divergence is % s" % np.mean(div))
         self.assertTrue(np.mean(div) < 1e-4)
 
     def test_pred_inception_bn(self):
diff --git a/tools/ipynb2md.py b/tools/ipynb2md.py
index 227174c25ee..3708e81b51a 100755
--- a/tools/ipynb2md.py
+++ b/tools/ipynb2md.py
@@ -24,6 +24,7 @@
 
 It is heavily adapted from 
https://gist.github.com/decabyte/0ed87372774cf5d34d7e
 """
+from __future__ import print_function
 
 import sys
 import io
@@ -61,7 +62,7 @@ def main():
     old_ipynb = args.notebook[0]
     new_ipynb = 'tmp.ipynb'
     md_file = args.output
-    print md_file
+    print(md_file)
     if not md_file:
         md_file = os.path.splitext(old_ipynb)[0] + '.md'
 
diff --git a/tools/kill-mxnet.py b/tools/kill-mxnet.py
index 2a4a4303400..321b2b82af2 100644
--- a/tools/kill-mxnet.py
+++ b/tools/kill-mxnet.py
@@ -18,11 +18,12 @@
 # under the License.
 
 
+from __future__ import print_function
 import os, sys
 import subprocess
 
 if len(sys.argv) != 4:
-  print "usage: %s <hostfile> <user> <prog>" % sys.argv[0]
+  print("usage: %s <hostfile> <user> <prog>" % sys.argv[0])
   sys.exit(1)
 
 host_file = sys.argv[1]
@@ -36,19 +37,19 @@
     "awk '{if($1==\"" + user + "\")print $2;}' | "
     "xargs kill -9"
     )
-print kill_cmd
+print(kill_cmd)
 
 # Kill program on remote machines
 with open(host_file, "r") as f:
   for host in f:
     if ':' in host:
       host = host[:host.index(':')]
-    print host
+    print(host)
     subprocess.Popen(["ssh", "-oStrictHostKeyChecking=no", "%s" % host, 
kill_cmd],
             shell=False,
             stdout=subprocess.PIPE,
             stderr=subprocess.PIPE)
-    print "Done killing"
+    print("Done killing")
 
 # Kill program on local machine
 os.system(kill_cmd)


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

Reply via email to