ueshin commented on a change in pull request #28957:
URL: https://github.com/apache/spark/pull/28957#discussion_r448615071



##########
File path: python/pyspark/mllib/clustering.py
##########
@@ -17,20 +17,12 @@
 
 import sys
 import array as pyarray
-import warnings
-
-if sys.version > '3':
-    xrange = range
-    basestring = str
-
 from math import exp, log
+from collections import namedtuple
 
 from numpy import array, random, tile
 
-from collections import namedtuple
-
-from pyspark import SparkContext, since
-from pyspark.rdd import RDD, ignore_unicode_prefix
+from pyspark import SparkContext, since, RDD

Review comment:
       Why changed to import `RDD` from `pyspark` instead of `pyspark.rdd`?

##########
File path: python/pyspark/sql/tests/test_context.py
##########
@@ -19,11 +19,7 @@
 import sys
 import tempfile
 import unittest
-try:
-    from importlib import reload  # Python 3.4+ only.
-except ImportError:
-    # Otherwise, we will stick to Python 2's built-in reload.
-    pass
+from importlib import reload  # Python 3.4+ only.

Review comment:
       We can remove the comment `# Python 3.4+ only.`?

##########
File path: python/pyspark/tests/test_readwrite.py
##########
@@ -38,7 +38,6 @@ def tearDownClass(cls):
         ReusedPySparkTestCase.tearDownClass()
         shutil.rmtree(cls.tempdir.name)
 
-    @unittest.skipIf(sys.version >= "3", "serialize array of byte")
     def test_sequencefiles(self):

Review comment:
       We should remove this test?

##########
File path: python/pyspark/mllib/stat/_statistics.py
##########
@@ -16,10 +16,8 @@
 #
 
 import sys
-if sys.version >= '3':
-    basestring = str
 
-from pyspark.rdd import RDD, ignore_unicode_prefix
+from pyspark import RDD

Review comment:
       ditto.

##########
File path: python/pyspark/sql/tests/test_types.py
##########
@@ -540,22 +540,6 @@ def test_infer_long_type(self):
         self.assertEqual(_infer_type(2**61), LongType())
         self.assertEqual(_infer_type(2**71), LongType())
 
-    @unittest.skipIf(sys.version < "3", "only Python 3 infers bytes as binary 
type")
-    def test_infer_binary_type(self):

Review comment:
       We should keep this test?

##########
File path: python/pyspark/sql/tests/test_types.py
##########
@@ -972,19 +945,6 @@ def __init__(self, **kwargs):
             with self.assertRaises(exp, msg=msg):
                 _make_type_verifier(data_type, nullable=False)(obj)
 
-    @unittest.skipIf(sys.version_info[:2] < (3, 6), "Create Row without 
sorting fields")
-    def test_row_without_field_sorting(self):

Review comment:
       ditto.

##########
File path: python/pyspark/mllib/feature.py
##########
@@ -18,21 +18,14 @@
 """
 Python package for feature in MLlib.
 """
-from __future__ import absolute_import
-
 import sys
 import warnings
-if sys.version >= '3':
-    basestring = str
-    unicode = str
-
 from py4j.protocol import Py4JJavaError
 
-from pyspark import since
-from pyspark.rdd import RDD, ignore_unicode_prefix
+from pyspark import since, RDD

Review comment:
       ditto.

##########
File path: python/pyspark/tests/test_readwrite.py
##########
@@ -249,7 +248,6 @@ def setUp(self):
     def tearDown(self):
         shutil.rmtree(self.tempdir.name, ignore_errors=True)
 
-    @unittest.skipIf(sys.version >= "3", "serialize array of byte")
     def test_sequencefiles(self):

Review comment:
       ditto.

##########
File path: python/pyspark/tests/test_readwrite.py
##########
@@ -361,7 +359,6 @@ def test_newhadoop(self):
             conf=input_conf).collect())
         self.assertEqual(new_dataset, data)
 
-    @unittest.skipIf(sys.version >= "3", "serialize of array")

Review comment:
       ditto.




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to