Github user HyukjinKwon commented on a diff in the pull request:
https://github.com/apache/spark/pull/21211#discussion_r185397426
--- Diff: python/pyspark/util.py ---
@@ -62,24 +62,28 @@ def _get_argspec(f):
return argspec
-def majorMinorVersion(version):
+class VersionUtils(object):
"""
- Get major and minor version numbers for given Spark version string.
-
- >>> version = "2.4.0"
- >>> majorMinorVersion(version)
- (2, 4)
+ Provides utility method to determine Spark versions with given input
string.
+ """
+ @staticmethod
+ def majorMinorVersion(version):
+ """
+ Get major and minor version numbers for given Spark version string.
- >>> version = "abc"
- >>> majorMinorVersion(version) is None
- True
+ >>> version = "2.4.0"
+ >>> majorMinorVersion(version)
+ (2, 4)
+ >>> version = "2.3.0-SNAPSHOT"
+ >>> majorMinorVersion(version)
+ (2, 3)
- """
- m = re.search('^(\d+)\.(\d+)(\..*)?$', version)
- if m is None:
- return None
- else:
- return (int(m.group(1)), int(m.group(2)))
+ """
+ m = re.search('^(\d+)\.(\d+)(\..*)?$', version)
+ if m is None:
--- End diff --
I'd do `if m is not None` to match the order with Scala side.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]