Github user HyukjinKwon commented on a diff in the pull request:
https://github.com/apache/spark/pull/21211#discussion_r185397304
--- Diff: python/pyspark/util.py ---
@@ -62,24 +62,28 @@ def _get_argspec(f):
return argspec
-def majorMinorVersion(version):
+class VersionUtils(object):
"""
- Get major and minor version numbers for given Spark version string.
-
- >>> version = "2.4.0"
- >>> majorMinorVersion(version)
- (2, 4)
+ Provides utility method to determine Spark versions with given input
string.
+ """
+ @staticmethod
+ def majorMinorVersion(version):
+ """
+ Get major and minor version numbers for given Spark version string.
- >>> version = "abc"
- >>> majorMinorVersion(version) is None
- True
+ >>> version = "2.4.0"
+ >>> majorMinorVersion(version)
+ (2, 4)
+ >>> version = "2.3.0-SNAPSHOT"
+ >>> majorMinorVersion(version)
+ (2, 3)
- """
- m = re.search('^(\d+)\.(\d+)(\..*)?$', version)
- if m is None:
- return None
- else:
- return (int(m.group(1)), int(m.group(2)))
+ """
+ m = re.search('^(\d+)\.(\d+)(\..*)?$', version)
+ if m is None:
+ raise ValueError("invalid version string: " + version)
--- End diff --
Shall we match the message with Scala side?
```
throw new IllegalArgumentException(s"Spark tried to parse
'$sparkVersion' as a Spark" +
s" version string, but it could not find the major and minor
version numbers.")
```
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]