zero323 commented on a change in pull request #34411:
URL: https://github.com/apache/spark/pull/34411#discussion_r741864096
##########
File path: python/pyspark/conf.py
##########
@@ -178,49 +193,49 @@ def setAll(self, pairs):
self.set(k, v)
return self
- def get(self, key, defaultValue=None):
+ def get(self, key: str, defaultValue: Optional[str] = None) ->
Optional[str]:
"""Get the configured value for some key, or return a default
otherwise."""
- if defaultValue is None: # Py4J doesn't call the right get() if we
pass None
+ if defaultValue is None: # Py4J doesn't call the right get() if we
pass None
if self._jconf is not None:
if not self._jconf.contains(key):
return None
return self._jconf.get(key)
else:
- if key not in self._conf:
+ if key not in cast(Dict[str, str], self._conf):
return None
- return self._conf[key]
+ return cast(Dict[str, str], self._conf)[key]
Review comment:
Unrelated note ‒ shouldn't we use `get` here?
##########
File path: python/pyspark/conf.py
##########
@@ -124,48 +130,57 @@ def __init__(self, loadDefaults=True, _jvm=None,
_jconf=None):
self._jconf = None
self._conf = {}
- def set(self, key, value):
+ def set(self, key: str, value: str) -> "SparkConf":
"""Set a configuration property."""
# Try to set self._jconf first if JVM is created, set self._conf if
JVM is not created yet.
if self._jconf is not None:
self._jconf.set(key, str(value))
else:
- self._conf[key] = str(value)
+ cast(Dict[str, str], self._conf)[key] = str(value)
Review comment:
I'd probably go with `assert` here
```patch
diff --git a/python/pyspark/conf.py b/python/pyspark/conf.py
index 09c8e63d09..a8538b06e4 100644
--- a/python/pyspark/conf.py
+++ b/python/pyspark/conf.py
@@ -136,7 +136,8 @@ class SparkConf(object):
if self._jconf is not None:
self._jconf.set(key, str(value))
else:
- cast(Dict[str, str], self._conf)[key] = str(value)
+ assert self._conf is not None
+ self._conf[key] = str(value)
return self
def setIfMissing(self, key: str, value: str) -> "SparkConf":
```
but I guess it is fine for now.
##########
File path: python/pyspark/conf.py
##########
@@ -178,49 +193,49 @@ def setAll(self, pairs):
self.set(k, v)
return self
- def get(self, key, defaultValue=None):
+ def get(self, key: str, defaultValue: Optional[str] = None) ->
Optional[str]:
"""Get the configured value for some key, or return a default
otherwise."""
- if defaultValue is None: # Py4J doesn't call the right get() if we
pass None
+ if defaultValue is None: # Py4J doesn't call the right get() if we
pass None
if self._jconf is not None:
if not self._jconf.contains(key):
return None
return self._jconf.get(key)
else:
- if key not in self._conf:
+ if key not in cast(Dict[str, str], self._conf):
return None
- return self._conf[key]
+ return cast(Dict[str, str], self._conf)[key]
Review comment:
Also, same as above ‒ single `assert` might be a better option.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]