Github user zjffdu commented on a diff in the pull request:

    https://github.com/apache/spark/pull/14959#discussion_r82512714
  
    --- Diff: python/pyspark/conf.py ---
    @@ -149,35 +161,53 @@ def setAll(self, pairs):
             :param pairs: list of key-value pairs to set
             """
             for (k, v) in pairs:
    -            self._jconf.set(k, v)
    +            self.set(k, v)
             return self
     
         def get(self, key, defaultValue=None):
             """Get the configured value for some key, or return a default 
otherwise."""
             if defaultValue is None:   # Py4J doesn't call the right get() if 
we pass None
    -            if not self._jconf.contains(key):
    -                return None
    -            return self._jconf.get(key)
    +            if self._jconf:
    +                if not self._jconf.contains(key):
    +                    return None
    +                return self._jconf.get(key)
    +            else:
    +                if key not in self._conf:
    +                    return None
    +                return self._conf[key]
             else:
    -            return self._jconf.get(key, defaultValue)
    +            if self._jconf:
    +                return self._jconf.get(key, defaultValue)
    +            else:
    +                return self._conf.get(key, defaultValue)
     
         def getAll(self):
             """Get all values as a list of key-value pairs."""
             pairs = []
    -        for elem in self._jconf.getAll():
    -            pairs.append((elem._1(), elem._2()))
    +        if self._jconf:
    +            for elem in self._jconf.getAll():
    +                pairs.append((elem._1(), elem._2()))
    +        else:
    +            for k, v in self._conf.items():
    +                pairs.append((k, v))
             return pairs
     
         def contains(self, key):
             """Does this configuration contain a given key?"""
    -        return self._jconf.contains(key)
    +        if self._jconf:
    +            return self._jconf.contains(key)
    +        else:
    +            return key in self._conf
     
         def toDebugString(self):
             """
             Returns a printable version of the configuration, as a list of
             key=value pairs, one per line.
             """
    -        return self._jconf.toDebugString()
    +        if self._jconf:
    +            return self._jconf.toDebugString()
    +        else:
    +            return '\n'.join('%s=%s' % (k, v) for k, v in 
self._conf.items())
    --- End diff --
    
    They may be different, because _jconf has the extra configuration in jvm 
side (like spark-defaults.conf), while self._conf only has the configuration in 
python side.  


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to