Github user HyukjinKwon commented on a diff in the pull request:

    https://github.com/apache/spark/pull/20404#discussion_r164269656
  
    --- Diff: python/pyspark/sql/session.py ---
    @@ -225,6 +225,7 @@ def __init__(self, sparkContext, jsparkSession=None):
             if SparkSession._instantiatedSession is None \
                     or SparkSession._instantiatedSession._sc._jsc is None:
                 SparkSession._instantiatedSession = self
    +            
self._jvm.org.apache.spark.sql.SparkSession.setDefaultSession(self._jsparkSession)
    --- End diff --
    
    I think it would overwrite. Can we try something like this?
    
    ```diff
    diff --git a/python/pyspark/sql/session.py b/python/pyspark/sql/session.py
    index 6c84023c43f..0bdfc88153f 100644
    --- a/python/pyspark/sql/session.py
    +++ b/python/pyspark/sql/session.py
    @@ -213,7 +213,10 @@ class SparkSession(object):
             self._jsc = self._sc._jsc
             self._jvm = self._sc._jvm
             if jsparkSession is None:
    -            jsparkSession = self._jvm.SparkSession(self._jsc.sc())
    +            if self._jvm.SparkSession.getDefaultSession().isEmpty():
    +                jsparkSession = 
self._jvm.SparkSession.getDefaultSession().get()
    +            else:
    +                jsparkSession = self._jvm.SparkSession(self._jsc.sc())
             self._jsparkSession = jsparkSession
             self._jwrapped = self._jsparkSession.sqlContext()
             self._wrapped = SQLContext(self._sc, self, self._jwrapped)
    @@ -225,6 +228,8 @@ class SparkSession(object):
             if SparkSession._instantiatedSession is None \
                     or SparkSession._instantiatedSession._sc._jsc is None:
                 SparkSession._instantiatedSession = self
    +            if self._jvm.SparkSession.getDefaultSession().isEmpty():
    +               
self._jvm.SparkSession.setDefaultSession(self._jsparkSession)
    
         def _repr_html_(self):
             return """
    @@ -759,6 +764,7 @@ class SparkSession(object):
             """Stop the underlying :class:`SparkContext`.
             """
             self._sc.stop()
    +        self._jvm.org.apache.spark.sql.SparkSession.clearDefaultSession()
             SparkSession._instantiatedSession = None
    ```


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to