cloud-fan commented on a change in pull request #30045:
URL: https://github.com/apache/spark/pull/30045#discussion_r510014126



##########
File path: 
sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSharedStateSuite.scala
##########
@@ -20,35 +20,45 @@ package org.apache.spark.sql.hive
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars
 
 import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}
-import org.apache.spark.sql.internal.SharedState
+import org.apache.spark.sql.SparkSession
 import org.apache.spark.sql.internal.StaticSQLConf._
 import org.apache.spark.util.Utils
 
 class HiveSharedStateSuite extends SparkFunSuite {
 
+  override def beforeEach(): Unit = {
+    SparkSession.clearActiveSessionInternal()
+    SparkSession.clearDefaultSession()
+    super.beforeEach()
+  }
+
   test("initial configs should be passed to SharedState but not SparkContext") 
{
     val conf = new SparkConf().setMaster("local").setAppName("SharedState 
Test")
     val sc = SparkContext.getOrCreate(conf)
+    val wareHouseDir = Utils.createTempDir().toString
     val invalidPath = "invalid/path"
     val metastorePath = Utils.createTempDir()
     val tmpDb = "tmp_db"
 
     // The initial configs used to generate SharedState, none of these should 
affect the global

Review comment:
       this is wrong now, as warehouse conf is an exception.




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to