Repository: spark
Updated Branches:
  refs/heads/master b8733e0ad -> 0d00c768a


[SPARK-20667][SQL][TESTS] Cleanup the cataloged metadata after completing the 
package of sql/core and sql/hive

## What changes were proposed in this pull request?

So far, we do not drop all the cataloged objects after each package. Sometimes, 
we might hit strange test case errors because the previous test suite did not 
drop the cataloged/temporary objects (tables/functions/database). At least, we 
can first clean up the environment when completing the package of `sql/core` 
and `sql/hive`.

## How was this patch tested?
N/A

Author: Xiao Li <gatorsm...@gmail.com>

Closes #17908 from gatorsmile/reset.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/0d00c768
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/0d00c768
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/0d00c768

Branch: refs/heads/master
Commit: 0d00c768a860fc03402c8f0c9081b8147c29133e
Parents: b8733e0
Author: Xiao Li <gatorsm...@gmail.com>
Authored: Tue May 9 20:10:50 2017 +0800
Committer: Wenchen Fan <wenc...@databricks.com>
Committed: Tue May 9 20:10:50 2017 +0800

----------------------------------------------------------------------
 .../apache/spark/sql/catalyst/catalog/SessionCatalog.scala    | 3 ++-
 .../scala/org/apache/spark/sql/test/SharedSQLContext.scala    | 1 +
 .../main/scala/org/apache/spark/sql/hive/test/TestHive.scala  | 7 +------
 3 files changed, 4 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/0d00c768/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
index 6c6d600..18e5146 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
@@ -1251,9 +1251,10 @@ class SessionCatalog(
         dropTempFunction(func.funcName, ignoreIfNotExists = false)
       }
     }
-    tempTables.clear()
+    clearTempTables()
     globalTempViewManager.clear()
     functionRegistry.clear()
+    tableRelationCache.invalidateAll()
     // restore built-in functions
     FunctionRegistry.builtin.listFunction().foreach { f =>
       val expressionInfo = FunctionRegistry.builtin.lookupFunction(f)

http://git-wip-us.apache.org/repos/asf/spark/blob/0d00c768/sql/core/src/test/scala/org/apache/spark/sql/test/SharedSQLContext.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/test/SharedSQLContext.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/test/SharedSQLContext.scala
index 81c69a3..7cea4c0 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/test/SharedSQLContext.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/test/SharedSQLContext.scala
@@ -74,6 +74,7 @@ trait SharedSQLContext extends SQLTestUtils with 
BeforeAndAfterEach with Eventua
   protected override def afterAll(): Unit = {
     super.afterAll()
     if (_spark != null) {
+      _spark.sessionState.catalog.reset()
       _spark.stop()
       _spark = null
     }

http://git-wip-us.apache.org/repos/asf/spark/blob/0d00c768/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
index d9bb1f8..ee9ac21 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
@@ -488,14 +488,9 @@ private[hive] class TestHiveSparkSession(
 
       sharedState.cacheManager.clearCache()
       loadedTables.clear()
-      sessionState.catalog.clearTempTables()
-      sessionState.catalog.tableRelationCache.invalidateAll()
-
+      sessionState.catalog.reset()
       metadataHive.reset()
 
-      
FunctionRegistry.getFunctionNames.asScala.filterNot(originalUDFs.contains(_)).
-        foreach { udfName => FunctionRegistry.unregisterTemporaryUDF(udfName) }
-
       // HDFS root scratch dir requires the write all (733) permission. For 
each connecting user,
       // an HDFS scratch dir: ${hive.exec.scratchdir}/<username> is created, 
with
       // ${hive.scratch.dir.permission}. To resolve the permission issue, the 
simplest way is to


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to