Repository: spark
Updated Branches:
  refs/heads/branch-1.3 223dd3f5d -> 1160cc9e1


Revert "[SPARK-6618][SQL] HiveMetastoreCatalog.lookupRelation should use 
fine-grained lock"

This reverts commit fd600cec0c8cf9e14c3d5d5f63b1de94413ffba8.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/1160cc9e
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/1160cc9e
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/1160cc9e

Branch: refs/heads/branch-1.3
Commit: 1160cc9e1883fb4704c46e7983799671313f9f39
Parents: 223dd3f
Author: Cheng Lian <l...@databricks.com>
Authored: Thu Apr 2 12:59:38 2015 +0800
Committer: Cheng Lian <l...@databricks.com>
Committed: Thu Apr 2 12:59:38 2015 +0800

----------------------------------------------------------------------
 .../apache/spark/sql/hive/HiveMetastoreCatalog.scala    | 12 +++---------
 .../apache/spark/sql/hive/execution/SQLQuerySuite.scala | 11 -----------
 2 files changed, 3 insertions(+), 20 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/1160cc9e/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
index 2b5d031..f0076ce 100644
--- 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
+++ 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
@@ -173,16 +173,12 @@ private[hive] class HiveMetastoreCatalog(hive: 
HiveContext) extends Catalog with
 
   def lookupRelation(
       tableIdentifier: Seq[String],
-      alias: Option[String]): LogicalPlan = {
+      alias: Option[String]): LogicalPlan = synchronized {
     val tableIdent = processTableIdentifier(tableIdentifier)
     val databaseName = tableIdent.lift(tableIdent.size - 2).getOrElse(
       hive.sessionState.getCurrentDatabase)
     val tblName = tableIdent.last
-    val table = try {
-      synchronized {
-        client.getTable(databaseName, tblName)
-      }
-    } catch {
+    val table = try client.getTable(databaseName, tblName) catch {
       case te: org.apache.hadoop.hive.ql.metadata.InvalidTableException =>
         throw new NoSuchTableException
     }
@@ -204,9 +200,7 @@ private[hive] class HiveMetastoreCatalog(hive: HiveContext) 
extends Catalog with
     } else {
       val partitions: Seq[Partition] =
         if (table.isPartitioned) {
-          synchronized {
-            HiveShim.getAllPartitionsOf(client, table).toSeq
-          }
+          HiveShim.getAllPartitionsOf(client, table).toSeq
         } else {
           Nil
         }

http://git-wip-us.apache.org/repos/asf/spark/blob/1160cc9e/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
index efb57f7..ec79144 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
@@ -450,15 +450,4 @@ class SQLQuerySuite extends QueryTest {
     dropTempTable("data")
     setConf("spark.sql.hive.convertCTAS", originalConf)
   }
-
-  test("sanity test for SPARK-6618") {
-    (1 to 100).par.map { i =>
-      val tableName = s"SPARK_6618_table_$i"
-      sql(s"CREATE TABLE $tableName (col1 string)")
-      catalog.lookupRelation(Seq(tableName))
-      table(tableName)
-      tables()
-      sql(s"DROP TABLE $tableName")
-    }
-  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to