Repository: spark Updated Branches: refs/heads/master ca6955858 -> 584c7f143
[SPARK-21699][SQL] Remove unused getTableOption in ExternalCatalog ## What changes were proposed in this pull request? This patch removes the unused SessionCatalog.getTableMetadataOption and ExternalCatalog. getTableOption. ## How was this patch tested? Removed the test case. Author: Reynold Xin <r...@databricks.com> Closes #18912 from rxin/remove-getTableOption. Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/584c7f14 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/584c7f14 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/584c7f14 Branch: refs/heads/master Commit: 584c7f14370cdfafdc6cd554b2760b7ce7709368 Parents: ca69558 Author: Reynold Xin <r...@databricks.com> Authored: Thu Aug 10 18:56:25 2017 -0700 Committer: Reynold Xin <r...@databricks.com> Committed: Thu Aug 10 18:56:25 2017 -0700 ---------------------------------------------------------------------- .../sql/catalyst/catalog/ExternalCatalog.scala | 2 -- .../sql/catalyst/catalog/InMemoryCatalog.scala | 4 ---- .../sql/catalyst/catalog/SessionCatalog.scala | 17 +++-------------- .../sql/catalyst/catalog/SessionCatalogSuite.scala | 11 ----------- .../spark/sql/hive/HiveExternalCatalog.scala | 4 ---- 5 files changed, 3 insertions(+), 35 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/spark/blob/584c7f14/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala ---------------------------------------------------------------------- diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala index 68644f4..d4c58db 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala @@ -167,8 +167,6 @@ abstract class ExternalCatalog def getTable(db: String, table: String): CatalogTable - def getTableOption(db: String, table: String): Option[CatalogTable] - def tableExists(db: String, table: String): Boolean def listTables(db: String): Seq[String] http://git-wip-us.apache.org/repos/asf/spark/blob/584c7f14/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala ---------------------------------------------------------------------- diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala index 37e9eea..98370c1 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala @@ -326,10 +326,6 @@ class InMemoryCatalog( catalog(db).tables(table).table } - override def getTableOption(db: String, table: String): Option[CatalogTable] = synchronized { - if (!tableExists(db, table)) None else Option(catalog(db).tables(table).table) - } - override def tableExists(db: String, table: String): Boolean = synchronized { requireDbExists(db) catalog(db).tables.contains(table) http://git-wip-us.apache.org/repos/asf/spark/blob/584c7f14/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala ---------------------------------------------------------------------- diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala index b44d2ee..e3237a8 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala @@ -387,9 +387,10 @@ class SessionCatalog( /** * Retrieve the metadata of an existing permanent table/view. If no database is specified, - * assume the table/view is in the current database. If the specified table/view is not found - * in the database then a [[NoSuchTableException]] is thrown. + * assume the table/view is in the current database. */ + @throws[NoSuchDatabaseException] + @throws[NoSuchTableException] def getTableMetadata(name: TableIdentifier): CatalogTable = { val db = formatDatabaseName(name.database.getOrElse(getCurrentDatabase)) val table = formatTableName(name.table) @@ -399,18 +400,6 @@ class SessionCatalog( } /** - * Retrieve the metadata of an existing metastore table. - * If no database is specified, assume the table is in the current database. - * If the specified table is not found in the database then return None if it doesn't exist. - */ - def getTableMetadataOption(name: TableIdentifier): Option[CatalogTable] = { - val db = formatDatabaseName(name.database.getOrElse(getCurrentDatabase)) - val table = formatTableName(name.table) - requireDbExists(db) - externalCatalog.getTableOption(db, table) - } - - /** * Load files stored in given path into an existing metastore table. * If no database is specified, assume the table is in the current database. * If the specified table is not found in the database then a [[NoSuchTableException]] is thrown. http://git-wip-us.apache.org/repos/asf/spark/blob/584c7f14/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala ---------------------------------------------------------------------- diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala index 8f856a0..d2b670d 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala @@ -510,17 +510,6 @@ abstract class SessionCatalogSuite extends AnalysisTest { } } - test("get option of table metadata") { - withBasicCatalog { catalog => - assert(catalog.getTableMetadataOption(TableIdentifier("tbl1", Some("db2"))) - == Option(catalog.externalCatalog.getTable("db2", "tbl1"))) - assert(catalog.getTableMetadataOption(TableIdentifier("unknown_table", Some("db2"))).isEmpty) - intercept[NoSuchDatabaseException] { - catalog.getTableMetadataOption(TableIdentifier("tbl1", Some("unknown_db"))) - } - } - } - test("lookup table relation") { withBasicCatalog { catalog => val tempTable1 = Range(1, 10, 1, 10) http://git-wip-us.apache.org/repos/asf/spark/blob/584c7f14/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala ---------------------------------------------------------------------- diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala index 19e5f78..e9d48f9 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala @@ -669,10 +669,6 @@ private[spark] class HiveExternalCatalog(conf: SparkConf, hadoopConf: Configurat restoreTableMetadata(getRawTable(db, table)) } - override def getTableOption(db: String, table: String): Option[CatalogTable] = withClient { - client.getTableOption(db, table).map(restoreTableMetadata) - } - /** * Restores table metadata from the table properties. This method is kind of a opposite version * of [[createTable]]. --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org