Repository: spark Updated Branches: refs/heads/master 268b71d0d -> 7de30d6e9
[SPARK-16916][SQL] serde/storage properties should not have limitations ## What changes were proposed in this pull request? `CatalogStorageFormat.properties` can be used in 2 ways: 1. for hive tables, it stores the serde properties. 2. for data source tables, it stores the data source options, e.g. `path`, `skipHiveMetadata`, etc. however, both of them have nothing to do with data source properties, e.g. `spark.sql.sources.provider`, so they should not have limitations about data source properties. ## How was this patch tested? existing tests Author: Wenchen Fan <[email protected]> Closes #14506 from cloud-fan/table-prop. Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/7de30d6e Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/7de30d6e Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/7de30d6e Branch: refs/heads/master Commit: 7de30d6e9e5d3020d2ba8c2ce08893d9cd822b56 Parents: 268b71d Author: Wenchen Fan <[email protected]> Authored: Mon Aug 15 21:43:41 2016 -0700 Committer: Yin Huai <[email protected]> Committed: Mon Aug 15 21:43:41 2016 -0700 ---------------------------------------------------------------------- .../scala/org/apache/spark/sql/execution/command/ddl.scala | 3 --- .../scala/org/apache/spark/sql/execution/command/tables.scala | 1 - .../org/apache/spark/sql/execution/command/DDLSuite.scala | 7 ------- 3 files changed, 11 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/spark/blob/7de30d6e/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala ---------------------------------------------------------------------- diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala index 8fa7615..2eff933 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala @@ -306,9 +306,6 @@ case class AlterTableSerDePropertiesCommand( "ALTER TABLE attempted to set neither serde class name nor serde properties") override def run(sparkSession: SparkSession): Seq[Row] = { - DDLUtils.verifyTableProperties( - serdeProperties.toSeq.flatMap(_.keys.toSeq), - "ALTER TABLE SERDEPROPERTIES") val catalog = sparkSession.sessionState.catalog val table = catalog.getTableMetadata(tableName) // For datasource tables, disallow setting serde or specifying partition http://git-wip-us.apache.org/repos/asf/spark/blob/7de30d6e/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala ---------------------------------------------------------------------- diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala index 3b10526..720399e 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala @@ -120,7 +120,6 @@ case class CreateTableCommand(table: CatalogTable, ifNotExists: Boolean) extends override def run(sparkSession: SparkSession): Seq[Row] = { DDLUtils.verifyTableProperties(table.properties.keys.toSeq, "CREATE TABLE") - DDLUtils.verifyTableProperties(table.storage.properties.keys.toSeq, "CREATE TABLE") sparkSession.sessionState.catalog.createTable(table, ifNotExists) Seq.empty[Row] } http://git-wip-us.apache.org/repos/asf/spark/blob/7de30d6e/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala ---------------------------------------------------------------------- diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index ce1f7c5..0f7fda7 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -1273,11 +1273,6 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { intercept[AnalysisException] { sql("ALTER TABLE does_not_exist SET SERDEPROPERTIES ('x' = 'y')") } - // serde properties must not be a datasource property - val e = intercept[AnalysisException] { - sql(s"ALTER TABLE tab1 SET SERDEPROPERTIES ('${DATASOURCE_PREFIX}foo'='wah')") - } - assert(e.getMessage.contains(DATASOURCE_PREFIX + "foo")) } private def testSetSerdePartition(isDatasourceTable: Boolean): Unit = { @@ -1580,8 +1575,6 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { test("create table with datasource properties (not allowed)") { assertUnsupported("CREATE TABLE my_tab TBLPROPERTIES ('spark.sql.sources.me'='anything')") - assertUnsupported("CREATE TABLE my_tab ROW FORMAT SERDE 'serde' " + - "WITH SERDEPROPERTIES ('spark.sql.sources.me'='anything')") } test("Create Hive Table As Select") { --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
