This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 6035b681bfc [SPARK-34305][SQL][TESTS] Unify v1 and v2 ALTER TABLE ..
SET SERDE tests
6035b681bfc is described below
commit 6035b681bfc4c79463a0551dcf93bfa57d3c2b9d
Author: panbingkun <[email protected]>
AuthorDate: Sun Jul 31 14:49:44 2022 +0500
[SPARK-34305][SQL][TESTS] Unify v1 and v2 ALTER TABLE .. SET SERDE tests
### What changes were proposed in this pull request?
- Move parser tests from DDLParserSuite to AlterTableSetSerdeParserSuite.
- Port DS v1 tests from DDLSuite and other test suites to
v1.AlterTableSetSerdeSuite.
- Add a test for DSv2 ALTER TABLE .. SET SERDE to
v2.AlterTableSetSerdeSuite.
### Why are the changes needed?
To improve test coverage.
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
By running new test suites:
> $ build/sbt -Phive-2.3 -Phive-thriftserver "test:testOnly
*AlterTableSetSerdeSuite"
> $ build/sbt -Phive-2.3 -Phive-thriftserver "test:testOnly
*AlterTableSetSerdeParserSuite"
Closes #36996 from panbingkun/SPARK-34305.
Authored-by: panbingkun <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
---
.../spark/sql/catalyst/parser/DDLParserSuite.scala | 91 ----------
.../spark/sql/connector/DataSourceV2SQLSuite.scala | 12 --
.../command/AlterTableSetSerdeParserSuite.scala | 143 +++++++++++++++
.../command/AlterTableSetSerdeSuiteBase.scala | 38 ++++
.../sql/execution/command/DDLParserSuite.scala | 7 -
.../spark/sql/execution/command/DDLSuite.scala | 193 +++------------------
.../command/v1/AlterTableSetSerdeSuite.scala | 168 ++++++++++++++++++
.../command/v2/AlterTableSetSerdeSuite.scala | 46 +++++
.../spark/sql/hive/execution/HiveDDLSuite.scala | 8 -
.../command/AlterTableSetSerdeSuite.scala | 117 +++++++++++++
10 files changed, 539 insertions(+), 284 deletions(-)
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
index 3a5f0bb6297..1eb7b011d97 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
@@ -1880,97 +1880,6 @@ class DDLParserSuite extends AnalysisTest {
""".stripMargin)
}
- test("alter table: SerDe properties") {
- val sql1 = "ALTER TABLE table_name SET SERDE 'org.apache.class'"
- val hint = Some("Please use ALTER VIEW instead.")
- val parsed1 = parsePlan(sql1)
- val expected1 = SetTableSerDeProperties(
- UnresolvedTable(Seq("table_name"), "ALTER TABLE ... SET
[SERDE|SERDEPROPERTIES]", hint),
- Some("org.apache.class"),
- None,
- None)
- comparePlans(parsed1, expected1)
-
- val sql2 =
- """
- |ALTER TABLE table_name SET SERDE 'org.apache.class'
- |WITH SERDEPROPERTIES ('columns'='foo,bar', 'field.delim' = ',')
- """.stripMargin
- val parsed2 = parsePlan(sql2)
- val expected2 = SetTableSerDeProperties(
- UnresolvedTable(Seq("table_name"), "ALTER TABLE ... SET
[SERDE|SERDEPROPERTIES]", hint),
- Some("org.apache.class"),
- Some(Map("columns" -> "foo,bar", "field.delim" -> ",")),
- None)
- comparePlans(parsed2, expected2)
-
- val sql3 =
- """
- |ALTER TABLE table_name
- |SET SERDEPROPERTIES ('columns'='foo,bar', 'field.delim' = ',')
- """.stripMargin
- val parsed3 = parsePlan(sql3)
- val expected3 = SetTableSerDeProperties(
- UnresolvedTable(Seq("table_name"), "ALTER TABLE ... SET
[SERDE|SERDEPROPERTIES]", hint),
- None,
- Some(Map("columns" -> "foo,bar", "field.delim" -> ",")),
- None)
- comparePlans(parsed3, expected3)
-
- val sql4 =
- """
- |ALTER TABLE table_name PARTITION (test=1, dt='2008-08-08',
country='us')
- |SET SERDE 'org.apache.class'
- |WITH SERDEPROPERTIES ('columns'='foo,bar', 'field.delim' = ',')
- """.stripMargin
- val parsed4 = parsePlan(sql4)
- val expected4 = SetTableSerDeProperties(
- UnresolvedTable(Seq("table_name"), "ALTER TABLE ... SET
[SERDE|SERDEPROPERTIES]", hint),
- Some("org.apache.class"),
- Some(Map("columns" -> "foo,bar", "field.delim" -> ",")),
- Some(Map("test" -> "1", "dt" -> "2008-08-08", "country" -> "us")))
- comparePlans(parsed4, expected4)
-
- val sql5 =
- """
- |ALTER TABLE table_name PARTITION (test=1, dt='2008-08-08',
country='us')
- |SET SERDEPROPERTIES ('columns'='foo,bar', 'field.delim' = ',')
- """.stripMargin
- val parsed5 = parsePlan(sql5)
- val expected5 = SetTableSerDeProperties(
- UnresolvedTable(Seq("table_name"), "ALTER TABLE ... SET
[SERDE|SERDEPROPERTIES]", hint),
- None,
- Some(Map("columns" -> "foo,bar", "field.delim" -> ",")),
- Some(Map("test" -> "1", "dt" -> "2008-08-08", "country" -> "us")))
- comparePlans(parsed5, expected5)
-
- val sql6 =
- """
- |ALTER TABLE a.b.c SET SERDE 'org.apache.class'
- |WITH SERDEPROPERTIES ('columns'='foo,bar', 'field.delim' = ',')
- """.stripMargin
- val parsed6 = parsePlan(sql6)
- val expected6 = SetTableSerDeProperties(
- UnresolvedTable(Seq("a", "b", "c"), "ALTER TABLE ... SET
[SERDE|SERDEPROPERTIES]", hint),
- Some("org.apache.class"),
- Some(Map("columns" -> "foo,bar", "field.delim" -> ",")),
- None)
- comparePlans(parsed6, expected6)
-
- val sql7 =
- """
- |ALTER TABLE a.b.c PARTITION (test=1, dt='2008-08-08', country='us')
- |SET SERDEPROPERTIES ('columns'='foo,bar', 'field.delim' = ',')
- """.stripMargin
- val parsed7 = parsePlan(sql7)
- val expected7 = SetTableSerDeProperties(
- UnresolvedTable(Seq("a", "b", "c"), "ALTER TABLE ... SET
[SERDE|SERDEPROPERTIES]", hint),
- None,
- Some(Map("columns" -> "foo,bar", "field.delim" -> ",")),
- Some(Map("test" -> "1", "dt" -> "2008-08-08", "country" -> "us")))
- comparePlans(parsed7, expected7)
- }
-
test("alter view: AS Query") {
val parsed = parsePlan("ALTER VIEW a.b.c AS SELECT 1")
val expected = AlterViewAs(
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
index c82d875faa7..df0b9d8b519 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
@@ -1844,18 +1844,6 @@ class DataSourceV2SQLSuite
}
}
- test("ALTER TABLE SerDe properties") {
- val t = "testcat.ns1.ns2.tbl"
- withTable(t) {
- spark.sql(s"CREATE TABLE $t (id bigint, data string) USING foo
PARTITIONED BY (id)")
- val e = intercept[AnalysisException] {
- sql(s"ALTER TABLE $t SET SERDEPROPERTIES ('columns'='foo,bar',
'field.delim' = ',')")
- }
- assert(e.message.contains(
- "ALTER TABLE ... SET [SERDE|SERDEPROPERTIES] is not supported for v2
tables"))
- }
- }
-
test("CREATE VIEW") {
val v = "testcat.ns1.ns2.v"
val e = intercept[AnalysisException] {
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableSetSerdeParserSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableSetSerdeParserSuite.scala
new file mode 100644
index 00000000000..b5143e8f92d
--- /dev/null
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableSetSerdeParserSuite.scala
@@ -0,0 +1,143 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command
+
+import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedTable}
+import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan
+import org.apache.spark.sql.catalyst.parser.ParseException
+import org.apache.spark.sql.catalyst.plans.logical.SetTableSerDeProperties
+import org.apache.spark.sql.test.SharedSparkSession
+
+class AlterTableSetSerdeParserSuite extends AnalysisTest with
SharedSparkSession {
+
+ private val HINT = Some("Please use ALTER VIEW instead.")
+
+ test("SerDe property values must be set") {
+ val sql = "ALTER TABLE table_name SET SERDE 'serde' " +
+ "WITH SERDEPROPERTIES('key_without_value', 'key_with_value'='x')"
+ val errMsg = intercept[ParseException] {
+ parsePlan(sql)
+ }.getMessage
+ assert(errMsg.contains("Operation not allowed"))
+ assert(errMsg.contains("key_without_value"))
+ }
+
+ test("alter table SerDe properties by 'SET SERDE'") {
+ val sql = "ALTER TABLE table_name SET SERDE 'org.apache.class'"
+ val parsed = parsePlan(sql)
+ val expected = SetTableSerDeProperties(
+ UnresolvedTable(Seq("table_name"), "ALTER TABLE ... SET
[SERDE|SERDEPROPERTIES]", HINT),
+ Some("org.apache.class"),
+ None,
+ None)
+ comparePlans(parsed, expected)
+ }
+
+ test("alter table SerDe properties by 'SET SERDE ... WITH SERDEPROPERTIES'")
{
+ val sql =
+ """
+ |ALTER TABLE table_name SET SERDE 'org.apache.class'
+ |WITH SERDEPROPERTIES ('columns'='foo,bar', 'field.delim' = ',')
+ """.stripMargin
+ val parsed = parsePlan(sql)
+ val expected = SetTableSerDeProperties(
+ UnresolvedTable(Seq("table_name"), "ALTER TABLE ... SET
[SERDE|SERDEPROPERTIES]", HINT),
+ Some("org.apache.class"),
+ Some(Map("columns" -> "foo,bar", "field.delim" -> ",")),
+ None)
+ comparePlans(parsed, expected)
+ }
+
+ test("alter table SerDe properties by 'SET SERDEPROPERTIES'") {
+ val sql =
+ """
+ |ALTER TABLE table_name
+ |SET SERDEPROPERTIES ('columns'='foo,bar', 'field.delim' = ',')
+ """.stripMargin
+ val parsed = parsePlan(sql)
+ val expected = SetTableSerDeProperties(
+ UnresolvedTable(Seq("table_name"), "ALTER TABLE ... SET
[SERDE|SERDEPROPERTIES]", HINT),
+ None,
+ Some(Map("columns" -> "foo,bar", "field.delim" -> ",")),
+ None)
+ comparePlans(parsed, expected)
+ }
+
+ test("alter parition SerDe properties by 'SET SERDE ... WITH
SERDEPROPERTIES'") {
+ val sql =
+ """
+ |ALTER TABLE table_name PARTITION (test=1, dt='2008-08-08',
country='us')
+ |SET SERDE 'org.apache.class'
+ |WITH SERDEPROPERTIES ('columns'='foo,bar', 'field.delim' = ',')
+ """.stripMargin
+ val parsed = parsePlan(sql)
+ val expected = SetTableSerDeProperties(
+ UnresolvedTable(Seq("table_name"), "ALTER TABLE ... SET
[SERDE|SERDEPROPERTIES]", HINT),
+ Some("org.apache.class"),
+ Some(Map("columns" -> "foo,bar", "field.delim" -> ",")),
+ Some(Map("test" -> "1", "dt" -> "2008-08-08", "country" -> "us")))
+ comparePlans(parsed, expected)
+ }
+
+ test("alter parition SerDe properties by 'SET SERDEPROPERTIES'") {
+ val sql =
+ """
+ |ALTER TABLE table_name PARTITION (test=1, dt='2008-08-08',
country='us')
+ |SET SERDEPROPERTIES ('columns'='foo,bar', 'field.delim' = ',')
+ """.stripMargin
+ val parsed = parsePlan(sql)
+ val expected = SetTableSerDeProperties(
+ UnresolvedTable(Seq("table_name"), "ALTER TABLE ... SET
[SERDE|SERDEPROPERTIES]", HINT),
+ None,
+ Some(Map("columns" -> "foo,bar", "field.delim" -> ",")),
+ Some(Map("test" -> "1", "dt" -> "2008-08-08", "country" -> "us")))
+ comparePlans(parsed, expected)
+ }
+
+ test("table with multi-part identifier: " +
+ "alter table SerDe properties by 'SET SERDE ... WITH SERDEPROPERTIES'") {
+ val sql =
+ """
+ |ALTER TABLE a.b.c SET SERDE 'org.apache.class'
+ |WITH SERDEPROPERTIES ('columns'='foo,bar', 'field.delim' = ',')
+ """.stripMargin
+ val parsed = parsePlan(sql)
+ val expected = SetTableSerDeProperties(
+ UnresolvedTable(Seq("a", "b", "c"), "ALTER TABLE ... SET
[SERDE|SERDEPROPERTIES]", HINT),
+ Some("org.apache.class"),
+ Some(Map("columns" -> "foo,bar", "field.delim" -> ",")),
+ None)
+ comparePlans(parsed, expected)
+ }
+
+ test("table with multi-part identifier: " +
+ "alter parition SerDe properties by 'SET SERDE ... WITH SERDEPROPERTIES'")
{
+ val sql =
+ """
+ |ALTER TABLE a.b.c PARTITION (test=1, dt='2008-08-08', country='us')
+ |SET SERDEPROPERTIES ('columns'='foo,bar', 'field.delim' = ',')
+ """.stripMargin
+ val parsed = parsePlan(sql)
+ val expected = SetTableSerDeProperties(
+ UnresolvedTable(Seq("a", "b", "c"), "ALTER TABLE ... SET
[SERDE|SERDEPROPERTIES]", HINT),
+ None,
+ Some(Map("columns" -> "foo,bar", "field.delim" -> ",")),
+ Some(Map("test" -> "1", "dt" -> "2008-08-08", "country" -> "us")))
+ comparePlans(parsed, expected)
+ }
+}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableSetSerdeSuiteBase.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableSetSerdeSuiteBase.scala
new file mode 100644
index 00000000000..076c2257095
--- /dev/null
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableSetSerdeSuiteBase.scala
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command
+
+import org.apache.spark.sql.QueryTest
+
+/**
+ * This base suite contains unified tests for the `ALTER TABLE .. SET
[SERDE|SERDEPROPERTIES]`
+ * command that check V1 and V2 table catalogs. The tests that cannot run for
all supported
+ * catalogs are located in more specific test suites:
+ *
+ * - V2 table catalog tests:
+ * `org.apache.spark.sql.execution.command.v2.AlterTableSetSerdeSuite`
+ * - V1 table catalog tests:
+ * `org.apache.spark.sql.execution.command.v1.AlterTableSetSerdeSuiteBase`
+ * - V1 In-Memory catalog:
+ * `org.apache.spark.sql.execution.command.v1.AlterTableSetSerdeSuite`
+ * - V1 Hive External catalog:
+ * `org.apache.spark.sql.hive.execution.command.AlterTableSetSerdeSuite`
+ */
+trait AlterTableSetSerdeSuiteBase extends QueryTest with DDLCommandTestUtils {
+ override val command = "ALTER TABLE .. SET [SERDE|SERDEPROPERTIES]"
+}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala
index 49059fe9ef4..8ef2cf9887c 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala
@@ -120,13 +120,6 @@ class DDLParserSuite extends AnalysisTest with
SharedSparkSession {
containsThesePhrases = Seq("key_with_value"))
}
- test("alter table - SerDe property values must be set") {
- assertUnsupported(
- sql = "ALTER TABLE my_tab SET SERDE 'serde' " +
- "WITH SERDEPROPERTIES('key_without_value', 'key_with_value'='x')",
- containsThesePhrases = Seq("key_without_value"))
- }
-
test("alter table: exchange partition (not supported)") {
assertUnsupported(
"""
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index cb1ace76ae7..44beb3a1697 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -39,7 +39,6 @@ import org.apache.spark.sql.test.{SharedSparkSession,
SQLTestUtils}
import org.apache.spark.sql.types._
import org.apache.spark.util.Utils
-
class InMemoryCatalogedDDLSuite extends DDLSuite with SharedSparkSession {
import testImplicits._
@@ -227,23 +226,21 @@ class InMemoryCatalogedDDLSuite extends DDLSuite with
SharedSparkSession {
}
}
-abstract class DDLSuite extends QueryTest with SQLTestUtils {
-
- protected val reversedProperties = Seq(PROP_OWNER)
+trait DDLSuiteBase extends SQLTestUtils {
protected def isUsingHiveMetastore: Boolean = {
spark.sparkContext.conf.get(CATALOG_IMPLEMENTATION) == "hive"
}
protected def generateTable(
- catalog: SessionCatalog,
- name: TableIdentifier,
- isDataSource: Boolean = true,
- partitionCols: Seq[String] = Seq("a", "b")): CatalogTable
+ catalog: SessionCatalog,
+ name: TableIdentifier,
+ isDataSource: Boolean = true,
+ partitionCols: Seq[String] = Seq("a", "b")): CatalogTable
private val escapedIdentifier = "`(.+)`".r
- private def dataSource: String = {
+ protected def dataSource: String = {
if (isUsingHiveMetastore) {
"HIVE"
} else {
@@ -252,64 +249,69 @@ abstract class DDLSuite extends QueryTest with
SQLTestUtils {
}
protected def normalizeCatalogTable(table: CatalogTable): CatalogTable =
table
- private def normalizeSerdeProp(props: Map[String, String]): Map[String,
String] = {
+ protected def normalizeSerdeProp(props: Map[String, String]): Map[String,
String] = {
props.filterNot(p => Seq("serialization.format", "path").contains(p._1))
}
- private def checkCatalogTables(expected: CatalogTable, actual:
CatalogTable): Unit = {
+ protected def checkCatalogTables(expected: CatalogTable, actual:
CatalogTable): Unit = {
assert(normalizeCatalogTable(actual) == normalizeCatalogTable(expected))
}
/**
* Strip backticks, if any, from the string.
*/
- private def cleanIdentifier(ident: String): String = {
+ protected def cleanIdentifier(ident: String): String = {
ident match {
case escapedIdentifier(i) => i
case plainIdent => plainIdent
}
}
- private def assertUnsupported(query: String): Unit = {
+ protected def assertUnsupported(query: String): Unit = {
val e = intercept[AnalysisException] {
sql(query)
}
assert(e.getMessage.toLowerCase(Locale.ROOT).contains("operation not
allowed"))
}
- private def maybeWrapException[T](expectException: Boolean)(body: => T):
Unit = {
+ protected def maybeWrapException[T](expectException: Boolean)(body: => T):
Unit = {
if (expectException) intercept[AnalysisException] { body } else body
}
- private def createDatabase(catalog: SessionCatalog, name: String): Unit = {
+ protected def createDatabase(catalog: SessionCatalog, name: String): Unit = {
catalog.createDatabase(
CatalogDatabase(
name, "",
CatalogUtils.stringToURI(spark.sessionState.conf.warehousePath), Map()),
ignoreIfExists = false)
}
- private def createTable(
- catalog: SessionCatalog,
- name: TableIdentifier,
- isDataSource: Boolean = true,
- partitionCols: Seq[String] = Seq("a", "b")): Unit = {
+ protected def createTable(
+ catalog: SessionCatalog,
+ name: TableIdentifier,
+ isDataSource: Boolean = true,
+ partitionCols: Seq[String] = Seq("a", "b")): Unit = {
catalog.createTable(
generateTable(catalog, name, isDataSource, partitionCols),
ignoreIfExists = false)
}
- private def createTablePartition(
- catalog: SessionCatalog,
- spec: TablePartitionSpec,
- tableName: TableIdentifier): Unit = {
+ protected def createTablePartition(
+ catalog: SessionCatalog,
+ spec: TablePartitionSpec,
+ tableName: TableIdentifier): Unit = {
val part = CatalogTablePartition(
spec, CatalogStorageFormat(None, None, None, None, false, Map()))
catalog.createPartitions(tableName, Seq(part), ignoreIfExists = false)
}
- private def getDBPath(dbName: String): URI = {
+ protected def getDBPath(dbName: String): URI = {
val warehousePath =
makeQualifiedPath(spark.sessionState.conf.warehousePath)
new Path(CatalogUtils.URIToString(warehousePath), s"$dbName.db").toUri
}
+}
+
+abstract class DDLSuite extends QueryTest with DDLSuiteBase {
+
+ protected val reversedProperties = Seq(PROP_OWNER)
test("alter table: set location (datasource table)") {
testSetLocation(isDatasourceTable = true)
@@ -323,14 +325,6 @@ abstract class DDLSuite extends QueryTest with
SQLTestUtils {
testUnsetProperties(isDatasourceTable = true)
}
- test("alter table: set serde (datasource table)") {
- testSetSerde(isDatasourceTable = true)
- }
-
- test("alter table: set serde partition (datasource table)") {
- testSetSerdePartition(isDatasourceTable = true)
- }
-
test("alter table: change column (datasource table)") {
testChangeColumn(isDatasourceTable = true)
}
@@ -347,7 +341,6 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils
{
}
}
-
test("CTAS a managed table with the existing empty directory") {
withEmptyDirInTablePath("tab1") { tableLoc =>
withTable("tab1") {
@@ -1099,138 +1092,6 @@ abstract class DDLSuite extends QueryTest with
SQLTestUtils {
}
}
- protected def testSetSerde(isDatasourceTable: Boolean): Unit = {
- if (!isUsingHiveMetastore) {
- assert(isDatasourceTable, "InMemoryCatalog only supports data source
tables")
- }
- val catalog = spark.sessionState.catalog
- val tableIdent = TableIdentifier("tab1", Some("dbx"))
- createDatabase(catalog, "dbx")
- createTable(catalog, tableIdent, isDatasourceTable)
- def checkSerdeProps(expectedSerdeProps: Map[String, String]): Unit = {
- val serdeProp = catalog.getTableMetadata(tableIdent).storage.properties
- if (isUsingHiveMetastore) {
- assert(normalizeSerdeProp(serdeProp) == expectedSerdeProps)
- } else {
- assert(serdeProp == expectedSerdeProps)
- }
- }
- if (isUsingHiveMetastore) {
- val expectedSerde = if (isDatasourceTable) {
- "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe"
- } else {
- "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"
- }
- assert(catalog.getTableMetadata(tableIdent).storage.serde ==
Some(expectedSerde))
- } else {
- assert(catalog.getTableMetadata(tableIdent).storage.serde.isEmpty)
- }
- checkSerdeProps(Map.empty[String, String])
- // set table serde and/or properties (should fail on datasource tables)
- if (isDatasourceTable) {
- val e1 = intercept[AnalysisException] {
- sql("ALTER TABLE dbx.tab1 SET SERDE 'whatever'")
- }
- val e2 = intercept[AnalysisException] {
- sql("ALTER TABLE dbx.tab1 SET SERDE 'org.apache.madoop' " +
- "WITH SERDEPROPERTIES ('k' = 'v', 'kay' = 'vee')")
- }
- assert(e1.getMessage.contains("datasource"))
- assert(e2.getMessage.contains("datasource"))
- } else {
- val newSerde =
"org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe"
- sql(s"ALTER TABLE dbx.tab1 SET SERDE '$newSerde'")
- assert(catalog.getTableMetadata(tableIdent).storage.serde ==
Some(newSerde))
- checkSerdeProps(Map.empty[String, String])
- val serde2 =
"org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe"
- sql(s"ALTER TABLE dbx.tab1 SET SERDE '$serde2' " +
- "WITH SERDEPROPERTIES ('k' = 'v', 'kay' = 'vee')")
- assert(catalog.getTableMetadata(tableIdent).storage.serde ==
Some(serde2))
- checkSerdeProps(Map("k" -> "v", "kay" -> "vee"))
- }
- // set serde properties only
- sql("ALTER TABLE dbx.tab1 SET SERDEPROPERTIES ('k' = 'vvv', 'kay' =
'vee')")
- checkSerdeProps(Map("k" -> "vvv", "kay" -> "vee"))
- // set things without explicitly specifying database
- catalog.setCurrentDatabase("dbx")
- sql("ALTER TABLE tab1 SET SERDEPROPERTIES ('kay' = 'veee')")
- checkSerdeProps(Map("k" -> "vvv", "kay" -> "veee"))
- // table to alter does not exist
- intercept[AnalysisException] {
- sql("ALTER TABLE does_not_exist SET SERDEPROPERTIES ('x' = 'y')")
- }
- }
-
- protected def testSetSerdePartition(isDatasourceTable: Boolean): Unit = {
- if (!isUsingHiveMetastore) {
- assert(isDatasourceTable, "InMemoryCatalog only supports data source
tables")
- }
- val catalog = spark.sessionState.catalog
- val tableIdent = TableIdentifier("tab1", Some("dbx"))
- val spec = Map("a" -> "1", "b" -> "2")
- createDatabase(catalog, "dbx")
- createTable(catalog, tableIdent, isDatasourceTable)
- createTablePartition(catalog, spec, tableIdent)
- createTablePartition(catalog, Map("a" -> "1", "b" -> "3"), tableIdent)
- createTablePartition(catalog, Map("a" -> "2", "b" -> "2"), tableIdent)
- createTablePartition(catalog, Map("a" -> "2", "b" -> "3"), tableIdent)
- def checkPartitionSerdeProps(expectedSerdeProps: Map[String, String]):
Unit = {
- val serdeProp = catalog.getPartition(tableIdent, spec).storage.properties
- if (isUsingHiveMetastore) {
- assert(normalizeSerdeProp(serdeProp) == expectedSerdeProps)
- } else {
- assert(serdeProp == expectedSerdeProps)
- }
- }
- if (isUsingHiveMetastore) {
- val expectedSerde = if (isDatasourceTable) {
- "org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe"
- } else {
- "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"
- }
- assert(catalog.getPartition(tableIdent, spec).storage.serde ==
Some(expectedSerde))
- } else {
- assert(catalog.getPartition(tableIdent, spec).storage.serde.isEmpty)
- }
- checkPartitionSerdeProps(Map.empty[String, String])
- // set table serde and/or properties (should fail on datasource tables)
- if (isDatasourceTable) {
- val e1 = intercept[AnalysisException] {
- sql("ALTER TABLE dbx.tab1 PARTITION (a=1, b=2) SET SERDE 'whatever'")
- }
- val e2 = intercept[AnalysisException] {
- sql("ALTER TABLE dbx.tab1 PARTITION (a=1, b=2) SET SERDE
'org.apache.madoop' " +
- "WITH SERDEPROPERTIES ('k' = 'v', 'kay' = 'vee')")
- }
- assert(e1.getMessage.contains("datasource"))
- assert(e2.getMessage.contains("datasource"))
- } else {
- sql("ALTER TABLE dbx.tab1 PARTITION (a=1, b=2) SET SERDE
'org.apache.jadoop'")
- assert(catalog.getPartition(tableIdent, spec).storage.serde ==
Some("org.apache.jadoop"))
- checkPartitionSerdeProps(Map.empty[String, String])
- sql("ALTER TABLE dbx.tab1 PARTITION (a=1, b=2) SET SERDE
'org.apache.madoop' " +
- "WITH SERDEPROPERTIES ('k' = 'v', 'kay' = 'vee')")
- assert(catalog.getPartition(tableIdent, spec).storage.serde ==
Some("org.apache.madoop"))
- checkPartitionSerdeProps(Map("k" -> "v", "kay" -> "vee"))
- }
- // set serde properties only
- maybeWrapException(isDatasourceTable) {
- sql("ALTER TABLE dbx.tab1 PARTITION (a=1, b=2) " +
- "SET SERDEPROPERTIES ('k' = 'vvv', 'kay' = 'vee')")
- checkPartitionSerdeProps(Map("k" -> "vvv", "kay" -> "vee"))
- }
- // set things without explicitly specifying database
- catalog.setCurrentDatabase("dbx")
- maybeWrapException(isDatasourceTable) {
- sql("ALTER TABLE tab1 PARTITION (a=1, b=2) SET SERDEPROPERTIES ('kay' =
'veee')")
- checkPartitionSerdeProps(Map("k" -> "vvv", "kay" -> "veee"))
- }
- // table to alter does not exist
- intercept[AnalysisException] {
- sql("ALTER TABLE does_not_exist PARTITION (a=1, b=2) SET SERDEPROPERTIES
('x' = 'y')")
- }
- }
-
protected def testChangeColumn(isDatasourceTable: Boolean): Unit = {
if (!isUsingHiveMetastore) {
assert(isDatasourceTable, "InMemoryCatalog only supports data source
tables")
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/AlterTableSetSerdeSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/AlterTableSetSerdeSuite.scala
new file mode 100644
index 00000000000..b34d7b03d47
--- /dev/null
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/AlterTableSetSerdeSuite.scala
@@ -0,0 +1,168 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command.v1
+
+import org.apache.spark.sql.AnalysisException
+import org.apache.spark.sql.catalyst.TableIdentifier
+import org.apache.spark.sql.execution.command
+import org.apache.spark.sql.internal.StaticSQLConf.CATALOG_IMPLEMENTATION
+
+/**
+ * This base suite contains unified tests for the `ALTER TABLE .. SET
[SERDE|SERDEPROPERTIES]`
+ * command that check V1 table catalogs. The tests that cannot run for all V1
catalogs
+ * are located in more specific test suites:
+ *
+ * - V1 In-Memory catalog:
`org.apache.spark.sql.execution.command.v1.AlterTableSetSerdeSuite`
+ * - V1 Hive External catalog:
+ * `org.apache.spark.sql.hive.execution.command.AlterTableSetSerdeSuite`
+ */
+trait AlterTableSetSerdeSuiteBase extends command.AlterTableSetSerdeSuiteBase {
+
+ private[sql] lazy val sessionCatalog = spark.sessionState.catalog
+
+ private def isUsingHiveMetastore: Boolean = {
+ spark.sparkContext.conf.get(CATALOG_IMPLEMENTATION) == "hive"
+ }
+
+ private def normalizeSerdeProp(props: Map[String, String]): Map[String,
String] = {
+ props.filterNot(p => Seq("serialization.format", "path").contains(p._1))
+ }
+
+ private[sql] def checkSerdeProps(tableIdent: TableIdentifier,
+ expectedSerdeProps: Map[String, String]): Unit = {
+ val serdeProp =
sessionCatalog.getTableMetadata(tableIdent).storage.properties
+ if (isUsingHiveMetastore) {
+ assert(normalizeSerdeProp(serdeProp) == expectedSerdeProps)
+ } else {
+ assert(serdeProp == expectedSerdeProps)
+ }
+ }
+
+ private[sql] def checkPartitionSerdeProps(
+ tableIdent: TableIdentifier,
+ spec: Map[String, String],
+ expectedSerdeProps: Map[String, String]): Unit = {
+ val serdeProp = sessionCatalog.getPartition(tableIdent,
spec).storage.properties
+ if (isUsingHiveMetastore) {
+ assert(normalizeSerdeProp(serdeProp) == expectedSerdeProps)
+ } else {
+ assert(serdeProp == expectedSerdeProps)
+ }
+ }
+}
+
+/**
+ * The class contains tests for the `ALTER TABLE .. SET
[SERDE|SERDEPROPERTIES]` command to check
+ * V1 In-Memory table catalog.
+ */
+class AlterTableSetSerdeSuite extends AlterTableSetSerdeSuiteBase with
CommandSuiteBase {
+
+ test("In-Memory catalog - datasource table: alter table set serde") {
+ withNamespaceAndTable("ns", "tbl") { t =>
+ sql(s"CREATE TABLE $t (col1 int, col2 string, a int, b int)
$defaultUsing " +
+ s"PARTITIONED BY (a, b)")
+
+ val tableIdent = TableIdentifier("tbl", Some("ns"))
+ assert(sessionCatalog.getTableMetadata(tableIdent).storage.serde.isEmpty)
+ checkSerdeProps(tableIdent, Map.empty[String, String])
+
+ // set table serde and/or properties (should fail on datasource tables)
+ val e1 = intercept[AnalysisException] {
+ sql(s"ALTER TABLE $t SET SERDE 'whatever'")
+ }
+ assert(e1.getMessage == "Operation not allowed: " +
+ "ALTER TABLE SET SERDE is not supported for tables created with the
datasource API")
+ val e2 = intercept[AnalysisException] {
+ sql(s"ALTER TABLE $t SET SERDE 'org.apache.madoop' " +
+ "WITH SERDEPROPERTIES ('k' = 'v', 'kay' = 'vee')")
+ }
+ assert(e2.getMessage == "Operation not allowed: " +
+ "ALTER TABLE SET SERDE is not supported for tables created with the
datasource API")
+
+ // set serde properties only
+ sql(s"ALTER TABLE $t SET SERDEPROPERTIES ('k' = 'vvv', 'kay' = 'vee')")
+ checkSerdeProps(tableIdent, Map("k" -> "vvv", "kay" -> "vee"))
+
+ // set things without explicitly specifying database
+ sessionCatalog.setCurrentDatabase("ns")
+ sql(s"ALTER TABLE tbl SET SERDEPROPERTIES ('kay' = 'veee')")
+ checkSerdeProps(tableIdent, Map("k" -> "vvv", "kay" -> "veee"))
+
+ // table to alter does not exist
+ val e3 = intercept[AnalysisException] {
+ sql("ALTER TABLE does_not_exist SET SERDEPROPERTIES ('x' = 'y')")
+ }
+ assert(e3.getMessage.contains("Table not found: does_not_exist"))
+ }
+ }
+
+ test("In-Memory catalog - datasource table: alter table set serde
partition") {
+ withNamespaceAndTable("ns", "tbl") { t =>
+ sql(s"CREATE TABLE $t (col1 int, col2 string, a int, b int)
$defaultUsing " +
+ s"PARTITIONED BY (a, b)")
+ sql(s"INSERT INTO $t PARTITION (a = '1', b = '2') SELECT 1, 'abc'")
+ sql(s"INSERT INTO $t PARTITION (a = '1', b = '3') SELECT 2, 'def'")
+ sql(s"INSERT INTO $t PARTITION (a = '2', b = '2') SELECT 3, 'ghi'")
+ sql(s"INSERT INTO $t PARTITION (a = '2', b = '3') SELECT 4, 'jkl'")
+
+ val tableIdent = TableIdentifier("tbl", Some("ns"))
+ val spec = Map("a" -> "1", "b" -> "2")
+ assert(sessionCatalog.getPartition(tableIdent,
spec).storage.serde.isEmpty)
+ checkPartitionSerdeProps(tableIdent, spec, Map.empty[String, String])
+
+ // set table serde and/or properties (should fail on datasource tables)
+ val e1 = intercept[AnalysisException] {
+ sql(s"ALTER TABLE $t PARTITION (a=1, b=2) SET SERDE 'whatever'")
+ }
+ assert(e1.getMessage == "Operation not allowed: " +
+ "ALTER TABLE SET [SERDE | SERDEPROPERTIES] for a specific partition " +
+ "is not supported for tables created with the datasource API")
+ val e2 = intercept[AnalysisException] {
+ sql(s"ALTER TABLE $t PARTITION (a=1, b=2) SET SERDE
'org.apache.madoop' " +
+ "WITH SERDEPROPERTIES ('k' = 'v', 'kay' = 'vee')")
+ }
+ assert(e2.getMessage == "Operation not allowed: " +
+ "ALTER TABLE SET [SERDE | SERDEPROPERTIES] for a specific partition " +
+ "is not supported for tables created with the datasource API")
+
+ // set serde properties only
+ val e3 = intercept[AnalysisException] {
+ sql(s"ALTER TABLE $t PARTITION (a=1, b=2) " +
+ "SET SERDEPROPERTIES ('k' = 'vvv', 'kay' = 'vee')")
+ }
+ assert(e3.getMessage == "Operation not allowed: " +
+ "ALTER TABLE SET [SERDE | SERDEPROPERTIES] for a specific partition " +
+ "is not supported for tables created with the datasource API")
+
+ // set things without explicitly specifying database
+ sessionCatalog.setCurrentDatabase("ns")
+ val e4 = intercept[AnalysisException] {
+ sql(s"ALTER TABLE tbl PARTITION (a=1, b=2) SET SERDEPROPERTIES ('kay'
= 'veee')")
+ }
+ assert(e4.getMessage == "Operation not allowed: " +
+ "ALTER TABLE SET [SERDE | SERDEPROPERTIES] for a specific partition " +
+ "is not supported for tables created with the datasource API")
+
+ // table to alter does not exist
+ val e5 = intercept[AnalysisException] {
+ sql("ALTER TABLE does_not_exist PARTITION (a=1, b=2) SET
SERDEPROPERTIES ('x' = 'y')")
+ }
+ assert(e5.getMessage.contains("Table not found: does_not_exist"))
+ }
+ }
+}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableSetSerdeSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableSetSerdeSuite.scala
new file mode 100644
index 00000000000..c824f1a6ded
--- /dev/null
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableSetSerdeSuite.scala
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command.v2
+
+import org.apache.spark.SparkConf
+import org.apache.spark.sql.AnalysisException
+import org.apache.spark.sql.connector.catalog.InMemoryCatalog
+import org.apache.spark.sql.execution.command
+
+/**
+ * The class contains tests for the `ALTER TABLE .. SET
[SERDE|SERDEPROPERTIES]` command to
+ * check V2 table catalogs.
+ */
+class AlterTableSetSerdeSuite extends command.AlterTableSetSerdeSuiteBase with
CommandSuiteBase {
+
+ override def sparkConf: SparkConf = super.sparkConf
+ .set("spark.sql.catalog.testcat", classOf[InMemoryCatalog].getName)
+
+ test("v2 catalog doesn't support ALTER TABLE SerDe properties") {
+ val t = "testcat.ns1.ns2.tbl"
+ withTable(t) {
+ spark.sql(s"CREATE TABLE $t (id bigint, data string) " +
+ s"USING foo PARTITIONED BY (id)")
+ val e = intercept[AnalysisException] {
+ sql(s"ALTER TABLE $t SET SERDEPROPERTIES ('columns'='foo,bar',
'field.delim' = ',')")
+ }
+ assert(e.message.contains(
+ "ALTER TABLE ... SET [SERDE|SERDEPROPERTIES] is not supported for v2
tables"))
+ }
+ }
+}
diff --git
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
index f76fb1e2bf1..4b28e16928f 100644
---
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
+++
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
@@ -147,14 +147,6 @@ class HiveCatalogedDDLSuite extends DDLSuite with
TestHiveSingleton with BeforeA
testUnsetProperties(isDatasourceTable = false)
}
- test("alter table: set serde") {
- testSetSerde(isDatasourceTable = false)
- }
-
- test("alter table: set serde partition") {
- testSetSerdePartition(isDatasourceTable = false)
- }
-
test("alter table: change column") {
testChangeColumn(isDatasourceTable = false)
}
diff --git
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/AlterTableSetSerdeSuite.scala
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/AlterTableSetSerdeSuite.scala
new file mode 100644
index 00000000000..48b48dbbaa4
--- /dev/null
+++
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/AlterTableSetSerdeSuite.scala
@@ -0,0 +1,117 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.hive.execution.command
+
+import org.apache.spark.sql.AnalysisException
+import org.apache.spark.sql.catalyst.TableIdentifier
+import org.apache.spark.sql.execution.command.v1
+
+/**
+ * The class contains tests for the `ALTER TABLE .. SET
[SERDE|SERDEPROPERTIES]` command to check
+ * V1 Hive external table catalog.
+ */
+class AlterTableSetSerdeSuite extends v1.AlterTableSetSerdeSuiteBase with
CommandSuiteBase {
+
+ test("Hive external catalog - hiveformat table: alter table set serde") {
+ withNamespaceAndTable("ns", "tbl") { t =>
+ sql(s"CREATE TABLE $t (col1 int, col2 string, a int, b int) " +
+ s"PARTITIONED BY (a, b) " +
+ s"ROW FORMAT SERDE
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' " +
+ s"STORED AS INPUTFORMAT
'org.apache.hadoop.mapred.SequenceFileInputFormat' " +
+ s"OUTPUTFORMAT
'org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat'")
+
+ val tableIdent = TableIdentifier("tbl", Some("ns"))
+ val expectedSerde = "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"
+ assert(sessionCatalog.getTableMetadata(tableIdent).storage.serde ==
Some(expectedSerde))
+ checkSerdeProps(tableIdent, Map.empty[String, String])
+
+ // set table serde and/or properties (should success on hiveformat
tables)
+ val newSerde =
"org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe"
+ sql(s"ALTER TABLE $t SET SERDE '$newSerde'")
+ assert(sessionCatalog.getTableMetadata(tableIdent).storage.serde ==
Some(newSerde))
+ checkSerdeProps(tableIdent, Map.empty[String, String])
+ val serde2 =
"org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe"
+ sql(s"ALTER TABLE $t SET SERDE '$serde2' " +
+ "WITH SERDEPROPERTIES ('k' = 'v', 'kay' = 'vee')")
+ assert(sessionCatalog.getTableMetadata(tableIdent).storage.serde ==
Some(serde2))
+ checkSerdeProps(tableIdent, Map("k" -> "v", "kay" -> "vee"))
+
+ // set serde properties only
+ sql(s"ALTER TABLE $t SET SERDEPROPERTIES ('k' = 'vvv', 'kay' = 'vee')")
+ checkSerdeProps(tableIdent, Map("k" -> "vvv", "kay" -> "vee"))
+
+ // set things without explicitly specifying database
+ sessionCatalog.setCurrentDatabase("ns")
+ sql("ALTER TABLE tbl SET SERDEPROPERTIES ('kay' = 'veee')")
+ checkSerdeProps(tableIdent, Map("k" -> "vvv", "kay" -> "veee"))
+
+ // table to alter does not exist
+ val e = intercept[AnalysisException] {
+ sql("ALTER TABLE does_not_exist SET SERDEPROPERTIES ('x' = 'y')")
+ }
+ assert(e.getMessage.contains("Table not found: does_not_exist"))
+ }
+ }
+
+ test("Hive external catalog - hiveformat table: alter table set serde
partition") {
+ withNamespaceAndTable("ns", "tbl") { t =>
+ sql(s"CREATE TABLE $t (col1 int, col2 string, a int, b int) " +
+ s"PARTITIONED BY (a, b) " +
+ s"ROW FORMAT SERDE
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' " +
+ s"STORED AS INPUTFORMAT
'org.apache.hadoop.mapred.SequenceFileInputFormat' " +
+ s"OUTPUTFORMAT
'org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat'")
+ sql(s"INSERT INTO $t PARTITION (a = '1', b = '2') SELECT 1, 'abc'")
+ sql(s"INSERT INTO $t PARTITION (a = '1', b = '3') SELECT 2, 'def'")
+ sql(s"INSERT INTO $t PARTITION (a = '2', b = '2') SELECT 3, 'ghi'")
+ sql(s"INSERT INTO $t PARTITION (a = '2', b = '3') SELECT 4, 'jkl'")
+
+ val tableIdent = TableIdentifier("tbl", Some("ns"))
+ val spec = Map("a" -> "1", "b" -> "2")
+ val expectedSerde = "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"
+ assert(sessionCatalog.getPartition(tableIdent, spec).storage.serde ==
Some(expectedSerde))
+ checkPartitionSerdeProps(tableIdent, spec, Map.empty[String, String])
+
+ // set table serde and/or properties (should success on hiveformat
tables)
+ sql(s"ALTER TABLE $t PARTITION (a=1, b=2) SET SERDE 'org.apache.jadoop'")
+ assert(sessionCatalog.getPartition(tableIdent, spec).storage.serde ==
+ Some("org.apache.jadoop"))
+ checkPartitionSerdeProps(tableIdent, spec, Map.empty[String, String])
+ sql(s"ALTER TABLE $t PARTITION (a=1, b=2) SET SERDE 'org.apache.madoop'
" +
+ "WITH SERDEPROPERTIES ('k' = 'v', 'kay' = 'vee')")
+ assert(sessionCatalog.getPartition(tableIdent, spec).storage.serde ==
+ Some("org.apache.madoop"))
+ checkPartitionSerdeProps(tableIdent, spec, Map("k" -> "v", "kay" ->
"vee"))
+
+ // set serde properties only
+ sql(s"ALTER TABLE $t PARTITION (a=1, b=2) " +
+ "SET SERDEPROPERTIES ('k' = 'vvv', 'kay' = 'vee')")
+ checkPartitionSerdeProps(tableIdent, spec, Map("k" -> "vvv", "kay" ->
"vee"))
+
+ // set things without explicitly specifying database
+ sessionCatalog.setCurrentDatabase("ns")
+ sql(s"ALTER TABLE tbl PARTITION (a=1, b=2) SET SERDEPROPERTIES ('kay' =
'veee')")
+ checkPartitionSerdeProps(tableIdent, spec, Map("k" -> "vvv", "kay" ->
"veee"))
+
+ // table to alter does not exist
+ val e = intercept[AnalysisException] {
+ sql("ALTER TABLE does_not_exist PARTITION (a=1, b=2) SET
SERDEPROPERTIES ('x' = 'y')")
+ }
+ assert(e.getMessage.contains("Table not found: does_not_exist"))
+ }
+ }
+}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]