cloud-fan commented on code in PR #46734:
URL: https://github.com/apache/spark/pull/46734#discussion_r1624895418
##########
sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala:
##########
@@ -2437,6 +2437,100 @@ abstract class DDLSuite extends QueryTest with
DDLSuiteBase {
)
}
}
+
+ test("Change column collation") {
+ withTable("t1", "t2", "t3", "t4") {
+ // Plain `StringType`.
+ sql("CREATE TABLE t1(col STRING) USING parquet")
+ sql("INSERT INTO t1 VALUES ('a')")
+ checkAnswer(sql("SELECT COLLATION(col) FROM t1"), Row("UTF8_BINARY"))
+ sql("ALTER TABLE t1 ALTER COLUMN col TYPE STRING COLLATE
UTF8_BINARY_LCASE")
+ checkAnswer(sql("SELECT COLLATION(col) FROM t1"),
Row("UTF8_BINARY_LCASE"))
+
+ // Invalid "ALTER COLUMN" to Integer.
+ val alterInt = "ALTER TABLE t1 ALTER COLUMN col TYPE INTEGER"
+ checkError(
+ exception = intercept[AnalysisException] {
+ sql(alterInt)
+ },
+ errorClass = "NOT_SUPPORTED_CHANGE_COLUMN",
+ parameters = Map(
+ "originType" -> "\"STRING COLLATE UTF8_BINARY_LCASE\"",
+ "originName" -> "`col`",
+ "table" -> "`spark_catalog`.`default`.`t1`",
+ "newType" -> "\"INT\"",
+ "newName" -> "`col`"
+ ),
+ context = ExpectedContext(fragment = alterInt, start = 0, stop =
alterInt.length - 1)
+ )
+
+ // `ArrayType` with collation.
+ sql("CREATE TABLE t2(col ARRAY<STRING>) USING parquet")
+ sql("INSERT INTO t2 VALUES (ARRAY('a'))")
+ checkAnswer(sql("SELECT COLLATION(col[0]) FROM t2"), Row("UTF8_BINARY"))
+ sql("ALTER TABLE t2 ALTER COLUMN col TYPE ARRAY<STRING COLLATE
UTF8_BINARY_LCASE>")
+ checkAnswer(sql("SELECT COLLATION(col[0]) FROM t2"),
Row("UTF8_BINARY_LCASE"))
+
+ // `MapType` with collation.
+ sql("CREATE TABLE t3(col MAP<STRING, STRING>) USING parquet")
+ sql("INSERT INTO t3 VALUES (MAP('k', 'v'))")
+ checkAnswer(sql("SELECT COLLATION(col['k']) FROM t3"),
Row("UTF8_BINARY"))
+ sql(
+ """
+ |ALTER TABLE t3 ALTER COLUMN col TYPE
+ |MAP<STRING, STRING COLLATE UTF8_BINARY_LCASE>""".stripMargin)
+ checkAnswer(sql("SELECT COLLATION(col['k']) FROM t3"),
Row("UTF8_BINARY_LCASE"))
+
+ // Invalid change of map key collation.
+ val alterMap =
+ "ALTER TABLE t3 ALTER COLUMN col TYPE " +
+ "MAP<STRING COLLATE UTF8_BINARY_LCASE, STRING>"
+ checkError(
+ exception = intercept[AnalysisException] {
+ sql(alterMap)
+ },
+ errorClass = "NOT_SUPPORTED_CHANGE_COLUMN",
+ parameters = Map(
+ "originType" -> "\"MAP<STRING, STRING COLLATE UTF8_BINARY_LCASE>\"",
+ "originName" -> "`col`",
+ "table" -> "`spark_catalog`.`default`.`t3`",
+ "newType" -> "\"MAP<STRING COLLATE UTF8_BINARY_LCASE, STRING>\"",
+ "newName" -> "`col`"
+ ),
+ context = ExpectedContext(fragment = alterMap, start = 0, stop =
alterMap.length - 1)
+ )
+
+ // `StructType` with collation.
+ sql("CREATE TABLE t4(col STRUCT<a:STRING>) USING parquet")
+ sql("INSERT INTO t4 VALUES (NAMED_STRUCT('a', 'value'))")
+ checkAnswer(sql("SELECT COLLATION(col.a) FROM t4"), Row("UTF8_BINARY"))
+ sql("ALTER TABLE t4 ALTER COLUMN col TYPE STRUCT<a:STRING COLLATE
UTF8_BINARY_LCASE>")
+ checkAnswer(sql("SELECT COLLATION(col.a) FROM t4"),
Row("UTF8_BINARY_LCASE"))
+ }
+ }
+
+ test("Invalid collation change on partition and bucket columns") {
+ withTable("t1", "t2") {
+ sql("CREATE TABLE t1(col STRING, i INTEGER) USING parquet PARTITIONED BY
(col)")
+ checkError(
+ exception = intercept[AnalysisException] {
+ sql("ALTER TABLE t1 ALTER COLUMN col TYPE STRING COLLATE
UTF8_BINARY_LCASE")
Review Comment:
why do we disallow it? I think we always store the string value as it is in
the partition directory name, so collation doesn't matter.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]