tangrizzly commented on code in PR #53941:
URL: https://github.com/apache/spark/pull/53941#discussion_r2730262713
##########
sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala:
##########
@@ -3015,29 +3026,151 @@ class DataSourceV2SQLSuiteV1Filter
condition = "CATALOG_NOT_FOUND",
parameters = Map(
"catalogName" -> "`not_exist_catalog`",
- "config" -> "\"spark.sql.catalog.not_exist_catalog\"")
- )
+ "config" -> "\"spark.sql.catalog.not_exist_catalog\""))
}
test("SPARK-49757: SET CATALOG statement with IDENTIFIER with multipart name
should fail") {
- val catalogManager = spark.sessionState.catalogManager
- assert(catalogManager.currentCatalog.name() == SESSION_CATALOG_NAME)
+ assertCurrentCatalog(SESSION_CATALOG_NAME)
- val sqlText = "SET CATALOG IDENTIFIER(:param)"
checkError(
- exception = intercept[ParseException] {
- spark.sql(sqlText, Map("param" -> "testcat.ns1"))
+ exception = intercept[AnalysisException] {
+ spark.sql("SET CATALOG IDENTIFIER(:param)", Map("param" ->
"testcat.ns1"))
},
condition = "INVALID_SQL_SYNTAX.MULTI_PART_NAME",
parameters = Map(
"name" -> "`testcat`.`ns1`",
"statement" -> "SET CATALOG"
- ),
- context = ExpectedContext(
- fragment = sqlText,
- start = 0,
- stop = 29)
+ ))
+ }
+
+ test("SPARK-55155: SET CATALOG statement with foldable expressions") {
+ assertCurrentCatalog(SESSION_CATALOG_NAME)
+
+ sql("SET CATALOG CAST(\"testcat\" AS STRING)")
+ assertCurrentCatalog("testcat")
+
+ sql("SET CATALOG CONCAT('test', 'cat2')")
+ assertCurrentCatalog("testcat2")
+ }
+
+ test("SPARK-55155: SET CATALOG statement is case-sensitive") {
+ assertCurrentCatalog(SESSION_CATALOG_NAME)
+
+ checkError(
+ exception = intercept[CatalogNotFoundException] {
+ sql("SET CATALOG teStCaT")
+ },
+ condition = "CATALOG_NOT_FOUND",
+ parameters = Map("catalogName" -> "`teStCaT`", "config" ->
"\"spark.sql.catalog.teStCaT\""))
+
+ checkError(
+ exception = intercept[CatalogNotFoundException] {
+ sql("SET CATALOG 'teStCaT'")
+ },
+ condition = "CATALOG_NOT_FOUND",
+ parameters = Map("catalogName" -> "`teStCaT`", "config" ->
"\"spark.sql.catalog.teStCaT\""))
+
+ checkError(
+ exception = intercept[CatalogNotFoundException] {
+ sql("SET CATALOG IDENTIFIER('teStCaT')")
+ },
+ condition = "CATALOG_NOT_FOUND",
Review Comment:
Done!
##########
sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala:
##########
@@ -3015,29 +3026,151 @@ class DataSourceV2SQLSuiteV1Filter
condition = "CATALOG_NOT_FOUND",
parameters = Map(
"catalogName" -> "`not_exist_catalog`",
- "config" -> "\"spark.sql.catalog.not_exist_catalog\"")
- )
+ "config" -> "\"spark.sql.catalog.not_exist_catalog\""))
}
test("SPARK-49757: SET CATALOG statement with IDENTIFIER with multipart name
should fail") {
- val catalogManager = spark.sessionState.catalogManager
- assert(catalogManager.currentCatalog.name() == SESSION_CATALOG_NAME)
+ assertCurrentCatalog(SESSION_CATALOG_NAME)
- val sqlText = "SET CATALOG IDENTIFIER(:param)"
checkError(
- exception = intercept[ParseException] {
- spark.sql(sqlText, Map("param" -> "testcat.ns1"))
+ exception = intercept[AnalysisException] {
+ spark.sql("SET CATALOG IDENTIFIER(:param)", Map("param" ->
"testcat.ns1"))
},
condition = "INVALID_SQL_SYNTAX.MULTI_PART_NAME",
parameters = Map(
"name" -> "`testcat`.`ns1`",
"statement" -> "SET CATALOG"
- ),
- context = ExpectedContext(
- fragment = sqlText,
- start = 0,
- stop = 29)
+ ))
+ }
+
+ test("SPARK-55155: SET CATALOG statement with foldable expressions") {
+ assertCurrentCatalog(SESSION_CATALOG_NAME)
+
+ sql("SET CATALOG CAST(\"testcat\" AS STRING)")
+ assertCurrentCatalog("testcat")
+
+ sql("SET CATALOG CONCAT('test', 'cat2')")
+ assertCurrentCatalog("testcat2")
+ }
+
+ test("SPARK-55155: SET CATALOG statement is case-sensitive") {
+ assertCurrentCatalog(SESSION_CATALOG_NAME)
+
+ checkError(
+ exception = intercept[CatalogNotFoundException] {
+ sql("SET CATALOG teStCaT")
+ },
+ condition = "CATALOG_NOT_FOUND",
+ parameters = Map("catalogName" -> "`teStCaT`", "config" ->
"\"spark.sql.catalog.teStCaT\""))
+
+ checkError(
+ exception = intercept[CatalogNotFoundException] {
+ sql("SET CATALOG 'teStCaT'")
+ },
+ condition = "CATALOG_NOT_FOUND",
+ parameters = Map("catalogName" -> "`teStCaT`", "config" ->
"\"spark.sql.catalog.teStCaT\""))
+
+ checkError(
+ exception = intercept[CatalogNotFoundException] {
+ sql("SET CATALOG IDENTIFIER('teStCaT')")
+ },
+ condition = "CATALOG_NOT_FOUND",
+ parameters = Map("catalogName" -> "`teStCaT`", "config" ->
"\"spark.sql.catalog.teStCaT\""))
+
+ checkError(
+ exception = intercept[CatalogNotFoundException] {
+ sql("SET CATALOG CONCAT('teSt', 'CaT')")
+ },
+ condition = "CATALOG_NOT_FOUND",
+ parameters = Map("catalogName" -> "`teStCaT`", "config" ->
"\"spark.sql.catalog.teStCaT\""))
+ }
+
+ test("SPARK-55155: SET CATALOG with session temp variable") {
+ registerCatalog("testcat3", classOf[InMemoryCatalog])
+ registerCatalog("testcat4", classOf[InMemoryCatalog])
+ assertCurrentCatalog(SESSION_CATALOG_NAME)
+
+ // Declare and set the session temp variable
+ sql("DECLARE cat_name STRING DEFAULT 'testcat'")
+ sql("DECLARE cat_name2 STRING")
+ sql("SET VAR cat_name2 = 'testcat2'")
+
+ // Using the session temp variable without IDENTIFIER()
+ sql("SET CATALOG cat_name")
+ assertCurrentCatalog("testcat")
+ sql("SET CATALOG cat_name2")
+ assertCurrentCatalog("testcat2")
+ // Using the session temp variable with IDENTIFIER()
+ sql("SET CATALOG IDENTIFIER(cat_name)")
+ assertCurrentCatalog("testcat")
+ sql("SET CATALOG IDENTIFIER(cat_name2)")
+ assertCurrentCatalog("testcat2")
+
+ // Fallback to literal when name is not a variable
+ sql("SET CATALOG testcat3")
+ assertCurrentCatalog("testcat3")
+ sql("SET CATALOG testcat4")
+ assertCurrentCatalog("testcat4")
+ }
+
+ test("SPARK-55155: SET CATALOG with multipart identifiers should fail") {
+ checkError(
+ exception = intercept[AnalysisException] {
+ sql("SET CATALOG testcat.ns1")
+ },
+ condition = "INVALID_SQL_SYNTAX.MULTI_PART_NAME",
+ parameters = Map(
+ "name" -> "`testcat`.`ns1`",
+ "statement" -> "SET CATALOG"
+ ))
+ }
+
+ test("SPARK-55155: SET CATALOG with non-deterministic expressions should
fail") {
+ checkError(
+ exception = intercept[AnalysisException] {
+ sql("SET CATALOG rand()")
+ },
+ condition = "INVALID_NON_DETERMINISTIC_EXPRESSIONS",
+ parameters = Map("sqlExprs" -> "\"rand()\""),
+ queryContext = Array(ExpectedContext(fragment = "rand()", start = 12,
stop = 17)))
+ }
+
+ test("SPARK-55155: SET CATALOG with null values should fail") {
Review Comment:
Done!
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]