This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 264e00ee12bb [MINOR][SQL][TEST] Moving tests to related suites
264e00ee12bb is described below
commit 264e00ee12bbbd822e52fa8ce79692c60f531495
Author: Mihailo Milosevic <[email protected]>
AuthorDate: Mon Mar 11 00:25:55 2024 +0500
[MINOR][SQL][TEST] Moving tests to related suites
### What changes were proposed in this pull request?
Tests from `QueryCompilationErrorsSuite` were moved to `DDLSuite` and
`JDBCTableCatalogSuite`.
### Why are the changes needed?
We should move tests to related test suites in order to improve testing.
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
Corresponding Suites succeed.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #45439 from mihailom-db/SPARK-47326.
Authored-by: Mihailo Milosevic <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
---
.../sql/errors/QueryCompilationErrorsSuite.scala | 74 ----------------------
.../spark/sql/execution/command/DDLSuite.scala | 17 +++++
.../v2/jdbc/JDBCTableCatalogSuite.scala | 56 ++++++++++++++++
3 files changed, 73 insertions(+), 74 deletions(-)
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
index c9198c86c720..4574d3328d48 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
@@ -25,13 +25,11 @@ import org.apache.spark.sql.api.java.{UDF1, UDF2, UDF23Test}
import org.apache.spark.sql.catalyst.expressions.{Coalesce, Literal, UnsafeRow}
import org.apache.spark.sql.catalyst.parser.ParseException
import
org.apache.spark.sql.execution.datasources.parquet.SparkToParquetSchemaConverter
-import org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog
import org.apache.spark.sql.expressions.SparkUserDefinedFunction
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types._
-import org.apache.spark.util.Utils
case class StringLongClass(a: String, b: Long)
@@ -817,78 +815,6 @@ class QueryCompilationErrorsSuite
parameters = Map("extraction" -> "\"array(test)\""))
}
- test("CREATE NAMESPACE with LOCATION for JDBC catalog should throw an
error") {
- withTempDir { tempDir =>
- val url =
s"jdbc:h2:${tempDir.getCanonicalPath};user=testUser;password=testPass"
- Utils.classForName("org.h2.Driver")
- withSQLConf(
- "spark.sql.catalog.h2" -> classOf[JDBCTableCatalog].getName,
- "spark.sql.catalog.h2.url" -> url,
- "spark.sql.catalog.h2.driver" -> "org.h2.Driver") {
- checkError(
- exception = intercept[AnalysisException] {
- sql("CREATE NAMESPACE h2.test_namespace LOCATION './samplepath'")
- },
- errorClass = "NOT_SUPPORTED_IN_JDBC_CATALOG.COMMAND",
- sqlState = "0A000",
- parameters = Map("cmd" -> toSQLStmt("CREATE NAMESPACE ... LOCATION
...")))
- }
- }
- }
-
- test("ALTER NAMESPACE with property other than COMMENT " +
- "for JDBC catalog should throw an exception") {
- withTempDir { tempDir =>
- val url =
s"jdbc:h2:${tempDir.getCanonicalPath};user=testUser;password=testPass"
- Utils.classForName("org.h2.Driver")
- withSQLConf(
- "spark.sql.catalog.h2" -> classOf[JDBCTableCatalog].getName,
- "spark.sql.catalog.h2.url" -> url,
- "spark.sql.catalog.h2.driver" -> "org.h2.Driver") {
- val namespace = "h2.test_namespace"
- withNamespace(namespace) {
- sql(s"CREATE NAMESPACE $namespace")
- checkError(
- exception = intercept[AnalysisException] {
- sql(s"ALTER NAMESPACE h2.test_namespace SET LOCATION
'/tmp/loc_test_2'")
- },
- errorClass = "NOT_SUPPORTED_IN_JDBC_CATALOG.COMMAND_WITH_PROPERTY",
- sqlState = "0A000",
- parameters = Map(
- "cmd" -> toSQLStmt("SET NAMESPACE"),
- "property" -> toSQLConf("location")))
-
- checkError(
- exception = intercept[AnalysisException] {
- sql(s"ALTER NAMESPACE h2.test_namespace SET PROPERTIES('a'='b')")
- },
- errorClass = "NOT_SUPPORTED_IN_JDBC_CATALOG.COMMAND_WITH_PROPERTY",
- sqlState = "0A000",
- parameters = Map(
- "cmd" -> toSQLStmt("SET NAMESPACE"),
- "property" -> toSQLConf("a")))
- }
- }
- }
- }
-
- test("ALTER TABLE UNSET nonexistent property should throw an exception") {
- val tableName = "test_table"
- withTable(tableName) {
- sql(s"CREATE TABLE $tableName (a STRING, b INT) USING parquet")
-
- checkError(
- exception = intercept[AnalysisException] {
- sql(s"ALTER TABLE $tableName UNSET TBLPROPERTIES ('test_prop1',
'test_prop2', 'comment')")
- },
- errorClass = "UNSET_NONEXISTENT_PROPERTIES",
- parameters = Map(
- "properties" -> "`test_prop1`, `test_prop2`",
- "table" -> "`spark_catalog`.`default`.`test_table`")
- )
- }
- }
-
test("SPARK-43841: Unresolved attribute in select of full outer join with
USING") {
withTempView("v1", "v2") {
sql("create or replace temp view v1 as values (1, 2) as (c1, c2)")
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index e47858a69077..e8af606d797e 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -335,6 +335,23 @@ abstract class DDLSuite extends QueryTest with
DDLSuiteBase {
testUnsetProperties(isDatasourceTable = true)
}
+ test("ALTER TABLE UNSET nonexistent property should throw an exception") {
+ val tableName = "test_table"
+ withTable(tableName) {
+ sql(s"CREATE TABLE $tableName (a STRING, b INT) USING parquet")
+
+ checkError(
+ exception = intercept[AnalysisException] {
+ sql(s"ALTER TABLE $tableName UNSET TBLPROPERTIES ('test_prop1',
'test_prop2', 'comment')")
+ },
+ errorClass = "UNSET_NONEXISTENT_PROPERTIES",
+ parameters = Map(
+ "properties" -> "`test_prop1`, `test_prop2`",
+ "table" -> "`spark_catalog`.`default`.`test_table`")
+ )
+ }
+ }
+
test("alter table: change column (datasource table)") {
testChangeColumn(isDatasourceTable = true)
}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalogSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalogSuite.scala
index 6332c32c733a..fc313de6c8fe 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalogSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalogSuite.scala
@@ -26,6 +26,7 @@ import org.apache.spark.sql.{AnalysisException, QueryTest,
Row}
import org.apache.spark.sql.catalyst.analysis.{NoSuchNamespaceException,
TableAlreadyExistsException}
import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.catalyst.util.CharVarcharUtils
+import org.apache.spark.sql.errors.DataTypeErrors.{toSQLConf, toSQLStmt}
import org.apache.spark.sql.execution.columnar.InMemoryTableScanExec
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
@@ -388,6 +389,61 @@ class JDBCTableCatalogSuite extends QueryTest with
SharedSparkSession {
}
}
+ test("CREATE NAMESPACE with LOCATION for JDBC catalog should throw an
error") {
+ withTempDir { tempDir =>
+ val url =
s"jdbc:h2:${tempDir.getCanonicalPath};user=testUser;password=testPass"
+ Utils.classForName("org.h2.Driver")
+ withSQLConf(
+ "spark.sql.catalog.h2" -> classOf[JDBCTableCatalog].getName,
+ "spark.sql.catalog.h2.url" -> url,
+ "spark.sql.catalog.h2.driver" -> "org.h2.Driver") {
+ checkError(
+ exception = intercept[AnalysisException] {
+ sql("CREATE NAMESPACE h2.test_namespace LOCATION './samplepath'")
+ },
+ errorClass = "NOT_SUPPORTED_IN_JDBC_CATALOG.COMMAND",
+ sqlState = "0A000",
+ parameters = Map("cmd" -> toSQLStmt("CREATE NAMESPACE ... LOCATION
...")))
+ }
+ }
+ }
+
+ test("ALTER NAMESPACE with property other than COMMENT " +
+ "for JDBC catalog should throw an exception") {
+ withTempDir { tempDir =>
+ val url =
s"jdbc:h2:${tempDir.getCanonicalPath};user=testUser;password=testPass"
+ Utils.classForName("org.h2.Driver")
+ withSQLConf(
+ "spark.sql.catalog.h2" -> classOf[JDBCTableCatalog].getName,
+ "spark.sql.catalog.h2.url" -> url,
+ "spark.sql.catalog.h2.driver" -> "org.h2.Driver") {
+ val namespace = "h2.test_namespace"
+ withNamespace(namespace) {
+ sql(s"CREATE NAMESPACE $namespace")
+ checkError(
+ exception = intercept[AnalysisException] {
+ sql(s"ALTER NAMESPACE h2.test_namespace SET LOCATION
'/tmp/loc_test_2'")
+ },
+ errorClass = "NOT_SUPPORTED_IN_JDBC_CATALOG.COMMAND_WITH_PROPERTY",
+ sqlState = "0A000",
+ parameters = Map(
+ "cmd" -> toSQLStmt("SET NAMESPACE"),
+ "property" -> toSQLConf("location")))
+
+ checkError(
+ exception = intercept[AnalysisException] {
+ sql(s"ALTER NAMESPACE h2.test_namespace SET PROPERTIES('a'='b')")
+ },
+ errorClass = "NOT_SUPPORTED_IN_JDBC_CATALOG.COMMAND_WITH_PROPERTY",
+ sqlState = "0A000",
+ parameters = Map(
+ "cmd" -> toSQLStmt("SET NAMESPACE"),
+ "property" -> toSQLConf("a")))
+ }
+ }
+ }
+ }
+
test("ALTER TABLE ... update column comment not supported") {
val tableName = "h2.test.alt_table"
withTable(tableName) {
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]