MaxGekk commented on code in PR #41699:
URL: https://github.com/apache/spark/pull/41699#discussion_r1241625508


##########
sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/Hive_2_1_DDLSuite.scala:
##########
@@ -100,13 +100,16 @@ class Hive_2_1_DDLSuite extends SparkFunSuite with 
TestHiveSingleton {
   }
 
   test("SPARK-21617: ALTER TABLE with incompatible schema on Hive-compatible 
table") {
-    val exception = intercept[AnalysisException] {
-      testAlterTable(
-        "t1",
-        "CREATE TABLE t1 (c1 string) USING parquet",
-        StructType(Array(StructField("c2", IntegerType))))
-    }
-    assert(exception.getMessage().contains("types incompatible with the 
existing columns"))
+    checkError(
+      exception = intercept[AnalysisException] {
+        testAlterTable(
+          "t1",
+          "CREATE TABLE t1 (c1 string) USING parquet",
+          StructType(Array(StructField("c2", IntegerType))))
+      },
+      errorClass = null,

Review Comment:
   ditto: we need an JIRA for the case.



##########
sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala:
##########
@@ -2095,20 +2243,27 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
   test(s"Add a directory when 
${SQLConf.LEGACY_ADD_SINGLE_FILE_IN_ADD_FILE.key} set to true") {
     withTempDir { testDir =>
       withSQLConf(SQLConf.LEGACY_ADD_SINGLE_FILE_IN_ADD_FILE.key -> "true") {
-        val msg = intercept[SparkException] {
-          spark.sql(s"ADD FILE $testDir")
-        }.getMessage
-        assert(msg.contains("is a directory and recursive is not turned on"))
+        checkError(
+          exception = intercept[SparkException] {
+            sql(s"ADD FILE $testDir")
+          },
+          errorClass = null,

Review Comment:
   We should assign an error class. @panbingkun Could you open an JIRA to don't 
forget the case, please.



##########
sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala:
##########
@@ -3095,9 +3358,13 @@ class HiveDDLSuite
   }
 
   test("SPARK-38216: Fail early if all the columns are partitioned columns") {
-    assertAnalysisError(
-      "CREATE TABLE tab (c1 int) PARTITIONED BY (c1) STORED AS PARQUET",
-      "Cannot use all columns for partition columns")
+    checkError(
+      exception = intercept[AnalysisException] {
+        sql("CREATE TABLE tab (c1 int) PARTITIONED BY (c1) STORED AS PARQUET")
+      },
+      errorClass = null,

Review Comment:
   ditto: missed an error class, even a legacy one.



##########
sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala:
##########
@@ -2669,14 +2880,17 @@ class HiveDDLSuite
 
     Seq("nested,column", "nested:column", "nested;column").foreach { 
nestedColumnName =>
       withTable("t") {
-        val e = intercept[AnalysisException] {
-          spark.range(1)
-            .select(struct(lit(0).as(nestedColumnName)).as("toplevel"))
-            .write
-            .format("hive")
-            .saveAsTable("t")
-        }.getMessage
-        assert(e.contains(expectedMsg))
+        checkError(
+          exception = intercept[AnalysisException] {
+            spark.range(1)
+              .select(struct(lit(0).as(nestedColumnName)).as("toplevel"))
+              .write
+              .format("hive")
+              .saveAsTable("t")
+          },
+          errorClass = null,

Review Comment:
   ditto: missed error class.



##########
sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala:
##########
@@ -431,9 +427,11 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
            |$partitionClause
          """.stripMargin
       if (userSpecifiedSchema.isEmpty && userSpecifiedPartitionCols.nonEmpty) {
-        val e = intercept[AnalysisException](sql(sqlCreateTable)).getMessage
-        assert(e.contains(
-          "not allowed to specify partition columns when the table schema is 
not defined"))
+        checkError(
+          exception = intercept[AnalysisException](sql(sqlCreateTable)),
+          errorClass = null,

Review Comment:
   hmm, it would be nice to investigate the case and assign an error class.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to