This is an automated email from the ASF dual-hosted git repository.

gengliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 2e7633622e1 [SPARK-38980][SQL][TEST] Move error class tests requiring 
ANSI SQL mode to QueryExecutionAnsiErrorsSuite
2e7633622e1 is described below

commit 2e7633622e14392932fd17d41aef907bf3b4fa21
Author: Gengliang Wang <[email protected]>
AuthorDate: Fri Apr 22 01:45:42 2022 +0800

    [SPARK-38980][SQL][TEST] Move error class tests requiring ANSI SQL mode to 
QueryExecutionAnsiErrorsSuite
    
    ### What changes were proposed in this pull request?
    
    Move error class tests requiring ANSI SQL mode to 
QueryExecutionAnsiErrorsSuite
    
    ### Why are the changes needed?
    
    There are some tests requires enabling ANSI SQL mode in 
`QueryExecutionErrorsSuite`. Before the test suite become big, I suggest 
putting all those tests into QueryExecutionAnsiErrorsSuite:
    1. it is easier to manage test cases
    2. developers don't need to write `withSQLConf(SQLConf.ANSI_ENABLED.key -> 
"true")`
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    UT
    
    Closes #36299 from gengliangwang/QueryExecutionAnsiErrorsSuite.
    
    Authored-by: Gengliang Wang <[email protected]>
    Signed-off-by: Gengliang Wang <[email protected]>
---
 .../sql/errors/QueryExecutionAnsiErrorsSuite.scala | 80 ++++++++++++++++++++++
 .../sql/errors/QueryExecutionErrorsSuite.scala     | 63 +----------------
 2 files changed, 81 insertions(+), 62 deletions(-)

diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
new file mode 100644
index 00000000000..491a21661d0
--- /dev/null
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.spark.sql.errors
+
+import org.apache.spark.{SparkArithmeticException, SparkConf, 
SparkDateTimeException}
+import org.apache.spark.sql.QueryTest
+import org.apache.spark.sql.internal.SQLConf
+import org.apache.spark.sql.test.SharedSparkSession
+
+// Test suite for all the execution errors that requires enable ANSI SQL mode.
+class QueryExecutionAnsiErrorsSuite extends QueryTest with SharedSparkSession {
+  override def sparkConf: SparkConf = 
super.sparkConf.set(SQLConf.ANSI_ENABLED.key, "true")
+
+  test("CAST_CAUSES_OVERFLOW: from timestamp to int") {
+    val e = intercept[SparkArithmeticException] {
+      sql("select CAST(TIMESTAMP '9999-12-31T12:13:14.56789Z' AS 
INT)").collect()
+    }
+    assert(e.getErrorClass === "CAST_CAUSES_OVERFLOW")
+    assert(e.getSqlState === "22005")
+    assert(e.getMessage === "Casting 253402258394567890L to INT causes 
overflow. " +
+      "To return NULL instead, use 'try_cast'. " +
+      "If necessary set spark.sql.ansi.enabled to false to bypass this error.")
+  }
+
+  test("DIVIDE_BY_ZERO: can't divide an integer by zero") {
+    val e = intercept[SparkArithmeticException] {
+      sql("select 6/0").collect()
+    }
+    assert(e.getErrorClass === "DIVIDE_BY_ZERO")
+    assert(e.getSqlState === "22012")
+    assert(e.getMessage ===
+      "divide by zero. To return NULL instead, use 'try_divide'. If necessary 
set " +
+        "spark.sql.ansi.enabled to false (except for ANSI interval type) to 
bypass this error." +
+        """
+          |== SQL(line 1, position 7) ==
+          |select 6/0
+          |       ^^^
+          |""".stripMargin)
+  }
+
+  test("INVALID_FRACTION_OF_SECOND: in the function make_timestamp") {
+    val e = intercept[SparkDateTimeException] {
+      sql("select make_timestamp(2012, 11, 30, 9, 19, 60.66666666)").collect()
+    }
+    assert(e.getErrorClass === "INVALID_FRACTION_OF_SECOND")
+    assert(e.getSqlState === "22023")
+    assert(e.getMessage === "The fraction of sec must be zero. Valid range is 
[0, 60]. " +
+      "If necessary set spark.sql.ansi.enabled to false to bypass this error. 
")
+  }
+
+  test("CANNOT_CHANGE_DECIMAL_PRECISION: cast string to decimal") {
+    val e = intercept[SparkArithmeticException] {
+      sql("select CAST('66666666666666.666' AS DECIMAL(8, 1))").collect()
+    }
+    assert(e.getErrorClass === "CANNOT_CHANGE_DECIMAL_PRECISION")
+    assert(e.getSqlState === "22005")
+    assert(e.getMessage ===
+      "Decimal(expanded,66666666666666.666,17,3}) cannot be represented as 
Decimal(8, 1). " +
+        "If necessary set spark.sql.ansi.enabled to false to bypass this 
error." +
+        """
+          |== SQL(line 1, position 7) ==
+          |select CAST('66666666666666.666' AS DECIMAL(8, 1))
+          |       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+          |""".stripMargin)
+  }
+}
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
index 57a732a7b81..a198e947207 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
@@ -21,7 +21,7 @@ import java.util.Locale
 
 import test.org.apache.spark.sql.connector.JavaSimpleWritableDataSource
 
-import org.apache.spark.{SparkArithmeticException, SparkDateTimeException, 
SparkException, SparkIllegalStateException, SparkRuntimeException, 
SparkUnsupportedOperationException, SparkUpgradeException}
+import org.apache.spark.{SparkArithmeticException, SparkException, 
SparkIllegalStateException, SparkRuntimeException, 
SparkUnsupportedOperationException, SparkUpgradeException}
 import org.apache.spark.sql.{DataFrame, QueryTest}
 import org.apache.spark.sql.catalyst.util.BadRecordException
 import org.apache.spark.sql.connector.SimpleWritableDataSource
@@ -358,67 +358,6 @@ class QueryExecutionErrorsSuite extends QueryTest
     }
   }
 
-  test("CAST_CAUSES_OVERFLOW: from timestamp to int") {
-    withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") {
-      val e = intercept[SparkArithmeticException] {
-        sql("select CAST(TIMESTAMP '9999-12-31T12:13:14.56789Z' AS 
INT)").collect()
-      }
-      assert(e.getErrorClass === "CAST_CAUSES_OVERFLOW")
-      assert(e.getSqlState === "22005")
-      assert(e.getMessage === "Casting 253402258394567890L to INT causes 
overflow. " +
-        "To return NULL instead, use 'try_cast'. " +
-        "If necessary set spark.sql.ansi.enabled to false to bypass this 
error.")
-    }
-  }
-
-  test("DIVIDE_BY_ZERO: can't divide an integer by zero") {
-    withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") {
-      val e = intercept[SparkArithmeticException] {
-        sql("select 6/0").collect()
-      }
-      assert(e.getErrorClass === "DIVIDE_BY_ZERO")
-      assert(e.getSqlState === "22012")
-      assert(e.getMessage ===
-        "divide by zero. To return NULL instead, use 'try_divide'. If 
necessary set " +
-          "spark.sql.ansi.enabled to false (except for ANSI interval type) to 
bypass this error." +
-          """
-            |== SQL(line 1, position 7) ==
-            |select 6/0
-            |       ^^^
-            |""".stripMargin)
-    }
-  }
-
-  test("INVALID_FRACTION_OF_SECOND: in the function make_timestamp") {
-    withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") {
-      val e = intercept[SparkDateTimeException] {
-        sql("select make_timestamp(2012, 11, 30, 9, 19, 
60.66666666)").collect()
-      }
-      assert(e.getErrorClass === "INVALID_FRACTION_OF_SECOND")
-      assert(e.getSqlState === "22023")
-      assert(e.getMessage === "The fraction of sec must be zero. Valid range 
is [0, 60]. " +
-        "If necessary set spark.sql.ansi.enabled to false to bypass this 
error. ")
-    }
-  }
-
-  test("CANNOT_CHANGE_DECIMAL_PRECISION: cast string to decimal") {
-    withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") {
-      val e = intercept[SparkArithmeticException] {
-        sql("select CAST('66666666666666.666' AS DECIMAL(8, 1))").collect()
-      }
-      assert(e.getErrorClass === "CANNOT_CHANGE_DECIMAL_PRECISION")
-      assert(e.getSqlState === "22005")
-      assert(e.getMessage ===
-        "Decimal(expanded,66666666666666.666,17,3}) cannot be represented as 
Decimal(8, 1). " +
-        "If necessary set spark.sql.ansi.enabled to false to bypass this 
error." +
-        """
-          |== SQL(line 1, position 7) ==
-          |select CAST('66666666666666.666' AS DECIMAL(8, 1))
-          |       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-          |""".stripMargin)
-    }
-  }
-
   test("FAILED_EXECUTE_UDF: execute user defined function") {
     val e1 = intercept[SparkException] {
       val words = Seq(("Jacek", 5), ("Agata", 5), ("Sweet", 6)).toDF("word", 
"index")


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to