This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new b30d1d41414 [SPARK-38700][SQL] Use error classes in the execution 
errors of save mode
b30d1d41414 is described below

commit b30d1d41414e200f1cc7ec9675e5c013bdf5b214
Author: panbingkun <pbk1...@gmail.com>
AuthorDate: Sun May 1 10:34:31 2022 +0300

    [SPARK-38700][SQL] Use error classes in the execution errors of save mode
    
    ### What changes were proposed in this pull request?
    Migrate the following errors in QueryExecutionErrors:
    
    * unsupportedSaveModeError -> UNSUPPORTED_SAVE_MODE
    
    ### Why are the changes needed?
    Porting execution errors of unsupported saveMode to new error framework.
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Add new UT.
    
    Closes #36350 from panbingkun/SPARK-38700.
    
    Authored-by: panbingkun <pbk1...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 core/src/main/resources/error/error-classes.json   | 11 ++++++++
 .../main/scala/org/apache/spark/ErrorInfo.scala    |  6 ++---
 .../spark/sql/errors/QueryExecutionErrors.scala    |  9 +++++--
 .../InsertIntoHadoopFsRelationCommand.scala        |  2 +-
 .../sql/errors/QueryExecutionErrorsSuite.scala     | 31 ++++++++++++++++++++--
 5 files changed, 51 insertions(+), 8 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index 4908a9b6c2e..aa38f8b9747 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -246,6 +246,17 @@
   "UNSUPPORTED_GROUPING_EXPRESSION" : {
     "message" : [ "grouping()/grouping_id() can only be used with 
GroupingSets/Cube/Rollup" ]
   },
+  "UNSUPPORTED_SAVE_MODE" : {
+    "message" : [ "The save mode <saveMode> is not supported for: " ],
+    "subClass" : {
+      "EXISTENT_PATH" : {
+        "message" : [ "an existent path." ]
+      },
+      "NON_EXISTENT_PATH" : {
+        "message" : [ "a not existent path." ]
+      }
+    }
+  },
   "UNTYPED_SCALA_UDF" : {
     "message" : [ "You're using untyped Scala UDF, which does not have the 
input type information. Spark may blindly pass null to the Scala closure with 
primitive-type argument, and the closure will see the default value of the Java 
type for the null argument, e.g. `udf((x: Int) => x, IntegerType)`, the result 
is 0 for null input. To get rid of this error, you could:\n1. use typed Scala 
UDF APIs(without return type parameter), e.g. `udf((x: Int) => x)`\n2. use Java 
UDF APIs, e.g. `udf(ne [...]
   },
diff --git a/core/src/main/scala/org/apache/spark/ErrorInfo.scala 
b/core/src/main/scala/org/apache/spark/ErrorInfo.scala
index a21f33e8833..0447572bb1c 100644
--- a/core/src/main/scala/org/apache/spark/ErrorInfo.scala
+++ b/core/src/main/scala/org/apache/spark/ErrorInfo.scala
@@ -80,9 +80,9 @@ private[spark] object SparkThrowableHelper {
       val errorSubInfo = subClass.getOrElse(subErrorClass,
         throw new IllegalArgumentException(s"Cannot find sub error class 
'$subErrorClass'"))
       val subMessageParameters = messageParameters.tail
-      "[" + errorClass + "." + subErrorClass + "] " + errorInfo.messageFormat +
-        
String.format(errorSubInfo.messageFormat.replaceAll("<[a-zA-Z0-9_-]+>", "%s"),
-          subMessageParameters: _*)
+      "[" + errorClass + "." + subErrorClass + "] " + 
String.format((errorInfo.messageFormat +
+        errorSubInfo.messageFormat).replaceAll("<[a-zA-Z0-9_-]+>", "%s"),
+        subMessageParameters: _*)
     } else {
       "[" + errorClass + "] " + String.format(
         errorInfo.messageFormat.replaceAll("<[a-zA-Z0-9_-]+>", "%s"),
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 225315d3f02..4b8d76e8e6f 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -592,8 +592,13 @@ object QueryExecutionErrors extends QueryErrorsBase {
        """.stripMargin)
   }
 
-  def unsupportedSaveModeError(saveMode: String, pathExists: Boolean): 
Throwable = {
-    new IllegalStateException(s"unsupported save mode $saveMode ($pathExists)")
+  def saveModeUnsupportedError(saveMode: Any, pathExists: Boolean): Throwable 
= {
+    pathExists match {
+      case true => new SparkIllegalArgumentException(errorClass = 
"UNSUPPORTED_SAVE_MODE",
+        messageParameters = Array("EXISTENT_PATH", toSQLValue(saveMode, 
StringType)))
+      case _ => new SparkIllegalArgumentException(errorClass = 
"UNSUPPORTED_SAVE_MODE",
+        messageParameters = Array("NON_EXISTENT_PATH", toSQLValue(saveMode, 
StringType)))
+    }
   }
 
   def cannotClearOutputDirectoryError(staticPrefixPath: Path): Throwable = {
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/InsertIntoHadoopFsRelationCommand.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/InsertIntoHadoopFsRelationCommand.scala
index 74be483cd7c..d773d4bd271 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/InsertIntoHadoopFsRelationCommand.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/InsertIntoHadoopFsRelationCommand.scala
@@ -133,7 +133,7 @@ case class InsertIntoHadoopFsRelationCommand(
         case (SaveMode.Ignore, exists) =>
           !exists
         case (s, exists) =>
-          throw QueryExecutionErrors.unsupportedSaveModeError(s.toString, 
exists)
+          throw QueryExecutionErrors.saveModeUnsupportedError(s, exists)
       }
     }
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
index 338da79674e..3857394b8bc 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
@@ -21,8 +21,8 @@ import java.util.Locale
 
 import test.org.apache.spark.sql.connector.JavaSimpleWritableDataSource
 
-import org.apache.spark.{SparkArithmeticException, SparkException, 
SparkIllegalStateException, SparkRuntimeException, 
SparkUnsupportedOperationException, SparkUpgradeException}
-import org.apache.spark.sql.{AnalysisException, DataFrame, QueryTest}
+import org.apache.spark.{SparkArithmeticException, SparkException, 
SparkIllegalArgumentException, SparkIllegalStateException, 
SparkRuntimeException, SparkUnsupportedOperationException, 
SparkUpgradeException}
+import org.apache.spark.sql.{AnalysisException, DataFrame, QueryTest, SaveMode}
 import org.apache.spark.sql.catalyst.util.BadRecordException
 import org.apache.spark.sql.connector.SimpleWritableDataSource
 import org.apache.spark.sql.execution.QueryExecutionException
@@ -33,6 +33,7 @@ import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.internal.SQLConf.LegacyBehaviorPolicy.EXCEPTION
 import org.apache.spark.sql.types.{DecimalType, StructType, TimestampType}
 import org.apache.spark.sql.util.ArrowUtils
+import org.apache.spark.util.Utils
 
 class QueryExecutionErrorsSuite
   extends QueryTest
@@ -430,4 +431,30 @@ class QueryExecutionErrorsSuite
       matchMsg = true
     )
   }
+
+  test("UNSUPPORTED_SAVE_MODE: unsupported null saveMode whether the path 
exists or not") {
+    withTempPath { path =>
+      val e1 = intercept[SparkIllegalArgumentException] {
+        val saveMode: SaveMode = null
+        Seq(1, 2).toDS().write.mode(saveMode).parquet(path.getAbsolutePath)
+      }
+      checkErrorClass(
+        exception = e1,
+        errorClass = "UNSUPPORTED_SAVE_MODE",
+        errorSubClass = Some("NON_EXISTENT_PATH"),
+        msg = "The save mode NULL is not supported for: a not existent path.")
+
+      Utils.createDirectory(path)
+
+      val e2 = intercept[SparkIllegalArgumentException] {
+        val saveMode: SaveMode = null
+        Seq(1, 2).toDS().write.mode(saveMode).parquet(path.getAbsolutePath)
+      }
+      checkErrorClass(
+        exception = e2,
+        errorClass = "UNSUPPORTED_SAVE_MODE",
+        errorSubClass = Some("EXISTENT_PATH"),
+        msg = "The save mode NULL is not supported for: an existent path.")
+    }
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to