This is an automated email from the ASF dual-hosted git repository.

yao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new d4513013ea70 [SPARK-51926][CORE][SQL] Add INVALID_CONF_VALUE.subXXX 
error class to conf errors
d4513013ea70 is described below

commit d4513013ea704ae888a38db6b1f4159165db0b1a
Author: Kent Yao <y...@apache.org>
AuthorDate: Mon Apr 28 14:02:47 2025 +0800

    [SPARK-51926][CORE][SQL] Add INVALID_CONF_VALUE.subXXX error class to conf 
errors
    
    ### What changes were proposed in this pull request?
    This PR adds the following INVALID_CONF_VALUE.subXXX error class to conf 
errors
    
    - `_.OUT_OF_RANGE_OF_OPTIONS` for checkValues. and enumerable confs that 
give a set of candidates
    - `_.TYPE_MISMATCH` for errors raised in a String => T conversion
    - `_.REQUIREMENT` for checkValue that offers a check condition and a hint
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes, error for configs
    
    ### How was this patch tested?
    new tests
    
    ### Was this patch authored or co-authored using generative AI tooling?
    no
    
    Closes #50729 from yaooqinn/SPARK-51926.
    
    Authored-by: Kent Yao <y...@apache.org>
    Signed-off-by: Kent Yao <y...@apache.org>
---
 .../src/main/resources/error/error-conditions.json |  15 +++
 .../spark/internal/config/ConfigBuilder.scala      |  93 ++++++++++----
 .../spark/internal/config/ConfigEntrySuite.scala   | 134 +++++++++++++++------
 .../features/BasicExecutorFeatureStepSuite.scala   |  14 ++-
 .../catalyst/optimizer/OptimizerLoggingSuite.scala |  15 ++-
 .../spark/sql/internal/SQLConfEntrySuite.scala     | 107 +++++++++++-----
 .../apache/spark/sql/internal/SQLConfSuite.scala   |  77 ++++++++----
 7 files changed, 330 insertions(+), 125 deletions(-)

diff --git a/common/utils/src/main/resources/error/error-conditions.json 
b/common/utils/src/main/resources/error/error-conditions.json
index 41101dfbdfdf..ed57e271a4c9 100644
--- a/common/utils/src/main/resources/error/error-conditions.json
+++ b/common/utils/src/main/resources/error/error-conditions.json
@@ -2374,10 +2374,25 @@
       "The value '<confValue>' in the config \"<confName>\" is invalid."
     ],
     "subClass" : {
+      "OUT_OF_RANGE_OF_OPTIONS" : {
+        "message" : [
+          "It should be one of '<confOptions>'."
+        ]
+      },
+      "REQUIREMENT" : {
+        "message" : [
+          "<confRequirement>"
+        ]
+      },
       "TIME_ZONE" : {
         "message" : [
           "Cannot resolve the given timezone."
         ]
+      },
+      "TYPE_MISMATCH" : {
+        "message" : [
+          "It should be a/an '<confType>' value."
+        ]
       }
     },
     "sqlState" : "22022"
diff --git 
a/common/utils/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala
 
b/common/utils/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala
index 0f8a6b5fe334..c96324608ba5 100644
--- 
a/common/utils/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala
+++ 
b/common/utils/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala
@@ -33,8 +33,7 @@ private object ConfigHelpers {
     try {
       converter(s.trim)
     } catch {
-      case _: NumberFormatException =>
-        throw new IllegalArgumentException(s"$key should be $configType, but 
was $s")
+      case _: NumberFormatException => throw configTypeMismatchError(key, s, 
configType)
     }
   }
 
@@ -42,8 +41,7 @@ private object ConfigHelpers {
     try {
       s.trim.toBoolean
     } catch {
-      case _: IllegalArgumentException =>
-        throw new IllegalArgumentException(s"$key should be boolean, but was 
$s")
+      case _: IllegalArgumentException => throw configTypeMismatchError(key, 
s, "boolean")
     }
   }
 
@@ -52,17 +50,14 @@ private object ConfigHelpers {
       enumClass.withName(s.trim.toUpperCase(Locale.ROOT))
     } catch {
       case _: NoSuchElementException =>
-        throw new IllegalArgumentException(
-          s"$key should be one of ${enumClass.values.mkString(", ")}, but was 
$s")
+        throw configOutOfRangeOfOptionsError(key, s, enumClass.values)
     }
   }
 
   def toEnum[E <: Enum[E]](s: String, enumClass: Class[E], key: String): E = {
     enumClass.getEnumConstants.find(_.name().equalsIgnoreCase(s.trim)) match {
       case Some(enum) => enum
-      case None =>
-        throw new IllegalArgumentException(
-          s"$key should be one of ${enumClass.getEnumConstants.mkString(", 
")}, but was $s")
+      case None => throw configOutOfRangeOfOptionsError(key, s, 
enumClass.getEnumConstants)
     }
   }
 
@@ -74,29 +69,75 @@ private object ConfigHelpers {
     v.map(stringConverter).mkString(",")
   }
 
-  def timeFromString(str: String, unit: TimeUnit): Long = 
JavaUtils.timeStringAs(str, unit)
+  def timeFromString(str: String, unit: TimeUnit, key: String): Long = {
+    try {
+      JavaUtils.timeStringAs(str, unit)
+    } catch {
+      case _: NumberFormatException => throw configTypeMismatchError(key, str, 
s"time in $unit")
+    }
+  }
 
   def timeToString(v: Long, unit: TimeUnit): String = 
s"${TimeUnit.MILLISECONDS.convert(v, unit)}ms"
 
-  def byteFromString(str: String, unit: ByteUnit): Long = {
-    val (input, multiplier) =
-      if (str.length() > 0 && str.charAt(0) == '-') {
-        (str.substring(1), -1)
-      } else {
-        (str, 1)
-      }
-    multiplier * JavaUtils.byteStringAs(input, unit)
+  def byteFromString(str: String, unit: ByteUnit, key: String): Long = {
+    try {
+      val (input, multiplier) =
+        if (str.nonEmpty && str.charAt(0) == '-') {
+          (str.substring(1), -1)
+        } else {
+          (str, 1)
+        }
+      multiplier * JavaUtils.byteStringAs(input, unit)
+    } catch {
+      case _: NumberFormatException | _: IllegalArgumentException =>
+        throw configTypeMismatchError(key, str, s"bytes in $unit")
+    }
   }
 
-  def byteToString(v: Long, unit: ByteUnit): String = s"${unit.convertTo(v, 
ByteUnit.BYTE)}b"
+  def byteToString(v: Long, unit: ByteUnit, key: String): String = {
+    try {
+      s"${unit.convertTo(v, ByteUnit.BYTE)}b"
+    } catch {
+      case _: IllegalArgumentException =>
+        throw configTypeMismatchError(key, v.toString, s"bytes in $unit")
+    }
+  }
 
   def regexFromString(str: String, key: String): Regex = {
     try str.r catch {
-      case e: PatternSyntaxException =>
-        throw new IllegalArgumentException(s"$key should be a regex, but was 
$str", e)
+      case _: PatternSyntaxException => throw configTypeMismatchError(key, 
str, "regex")
     }
   }
 
+  def configTypeMismatchError(
+      key: String, value: String, configType: String): 
SparkIllegalArgumentException = {
+    new SparkIllegalArgumentException(
+      errorClass = "INVALID_CONF_VALUE.TYPE_MISMATCH",
+      messageParameters = Map(
+        "confName" -> key,
+        "confValue" -> value,
+        "confType" -> configType))
+  }
+
+  def configOutOfRangeOfOptionsError(
+      key: String, value: String, options: Iterable[AnyRef]): 
SparkIllegalArgumentException = {
+    new SparkIllegalArgumentException(
+      errorClass = "INVALID_CONF_VALUE.OUT_OF_RANGE_OF_OPTIONS",
+      messageParameters = Map(
+        "confName" -> key,
+        "confValue" -> value,
+        "confOptions" -> options.mkString(", ")))
+  }
+
+  def configRequirementError(
+      key: String, value: String, requirement: String): 
SparkIllegalArgumentException = {
+    new SparkIllegalArgumentException(
+      errorClass = "INVALID_CONF_VALUE.REQUIREMENT",
+      messageParameters = Map(
+        "confName" -> key,
+        "confValue" -> value,
+        "confRequirement" -> requirement))
+  }
 }
 
 /**
@@ -126,7 +167,7 @@ private[spark] class TypedConfigBuilder[T](
   def checkValue(validator: T => Boolean, errorMsg: String): 
TypedConfigBuilder[T] = {
     transform { v =>
       if (!validator(v)) {
-        throw new IllegalArgumentException(s"'$v' in ${parent.key} is invalid. 
$errorMsg")
+        throw configRequirementError(parent.key, stringConverter(v), errorMsg)
       }
       v
     }
@@ -154,8 +195,8 @@ private[spark] class TypedConfigBuilder[T](
   def checkValues(validValues: Set[T]): TypedConfigBuilder[T] = {
     transform { v =>
       if (!validValues.contains(v)) {
-        throw new IllegalArgumentException(
-          s"The value of ${parent.key} should be one of 
${validValues.mkString(", ")}, but was $v")
+        throw configOutOfRangeOfOptionsError(
+          parent.key, stringConverter(v), validValues.map(stringConverter))
       }
       v
     }
@@ -303,12 +344,12 @@ private[spark] case class ConfigBuilder(key: String) {
 
   def timeConf(unit: TimeUnit): TypedConfigBuilder[Long] = {
     checkPrependConfig
-    new TypedConfigBuilder(this, timeFromString(_, unit), timeToString(_, 
unit))
+    new TypedConfigBuilder(this, timeFromString(_, unit, key), timeToString(_, 
unit))
   }
 
   def bytesConf(unit: ByteUnit): TypedConfigBuilder[Long] = {
     checkPrependConfig
-    new TypedConfigBuilder(this, byteFromString(_, unit), byteToString(_, 
unit))
+    new TypedConfigBuilder(this, byteFromString(_, unit, key), byteToString(_, 
unit, key))
   }
 
   def fallbackConf[T](fallback: ConfigEntry[T]): ConfigEntry[T] = {
diff --git 
a/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala 
b/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala
index 457e92b06280..b02424ff6d5e 100644
--- 
a/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala
+++ 
b/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala
@@ -20,7 +20,7 @@ package org.apache.spark.internal.config
 import java.util.Locale
 import java.util.concurrent.TimeUnit
 
-import org.apache.spark.{SparkConf, SparkFunSuite}
+import org.apache.spark.{SparkConf, SparkFunSuite, 
SparkIllegalArgumentException}
 import org.apache.spark.network.shuffledb.DBBackend
 import org.apache.spark.network.util.ByteUnit
 import org.apache.spark.util.SparkConfWithEnv
@@ -86,6 +86,18 @@ class ConfigEntrySuite extends SparkFunSuite {
     assert(conf.get(time) === 3600L)
     conf.set(time.key, "1m")
     assert(conf.get(time) === 60L)
+
+    checkError(
+      exception = intercept[SparkIllegalArgumentException] {
+        conf.set(time.key, "abc")
+        conf.get(time)
+      },
+      condition = "INVALID_CONF_VALUE.TYPE_MISMATCH",
+      parameters = Map(
+        "confName" -> time.key,
+        "confValue" -> "abc",
+        "confType" -> s"time in ${TimeUnit.SECONDS}")
+    )
   }
 
   test("conf entry: bytes") {
@@ -97,6 +109,18 @@ class ConfigEntrySuite extends SparkFunSuite {
     assert(conf.get(bytes) === 1L)
     conf.set(bytes.key, "2048")
     assert(conf.get(bytes) === 2048)
+
+    checkError(
+      exception = intercept[SparkIllegalArgumentException] {
+        conf.set(bytes.key, "abc")
+        conf.get(bytes)
+      },
+      condition = "INVALID_CONF_VALUE.TYPE_MISMATCH",
+      parameters = Map(
+        "confName" -> bytes.key,
+        "confValue" -> "abc",
+        "confType" -> s"bytes in ${ByteUnit.KiB}")
+    )
   }
 
   test("conf entry: regex") {
@@ -110,8 +134,16 @@ class ConfigEntrySuite extends SparkFunSuite {
     assert(conf.get(rConf).toString === "[0-9a-f]{4}")
 
     conf.set(rConf.key, "[.")
-    val e = intercept[IllegalArgumentException](conf.get(rConf))
-    assert(e.getMessage.contains("regex should be a regex, but was"))
+    checkError(
+      exception = intercept[SparkIllegalArgumentException] {
+        conf.get(rConf)
+      },
+      condition = "INVALID_CONF_VALUE.TYPE_MISMATCH",
+      parameters = Map(
+        "confName" -> rConf.key,
+        "confValue" -> "[.",
+        "confType" -> "regex")
+    )
   }
 
   test("conf entry: string seq") {
@@ -155,24 +187,35 @@ class ConfigEntrySuite extends SparkFunSuite {
 
     val entry = createEntry(10)
     conf.set(entry, -1)
-    val e1 = intercept[IllegalArgumentException] {
-      conf.get(entry)
-    }
-    assert(e1.getMessage ===
-      s"'-1' in ${testKey("checkValue")} is invalid. value must be 
non-negative")
+    checkError(
+      exception = intercept[SparkIllegalArgumentException] {
+        conf.get(entry)
+      },
+      condition = "INVALID_CONF_VALUE.REQUIREMENT",
+      parameters = Map(
+        "confName" -> testKey("checkValue"),
+        "confValue" -> "-1",
+        "confRequirement" -> "value must be non-negative")
+    )
 
-    val e2 = intercept[IllegalArgumentException] {
-      createEntry(-1)
-    }
-    assert(e2.getMessage ===
-      s"'-1' in ${testKey("checkValue")} is invalid. value must be 
non-negative")
+    checkError(
+      exception = intercept[SparkIllegalArgumentException] {
+        createEntry(-1)
+      },
+      condition = "INVALID_CONF_VALUE.REQUIREMENT",
+      parameters = Map(
+        "confName" -> testKey("checkValue"),
+        "confValue" -> "-1",
+        "confRequirement" -> "value must be non-negative")
+    )
   }
 
   test("conf entry: valid values check") {
     val conf = new SparkConf()
+    val configOptions = Set("a", "b", "c")
     val enumConf = ConfigBuilder(testKey("enum"))
       .stringConf
-      .checkValues(Set("a", "b", "c"))
+      .checkValues(configOptions)
       .createWithDefault("a")
     assert(conf.get(enumConf) === "a")
 
@@ -180,21 +223,32 @@ class ConfigEntrySuite extends SparkFunSuite {
     assert(conf.get(enumConf) === "b")
 
     conf.set(enumConf, "d")
-    val enumError = intercept[IllegalArgumentException] {
-      conf.get(enumConf)
-    }
-    assert(enumError.getMessage ===
-      s"The value of ${enumConf.key} should be one of a, b, c, but was d")
+    checkError(
+      exception = intercept[SparkIllegalArgumentException] {
+        conf.get(enumConf)
+      },
+      condition = "INVALID_CONF_VALUE.OUT_OF_RANGE_OF_OPTIONS",
+      parameters = Map(
+        "confName" -> enumConf.key,
+        "confValue" -> "d",
+        "confOptions" -> configOptions.mkString(", "))
+    )
   }
 
   test("conf entry: conversion error") {
     val conf = new SparkConf()
     val conversionTest = 
ConfigBuilder(testKey("conversionTest")).doubleConf.createOptional
     conf.set(conversionTest.key, "abc")
-    val conversionError = intercept[IllegalArgumentException] {
-      conf.get(conversionTest)
-    }
-    assert(conversionError.getMessage === s"${conversionTest.key} should be 
double, but was abc")
+    checkError(
+      exception = intercept[SparkIllegalArgumentException] {
+        conf.get(conversionTest)
+      },
+      condition = "INVALID_CONF_VALUE.TYPE_MISMATCH",
+      parameters = Map(
+        "confName" -> conversionTest.key,
+        "confValue" -> "abc",
+        "confType" -> "double")
+    )
   }
 
   test("variable expansion of spark config entries") {
@@ -403,11 +457,18 @@ class ConfigEntrySuite extends SparkFunSuite {
     assert(conf.get(enumConf) === MyTestEnum.Y)
     conf.set(enumConf.key, "Z")
     assert(conf.get(enumConf) === MyTestEnum.Z)
-    val e = intercept[IllegalArgumentException] {
-      conf.set(enumConf.key, "A")
-      conf.get(enumConf)
-    }
-    assert(e.getMessage === s"${enumConf.key} should be one of X, Y, Z, but 
was A")
+
+    checkError(
+      exception = intercept[SparkIllegalArgumentException] {
+        conf.set(enumConf.key, "A")
+        conf.get(enumConf)
+      },
+      condition = "INVALID_CONF_VALUE.OUT_OF_RANGE_OF_OPTIONS",
+      parameters = Map(
+        "confName" -> enumConf.key,
+        "confValue" -> "A",
+        "confOptions" -> MyTestEnum.values.mkString(", "))
+    )
   }
 
   test("SPARK-51896: Add Java enum support to ConfigBuilder") {
@@ -418,11 +479,16 @@ class ConfigEntrySuite extends SparkFunSuite {
     assert(conf.get(enumConf) === DBBackend.LEVELDB)
     conf.set(enumConf, DBBackend.ROCKSDB)
     assert(conf.get(enumConf) === DBBackend.ROCKSDB)
-    val e = intercept[IllegalArgumentException] {
-      conf.set(enumConf.key, "ANYDB")
-      conf.get(enumConf)
-    }
-    assert(e.getMessage ===
-      s"${enumConf.key} should be one of ${DBBackend.values.mkString(", ")}, 
but was ANYDB")
+    checkError(
+      exception = intercept[SparkIllegalArgumentException] {
+        conf.set(enumConf.key, "ANYDB")
+        conf.get(enumConf)
+        },
+      condition = "INVALID_CONF_VALUE.OUT_OF_RANGE_OF_OPTIONS",
+      parameters = Map(
+        "confName" -> enumConf.key,
+        "confValue" -> "ANYDB",
+        "confOptions" -> DBBackend.values.mkString(", "))
+    )
   }
 }
diff --git 
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/BasicExecutorFeatureStepSuite.scala
 
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/BasicExecutorFeatureStepSuite.scala
index 3ed6d50f689d..37d28c203000 100644
--- 
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/BasicExecutorFeatureStepSuite.scala
+++ 
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/BasicExecutorFeatureStepSuite.scala
@@ -22,7 +22,7 @@ import com.google.common.net.InternetDomainName
 import io.fabric8.kubernetes.api.model._
 import org.scalatest.BeforeAndAfter
 
-import org.apache.spark.{SecurityManager, SparkConf, SparkException, 
SparkFunSuite}
+import org.apache.spark.{SecurityManager, SparkConf, SparkException, 
SparkFunSuite, SparkIllegalArgumentException}
 import org.apache.spark.deploy.k8s.{KubernetesExecutorConf, 
KubernetesTestConf, SecretVolumeUtils, SparkPod}
 import org.apache.spark.deploy.k8s.Config._
 import org.apache.spark.deploy.k8s.Constants._
@@ -203,10 +203,14 @@ class BasicExecutorFeatureStepSuite extends SparkFunSuite 
with BeforeAndAfter {
     withPodNamePrefix {
       Seq("_123", "spark_exec", "spark@", "a" * 238).foreach { invalid =>
         baseConf.set(KUBERNETES_EXECUTOR_POD_NAME_PREFIX, invalid)
-        val e = intercept[IllegalArgumentException](newExecutorConf())
-        assert(e.getMessage === s"'$invalid' in 
spark.kubernetes.executor.podNamePrefix is" +
-          s" invalid. must conform 
https://kubernetes.io/docs/concepts/overview/"; +
-          "working-with-objects/names/#dns-subdomain-names and the value 
length <= 237")
+        checkError(
+          exception = 
intercept[SparkIllegalArgumentException](newExecutorConf()),
+          condition = "INVALID_CONF_VALUE.REQUIREMENT",
+          parameters = Map(
+              "confName" -> KUBERNETES_EXECUTOR_POD_NAME_PREFIX.key,
+              "confValue" -> invalid,
+              "confRequirement" -> ("must conform 
https://kubernetes.io/docs/concepts/overview/"; +
+                "working-with-objects/names/#dns-subdomain-names and the value 
length <= 237")))
       }
     }
   }
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizerLoggingSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizerLoggingSuite.scala
index 083c522287ca..3adaf08c97cc 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizerLoggingSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizerLoggingSuite.scala
@@ -20,6 +20,7 @@ package org.apache.spark.sql.catalyst.optimizer
 import org.apache.logging.log4j.Level
 import org.slf4j.event.{Level => Slf4jLevel}
 
+import org.apache.spark.SparkIllegalArgumentException
 import org.apache.spark.sql.catalyst.dsl.expressions._
 import org.apache.spark.sql.catalyst.dsl.plans._
 import org.apache.spark.sql.catalyst.expressions.InSet
@@ -101,11 +102,15 @@ class OptimizerLoggingSuite extends PlanTest {
       "infoo")
 
     levels.foreach { level =>
-      val error = intercept[IllegalArgumentException] {
-        withSQLConf(SQLConf.PLAN_CHANGE_LOG_LEVEL.key -> level) {}
-      }
-      assert(error.getMessage == s"${SQLConf.PLAN_CHANGE_LOG_LEVEL.key} should 
be one of " +
-        s"${classOf[Slf4jLevel].getEnumConstants.mkString(", ")}, but was 
$level")
+      checkError(
+        exception = intercept[SparkIllegalArgumentException] {
+          withSQLConf(SQLConf.PLAN_CHANGE_LOG_LEVEL.key -> level) {}
+        },
+        condition = "INVALID_CONF_VALUE.OUT_OF_RANGE_OF_OPTIONS",
+        parameters = Map(
+          "confName" -> SQLConf.PLAN_CHANGE_LOG_LEVEL.key,
+          "confValue" -> level,
+          "confOptions" -> classOf[Slf4jLevel].getEnumConstants.mkString(", 
")))
     }
   }
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfEntrySuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfEntrySuite.scala
index 26011af37bf4..9c34e28e5fe8 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfEntrySuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfEntrySuite.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.internal
 
-import org.apache.spark.SparkFunSuite
+import org.apache.spark.{SparkFunSuite, SparkIllegalArgumentException}
 import org.apache.spark.sql.internal.SQLConf._
 
 class SQLConfEntrySuite extends SparkFunSuite {
@@ -40,10 +40,16 @@ class SQLConfEntrySuite extends SparkFunSuite {
     conf.setConfString(key, " 20")
     assert(conf.getConf(confEntry, 5) === 20)
 
-    val e = intercept[IllegalArgumentException] {
-      conf.setConfString(key, "abc")
-    }
-    assert(e.getMessage === s"$key should be int, but was abc")
+    checkError(
+      exception = intercept[SparkIllegalArgumentException] {
+        conf.setConfString(key, "abc")
+      },
+      condition = "INVALID_CONF_VALUE.TYPE_MISMATCH",
+      parameters = Map(
+        "confName" -> key,
+        "confValue" -> "abc",
+        "confType" -> "int")
+    )
   }
 
   test("longConf") {
@@ -59,10 +65,16 @@ class SQLConfEntrySuite extends SparkFunSuite {
     assert(conf.getConfString(key) === "20")
     assert(conf.getConf(confEntry, 5L) === 20L)
 
-    val e = intercept[IllegalArgumentException] {
-      conf.setConfString(key, "abc")
-    }
-    assert(e.getMessage === s"$key should be long, but was abc")
+    checkError(
+      exception = intercept[SparkIllegalArgumentException] {
+        conf.setConfString(key, "abc")
+      },
+      condition = "INVALID_CONF_VALUE.TYPE_MISMATCH",
+      parameters = Map(
+        "confName" -> key,
+        "confValue" -> "abc",
+        "confType" -> "long")
+    )
   }
 
   test("booleanConf") {
@@ -80,10 +92,16 @@ class SQLConfEntrySuite extends SparkFunSuite {
 
     conf.setConfString(key, " true ")
     assert(conf.getConf(confEntry, false))
-    val e = intercept[IllegalArgumentException] {
-      conf.setConfString(key, "abc")
-    }
-    assert(e.getMessage === s"$key should be boolean, but was abc")
+    checkError(
+      exception = intercept[SparkIllegalArgumentException] {
+        conf.setConfString(key, "abc")
+      },
+      condition = "INVALID_CONF_VALUE.TYPE_MISMATCH",
+      parameters = Map(
+        "confName" -> key,
+        "confValue" -> "abc",
+        "confType" -> "boolean")
+    )
   }
 
   test("doubleConf") {
@@ -99,10 +117,16 @@ class SQLConfEntrySuite extends SparkFunSuite {
     assert(conf.getConfString(key) === "20.0")
     assert(conf.getConf(confEntry, 5.0) === 20.0)
 
-    val e = intercept[IllegalArgumentException] {
-      conf.setConfString(key, "abc")
-    }
-    assert(e.getMessage === s"$key should be double, but was abc")
+    checkError(
+      exception = intercept[SparkIllegalArgumentException] {
+        conf.setConfString(key, "abc")
+      },
+      condition = "INVALID_CONF_VALUE.TYPE_MISMATCH",
+      parameters = Map(
+        "confName" -> key,
+        "confValue" -> "abc",
+        "confType" -> "double")
+    )
   }
 
   test("stringConf") {
@@ -121,9 +145,10 @@ class SQLConfEntrySuite extends SparkFunSuite {
 
   test("enumConf") {
     val key = "spark.sql.SQLConfEntrySuite.enum"
+    val candidates = Set("a", "b", "c")
     val confEntry = buildConf(key)
       .stringConf
-      .checkValues(Set("a", "b", "c"))
+      .checkValues(candidates)
       .createWithDefault("a")
     assert(conf.getConf(confEntry) === "a")
 
@@ -135,10 +160,16 @@ class SQLConfEntrySuite extends SparkFunSuite {
     assert(conf.getConfString(key) === "c")
     assert(conf.getConf(confEntry) === "c")
 
-    val e = intercept[IllegalArgumentException] {
-      conf.setConfString(key, "d")
-    }
-    assert(e.getMessage === s"The value of $key should be one of a, b, c, but 
was d")
+    checkError(
+      exception = intercept[SparkIllegalArgumentException] {
+        conf.setConfString(key, "abc")
+      },
+      condition = "INVALID_CONF_VALUE.OUT_OF_RANGE_OF_OPTIONS",
+      parameters = Map(
+        "confName" -> key,
+        "confValue" -> "abc",
+        "confOptions" -> candidates.mkString(", "))
+    )
   }
 
   test("stringSeqConf") {
@@ -182,17 +213,27 @@ class SQLConfEntrySuite extends SparkFunSuite {
     assert(conf.getConf(confEntry) === 1000)
 
     conf.setConf(confEntry, -1)
-    val e1 = intercept[IllegalArgumentException] {
-      conf.getConf(confEntry)
-    }
-    assert(e1.getMessage === s"'-1' in ${confEntry.key} is invalid." +
-      s" The maximum size of the cache must not be negative")
-
-    val e2 = intercept[IllegalArgumentException] {
-      conf.setConfString(confEntry.key, "-1")
-    }
-    assert(e2.getMessage === s"'-1' in ${confEntry.key} is invalid." +
-      s" The maximum size of the cache must not be negative")
+    checkError(
+      exception = intercept[SparkIllegalArgumentException] {
+        conf.getConf(confEntry)
+      },
+      condition = "INVALID_CONF_VALUE.REQUIREMENT",
+      parameters = Map(
+        "confName" -> confEntry.key,
+        "confValue" -> "-1",
+        "confRequirement" -> "The maximum size of the cache must not be 
negative")
+    )
+
+    checkError(
+      exception = intercept[SparkIllegalArgumentException] {
+        conf.setConfString(confEntry.key, "-1")
+      },
+      condition = "INVALID_CONF_VALUE.REQUIREMENT",
+      parameters = Map(
+        "confName" -> confEntry.key,
+        "confValue" -> "-1",
+        "confRequirement" -> "The maximum size of the cache must not be 
negative")
+    )
   }
 
   test("clone SQLConf") {
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
index 442dd09ce388..74a39322480d 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
@@ -23,6 +23,7 @@ import org.apache.hadoop.fs.Path
 import org.apache.logging.log4j.Level
 
 import org.apache.spark.{SPARK_DOC_ROOT, SparkIllegalArgumentException, 
SparkNoSuchElementException}
+import org.apache.spark.network.util.ByteUnit
 import org.apache.spark.sql.{AnalysisException, QueryTest, Row}
 import org.apache.spark.sql.catalyst.parser.ParseException
 import org.apache.spark.sql.catalyst.util.DateTimeTestUtils.MIT
@@ -241,11 +242,18 @@ class SQLConfSuite extends QueryTest with 
SharedSparkSession {
   }
 
   test("invalid conf value") {
-    val e = intercept[IllegalArgumentException] {
-      withSQLConf(SQLConf.CASE_SENSITIVE.key -> "10") {
-      }
-    }
-    assert(e.getMessage === s"${SQLConf.CASE_SENSITIVE.key} should be boolean, 
but was 10")
+    checkError(
+      exception = intercept[SparkIllegalArgumentException] {
+        withSQLConf(SQLConf.CASE_SENSITIVE.key -> "10") {
+          sql(s"SET ${SQLConf.CASE_SENSITIVE.key}")
+        }
+      },
+      condition = "INVALID_CONF_VALUE.TYPE_MISMATCH",
+      parameters = Map(
+        "confName" -> SQLConf.CASE_SENSITIVE.key,
+        "confValue" -> "10",
+        "confType" -> "boolean")
+    )
   }
 
   test("Test ADVISORY_PARTITION_SIZE_IN_BYTES's method") {
@@ -264,20 +272,41 @@ class SQLConfSuite extends QueryTest with 
SharedSparkSession {
     assert(sqlConf.getConf(SQLConf.ADVISORY_PARTITION_SIZE_IN_BYTES) === 
1073741824)
 
     // test negative value
-    intercept[IllegalArgumentException] {
-      spark.conf.set(SQLConf.ADVISORY_PARTITION_SIZE_IN_BYTES.key, "-1")
-    }
+    checkError(
+      exception = intercept[SparkIllegalArgumentException] {
+        spark.conf.set(SQLConf.ADVISORY_PARTITION_SIZE_IN_BYTES.key, "-1k")
+      },
+      condition = "INVALID_CONF_VALUE.REQUIREMENT",
+      parameters = Map(
+        "confName" -> "spark.sql.adaptive.shuffle.targetPostShuffleInputSize",
+        "confValue" -> "-1024b",
+        "confRequirement" -> "advisoryPartitionSizeInBytes must be positive")
+    )
 
     // Test overflow exception
-    intercept[IllegalArgumentException] {
-      // This value exceeds Long.MaxValue
-      spark.conf.set(SQLConf.ADVISORY_PARTITION_SIZE_IN_BYTES.key, 
"90000000000g")
-    }
+    checkError(
+      exception = intercept[SparkIllegalArgumentException] {
+        // This value exceeds Long.MaxValue
+        spark.conf.set(SQLConf.ADVISORY_PARTITION_SIZE_IN_BYTES.key, 
"90000000000g")
+      },
+      condition = "INVALID_CONF_VALUE.TYPE_MISMATCH",
+      parameters = Map(
+        "confName" -> "spark.sql.adaptive.shuffle.targetPostShuffleInputSize",
+        "confValue" -> "90000000000g",
+        "confType" -> s"bytes in ${ByteUnit.BYTE}")
+    )
 
-    intercept[IllegalArgumentException] {
-      // This value less than Long.MinValue
-      spark.conf.set(SQLConf.ADVISORY_PARTITION_SIZE_IN_BYTES.key, 
"-90000000000g")
-    }
+    checkError(
+      exception = intercept[SparkIllegalArgumentException] {
+        // This value less than Long.MinValue
+        spark.conf.set(SQLConf.ADVISORY_PARTITION_SIZE_IN_BYTES.key, 
"-90000000000g")
+      },
+      condition = "INVALID_CONF_VALUE.TYPE_MISMATCH",
+      parameters = Map(
+        "confName" -> "spark.sql.adaptive.shuffle.targetPostShuffleInputSize",
+        "confValue" -> "-90000000000g",
+        "confType" -> s"bytes in ${ByteUnit.BYTE}")
+    )
 
     sqlConf.clear()
   }
@@ -519,11 +548,15 @@ class SQLConfSuite extends QueryTest with 
SharedSparkSession {
 
   test("SPARK-51874: Add Enum support to ConfigBuilder") {
     assert(spark.conf.get(SQLConf.LEGACY_TIME_PARSER_POLICY) === 
LegacyBehaviorPolicy.CORRECTED)
-    val e = intercept[IllegalArgumentException] {
-      spark.conf.set(SQLConf.LEGACY_TIME_PARSER_POLICY.key, "invalid")
-    }
-    assert(e.getMessage ===
-      s"${SQLConf.LEGACY_TIME_PARSER_POLICY.key} should be one of " +
-      s"${LegacyBehaviorPolicy.values.mkString(", ")}, but was invalid")
+
+    checkError(
+      exception = intercept[SparkIllegalArgumentException] {
+        spark.conf.set(SQLConf.LEGACY_TIME_PARSER_POLICY.key, "invalid")
+      },
+      condition = "INVALID_CONF_VALUE.OUT_OF_RANGE_OF_OPTIONS",
+      parameters = Map(
+        "confName" -> SQLConf.LEGACY_TIME_PARSER_POLICY.key,
+        "confValue" -> "invalid",
+        "confOptions" -> LegacyBehaviorPolicy.values.mkString(", ")))
   }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to