This is an automated email from the ASF dual-hosted git repository. maxgekk pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new 036af4bf2336 [SPARK-45725][SQL][FOLLOWUP] Fix arguments of a removed SQL config 036af4bf2336 is described below commit 036af4bf23361858d4de2429c9312828cd74fdf2 Author: Max Gekk <max.g...@gmail.com> AuthorDate: Mon Dec 18 17:38:29 2023 +0300 [SPARK-45725][SQL][FOLLOWUP] Fix arguments of a removed SQL config ### What changes were proposed in this pull request? In the PR, I propose to fix the order of arguments of the removed SQL config `park.sql.optimizer.runtimeFilter.semiJoinReduction.enabled` and check the format of Spark version in the constructor of `RemovedConfig`. ### Why are the changes needed? To don't confuse users and prevent such kind of mistakes. ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? By running the existing test suite: ``` $ build/sbt "test:testOnly *SQLConfSuite" ``` without the fix the test suite fails with the internal error: ``` Caused by: org.apache.spark.SparkException: [INTERNAL_ERROR] The removed SQL config spark.sql.optimizer.runtimeFilter.semiJoinReduction.enabled has the wrong Spark version: false SQLSTATE: XX000 at org.apache.spark.SparkException$.internalError(SparkException.scala:92) at org.apache.spark.SparkException$.internalError(SparkException.scala:96) ``` ### Was this patch authored or co-authored using generative AI tooling? No. Closes #44399 from MaxGekk/followup-semiJoinReduction-conf-2. Authored-by: Max Gekk <max.g...@gmail.com> Signed-off-by: Max Gekk <max.g...@gmail.com> --- .../main/scala/org/apache/spark/sql/internal/SQLConf.scala | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index 23217f94d64e..448474ae2faa 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -31,7 +31,7 @@ import scala.util.matching.Regex import org.apache.hadoop.fs.Path -import org.apache.spark.{ErrorMessageFormat, SparkConf, SparkContext, TaskContext} +import org.apache.spark.{ErrorMessageFormat, SparkConf, SparkContext, SparkException, TaskContext} import org.apache.spark.internal.Logging import org.apache.spark.internal.config._ import org.apache.spark.io.CompressionCodec @@ -47,7 +47,7 @@ import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryExecutionErrors import org.apache.spark.sql.types.{AtomicType, TimestampNTZType, TimestampType} import org.apache.spark.storage.{StorageLevel, StorageLevelMapper} import org.apache.spark.unsafe.array.ByteArrayMethods -import org.apache.spark.util.Utils +import org.apache.spark.util.{Utils, VersionUtils} //////////////////////////////////////////////////////////////////////////////////////////////////// // This file defines the configuration options for Spark SQL. @@ -4675,7 +4675,12 @@ object SQLConf { * users that they set non-default value to an already removed config. * @param comment Additional info regarding to the removed config. */ - case class RemovedConfig(key: String, version: String, defaultValue: String, comment: String) + case class RemovedConfig(key: String, version: String, defaultValue: String, comment: String) { + if (VersionUtils.majorMinorPatchVersion(version).isEmpty) { + throw SparkException.internalError( + s"The removed SQL config $key has the wrong Spark version: $version") + } + } /** * The map contains info about removed SQL configs. Keys are SQL config names, @@ -4713,7 +4718,7 @@ object SQLConf { "for more details."), RemovedConfig("spark.sql.hive.verifyPartitionPath", "4.0.0", "false", s"This config was replaced by '${IGNORE_MISSING_FILES.key}'."), - RemovedConfig("spark.sql.optimizer.runtimeFilter.semiJoinReduction.enabled", "false", "4.0", + RemovedConfig("spark.sql.optimizer.runtimeFilter.semiJoinReduction.enabled", "4.0.0", "false", "This optimizer config is useless as runtime filter cannot be an IN subquery now.") ) --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org