This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch branch-4.0
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-4.0 by this push:
new 741997474ad2 [SPARK-50993][SQL] Move nullDataSourceOption from
QueryCompilationErrors into QueryExecutionErrors
741997474ad2 is described below
commit 741997474ad21e099bd804d56356d93ee2b01087
Author: beliefer <[email protected]>
AuthorDate: Sun Jan 26 17:01:30 2025 +0300
[SPARK-50993][SQL] Move nullDataSourceOption from QueryCompilationErrors
into QueryExecutionErrors
### What changes were proposed in this pull request?
This PR proposes to move `nullDataSourceOption` from
`QueryCompilationErrors` into `QueryExecutionErrors`.
### Why are the changes needed?
Currently, `nullDataSourceOption` is placed into QueryCompilationErrors.
In fact, it's an execution error.
### Does this PR introduce _any_ user-facing change?
'Yes'.
The type of error will be changed.
### How was this patch tested?
GA.
### Was this patch authored or co-authored using generative AI tooling?
'No'.
Closes #49677 from beliefer/SPARK-50993.
Authored-by: beliefer <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
(cherry picked from commit e7821c82e28dd316712e9787af27f31090d2f975)
Signed-off-by: Max Gekk <[email protected]>
---
.../scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala | 7 -------
.../scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala | 7 +++++++
.../apache/spark/sql/execution/datasources/jdbc/JDBCOptions.scala | 4 ++--
.../src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala | 4 ++--
4 files changed, 11 insertions(+), 11 deletions(-)
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index 2fda7894c0fb..1256fe3aad6a 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -224,13 +224,6 @@ private[sql] object QueryCompilationErrors extends
QueryErrorsBase with Compilat
"invalidValue" -> toSQLExpr(invalidValue)))
}
- def nullDataSourceOption(option: String): Throwable = {
- new AnalysisException(
- errorClass = "NULL_DATA_SOURCE_OPTION",
- messageParameters = Map("option" -> option)
- )
- }
-
def unorderablePivotColError(pivotCol: Expression): Throwable = {
new AnalysisException(
errorClass = "INCOMPARABLE_PIVOT_COLUMN",
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index f9f9b31a25aa..5755ad38fb29 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -2937,4 +2937,11 @@ private[sql] object QueryExecutionErrors extends
QueryErrorsBase with ExecutionE
)
)
}
+
+ def nullDataSourceOption(option: String): Throwable = {
+ new SparkIllegalArgumentException(
+ errorClass = "NULL_DATA_SOURCE_OPTION",
+ messageParameters = Map("option" -> option)
+ )
+ }
}
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCOptions.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCOptions.scala
index 481cc80fe522..e7a4c9b258c1 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCOptions.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCOptions.scala
@@ -25,7 +25,7 @@ import org.apache.commons.io.FilenameUtils
import org.apache.spark.SparkFiles
import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.util.CaseInsensitiveMap
-import org.apache.spark.sql.errors.{QueryCompilationErrors,
QueryExecutionErrors}
+import org.apache.spark.sql.errors.QueryExecutionErrors
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.TimestampNTZType
import org.apache.spark.util.Utils
@@ -56,7 +56,7 @@ class JDBCOptions(
// If an option value is `null`, throw a user-friendly error. Keys here
cannot be null, as
// scala's implementation of Maps prohibits null keys.
if (v == null) {
- throw QueryCompilationErrors.nullDataSourceOption(k)
+ throw QueryExecutionErrors.nullDataSourceOption(k)
}
properties.setProperty(k, v)
}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala
index bf9e091c5296..1ba7ec78f8bd 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala
@@ -24,7 +24,7 @@ import scala.util.control.NonFatal
import
test.org.apache.spark.sql.connector.catalog.functions.JavaStrLen.JavaStrLenStaticMagic
-import org.apache.spark.{SparkConf, SparkException}
+import org.apache.spark.{SparkConf, SparkException,
SparkIllegalArgumentException}
import org.apache.spark.sql.{AnalysisException, DataFrame, ExplainSuiteHelper,
QueryTest, Row}
import org.apache.spark.sql.catalyst.InternalRow
import
org.apache.spark.sql.catalyst.analysis.{CannotReplaceMissingTableException,
IndexAlreadyExistsException, NoSuchIndexException}
@@ -396,7 +396,7 @@ class JDBCV2Suite extends QueryTest with SharedSparkSession
with ExplainSuiteHel
.option("pushDownOffset", null)
.table("h2.test.employee")
checkError(
- exception = intercept[AnalysisException] {
+ exception = intercept[SparkIllegalArgumentException] {
df.collect()
},
condition = "NULL_DATA_SOURCE_OPTION",
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]