This is an automated email from the ASF dual-hosted git repository. maxgekk pushed a commit to branch branch-3.3 in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.3 by this push: new a4d40e8a355 [SPARK-45100][SQL][3.3] Fix an internal error from `reflect()`on `NULL` class and method a4d40e8a355 is described below commit a4d40e8a355f451c6340dec0c90a332434433a75 Author: Max Gekk <max.g...@gmail.com> AuthorDate: Fri Sep 8 18:59:22 2023 +0300 [SPARK-45100][SQL][3.3] Fix an internal error from `reflect()`on `NULL` class and method ### What changes were proposed in this pull request? In the PR, I propose to check that the `class` and `method` arguments are not a NULL in `CallMethodViaReflection`. And if they are, throw an `AnalysisException` with new error class `DATATYPE_MISMATCH.UNEXPECTED_NULL`. This is a backport of https://github.com/apache/spark/pull/42849. ### Why are the changes needed? To fix the issue demonstrated by the example: ```sql $ spark-sql (default)> select reflect('java.util.UUID', CAST(NULL AS STRING)); [INTERNAL_ERROR] The Spark SQL phase analysis failed with an internal error. You hit a bug in Spark or the Spark plugins you use. Please, report this bug to the corresponding communities or vendors, and provide the full stack trace. ``` ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? By running new test: ``` $ build/sbt "test:testOnly *.MiscFunctionsSuite" ``` ### Was this patch authored or co-authored using generative AI tooling? No. Authored-by: Max Gekk <max.gekkgmail.com> (cherry picked from commit fd424caf6c46e7030ac2deb2afbe3f4a5fc1095c) Closes #42856 from MaxGekk/fix-internal-error-in-reflect-3.3. Authored-by: Max Gekk <max.g...@gmail.com> Signed-off-by: Max Gekk <max.g...@gmail.com> --- .../spark/sql/catalyst/expressions/CallMethodViaReflection.scala | 2 ++ .../src/test/scala/org/apache/spark/sql/MiscFunctionsSuite.scala | 8 ++++++++ 2 files changed, 10 insertions(+) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala index 7cb830d1156..9764d9db7f0 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala @@ -65,6 +65,8 @@ case class CallMethodViaReflection(children: Seq[Expression]) } else if (!children.take(2).forall(e => e.dataType == StringType && e.foldable)) { // The first two arguments must be string type. TypeCheckFailure("first two arguments should be string literals") + } else if (children.take(2).exists(_.eval() == null)) { + TypeCheckFailure("first two arguments must be non-NULL") } else if (!classExists) { TypeCheckFailure(s"class $className not found") } else if (children.slice(2, children.length) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/MiscFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/MiscFunctionsSuite.scala index 37ba52023dd..18262ccd407 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/MiscFunctionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/MiscFunctionsSuite.scala @@ -34,6 +34,14 @@ class MiscFunctionsSuite extends QueryTest with SharedSparkSession { s"reflect('$className', 'method1', a, b)", s"java_method('$className', 'method1', a, b)"), Row("m1one", "m1one")) + val e1 = intercept[AnalysisException] { + df.selectExpr("reflect(cast(null as string), 'fromString', a)") + } + assert(e1.getMessage.contains("first two arguments must be non-NULL")) + val e2 = intercept[AnalysisException] { + df.selectExpr("reflect('java.util.UUID', cast(null as string), a)") + } + assert(e2.getMessage.contains("first two arguments must be non-NULL")) } test("version") { --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org