This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 1a90512f605 [SPARK-39187][SQL] Remove `SparkIllegalStateException`
1a90512f605 is described below

commit 1a90512f605c490255f7b38215c207e64621475b
Author: Max Gekk <max.g...@gmail.com>
AuthorDate: Mon May 16 08:24:12 2022 +0300

    [SPARK-39187][SQL] Remove `SparkIllegalStateException`
    
    ### What changes were proposed in this pull request?
    Remove `SparkIllegalStateException` and replace it by 
`IllegalStateException` where it was used.
    
    ### Why are the changes needed?
    To improve code maintenance and be consistent to other places where 
`IllegalStateException` is used in illegal states (for instance, see 
https://github.com/apache/spark/pull/36524). After the PR 
https://github.com/apache/spark/pull/36500, the exception is substituted by 
`SparkException` w/ the `INTERNAL_ERROR` error class.
    
    ### Does this PR introduce _any_ user-facing change?
    No. Users shouldn't face to the exception in regular cases.
    
    ### How was this patch tested?
    By running the affected test suites:
    ```
    $ build/sbt "sql/test:testOnly *QueryExecutionErrorsSuite*"
    $ build/sbt "test:testOnly *ArrowUtilsSuite"
    ```
    
    Closes #36550 from MaxGekk/remove-SparkIllegalStateException.
    
    Authored-by: Max Gekk <max.g...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../main/scala/org/apache/spark/SparkException.scala   | 12 ------------
 .../spark/sql/catalyst/analysis/CheckAnalysis.scala    |  6 +++---
 .../apache/spark/sql/errors/QueryExecutionErrors.scala | 16 +++-------------
 .../scala/org/apache/spark/sql/util/ArrowUtils.scala   |  9 +++------
 .../org/apache/spark/sql/util/ArrowUtilsSuite.scala    |  2 +-
 .../spark/sql/errors/QueryExecutionErrorsSuite.scala   | 18 ++++--------------
 6 files changed, 14 insertions(+), 49 deletions(-)

diff --git a/core/src/main/scala/org/apache/spark/SparkException.scala 
b/core/src/main/scala/org/apache/spark/SparkException.scala
index a846e6c46a2..4feea6151b9 100644
--- a/core/src/main/scala/org/apache/spark/SparkException.scala
+++ b/core/src/main/scala/org/apache/spark/SparkException.scala
@@ -151,18 +151,6 @@ private[spark] class SparkFileAlreadyExistsException(
   override def getErrorClass: String = errorClass
 }
 
-/**
- * Illegal state exception thrown from Spark with an error class.
- */
-private[spark] class SparkIllegalStateException(
-    errorClass: String,
-    messageParameters: Array[String])
-  extends IllegalStateException(
-    SparkThrowableHelper.getMessage(errorClass, messageParameters)) with 
SparkThrowable {
-
-  override def getErrorClass: String = errorClass
-}
-
 /**
  * File not found exception thrown from Spark with an error class.
  */
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
index 1e9c431292b..f827e9effe9 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
@@ -28,7 +28,7 @@ import org.apache.spark.sql.catalyst.plans.logical._
 import org.apache.spark.sql.catalyst.trees.TreeNodeTag
 import org.apache.spark.sql.catalyst.util.{CharVarcharUtils, StringUtils, 
TypeUtils}
 import org.apache.spark.sql.connector.catalog.{LookupCatalog, 
SupportsPartitionManagement}
-import org.apache.spark.sql.errors.{QueryCompilationErrors, 
QueryExecutionErrors}
+import org.apache.spark.sql.errors.QueryCompilationErrors
 import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.types._
 import org.apache.spark.sql.util.SchemaUtils
@@ -582,8 +582,8 @@ trait CheckAnalysis extends PredicateHelper with 
LookupCatalog {
                  |in operator 
${operator.simpleString(SQLConf.get.maxToStringFields)}
                """.stripMargin)
 
-          case _: UnresolvedHint =>
-            throw 
QueryExecutionErrors.logicalHintOperatorNotRemovedDuringAnalysisError
+          case _: UnresolvedHint => throw new IllegalStateException(
+            "Logical hint operator should be removed during analysis.")
 
           case f @ Filter(condition, _)
             if PlanHelper.specialExpressionsInUnsupportedOperator(f).nonEmpty 
=>
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 7ed4fc3574d..b7239d3ff60 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -34,7 +34,7 @@ import org.apache.hadoop.fs.permission.FsPermission
 import org.codehaus.commons.compiler.CompileException
 import org.codehaus.janino.InternalCompilerException
 
-import org.apache.spark.{Partition, SparkArithmeticException, 
SparkArrayIndexOutOfBoundsException, SparkClassNotFoundException, 
SparkConcurrentModificationException, SparkDateTimeException, SparkException, 
SparkFileAlreadyExistsException, SparkFileNotFoundException, 
SparkIllegalArgumentException, SparkIllegalStateException, 
SparkIndexOutOfBoundsException, SparkNoSuchElementException, 
SparkNoSuchMethodException, SparkNumberFormatException, SparkRuntimeException, 
SparkSecurityException, Sp [...]
+import org.apache.spark.{Partition, SparkArithmeticException, 
SparkArrayIndexOutOfBoundsException, SparkClassNotFoundException, 
SparkConcurrentModificationException, SparkDateTimeException, SparkException, 
SparkFileAlreadyExistsException, SparkFileNotFoundException, 
SparkIllegalArgumentException, SparkIndexOutOfBoundsException, 
SparkNoSuchElementException, SparkNoSuchMethodException, 
SparkNumberFormatException, SparkRuntimeException, SparkSecurityException, 
SparkSQLException, SparkSQLFea [...]
 import org.apache.spark.executor.CommitDeniedException
 import org.apache.spark.launcher.SparkLauncher
 import org.apache.spark.memory.SparkOutOfMemoryError
@@ -68,17 +68,6 @@ import org.apache.spark.util.CircularBuffer
  */
 object QueryExecutionErrors extends QueryErrorsBase {
 
-  def internalMissingTimezoneIdError(): Throwable = {
-    new SparkIllegalStateException(errorClass = "INTERNAL_ERROR",
-      messageParameters = Array("Missing timezoneId where it is mandatory."))
-  }
-
-  def logicalHintOperatorNotRemovedDuringAnalysisError(): Throwable = {
-    new SparkIllegalStateException(errorClass = "INTERNAL_ERROR",
-      messageParameters = Array(
-        "Internal error: logical hint operator should have been removed during 
analysis"))
-  }
-
   def cannotEvaluateExpressionError(expression: Expression): Throwable = {
     new SparkUnsupportedOperationException(errorClass = "INTERNAL_ERROR",
       messageParameters = Array(s"Cannot evaluate expression: $expression"))
@@ -135,7 +124,8 @@ object QueryExecutionErrors extends QueryErrorsBase {
   }
 
   def cannotParseDecimalError(): Throwable = {
-    new SparkIllegalStateException(errorClass = "CANNOT_PARSE_DECIMAL",
+    new SparkRuntimeException(
+      errorClass = "CANNOT_PARSE_DECIMAL",
       messageParameters = Array.empty)
   }
 
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/util/ArrowUtils.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/util/ArrowUtils.scala
index 8fa1879548c..b8f77c3646c 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/util/ArrowUtils.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/util/ArrowUtils.scala
@@ -47,12 +47,9 @@ private[sql] object ArrowUtils {
     case BinaryType => ArrowType.Binary.INSTANCE
     case DecimalType.Fixed(precision, scale) => new 
ArrowType.Decimal(precision, scale)
     case DateType => new ArrowType.Date(DateUnit.DAY)
-    case TimestampType =>
-      if (timeZoneId == null) {
-        throw QueryExecutionErrors.internalMissingTimezoneIdError()
-      } else {
-        new ArrowType.Timestamp(TimeUnit.MICROSECOND, timeZoneId)
-      }
+    case TimestampType if timeZoneId == null =>
+      throw new IllegalStateException("Missing timezoneId where it is 
mandatory.")
+    case TimestampType => new ArrowType.Timestamp(TimeUnit.MICROSECOND, 
timeZoneId)
     case TimestampNTZType =>
       new ArrowType.Timestamp(TimeUnit.MICROSECOND, null)
     case NullType => ArrowType.Null.INSTANCE
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/util/ArrowUtilsSuite.scala 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/util/ArrowUtilsSuite.scala
index 03178f2b761..6dd02afe19b 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/util/ArrowUtilsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/util/ArrowUtilsSuite.scala
@@ -50,7 +50,7 @@ class ArrowUtilsSuite extends SparkFunSuite {
     roundtrip(DateType)
     roundtrip(YearMonthIntervalType())
     roundtrip(DayTimeIntervalType())
-    val tsExMsg = intercept[org.apache.spark.SparkIllegalStateException] {
+    val tsExMsg = intercept[IllegalStateException] {
       roundtrip(TimestampType)
     }
     assert(tsExMsg.getMessage.contains("timezoneId"))
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
index cf1551298a8..bbd7ec8a051 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
@@ -27,7 +27,7 @@ import org.apache.hadoop.fs.permission.FsPermission
 import org.mockito.Mockito.{mock, when}
 import test.org.apache.spark.sql.connector.JavaSimpleWritableDataSource
 
-import org.apache.spark.{SparkArithmeticException, 
SparkClassNotFoundException, SparkException, SparkIllegalArgumentException, 
SparkIllegalStateException, SparkRuntimeException, SparkSecurityException, 
SparkSQLException, SparkUnsupportedOperationException, SparkUpgradeException}
+import org.apache.spark.{SparkArithmeticException, 
SparkClassNotFoundException, SparkException, SparkIllegalArgumentException, 
SparkRuntimeException, SparkSecurityException, SparkSQLException, 
SparkUnsupportedOperationException, SparkUpgradeException}
 import org.apache.spark.sql.{AnalysisException, DataFrame, QueryTest, SaveMode}
 import org.apache.spark.sql.catalyst.util.BadRecordException
 import org.apache.spark.sql.connector.SimpleWritableDataSource
@@ -39,8 +39,7 @@ import org.apache.spark.sql.functions.{lit, lower, struct, 
sum, udf}
 import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.internal.SQLConf.LegacyBehaviorPolicy.EXCEPTION
 import org.apache.spark.sql.jdbc.{JdbcDialect, JdbcDialects}
-import org.apache.spark.sql.types.{DataType, DecimalType, MetadataBuilder, 
StructType, TimestampType}
-import org.apache.spark.sql.util.ArrowUtils
+import org.apache.spark.sql.types.{DataType, DecimalType, MetadataBuilder, 
StructType}
 import org.apache.spark.util.Utils
 
 class QueryExecutionErrorsSuite
@@ -272,15 +271,6 @@ class QueryExecutionErrorsSuite
     }
   }
 
-  test("INTERNAL_ERROR: timeZoneId not specified while converting 
TimestampType to Arrow") {
-    checkErrorClass(
-      exception = intercept[SparkIllegalStateException] {
-        ArrowUtils.toArrowSchema(new StructType().add("value", TimestampType), 
null)
-      },
-      errorClass = "INTERNAL_ERROR",
-      msg = "Missing timezoneId where it is mandatory.")
-  }
-
   test("UNSUPPORTED_FEATURE - SPARK-36346: can't read Timestamp as 
TimestampNTZ") {
     withTempPath { file =>
       sql("select timestamp_ltz'2019-03-21 
00:02:03'").write.orc(file.getCanonicalPath)
@@ -358,10 +348,10 @@ class QueryExecutionErrorsSuite
     assert(e3.getCause.isInstanceOf[BadRecordException])
 
     val e4 = e3.getCause.asInstanceOf[BadRecordException]
-    assert(e4.getCause.isInstanceOf[SparkIllegalStateException])
+    assert(e4.getCause.isInstanceOf[SparkRuntimeException])
 
     checkErrorClass(
-      exception = e4.getCause.asInstanceOf[SparkIllegalStateException],
+      exception = e4.getCause.asInstanceOf[SparkRuntimeException],
       errorClass = "CANNOT_PARSE_DECIMAL",
       msg = "Cannot parse decimal",
       sqlState = Some("42000"))


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to