This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new cf852b284d5 [SPARK-44254][SQL] Move QueryExecutionErrors that used by 
DataType to sql/api as DataTypeErrors
cf852b284d5 is described below

commit cf852b284d550f9425ae7893796ae0042be6010f
Author: Rui Wang <[email protected]>
AuthorDate: Sun Jul 2 10:18:43 2023 +0300

    [SPARK-44254][SQL] Move QueryExecutionErrors that used by DataType to 
sql/api as DataTypeErrors
    
    ### What changes were proposed in this pull request?
    
    Moving some QueryExecutionErrors that are used by data types to `sql/api` 
and name those as DataType erros so that DataType can use those if DataType 
only stay in `sql/api` module.
    
    ### Why are the changes needed?
    
    Towards a simpler DataType interface.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    Existing test
    
    Closes #41794 from amaliujia/datatype_more_refactors.
    
    Authored-by: Rui Wang <[email protected]>
    Signed-off-by: Max Gekk <[email protected]>
---
 sql/api/pom.xml                                    |  5 ++
 .../apache/spark/sql/errors/DataTypeErrors.scala   | 95 ++++++++++++++++++++++
 .../spark/sql/errors/QueryExecutionErrors.scala    | 42 ++--------
 .../apache/spark/sql/types/AbstractDataType.scala  |  4 +-
 .../scala/org/apache/spark/sql/types/Decimal.scala |  9 +-
 .../org/apache/spark/sql/types/DecimalType.scala   |  4 +-
 .../org/apache/spark/sql/types/Metadata.scala      | 10 +--
 .../org/apache/spark/sql/types/ObjectType.scala    |  4 +-
 .../apache/spark/sql/types/UDTRegistration.scala   |  6 +-
 9 files changed, 127 insertions(+), 52 deletions(-)

diff --git a/sql/api/pom.xml b/sql/api/pom.xml
index 9b7917e0343..41a5b85d4c6 100644
--- a/sql/api/pom.xml
+++ b/sql/api/pom.xml
@@ -40,6 +40,11 @@
             <artifactId>spark-common-utils_${scala.binary.version}</artifactId>
             <version>${project.version}</version>
         </dependency>
+        <dependency>
+            <groupId>org.apache.spark</groupId>
+            <artifactId>spark-unsafe_${scala.binary.version}</artifactId>
+            <version>${project.version}</version>
+        </dependency>
     </dependencies>
     <build>
         
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
diff --git 
a/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrors.scala 
b/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrors.scala
new file mode 100644
index 00000000000..02e8b12c707
--- /dev/null
+++ b/sql/api/src/main/scala/org/apache/spark/sql/errors/DataTypeErrors.scala
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.spark.sql.errors
+
+import org.apache.spark.{SparkArithmeticException, SparkException, 
SparkRuntimeException, SparkUnsupportedOperationException}
+import org.apache.spark.unsafe.types.UTF8String
+
+/**
+ * Object for grouping error messages from (most) exceptions thrown during 
query execution.
+ * This does not include exceptions thrown during the eager execution of 
commands, which are
+ * grouped into [[QueryCompilationErrors]].
+ */
+private[sql] object DataTypeErrors {
+  def unsupportedOperationExceptionError(): SparkUnsupportedOperationException 
= {
+    new SparkUnsupportedOperationException(
+      errorClass = "_LEGACY_ERROR_TEMP_2225",
+      messageParameters = Map.empty)
+  }
+
+  def decimalPrecisionExceedsMaxPrecisionError(
+      precision: Int, maxPrecision: Int): SparkArithmeticException = {
+    new SparkArithmeticException(
+      errorClass = "DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION",
+      messageParameters = Map(
+        "precision" -> precision.toString,
+        "maxPrecision" -> maxPrecision.toString
+      ),
+      context = Array.empty,
+      summary = "")
+  }
+
+  def unsupportedRoundingMode(roundMode: BigDecimal.RoundingMode.Value): 
SparkException = {
+    SparkException.internalError(s"Not supported rounding mode: 
${roundMode.toString}.")
+  }
+
+  def outOfDecimalTypeRangeError(str: UTF8String): SparkArithmeticException = {
+    new SparkArithmeticException(
+      errorClass = "NUMERIC_OUT_OF_SUPPORTED_RANGE",
+      messageParameters = Map(
+        "value" -> str.toString),
+      context = Array.empty,
+      summary = "")
+  }
+
+  def unsupportedJavaTypeError(clazz: Class[_]): SparkRuntimeException = {
+    new SparkRuntimeException(
+      errorClass = "_LEGACY_ERROR_TEMP_2121",
+      messageParameters = Map("clazz" -> clazz.toString()))
+  }
+
+  def nullLiteralsCannotBeCastedError(name: String): 
SparkUnsupportedOperationException = {
+    new SparkUnsupportedOperationException(
+      errorClass = "_LEGACY_ERROR_TEMP_2226",
+      messageParameters = Map(
+        "name" -> name))
+  }
+
+  def notUserDefinedTypeError(name: String, userClass: String): Throwable = {
+    new SparkException(
+      errorClass = "_LEGACY_ERROR_TEMP_2227",
+      messageParameters = Map(
+        "name" -> name,
+        "userClass" -> userClass),
+      cause = null)
+  }
+
+  def cannotLoadUserDefinedTypeError(name: String, userClass: String): 
Throwable = {
+    new SparkException(
+      errorClass = "_LEGACY_ERROR_TEMP_2228",
+      messageParameters = Map(
+        "name" -> name,
+        "userClass" -> userClass),
+      cause = null)
+  }
+
+  def unsupportedArrayTypeError(clazz: Class[_]): SparkRuntimeException = {
+    new SparkRuntimeException(
+      errorClass = "_LEGACY_ERROR_TEMP_2120",
+      messageParameters = Map("clazz" -> clazz.toString()))
+  }
+}
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 74c29cabbe1..630cf2fa55a 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -515,7 +515,7 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase {
   }
 
   def unsupportedRoundingMode(roundMode: BigDecimal.RoundingMode.Value): 
SparkException = {
-    SparkException.internalError(s"Not supported rounding mode: 
${roundMode.toString}.")
+    DataTypeErrors.unsupportedRoundingMode(roundMode)
   }
 
   def resolveCannotHandleNestedSchema(plan: LogicalPlan): 
SparkRuntimeException = {
@@ -1279,14 +1279,7 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase {
 
   def decimalPrecisionExceedsMaxPrecisionError(
       precision: Int, maxPrecision: Int): SparkArithmeticException = {
-    new SparkArithmeticException(
-      errorClass = "DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION",
-      messageParameters = Map(
-        "precision" -> precision.toString,
-        "maxPrecision" -> maxPrecision.toString
-      ),
-      context = Array.empty,
-      summary = "")
+    DataTypeErrors.decimalPrecisionExceedsMaxPrecisionError(precision, 
maxPrecision)
   }
 
   def outOfDecimalTypeRangeError(str: UTF8String): SparkArithmeticException = {
@@ -1299,15 +1292,11 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase {
   }
 
   def unsupportedArrayTypeError(clazz: Class[_]): SparkRuntimeException = {
-    new SparkRuntimeException(
-      errorClass = "_LEGACY_ERROR_TEMP_2120",
-      messageParameters = Map("clazz" -> clazz.toString()))
+    DataTypeErrors.unsupportedJavaTypeError(clazz)
   }
 
   def unsupportedJavaTypeError(clazz: Class[_]): SparkRuntimeException = {
-    new SparkRuntimeException(
-      errorClass = "_LEGACY_ERROR_TEMP_2121",
-      messageParameters = Map("clazz" -> clazz.toString()))
+    DataTypeErrors.unsupportedJavaTypeError(clazz)
   }
 
   def failedParsingStructTypeError(raw: String): SparkRuntimeException = {
@@ -2183,34 +2172,19 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase {
   }
 
   def unsupportedOperationExceptionError(): SparkUnsupportedOperationException 
= {
-    new SparkUnsupportedOperationException(
-      errorClass = "_LEGACY_ERROR_TEMP_2225",
-      messageParameters = Map.empty)
+    DataTypeErrors.unsupportedOperationExceptionError()
   }
 
   def nullLiteralsCannotBeCastedError(name: String): 
SparkUnsupportedOperationException = {
-    new SparkUnsupportedOperationException(
-      errorClass = "_LEGACY_ERROR_TEMP_2226",
-      messageParameters = Map(
-        "name" -> name))
+    DataTypeErrors.nullLiteralsCannotBeCastedError(name)
   }
 
   def notUserDefinedTypeError(name: String, userClass: String): Throwable = {
-    new SparkException(
-      errorClass = "_LEGACY_ERROR_TEMP_2227",
-      messageParameters = Map(
-        "name" -> name,
-        "userClass" -> userClass),
-      cause = null)
+    DataTypeErrors.notUserDefinedTypeError(name, userClass)
   }
 
   def cannotLoadUserDefinedTypeError(name: String, userClass: String): 
Throwable = {
-    new SparkException(
-      errorClass = "_LEGACY_ERROR_TEMP_2228",
-      messageParameters = Map(
-        "name" -> name,
-        "userClass" -> userClass),
-      cause = null)
+    DataTypeErrors.cannotLoadUserDefinedTypeError(name, userClass)
   }
 
   def notPublicClassError(name: String): SparkUnsupportedOperationException = {
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala
index c1483e719b5..67f634f8379 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/AbstractDataType.scala
@@ -18,7 +18,7 @@
 package org.apache.spark.sql.types
 
 import org.apache.spark.annotation.Stable
-import org.apache.spark.sql.errors.QueryExecutionErrors
+import org.apache.spark.sql.errors.DataTypeErrors
 
 /**
  * A non-concrete data type, reserved for internal uses.
@@ -107,7 +107,7 @@ protected[sql] object AnyDataType extends AbstractDataType 
with Serializable {
   // Note that since AnyDataType matches any concrete types, 
defaultConcreteType should never
   // be invoked.
   override private[sql] def defaultConcreteType: DataType =
-    throw QueryExecutionErrors.unsupportedOperationExceptionError()
+    throw DataTypeErrors.unsupportedOperationExceptionError()
 
   override private[sql] def simpleString: String = "any"
 
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
index 2c0b6677541..f1529285294 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
@@ -23,6 +23,7 @@ import scala.util.Try
 
 import org.apache.spark.annotation.Unstable
 import org.apache.spark.sql.catalyst.trees.SQLQueryContext
+import org.apache.spark.sql.errors.DataTypeErrors
 import org.apache.spark.sql.errors.QueryExecutionErrors
 import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.unsafe.types.UTF8String
@@ -120,7 +121,7 @@ final class Decimal extends Ordered[Decimal] with 
Serializable {
     DecimalType.checkNegativeScale(scale)
     this.decimalVal = decimal.setScale(scale, ROUND_HALF_UP)
     if (decimalVal.precision > precision) {
-      throw QueryExecutionErrors.decimalPrecisionExceedsMaxPrecisionError(
+      throw DataTypeErrors.decimalPrecisionExceedsMaxPrecisionError(
         decimalVal.precision, precision)
     }
     this.longVal = 0L
@@ -382,7 +383,7 @@ final class Decimal extends Ordered[Decimal] with 
Serializable {
             case ROUND_FLOOR => if (lv < 0) -1L else 0L
             case ROUND_CEILING => if (lv > 0) 1L else 0L
             case ROUND_HALF_UP | ROUND_HALF_EVEN => 0L
-            case _ => throw 
QueryExecutionErrors.unsupportedRoundingMode(roundMode)
+            case _ => throw DataTypeErrors.unsupportedRoundingMode(roundMode)
           }
         } else {
           val pow10diff = POW_10(diff)
@@ -408,7 +409,7 @@ final class Decimal extends Ordered[Decimal] with 
Serializable {
                 lv += (if (droppedDigits < 0) -1L else 1L)
               }
             case _ =>
-              throw QueryExecutionErrors.unsupportedRoundingMode(roundMode)
+              throw DataTypeErrors.unsupportedRoundingMode(roundMode)
           }
         }
       } else if (scale > _scale) {
@@ -622,7 +623,7 @@ object Decimal {
       // For example: Decimal("6.0790316E+25569151")
       if (numDigitsInIntegralPart(bigDecimal) > DecimalType.MAX_PRECISION &&
           !SQLConf.get.allowNegativeScaleOfDecimalEnabled) {
-        throw QueryExecutionErrors.outOfDecimalTypeRangeError(str)
+        throw DataTypeErrors.outOfDecimalTypeRangeError(str)
       } else {
         Decimal(bigDecimal)
       }
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala
index 9782f140335..49ac217f1bd 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala
@@ -22,7 +22,7 @@ import java.util.Locale
 import scala.annotation.tailrec
 
 import org.apache.spark.annotation.Stable
-import org.apache.spark.sql.errors.{QueryCompilationErrors, 
QueryExecutionErrors}
+import org.apache.spark.sql.errors.{DataTypeErrors, QueryCompilationErrors}
 import org.apache.spark.sql.internal.SQLConf
 
 /**
@@ -48,7 +48,7 @@ case class DecimalType(precision: Int, scale: Int) extends 
FractionalType {
   }
 
   if (precision > DecimalType.MAX_PRECISION) {
-    throw QueryExecutionErrors.decimalPrecisionExceedsMaxPrecisionError(
+    throw DataTypeErrors.decimalPrecisionExceedsMaxPrecisionError(
       precision, DecimalType.MAX_PRECISION)
   }
 
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Metadata.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Metadata.scala
index 3e05eda3443..4e7ac996d31 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Metadata.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Metadata.scala
@@ -23,7 +23,7 @@ import org.json4s._
 import org.json4s.jackson.JsonMethods._
 
 import org.apache.spark.annotation.Stable
-import org.apache.spark.sql.errors.QueryExecutionErrors
+import org.apache.spark.sql.errors.DataTypeErrors
 
 
 /**
@@ -163,13 +163,13 @@ object Metadata {
               builder.putMetadataArray(
                 key, 
value.asInstanceOf[List[JObject]].map(fromJObject).toArray)
             case other =>
-              throw 
QueryExecutionErrors.unsupportedArrayTypeError(other.getClass)
+              throw DataTypeErrors.unsupportedArrayTypeError(other.getClass)
           }
         }
       case (key, JNull) =>
         builder.putNull(key)
       case (key, other) =>
-        throw QueryExecutionErrors.unsupportedJavaTypeError(other.getClass)
+        throw DataTypeErrors.unsupportedJavaTypeError(other.getClass)
     }
     builder.build()
   }
@@ -196,7 +196,7 @@ object Metadata {
       case x: Metadata =>
         toJsonValue(x.map)
       case other =>
-        throw QueryExecutionErrors.unsupportedJavaTypeError(other.getClass)
+        throw DataTypeErrors.unsupportedJavaTypeError(other.getClass)
     }
   }
 
@@ -223,7 +223,7 @@ object Metadata {
       case null =>
         0
       case other =>
-        throw QueryExecutionErrors.unsupportedJavaTypeError(other.getClass)
+        throw DataTypeErrors.unsupportedJavaTypeError(other.getClass)
     }
   }
 }
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ObjectType.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ObjectType.scala
index 73a8a65c709..85542167854 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ObjectType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ObjectType.scala
@@ -17,11 +17,11 @@
 
 package org.apache.spark.sql.types
 
-import org.apache.spark.sql.errors.QueryExecutionErrors
+import org.apache.spark.sql.errors.DataTypeErrors
 
 object ObjectType extends AbstractDataType {
   override private[sql] def defaultConcreteType: DataType =
-    throw 
QueryExecutionErrors.nullLiteralsCannotBeCastedError(ObjectType.simpleString)
+    throw 
DataTypeErrors.nullLiteralsCannotBeCastedError(ObjectType.simpleString)
 
   override private[sql] def acceptsType(other: DataType): Boolean = other 
match {
     case ObjectType(_) => true
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UDTRegistration.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UDTRegistration.scala
index a6cd77b99c9..293687a4d61 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UDTRegistration.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UDTRegistration.scala
@@ -21,7 +21,7 @@ import scala.collection.mutable
 
 import org.apache.spark.annotation.{DeveloperApi, Since}
 import org.apache.spark.internal.Logging
-import org.apache.spark.sql.errors.QueryExecutionErrors
+import org.apache.spark.sql.errors.DataTypeErrors
 import org.apache.spark.util.Utils
 
 /**
@@ -78,10 +78,10 @@ object UDTRegistration extends Serializable with Logging {
         if (classOf[UserDefinedType[_]].isAssignableFrom(udtClass)) {
           udtClass
         } else {
-          throw QueryExecutionErrors.notUserDefinedTypeError(udtClass.getName, 
userClass)
+          throw DataTypeErrors.notUserDefinedTypeError(udtClass.getName, 
userClass)
         }
       } else {
-        throw 
QueryExecutionErrors.cannotLoadUserDefinedTypeError(udtClassName, userClass)
+        throw DataTypeErrors.cannotLoadUserDefinedTypeError(udtClassName, 
userClass)
       }
     }
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to