This is an automated email from the ASF dual-hosted git repository.
chengpan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git
The following commit(s) were added to refs/heads/master by this push:
new 563d00584 [KYUUBI #6250][FOLLOWUP] Simplify code after dropping Spark
3.1
563d00584 is described below
commit 563d005846b18d97dbf285ce929aa86e92c86d3c
Author: Cheng Pan <[email protected]>
AuthorDate: Tue Apr 9 19:05:55 2024 +0800
[KYUUBI #6250][FOLLOWUP] Simplify code after dropping Spark 3.1
# :mag: Description
As title, simplify code after dropping Spark 3.1.
## Types of changes :bookmark:
- [ ] Bugfix (non-breaking change which fixes an issue)
- [ ] New feature (non-breaking change which adds functionality)
- [ ] Breaking change (fix or feature that would cause existing
functionality to change)
## Test Plan ๐งช
Pass GHA.
---
# Checklist ๐
- [x] This patch was not authored or co-authored using [Generative
Tooling](https://www.apache.org/legal/generative-tooling.html)
**Be nice. Be informative.**
Closes #6277 from pan3793/6273-followup.
Closes #6250
2b52de6c0 [Cheng Pan] [KYUUBI #6250][FOLLOWUP] Simplify code after dropping
Spark 3.1
Authored-by: Cheng Pan <[email protected]>
Signed-off-by: Cheng Pan <[email protected]>
---
.../kyuubi/engine/spark/schema/SchemaHelper.scala | 29 ++++++----------------
.../spark/sql/kyuubi/SparkDataTypeHelper.scala | 29 ----------------------
2 files changed, 7 insertions(+), 51 deletions(-)
diff --git
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/schema/SchemaHelper.scala
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/schema/SchemaHelper.scala
index 464643122..b6334a01e 100644
---
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/schema/SchemaHelper.scala
+++
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/schema/SchemaHelper.scala
@@ -21,7 +21,6 @@ import java.util.Collections
import scala.collection.JavaConverters._
-import org.apache.spark.sql.kyuubi.SparkDataTypeHelper
import org.apache.spark.sql.types._
import org.apache.kyuubi.shaded.hive.service.rpc.thrift._
@@ -33,16 +32,6 @@ object SchemaHelper {
*/
final val TIMESTAMP_NTZ = "TimestampNTZType$"
- /**
- * Spark 3.2.0 DataType DayTimeIntervalType's class name.
- */
- final val DAY_TIME_INTERVAL = "DayTimeIntervalType"
-
- /**
- * Spark 3.2.0 DataType YearMonthIntervalType's class name.
- */
- final val YEAR_MONTH_INTERVAL = "YearMonthIntervalType"
-
def toTTypeId(typ: DataType): TTypeId = typ match {
case NullType => TTypeId.NULL_TYPE
case BooleanType => TTypeId.BOOLEAN_TYPE
@@ -59,15 +48,12 @@ object SchemaHelper {
case ntz if ntz.getClass.getSimpleName.equals(TIMESTAMP_NTZ) =>
TTypeId.TIMESTAMP_TYPE
case BinaryType => TTypeId.BINARY_TYPE
case CalendarIntervalType => TTypeId.STRING_TYPE
- case dt if dt.getClass.getSimpleName.equals(DAY_TIME_INTERVAL) =>
- TTypeId.INTERVAL_DAY_TIME_TYPE
- case ym if ym.getClass.getSimpleName.equals(YEAR_MONTH_INTERVAL) =>
- TTypeId.INTERVAL_YEAR_MONTH_TYPE
+ case _: DayTimeIntervalType => TTypeId.INTERVAL_DAY_TIME_TYPE
+ case _: YearMonthIntervalType => TTypeId.INTERVAL_YEAR_MONTH_TYPE
case _: ArrayType => TTypeId.ARRAY_TYPE
case _: MapType => TTypeId.MAP_TYPE
case _: StructType => TTypeId.STRUCT_TYPE
- // SPARK-7768(fixed in 3.2.0) promoted UserDefinedType to DeveloperApi
- case _ if SparkDataTypeHelper.isUserDefinedType(typ) =>
TTypeId.USER_DEFINED_TYPE
+ case _: UserDefinedType[_] => TTypeId.USER_DEFINED_TYPE
case other =>
throw new IllegalArgumentException(s"Unrecognized type name:
${other.catalogString}")
}
@@ -140,13 +126,12 @@ object SchemaHelper {
* For array, map, string, and binaries, the column size is variable, return
null as unknown.
*/
def getColumnSize(sparkType: DataType): Option[Int] = sparkType match {
- case dt
- if Array(TIMESTAMP_NTZ, DAY_TIME_INTERVAL, YEAR_MONTH_INTERVAL)
- .contains(dt.getClass.getSimpleName) => Some(dt.defaultSize)
+ case dt if dt.getClass.getSimpleName == TIMESTAMP_NTZ =>
+ Some(dt.defaultSize)
case dt: DecimalType =>
Some(dt.precision)
- case dt @ (BooleanType | _: NumericType | DateType | TimestampType |
- CalendarIntervalType | NullType) =>
+ case dt @ (BooleanType | _: NumericType | DateType | TimestampType |
NullType |
+ CalendarIntervalType | _: DayTimeIntervalType | _:
YearMonthIntervalType) =>
Some(dt.defaultSize)
case StructType(fields) =>
val sizeArr = fields.map(f => getColumnSize(f.dataType))
diff --git
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/sql/kyuubi/SparkDataTypeHelper.scala
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/sql/kyuubi/SparkDataTypeHelper.scala
deleted file mode 100644
index 11f8be076..000000000
---
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/sql/kyuubi/SparkDataTypeHelper.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.sql.kyuubi
-
-import org.apache.spark.sql.types.{DataType, UserDefinedType}
-
-object SparkDataTypeHelper {
- def isUserDefinedType(typ: DataType): Boolean = {
- typ match {
- case _: UserDefinedType[_] => true
- case _ => false
- }
- }
-}