This is an automated email from the ASF dual-hosted git repository.
yao pushed a commit to branch branch-1.7
in repository https://gitbox.apache.org/repos/asf/kyuubi.git
The following commit(s) were added to refs/heads/branch-1.7 by this push:
new 08349aea6 [KYUUBI #5750] [Spark] Fix the inaccurate issue of obtaining
COLUMN_SIZE in the decimal field jdbc of spark engine
08349aea6 is described below
commit 08349aea64dc64539fb17d113b243c908c759a81
Author: waywtdcc <[email protected]>
AuthorDate: Wed Nov 29 11:49:02 2023 +0800
[KYUUBI #5750] [Spark] Fix the inaccurate issue of obtaining COLUMN_SIZE in
the decimal field jdbc of spark engine
# :mag: Description
## Issue References ๐
This pull request fixes #
## Describe Your Solution ๐ง
Fix the inaccuracy in getting COLUMN_SIZE from spark engine's decimal field
jdbc. The current jdbc's get column size gets the decimal field as default
size, which is inaccurate; if it is decimal(20,3), the obtained column size is
16; the actual is 20.
## Types of changes :bookmark:
- [X] Bugfix (non-breaking change which fixes an issue)
## Test Plan ๐งช
#### Behavior Without This Pull Request :coffin:
#### Behavior With This Pull Request :tada:
#### Related Unit Tests
---
Closes #5750 from waywtdcc/support_spark_decimal2.
Closes #5750
2d288f5aa [waywtdcc] [Spark] Fix the inaccurate issue of obtaining
COLUMN_SIZE in the decimal field jdbc of spark engine
4286354c2 [waywtdcc] Support flink engine under the select statement, the
results can be read in a stream
e5b74b0c8 [waywtdcc] Support flink engine under the select statement, the
results can be read in a stream
Authored-by: waywtdcc <[email protected]>
Signed-off-by: Kent Yao <[email protected]>
(cherry picked from commit eb9e88be17c4df6b71e87a00e36294029f61b4af)
Signed-off-by: Kent Yao <[email protected]>
---
.../main/scala/org/apache/kyuubi/engine/spark/schema/SchemaHelper.scala | 2 ++
.../org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala | 1 +
2 files changed, 3 insertions(+)
diff --git
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/schema/SchemaHelper.scala
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/schema/SchemaHelper.scala
index 8db46e2b7..3beab47a5 100644
---
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/schema/SchemaHelper.scala
+++
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/schema/SchemaHelper.scala
@@ -140,6 +140,8 @@ object SchemaHelper {
case dt
if Array(TIMESTAMP_NTZ, DAY_TIME_INTERVAL, YEAR_MONTH_INTERVAL)
.contains(dt.getClass.getSimpleName) => Some(dt.defaultSize)
+ case dt: DecimalType =>
+ Some(dt.precision)
case dt @ (BooleanType | _: NumericType | DateType | TimestampType |
CalendarIntervalType | NullType) =>
Some(dt.defaultSize)
diff --git
a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
index af514ceb3..c92206db1 100644
---
a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
+++
b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
@@ -153,6 +153,7 @@ class SparkOperationSuite extends WithSparkSQLEngine with
HiveMetadataTests with
val colSize = rowSet.getInt(COLUMN_SIZE)
schema(pos).dataType match {
case StringType | BinaryType | _: ArrayType | _: MapType =>
assert(colSize === 0)
+ case d: DecimalType => assert(colSize === d.precision)
case StructType(fields) if fields.length == 1 => assert(colSize
=== 0)
case o => assert(colSize === o.defaultSize)
}