This is an automated email from the ASF dual-hosted git repository.
lzljs3620320 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/paimon.git
The following commit(s) were added to refs/heads/master by this push:
new 29c807750 [hotfix] Fix unspecified value parameter when using
PaimonMetadataColumn.get method (#4211)
29c807750 is described below
commit 29c807750b0cd78591fe9077efc1f4a4e1fdeb72
Author: Kerwin <[email protected]>
AuthorDate: Thu Sep 19 11:51:04 2024 +0800
[hotfix] Fix unspecified value parameter when using
PaimonMetadataColumn.get method (#4211)
---
.../src/main/scala/org/apache/paimon/spark/PaimonBaseScan.scala | 5 ++++-
.../src/main/scala/org/apache/paimon/spark/PaimonStatistics.scala | 2 +-
2 files changed, 5 insertions(+), 2 deletions(-)
diff --git
a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonBaseScan.scala
b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonBaseScan.scala
index cb43560dc..fb494a492 100644
---
a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonBaseScan.scala
+++
b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonBaseScan.scala
@@ -113,12 +113,15 @@ abstract class PaimonBaseScan(
inputPartitions
}
+ final def partitionType: StructType = {
+ SparkTypeUtils.toSparkPartitionType(table)
+ }
+
override def readSchema(): StructType = {
StructType(requiredTableFields ++ metadataFields)
}
override def toBatch: Batch = {
- val partitionType = SparkTypeUtils.toSparkPartitionType(table)
val metadataColumns =
metadataFields.map(field => PaimonMetadataColumn.get(field.name,
partitionType))
PaimonBatch(lazyInputPartitions, readBuilder, metadataColumns)
diff --git
a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonStatistics.scala
b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonStatistics.scala
index c64ec7310..963d9fadd 100644
---
a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonStatistics.scala
+++
b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonStatistics.scala
@@ -75,7 +75,7 @@ case class PaimonStatistics[T <: PaimonBaseScan](scan: T)
extends Statistics {
val metadataSchemaSize = scan.metadataFields.map {
field =>
- val dataField = PaimonMetadataColumn.get(field.name).toPaimonDataField
+ val dataField = PaimonMetadataColumn.get(field.name,
scan.partitionType).toPaimonDataField
getSizeForField(dataField)
}.sum
val metadataSizeInBytes = paimonStats.mergedRecordCount().getAsLong *
metadataSchemaSize