This is an automated email from the ASF dual-hosted git repository.
xushiyan pushed a commit to branch rc3-patched-for-test
in repository https://gitbox.apache.org/repos/asf/hudi.git
The following commit(s) were added to refs/heads/rc3-patched-for-test by this
push:
new b41d66b4a1 use tail to get VectorizedParquetRecordReader ctor
b41d66b4a1 is described below
commit b41d66b4a1c53237c7df700db165ee1378b78bfb
Author: Raymond Xu <[email protected]>
AuthorDate: Thu Apr 21 20:45:22 2022 +0800
use tail to get VectorizedParquetRecordReader ctor
---
.../datasources/parquet/Spark32HoodieParquetFileFormat.scala | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
diff --git
a/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/Spark32HoodieParquetFileFormat.scala
b/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/Spark32HoodieParquetFileFormat.scala
index ccd93b6fd3..5d9d2a737d 100644
---
a/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/Spark32HoodieParquetFileFormat.scala
+++
b/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/Spark32HoodieParquetFileFormat.scala
@@ -412,7 +412,11 @@ object Spark32HoodieParquetFileFormat {
}
private def createVectorizedParquetRecordReader(args: Any*):
VectorizedParquetRecordReader = {
- val ctor = classOf[VectorizedParquetRecordReader].getConstructors.head
+ // NOTE: ParquetReadSupport ctor args contain Scala enum, therefore we
can't look it
+ // up by arg types, and have to instead rely on relative order of
ctors
+ // NOTE: VectorizedParquetRecordReader has 2 ctors and the one we need is
2nd on the array
+ // This is a hacky workaround for the fixed version of Class.
+ val ctor = classOf[VectorizedParquetRecordReader].getConstructors.tail
ctor.newInstance(args.map(_.asInstanceOf[AnyRef]): _*)
.asInstanceOf[VectorizedParquetRecordReader]
}