This is an automated email from the ASF dual-hosted git repository. xushiyan pushed a commit to branch rc3-patched-for-test in repository https://gitbox.apache.org/repos/asf/hudi.git
commit 93f9935fce8616ff890c93c956c59fc753c6ac9d Author: Alexey Kudinkin <[email protected]> AuthorDate: Wed Apr 20 15:32:48 2022 -0700 Fallback to pick ctors positionally as opposed to looking them up --- .../datasources/parquet/Spark32HoodieParquetFileFormat.scala | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/Spark32HoodieParquetFileFormat.scala b/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/Spark32HoodieParquetFileFormat.scala index 351203ca58..ccd93b6fd3 100644 --- a/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/Spark32HoodieParquetFileFormat.scala +++ b/hudi-spark-datasource/hudi-spark3/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/Spark32HoodieParquetFileFormat.scala @@ -411,8 +411,11 @@ object Spark32HoodieParquetFileFormat { .asInstanceOf[ParquetReadSupport] } - private def createVectorizedParquetRecordReader(args: Any*): VectorizedParquetRecordReader = - ReflectionUtils.newInstanceUnchecked(classOf[VectorizedParquetRecordReader], args.map(_.asInstanceOf[AnyRef]): _*) + private def createVectorizedParquetRecordReader(args: Any*): VectorizedParquetRecordReader = { + val ctor = classOf[VectorizedParquetRecordReader].getConstructors.head + ctor.newInstance(args.map(_.asInstanceOf[AnyRef]): _*) + .asInstanceOf[VectorizedParquetRecordReader] + } def pruneInternalSchema(internalSchemaStr: String, requiredSchema: StructType): String = { val querySchemaOption = SerDeHelper.fromJson(internalSchemaStr)
