This is an automated email from the ASF dual-hosted git repository.
leesf pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git
The following commit(s) were added to refs/heads/master by this push:
new 969a5bf [MINOR] Fix typo,rename 'HooodieAvroDeserializer' to
'HoodieAvroDeserializer' (#4064)
969a5bf is described below
commit 969a5bf11e1c1e2f65b33cd75f36da76931332d0
Author: 董可伦 <[email protected]>
AuthorDate: Tue Nov 23 19:10:57 2021 +0800
[MINOR] Fix typo,rename 'HooodieAvroDeserializer' to
'HoodieAvroDeserializer' (#4064)
---
.../src/main/scala/org/apache/hudi/HoodieMergeOnReadRDD.scala | 8 ++++----
...HooodieAvroDeserializer.scala => HoodieAvroDeserializer.scala} | 2 +-
.../apache/spark/sql/hudi/command/payload/SqlTypedRecord.scala | 4 ++--
3 files changed, 7 insertions(+), 7 deletions(-)
diff --git
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/hudi/HoodieMergeOnReadRDD.scala
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/hudi/HoodieMergeOnReadRDD.scala
index 44d39fa..f26cd88 100644
---
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/hudi/HoodieMergeOnReadRDD.scala
+++
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/hudi/HoodieMergeOnReadRDD.scala
@@ -28,7 +28,7 @@ import org.apache.hudi.exception.HoodieException
import org.apache.hudi.hadoop.config.HoodieRealtimeConfig
import
org.apache.hudi.hadoop.utils.HoodieInputFormatUtils.HOODIE_RECORD_KEY_COL_POS
import org.apache.spark.rdd.RDD
-import org.apache.spark.sql.avro.{HoodieAvroSerializer,
HooodieAvroDeserializer}
+import org.apache.spark.sql.avro.{HoodieAvroSerializer, HoodieAvroDeserializer}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.{SpecificInternalRow,
UnsafeProjection}
import org.apache.spark.sql.execution.datasources.PartitionedFile
@@ -119,7 +119,7 @@ class HoodieMergeOnReadRDD(@transient sc: SparkContext,
tableState.requiredStructSchema
.map(f => tableAvroSchema.getField(f.name).pos()).toList
private val recordBuilder = new GenericRecordBuilder(requiredAvroSchema)
- private val deserializer = HooodieAvroDeserializer(requiredAvroSchema,
tableState.requiredStructSchema)
+ private val deserializer = HoodieAvroDeserializer(requiredAvroSchema,
tableState.requiredStructSchema)
private val unsafeProjection =
UnsafeProjection.create(tableState.requiredStructSchema)
private val logRecords = HoodieMergeOnReadRDD.scanLog(split,
tableAvroSchema, config).getRecords
private val logRecordsKeyIterator =
logRecords.keySet().iterator().asScala
@@ -158,7 +158,7 @@ class HoodieMergeOnReadRDD(@transient sc: SparkContext,
tableState.requiredStructSchema
.map(f => tableAvroSchema.getField(f.name).pos()).toList
private val recordBuilder = new GenericRecordBuilder(requiredAvroSchema)
- private val deserializer = HooodieAvroDeserializer(requiredAvroSchema,
tableState.requiredStructSchema)
+ private val deserializer = HoodieAvroDeserializer(requiredAvroSchema,
tableState.requiredStructSchema)
private val unsafeProjection =
UnsafeProjection.create(tableState.requiredStructSchema)
private val logRecords = HoodieMergeOnReadRDD.scanLog(split,
tableAvroSchema, config).getRecords
private val logRecordsKeyIterator =
logRecords.keySet().iterator().asScala
@@ -204,7 +204,7 @@ class HoodieMergeOnReadRDD(@transient sc: SparkContext,
tableState.requiredStructSchema
.map(f => tableAvroSchema.getField(f.name).pos()).toList
private val serializer =
HoodieAvroSerializer(tableState.tableStructSchema, tableAvroSchema, false)
- private val requiredDeserializer =
HooodieAvroDeserializer(requiredAvroSchema, tableState.requiredStructSchema)
+ private val requiredDeserializer =
HoodieAvroDeserializer(requiredAvroSchema, tableState.requiredStructSchema)
private val recordBuilder = new GenericRecordBuilder(requiredAvroSchema)
private val unsafeProjection =
UnsafeProjection.create(tableState.requiredStructSchema)
private val logRecords = HoodieMergeOnReadRDD.scanLog(split,
tableAvroSchema, config).getRecords
diff --git
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/avro/HooodieAvroDeserializer.scala
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/avro/HoodieAvroDeserializer.scala
similarity index 94%
rename from
hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/avro/HooodieAvroDeserializer.scala
rename to
hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/avro/HoodieAvroDeserializer.scala
index ba911a7..cab1e54 100644
---
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/avro/HooodieAvroDeserializer.scala
+++
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/avro/HoodieAvroDeserializer.scala
@@ -24,7 +24,7 @@ import org.apache.spark.sql.types.DataType
* This is to be compatible with the type returned by Spark 3.1
* and other spark versions for AvroDeserializer
*/
-case class HooodieAvroDeserializer(rootAvroType: Schema, rootCatalystType:
DataType)
+case class HoodieAvroDeserializer(rootAvroType: Schema, rootCatalystType:
DataType)
extends AvroDeserializer(rootAvroType, rootCatalystType) {
def deserializeData(data: Any): Any = {
diff --git
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/payload/SqlTypedRecord.scala
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/payload/SqlTypedRecord.scala
index 76f5caf..7497614 100644
---
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/payload/SqlTypedRecord.scala
+++
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/payload/SqlTypedRecord.scala
@@ -22,7 +22,7 @@ import org.apache.avro.Schema
import org.apache.hudi.AvroConversionUtils
-import org.apache.spark.sql.avro.HooodieAvroDeserializer
+import org.apache.spark.sql.avro.HoodieAvroDeserializer
import org.apache.spark.sql.catalyst.InternalRow
/**
@@ -31,7 +31,7 @@ import org.apache.spark.sql.catalyst.InternalRow
class SqlTypedRecord(val record: IndexedRecord) extends IndexedRecord {
private lazy val sqlType =
AvroConversionUtils.convertAvroSchemaToStructType(getSchema)
- private lazy val avroDeserializer =
HooodieAvroDeserializer(record.getSchema, sqlType)
+ private lazy val avroDeserializer = HoodieAvroDeserializer(record.getSchema,
sqlType)
private lazy val sqlRow =
avroDeserializer.deserializeData(record).asInstanceOf[InternalRow]
override def put(i: Int, v: Any): Unit = {