voonhous commented on code in PR #17833:
URL: https://github.com/apache/hudi/pull/17833#discussion_r2717459690
##########
hudi-spark-datasource/hudi-spark3-common/src/main/scala/org/apache/spark/sql/adapter/BaseSpark3Adapter.scala:
##########
@@ -176,4 +179,38 @@ abstract class BaseSpark3Adapter extends SparkAdapter with
Logging {
Dataset.ofRows(sqlContext.sparkSession,
applyFiltersToPlan(logicalRelation, requiredSchema, resolvedSchema,
relation.fileFormat.asInstanceOf[HoodieFormatTrait].getRequiredFilters))
}
+
+ override def getVariantDataType: Option[DataType] = {
+ // Spark 3.x does not support VariantType
+ None
+ }
+
+ override def isDataTypeEqualForParquet(requiredType: DataType, fileType:
DataType): Option[Boolean] = {
+ // Spark 3.x does not support VariantType, so return None to use default
logic
+ None
+ }
+
+ override def isVariantType(dataType: DataType): Boolean = {
+ // Spark 3.x does not support VariantType
+ false
+ }
+
+ override def createVariantValueWriter(
+ dataType: DataType,
+ writeValue: Consumer[Array[Byte]],
+ writeMetadata: Consumer[Array[Byte]]
+ ): BiConsumer[SpecializedGetters, Integer] = {
+ // Spark 3.x does support VariantType
+ null
Review Comment:
Done
##########
hudi-spark-datasource/hudi-spark3-common/src/main/scala/org/apache/spark/sql/adapter/BaseSpark3Adapter.scala:
##########
@@ -176,4 +179,38 @@ abstract class BaseSpark3Adapter extends SparkAdapter with
Logging {
Dataset.ofRows(sqlContext.sparkSession,
applyFiltersToPlan(logicalRelation, requiredSchema, resolvedSchema,
relation.fileFormat.asInstanceOf[HoodieFormatTrait].getRequiredFilters))
}
+
+ override def getVariantDataType: Option[DataType] = {
+ // Spark 3.x does not support VariantType
+ None
+ }
+
+ override def isDataTypeEqualForParquet(requiredType: DataType, fileType:
DataType): Option[Boolean] = {
+ // Spark 3.x does not support VariantType, so return None to use default
logic
+ None
+ }
+
+ override def isVariantType(dataType: DataType): Boolean = {
+ // Spark 3.x does not support VariantType
+ false
+ }
+
+ override def createVariantValueWriter(
+ dataType: DataType,
+ writeValue: Consumer[Array[Byte]],
+ writeMetadata: Consumer[Array[Byte]]
+ ): BiConsumer[SpecializedGetters, Integer] = {
+ // Spark 3.x does support VariantType
+ null
+ }
+
+ override def convertVariantFieldToParquetType(
+ dataType: DataType,
+ fieldName: String,
+ fieldSchema: HoodieSchema,
+ repetition: Repetition
+ ): Type = {
+ // Spark 3.x does not support VariantType
+ null
Review Comment:
Done
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]