the-other-tim-brown commented on code in PR #17573:
URL: https://github.com/apache/hudi/pull/17573#discussion_r2623545830


##########
hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/command/payload/ExpressionPayload.scala:
##########
@@ -483,15 +483,15 @@ object ExpressionPayload {
     avroDeserializerCache.get()
       .get(schema, new Function[Schema, HoodieAvroDeserializer] {
         override def apply(t: Schema): HoodieAvroDeserializer =
-          sparkAdapter.createAvroDeserializer(schema, 
convertAvroSchemaToStructType(schema))
+          
sparkAdapter.createAvroDeserializer(HoodieSchema.fromAvroSchema(schema), 
convertHoodieSchemaToDataType(HoodieSchema.fromAvroSchema(schema)))
       })
   }
 
   private def getAvroSerializerFor(schema: Schema) = {

Review Comment:
   Let's make these caches take in `HoodieSchema` as well if possible



##########
hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/command/AlterHoodieTableChangeColumnCommand.scala:
##########
@@ -86,10 +85,10 @@ case class AlterHoodieTableChangeColumnCommand(
     Seq.empty[Row]
   }
 
-  private def validateSchema(newSchema: Schema, metaClient: 
HoodieTableMetaClient): Unit = {
+  private def validateSchema(newSchema: HoodieSchema, metaClient: 
HoodieTableMetaClient): Unit = {
     val schemaUtil = new TableSchemaResolver(metaClient)
-    val tableSchema = 
HoodieAvroUtils.createHoodieWriteSchema(schemaUtil.getTableAvroSchema(false))
-    if (!AvroSchemaUtils.isSchemaCompatible(tableSchema, newSchema)) {
+    val tableSchema = 
HoodieSchemaUtils.createHoodieWriteSchema(schemaUtil.getTableSchema(false).toString,
 false)

Review Comment:
   @rahil-c please address this as well



##########
hudi-spark-datasource/hudi-spark3-common/src/main/scala/org/apache/spark/sql/adapter/BaseSpark3Adapter.scala:
##########
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.adapter
 
-import org.apache.hudi.{AvroConversionUtils, DefaultSource, 
HoodiePartitionCDCFileGroupMapping, HoodiePartitionFileSliceMapping, 
Spark3HoodiePartitionCDCFileGroupMapping, Spark3HoodiePartitionFileSliceMapping}
+import org.apache.hudi.{AvroConversionUtils, DefaultSource, 
HoodiePartitionCDCFileGroupMapping, HoodiePartitionFileSliceMapping, 
HoodieSchemaConversionUtils, Spark3HoodiePartitionCDCFileGroupMapping, 
Spark3HoodiePartitionFileSliceMapping}

Review Comment:
   Is this causing a runtime issue? How did the other PR pass without the 
import?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to