This is an automated email from the ASF dual-hosted git repository.

leesf pushed a commit to branch task#69426984_upgrade_hudi_version
in repository https://gitbox.apache.org/repos/asf/hudi.git

commit 44bfd189327dd2707ddeab01680a903f0593ae0e
Author: mj375349 <[email protected]>
AuthorDate: Wed May 15 11:30:19 2024 +0800

    to #56799049 修改isHoodieTable spark3.5支持delta表
---
 .../src/main/scala/org/apache/spark/sql/hudi/SparkAdapter.scala     | 6 +++++-
 .../main/scala/org/apache/spark/sql/adapter/Spark3_5Adapter.scala   | 2 +-
 2 files changed, 6 insertions(+), 2 deletions(-)

diff --git 
a/hudi-client/hudi-spark-client/src/main/scala/org/apache/spark/sql/hudi/SparkAdapter.scala
 
b/hudi-client/hudi-spark-client/src/main/scala/org/apache/spark/sql/hudi/SparkAdapter.scala
index 1c617712477f..16f5c9974888 100644
--- 
a/hudi-client/hudi-spark-client/src/main/scala/org/apache/spark/sql/hudi/SparkAdapter.scala
+++ 
b/hudi-client/hudi-spark-client/src/main/scala/org/apache/spark/sql/hudi/SparkAdapter.scala
@@ -23,7 +23,6 @@ import org.apache.hadoop.fs.{FileStatus, Path}
 import org.apache.hudi.client.utils.SparkRowSerDe
 import org.apache.hudi.common.table.HoodieTableMetaClient
 import org.apache.hudi.storage.StoragePath
-
 import org.apache.avro.Schema
 import org.apache.hadoop.conf.Configuration
 import org.apache.spark.sql._
@@ -37,6 +36,7 @@ import 
org.apache.spark.sql.catalyst.planning.PhysicalOperation
 import org.apache.spark.sql.catalyst.plans.logical.{Command, LogicalPlan}
 import org.apache.spark.sql.catalyst.util.DateFormatter
 import org.apache.spark.sql.catalyst.{InternalRow, TableIdentifier}
+import org.apache.spark.sql.connector.catalog.Table
 import org.apache.spark.sql.execution.datasources._
 import org.apache.spark.sql.execution.datasources.parquet.ParquetFileFormat
 import org.apache.spark.sql.parser.HoodieExtendedParserInterface
@@ -154,6 +154,10 @@ trait SparkAdapter extends Serializable {
     }
   }
 
+  def isHoodieTable(table: Table): Boolean = {
+    table.getClass.getName.contains("HoodieInternalV2Table")
+  }
+
   def isHoodieTable(map: java.util.Map[String, String]): Boolean = {
     isHoodieTable(map.getOrDefault("provider", ""))
   }
diff --git 
a/hudi-spark-datasource/hudi-spark3.5.x/src/main/scala/org/apache/spark/sql/adapter/Spark3_5Adapter.scala
 
b/hudi-spark-datasource/hudi-spark3.5.x/src/main/scala/org/apache/spark/sql/adapter/Spark3_5Adapter.scala
index 12beba9ba322..6fd51804ec33 100644
--- 
a/hudi-spark-datasource/hudi-spark3.5.x/src/main/scala/org/apache/spark/sql/adapter/Spark3_5Adapter.scala
+++ 
b/hudi-spark-datasource/hudi-spark3.5.x/src/main/scala/org/apache/spark/sql/adapter/Spark3_5Adapter.scala
@@ -55,7 +55,7 @@ class Spark3_5Adapter extends BaseSpark3Adapter {
         case plan if !plan.resolved => None
         // NOTE: When resolving Hudi table we allow [[Filter]]s and 
[[Project]]s be applied
         //       on top of it
-        case PhysicalOperation(_, _, DataSourceV2Relation(v2: 
V2TableWithV1Fallback, _, _, _, _)) if isHoodieTable(v2.v1Table) =>
+        case PhysicalOperation(_, _, DataSourceV2Relation(v2: 
V2TableWithV1Fallback, _, _, _, _)) if isHoodieTable(v2) =>
           Some(v2.v1Table)
         case _ => None
       }

Reply via email to