yihua commented on code in PR #13558:
URL: https://github.com/apache/hudi/pull/13558#discussion_r2229473782


##########
hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/HoodieSqlCommonUtils.scala:
##########
@@ -378,4 +378,17 @@ object HoodieSqlCommonUtils extends SparkAdapterSupport {
       throw new HoodieException(s"Got an invalid instant ($queryInstant)")
     }
   }
+
+  /**
+   * Check if Polaris catalog is enabled in the Spark session.
+   * @param sparkSession The Spark session
+   * @return true if Polaris catalog is configured, false otherwise
+   */
+  def isUsingPolarisCatalog(sparkSession: SparkSession): Boolean = {

Review Comment:
   Got it, I was thinking from the ultimate approach with v2 catalog 
implementation where the exec itself should not have the catalog check.  This 
is OK now.



##########
hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/catalog/TestPolarisHoodieCatalogDelegation.scala:
##########
@@ -0,0 +1,151 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.hudi.catalog
+
+import org.apache.spark.sql.SparkSession
+import org.apache.spark.sql.connector.catalog.Identifier
+import org.apache.spark.sql.connector.expressions.Transform
+import org.apache.spark.sql.hudi.HoodieSqlCommonUtils
+import org.apache.spark.sql.types._
+import org.apache.spark.util.Utils
+import org.junit.jupiter.api.Assertions.{assertFalse, assertTrue}
+import org.mockito.ArgumentMatchers.any
+import org.mockito.Mockito.{never, spy, times, verify}
+import org.scalatest.funsuite.AnyFunSuite
+
+import java.io.File
+import java.util.UUID
+
+import scala.collection.JavaConverters._
+
+/**
+ * Test class dedicated to testing Polaris catalog delegation behavior in 
HoodieCatalog.
+ */
+class TestPolarisHoodieCatalogDelegation extends AnyFunSuite {
+
+  private def generateTableName: String = 
s"hudi_test_table_${UUID.randomUUID().toString.replace("-", "_")}"
+
+  private def withTempDir(f: File => Unit): Unit = {
+    val tempDir = Utils.createTempDir()
+    try {
+      f(tempDir)
+    } finally {
+      Utils.deleteRecursively(tempDir)
+    }
+  }
+
+  private def buildCustomSparkSession(tempDir: File, enablePolaris: Boolean = 
false): (SparkSession, HoodieCatalog, MockPolarisSparkCatalog) = {
+    val mockPolarisDelegate = spy(new MockPolarisSparkCatalog())
+
+    val sparkBuilder = SparkSession.builder()
+      .appName("TestPolarisHoodieCatalogDelegation")
+      .master("local[*]")
+      .config("spark.sql.warehouse.dir", tempDir.getCanonicalPath)
+      .config("spark.sql.extensions", 
"org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
+      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
+      .config("spark.sql.catalog.spark_catalog", 
"org.apache.spark.sql.hudi.catalog.HoodieCatalog")
+
+    if (enablePolaris) {
+      // In production the class should be 
org.apache.polaris.spark.SparkCatalog
+      // (which is the default value of the config 
hoodie.datasource.polaris.catalog.class)
+      // However, in testing, verify if config works by using mock catalog
+      val testPolarisCatalogClass = 
"org.apache.spark.sql.hudi.catalog.MockPolarisSparkCatalog"
+      sparkBuilder.config("spark.sql.catalog.polaris_catalog", 
testPolarisCatalogClass)
+      sparkBuilder.config("hoodie.datasource.polaris.catalog.class", 
testPolarisCatalogClass)
+    }
+
+    // Create SparkSession first so it becomes the active session
+    val customSession = sparkBuilder.getOrCreate()
+
+    // Get the HoodieCatalog instance from the session
+    val hoodieCatalog = 
customSession.sessionState.catalogManager.v2SessionCatalog.asInstanceOf[HoodieCatalog]
+
+    // Set the mock delegate if Polaris is enabled
+    if (enablePolaris) {
+      hoodieCatalog.setDelegateCatalog(mockPolarisDelegate)
+    }

Review Comment:
   Sg, let's enhance the comment to mention this `mimic Polaris's Spark catalog 
behavior`



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to