XuQianJin-Stars commented on code in PR #8488:
URL: https://github.com/apache/hudi/pull/8488#discussion_r1172121964


##########
hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/hudi/HoodieCLIUtils.scala:
##########
@@ -27,21 +27,29 @@ import org.apache.spark.api.java.JavaSparkContext
 import org.apache.spark.sql.SparkSession
 import org.apache.spark.sql.catalyst.TableIdentifier
 import org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable
-import org.apache.spark.sql.hudi.ProvidesHoodieConfig
+import org.apache.spark.sql.hudi.HoodieSqlCommonUtils.withSparkConf
 
 import scala.collection.JavaConverters.{collectionAsScalaIterableConverter, 
mapAsJavaMapConverter}
 
-object HoodieCLIUtils extends ProvidesHoodieConfig{
+object HoodieCLIUtils {
 
   def createHoodieClientFromPath(sparkSession: SparkSession,
                                  basePath: String,
-                                 conf: Map[String, String]): 
SparkRDDWriteClient[_] = {
+                                 conf: Map[String, String],
+                                 tableName: Option[String]): 
SparkRDDWriteClient[_] = {
     val metaClient = HoodieTableMetaClient.builder().setBasePath(basePath)
       .setConf(sparkSession.sessionState.newHadoopConf()).build()
     val schemaUtil = new TableSchemaResolver(metaClient)
     val schemaStr = schemaUtil.getTableAvroSchema(false).toString
+    // If tableName is provided, we need to add catalog props
+    val catalogProps = tableName match {

Review Comment:
   ```
       val hoodieCatalogTable = if (table.isDefined) {
         getHoodieCatalogTable(sparkSession, table.get.asInstanceOf[String])
       } else {
         metaClient.getTableConfig.getDatabaseName match {
           case databaseName: String => if (HoodieTableType.MERGE_ON_READ == 
tableType) {
             getHoodieCatalogTable(sparkSession, databaseName + "." + 
metaClient.getTableConfig.getTableName + MOR_SNAPSHOT_TABLE_SUFFIX)
           } else {
             getHoodieCatalogTable(sparkSession, databaseName + "." + 
metaClient.getTableConfig.getTableName)
           }
           case _ => getHoodieCatalogTable(sparkSession, 
metaClient.getTableConfig.getTableName)
         }
       }
   
       val finalParameters = 
HoodieWriterUtils.parametersWithWriteDefaults(buildHoodieConfig(hoodieCatalogTable)
 ++ conf)
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to